lesso06's picture
Training in progress, epoch 0, checkpoint
efc483c verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 0.1414777349414636,
"eval_steps": 500,
"global_step": 1000,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.00014147773494146358,
"grad_norm": 1.199510931968689,
"learning_rate": 0.00019999950652018584,
"loss": 1.3974,
"step": 1
},
{
"epoch": 0.00028295546988292715,
"grad_norm": 2.689831495285034,
"learning_rate": 0.0001999980260856137,
"loss": 2.1709,
"step": 2
},
{
"epoch": 0.0004244332048243908,
"grad_norm": 3.1065266132354736,
"learning_rate": 0.000199995558710895,
"loss": 1.6518,
"step": 3
},
{
"epoch": 0.0005659109397658543,
"grad_norm": 2.5816352367401123,
"learning_rate": 0.00019999210442038162,
"loss": 1.1529,
"step": 4
},
{
"epoch": 0.0007073886747073179,
"grad_norm": 3.101250171661377,
"learning_rate": 0.00019998766324816607,
"loss": 0.6761,
"step": 5
},
{
"epoch": 0.0008488664096487816,
"grad_norm": 3.0736050605773926,
"learning_rate": 0.0001999822352380809,
"loss": 0.3526,
"step": 6
},
{
"epoch": 0.0009903441445902451,
"grad_norm": 4.289332866668701,
"learning_rate": 0.00019997582044369843,
"loss": 0.3733,
"step": 7
},
{
"epoch": 0.0011318218795317086,
"grad_norm": 3.8449699878692627,
"learning_rate": 0.00019996841892833,
"loss": 0.2397,
"step": 8
},
{
"epoch": 0.0012732996144731723,
"grad_norm": 7.1344475746154785,
"learning_rate": 0.00019996003076502565,
"loss": 0.3184,
"step": 9
},
{
"epoch": 0.0014147773494146358,
"grad_norm": 0.7714758515357971,
"learning_rate": 0.00019995065603657316,
"loss": 0.0618,
"step": 10
},
{
"epoch": 0.0015562550843560995,
"grad_norm": 1.3134876489639282,
"learning_rate": 0.0001999402948354973,
"loss": 0.0192,
"step": 11
},
{
"epoch": 0.0016977328192975631,
"grad_norm": 1.4721983671188354,
"learning_rate": 0.00019992894726405893,
"loss": 0.0161,
"step": 12
},
{
"epoch": 0.0018392105542390266,
"grad_norm": 0.1445562243461609,
"learning_rate": 0.000199916613434254,
"loss": 0.005,
"step": 13
},
{
"epoch": 0.0019806882891804903,
"grad_norm": 2.260159730911255,
"learning_rate": 0.0001999032934678125,
"loss": 0.0221,
"step": 14
},
{
"epoch": 0.002122166024121954,
"grad_norm": 0.06021130084991455,
"learning_rate": 0.00019988898749619702,
"loss": 0.0017,
"step": 15
},
{
"epoch": 0.0022636437590634172,
"grad_norm": 0.09766138345003128,
"learning_rate": 0.00019987369566060176,
"loss": 0.0013,
"step": 16
},
{
"epoch": 0.002405121494004881,
"grad_norm": 0.1634778529405594,
"learning_rate": 0.00019985741811195097,
"loss": 0.0025,
"step": 17
},
{
"epoch": 0.0025465992289463446,
"grad_norm": 2.076612949371338,
"learning_rate": 0.00019984015501089752,
"loss": 0.024,
"step": 18
},
{
"epoch": 0.0026880769638878083,
"grad_norm": 2.0792248249053955,
"learning_rate": 0.0001998219065278212,
"loss": 0.016,
"step": 19
},
{
"epoch": 0.0028295546988292715,
"grad_norm": 0.0070343296974897385,
"learning_rate": 0.00019980267284282717,
"loss": 0.0002,
"step": 20
},
{
"epoch": 0.0029710324337707352,
"grad_norm": 0.5132514834403992,
"learning_rate": 0.00019978245414574417,
"loss": 0.0027,
"step": 21
},
{
"epoch": 0.003112510168712199,
"grad_norm": 0.015104507096111774,
"learning_rate": 0.00019976125063612252,
"loss": 0.0003,
"step": 22
},
{
"epoch": 0.0032539879036536626,
"grad_norm": 0.03561875969171524,
"learning_rate": 0.00019973906252323238,
"loss": 0.0004,
"step": 23
},
{
"epoch": 0.0033954656385951263,
"grad_norm": 1.239057183265686,
"learning_rate": 0.0001997158900260614,
"loss": 0.0068,
"step": 24
},
{
"epoch": 0.0035369433735365895,
"grad_norm": 0.004514714702963829,
"learning_rate": 0.0001996917333733128,
"loss": 0.0001,
"step": 25
},
{
"epoch": 0.0036784211084780532,
"grad_norm": 0.0027808404993265867,
"learning_rate": 0.00019966659280340297,
"loss": 0.0001,
"step": 26
},
{
"epoch": 0.003819898843419517,
"grad_norm": 1.516070008277893,
"learning_rate": 0.00019964046856445924,
"loss": 0.0102,
"step": 27
},
{
"epoch": 0.003961376578360981,
"grad_norm": 0.058614715933799744,
"learning_rate": 0.00019961336091431727,
"loss": 0.0002,
"step": 28
},
{
"epoch": 0.004102854313302444,
"grad_norm": 0.12913420796394348,
"learning_rate": 0.00019958527012051857,
"loss": 0.0023,
"step": 29
},
{
"epoch": 0.004244332048243908,
"grad_norm": 0.010314574465155602,
"learning_rate": 0.00019955619646030802,
"loss": 0.0002,
"step": 30
},
{
"epoch": 0.004385809783185371,
"grad_norm": 0.0008627488277852535,
"learning_rate": 0.00019952614022063084,
"loss": 0.0,
"step": 31
},
{
"epoch": 0.0045272875181268345,
"grad_norm": NaN,
"learning_rate": 0.00019952614022063084,
"loss": 0.2811,
"step": 32
},
{
"epoch": 0.004668765253068298,
"grad_norm": 1.236807942390442,
"learning_rate": 0.00019949510169813003,
"loss": 0.0072,
"step": 33
},
{
"epoch": 0.004810242988009762,
"grad_norm": 0.003006057580932975,
"learning_rate": 0.00019946308119914323,
"loss": 0.0001,
"step": 34
},
{
"epoch": 0.0049517207229512255,
"grad_norm": 0.43032628297805786,
"learning_rate": 0.0001994300790396999,
"loss": 0.0035,
"step": 35
},
{
"epoch": 0.005093198457892689,
"grad_norm": 0.001233522198162973,
"learning_rate": 0.000199396095545518,
"loss": 0.0,
"step": 36
},
{
"epoch": 0.005234676192834153,
"grad_norm": 0.0023691693786531687,
"learning_rate": 0.00019936113105200085,
"loss": 0.0001,
"step": 37
},
{
"epoch": 0.005376153927775617,
"grad_norm": 0.025574322789907455,
"learning_rate": 0.00019932518590423394,
"loss": 0.0002,
"step": 38
},
{
"epoch": 0.00551763166271708,
"grad_norm": 0.09084252268075943,
"learning_rate": 0.00019928826045698136,
"loss": 0.0003,
"step": 39
},
{
"epoch": 0.005659109397658543,
"grad_norm": 1.9598499536514282,
"learning_rate": 0.0001992503550746824,
"loss": 0.0094,
"step": 40
},
{
"epoch": 0.005800587132600007,
"grad_norm": 0.001224718289449811,
"learning_rate": 0.0001992114701314478,
"loss": 0.0,
"step": 41
},
{
"epoch": 0.0059420648675414705,
"grad_norm": 0.0008977500256150961,
"learning_rate": 0.0001991716060110563,
"loss": 0.0,
"step": 42
},
{
"epoch": 0.006083542602482934,
"grad_norm": 0.000888747803401202,
"learning_rate": 0.00019913076310695068,
"loss": 0.0,
"step": 43
},
{
"epoch": 0.006225020337424398,
"grad_norm": 0.0025476876180619,
"learning_rate": 0.00019908894182223388,
"loss": 0.0001,
"step": 44
},
{
"epoch": 0.0063664980723658615,
"grad_norm": 0.0013534630415961146,
"learning_rate": 0.00019904614256966512,
"loss": 0.0,
"step": 45
},
{
"epoch": 0.006507975807307325,
"grad_norm": 0.0009306861320510507,
"learning_rate": 0.00019900236577165576,
"loss": 0.0001,
"step": 46
},
{
"epoch": 0.006649453542248789,
"grad_norm": 0.024325449019670486,
"learning_rate": 0.0001989576118602651,
"loss": 0.0004,
"step": 47
},
{
"epoch": 0.006790931277190253,
"grad_norm": 0.32905998826026917,
"learning_rate": 0.00019891188127719618,
"loss": 0.0031,
"step": 48
},
{
"epoch": 0.006932409012131715,
"grad_norm": 0.002287359442561865,
"learning_rate": 0.0001988651744737914,
"loss": 0.0001,
"step": 49
},
{
"epoch": 0.007073886747073179,
"grad_norm": 0.03360757231712341,
"learning_rate": 0.00019881749191102808,
"loss": 0.0004,
"step": 50
},
{
"epoch": 0.007215364482014643,
"grad_norm": 0.0011812775628641248,
"learning_rate": 0.00019876883405951377,
"loss": 0.0,
"step": 51
},
{
"epoch": 0.0073568422169561064,
"grad_norm": 0.014647467993199825,
"learning_rate": 0.00019871920139948192,
"loss": 0.0002,
"step": 52
},
{
"epoch": 0.00749831995189757,
"grad_norm": 0.021761754527688026,
"learning_rate": 0.0001986685944207868,
"loss": 0.0003,
"step": 53
},
{
"epoch": 0.007639797686839034,
"grad_norm": NaN,
"learning_rate": 0.0001986685944207868,
"loss": 0.9778,
"step": 54
},
{
"epoch": 0.0077812754217804975,
"grad_norm": 0.0026818220503628254,
"learning_rate": 0.0001986170136228989,
"loss": 0.0,
"step": 55
},
{
"epoch": 0.007922753156721961,
"grad_norm": 0.562186062335968,
"learning_rate": 0.00019856445951489982,
"loss": 0.096,
"step": 56
},
{
"epoch": 0.008064230891663424,
"grad_norm": 0.020677773281931877,
"learning_rate": 0.0001985109326154774,
"loss": 0.0005,
"step": 57
},
{
"epoch": 0.008205708626604889,
"grad_norm": 0.01492348127067089,
"learning_rate": 0.00019845643345292054,
"loss": 0.0003,
"step": 58
},
{
"epoch": 0.008347186361546351,
"grad_norm": 0.06748542934656143,
"learning_rate": 0.00019840096256511398,
"loss": 0.0011,
"step": 59
},
{
"epoch": 0.008488664096487816,
"grad_norm": 0.007372274994850159,
"learning_rate": 0.00019834452049953297,
"loss": 0.0001,
"step": 60
},
{
"epoch": 0.008630141831429279,
"grad_norm": 0.002576321130618453,
"learning_rate": 0.00019828710781323792,
"loss": 0.0001,
"step": 61
},
{
"epoch": 0.008771619566370742,
"grad_norm": 0.000969462504144758,
"learning_rate": 0.0001982287250728689,
"loss": 0.0,
"step": 62
},
{
"epoch": 0.008913097301312206,
"grad_norm": 0.031211236491799355,
"learning_rate": 0.0001981693728546399,
"loss": 0.0006,
"step": 63
},
{
"epoch": 0.009054575036253669,
"grad_norm": 0.0024660963099449873,
"learning_rate": 0.0001981090517443334,
"loss": 0.0001,
"step": 64
},
{
"epoch": 0.009196052771195133,
"grad_norm": 0.002796717919409275,
"learning_rate": 0.00019804776233729444,
"loss": 0.0001,
"step": 65
},
{
"epoch": 0.009337530506136596,
"grad_norm": 0.006275609601289034,
"learning_rate": 0.0001979855052384247,
"loss": 0.0002,
"step": 66
},
{
"epoch": 0.00947900824107806,
"grad_norm": 0.0006806784658692777,
"learning_rate": 0.00019792228106217658,
"loss": 0.0,
"step": 67
},
{
"epoch": 0.009620485976019524,
"grad_norm": 0.004091021604835987,
"learning_rate": 0.00019785809043254722,
"loss": 0.0002,
"step": 68
},
{
"epoch": 0.009761963710960988,
"grad_norm": 0.0047629051841795444,
"learning_rate": 0.0001977929339830722,
"loss": 0.0002,
"step": 69
},
{
"epoch": 0.009903441445902451,
"grad_norm": NaN,
"learning_rate": 0.0001977929339830722,
"loss": 6.0759,
"step": 70
},
{
"epoch": 0.010044919180843914,
"grad_norm": 0.001333454973064363,
"learning_rate": 0.00019772681235681936,
"loss": 0.0001,
"step": 71
},
{
"epoch": 0.010186396915785378,
"grad_norm": 0.02889617159962654,
"learning_rate": 0.00019765972620638248,
"loss": 0.0006,
"step": 72
},
{
"epoch": 0.010327874650726841,
"grad_norm": 0.004033127333968878,
"learning_rate": 0.00019759167619387476,
"loss": 0.0002,
"step": 73
},
{
"epoch": 0.010469352385668306,
"grad_norm": 0.003473761025816202,
"learning_rate": 0.00019752266299092236,
"loss": 0.0002,
"step": 74
},
{
"epoch": 0.010610830120609769,
"grad_norm": 0.0035185706801712513,
"learning_rate": 0.00019745268727865774,
"loss": 0.0002,
"step": 75
},
{
"epoch": 0.010752307855551233,
"grad_norm": 0.008891044184565544,
"learning_rate": 0.0001973817497477129,
"loss": 0.0003,
"step": 76
},
{
"epoch": 0.010893785590492696,
"grad_norm": 0.001775395474396646,
"learning_rate": 0.00019730985109821266,
"loss": 0.0001,
"step": 77
},
{
"epoch": 0.01103526332543416,
"grad_norm": 0.004444313235580921,
"learning_rate": 0.00019723699203976766,
"loss": 0.0002,
"step": 78
},
{
"epoch": 0.011176741060375623,
"grad_norm": 7.77717399597168,
"learning_rate": 0.0001971631732914674,
"loss": 0.1504,
"step": 79
},
{
"epoch": 0.011318218795317086,
"grad_norm": 0.004705169703811407,
"learning_rate": 0.0001970883955818731,
"loss": 0.0002,
"step": 80
},
{
"epoch": 0.01145969653025855,
"grad_norm": 0.03359982743859291,
"learning_rate": 0.0001970126596490106,
"loss": 0.0009,
"step": 81
},
{
"epoch": 0.011601174265200014,
"grad_norm": 0.01374754961580038,
"learning_rate": 0.00019693596624036292,
"loss": 0.0004,
"step": 82
},
{
"epoch": 0.011742652000141478,
"grad_norm": 0.8847774267196655,
"learning_rate": 0.0001968583161128631,
"loss": 0.0095,
"step": 83
},
{
"epoch": 0.011884129735082941,
"grad_norm": 0.23337610065937042,
"learning_rate": 0.00019677971003288655,
"loss": 0.0033,
"step": 84
},
{
"epoch": 0.012025607470024405,
"grad_norm": 0.13195404410362244,
"learning_rate": 0.00019670014877624353,
"loss": 0.0041,
"step": 85
},
{
"epoch": 0.012167085204965868,
"grad_norm": 0.10853024572134018,
"learning_rate": 0.00019661963312817148,
"loss": 0.002,
"step": 86
},
{
"epoch": 0.012308562939907333,
"grad_norm": 0.3537059724330902,
"learning_rate": 0.0001965381638833274,
"loss": 0.0033,
"step": 87
},
{
"epoch": 0.012450040674848796,
"grad_norm": 0.009482644498348236,
"learning_rate": 0.00019645574184577982,
"loss": 0.0004,
"step": 88
},
{
"epoch": 0.012591518409790258,
"grad_norm": 0.07353401929140091,
"learning_rate": 0.000196372367829001,
"loss": 0.0012,
"step": 89
},
{
"epoch": 0.012732996144731723,
"grad_norm": 0.38067692518234253,
"learning_rate": 0.00019628804265585877,
"loss": 0.0089,
"step": 90
},
{
"epoch": 0.012874473879673186,
"grad_norm": 0.0006739358650520444,
"learning_rate": 0.0001962027671586086,
"loss": 0.0,
"step": 91
},
{
"epoch": 0.01301595161461465,
"grad_norm": 20.609447479248047,
"learning_rate": 0.0001961165421788852,
"loss": 0.5171,
"step": 92
},
{
"epoch": 0.013157429349556113,
"grad_norm": 0.0008163169259205461,
"learning_rate": 0.0001960293685676943,
"loss": 0.0,
"step": 93
},
{
"epoch": 0.013298907084497578,
"grad_norm": 0.005171321332454681,
"learning_rate": 0.0001959412471854043,
"loss": 0.0001,
"step": 94
},
{
"epoch": 0.01344038481943904,
"grad_norm": 0.0035722258035093546,
"learning_rate": 0.0001958521789017376,
"loss": 0.0,
"step": 95
},
{
"epoch": 0.013581862554380505,
"grad_norm": 0.004428490065038204,
"learning_rate": 0.00019576216459576222,
"loss": 0.0,
"step": 96
},
{
"epoch": 0.013723340289321968,
"grad_norm": 0.0027291348669677973,
"learning_rate": 0.00019567120515588308,
"loss": 0.0001,
"step": 97
},
{
"epoch": 0.01386481802426343,
"grad_norm": 0.0007217226084321737,
"learning_rate": 0.00019557930147983302,
"loss": 0.0,
"step": 98
},
{
"epoch": 0.014006295759204895,
"grad_norm": 0.0516432449221611,
"learning_rate": 0.00019548645447466431,
"loss": 0.0004,
"step": 99
},
{
"epoch": 0.014147773494146358,
"grad_norm": 0.002289633499458432,
"learning_rate": 0.00019539266505673938,
"loss": 0.0001,
"step": 100
},
{
"epoch": 0.014289251229087823,
"grad_norm": 0.022498467937111855,
"learning_rate": 0.00019529793415172192,
"loss": 0.0002,
"step": 101
},
{
"epoch": 0.014430728964029286,
"grad_norm": 0.0024731154553592205,
"learning_rate": 0.00019520226269456768,
"loss": 0.0,
"step": 102
},
{
"epoch": 0.01457220669897075,
"grad_norm": 0.053761184215545654,
"learning_rate": 0.00019510565162951537,
"loss": 0.0003,
"step": 103
},
{
"epoch": 0.014713684433912213,
"grad_norm": 52.32304000854492,
"learning_rate": 0.00019500810191007718,
"loss": 0.5584,
"step": 104
},
{
"epoch": 0.014855162168853677,
"grad_norm": 0.0023259480949491262,
"learning_rate": 0.00019490961449902946,
"loss": 0.0,
"step": 105
},
{
"epoch": 0.01499663990379514,
"grad_norm": 0.00143586122430861,
"learning_rate": 0.0001948101903684032,
"loss": 0.0001,
"step": 106
},
{
"epoch": 0.015138117638736603,
"grad_norm": 0.0017758137546479702,
"learning_rate": 0.00019470983049947444,
"loss": 0.0001,
"step": 107
},
{
"epoch": 0.015279595373678068,
"grad_norm": 0.008223861455917358,
"learning_rate": 0.00019460853588275454,
"loss": 0.0001,
"step": 108
},
{
"epoch": 0.01542107310861953,
"grad_norm": 0.002562491688877344,
"learning_rate": 0.00019450630751798048,
"loss": 0.0001,
"step": 109
},
{
"epoch": 0.015562550843560995,
"grad_norm": 0.0011813660385087132,
"learning_rate": 0.000194403146414105,
"loss": 0.0,
"step": 110
},
{
"epoch": 0.01570402857850246,
"grad_norm": 0.001858884934335947,
"learning_rate": 0.00019429905358928646,
"loss": 0.0,
"step": 111
},
{
"epoch": 0.015845506313443922,
"grad_norm": 30.78669548034668,
"learning_rate": 0.00019419403007087907,
"loss": 0.0985,
"step": 112
},
{
"epoch": 0.015986984048385385,
"grad_norm": 0.007476686500012875,
"learning_rate": 0.00019408807689542257,
"loss": 0.0002,
"step": 113
},
{
"epoch": 0.016128461783326848,
"grad_norm": 0.0010357332648709416,
"learning_rate": 0.00019398119510863197,
"loss": 0.0,
"step": 114
},
{
"epoch": 0.01626993951826831,
"grad_norm": 0.001794468960724771,
"learning_rate": 0.00019387338576538744,
"loss": 0.0,
"step": 115
},
{
"epoch": 0.016411417253209777,
"grad_norm": 14.643719673156738,
"learning_rate": 0.00019376464992972356,
"loss": 0.4845,
"step": 116
},
{
"epoch": 0.01655289498815124,
"grad_norm": 10.48317813873291,
"learning_rate": 0.00019365498867481923,
"loss": 0.5411,
"step": 117
},
{
"epoch": 0.016694372723092703,
"grad_norm": 0.001224672538228333,
"learning_rate": 0.00019354440308298675,
"loss": 0.0,
"step": 118
},
{
"epoch": 0.016835850458034166,
"grad_norm": 0.0019753577653318644,
"learning_rate": 0.00019343289424566122,
"loss": 0.0,
"step": 119
},
{
"epoch": 0.016977328192975632,
"grad_norm": 0.006218386348336935,
"learning_rate": 0.00019332046326338986,
"loss": 0.0001,
"step": 120
},
{
"epoch": 0.017118805927917095,
"grad_norm": 0.004642636049538851,
"learning_rate": 0.0001932071112458211,
"loss": 0.0001,
"step": 121
},
{
"epoch": 0.017260283662858558,
"grad_norm": 0.004355429671704769,
"learning_rate": 0.00019309283931169356,
"loss": 0.0001,
"step": 122
},
{
"epoch": 0.01740176139780002,
"grad_norm": 0.006458196323364973,
"learning_rate": 0.00019297764858882514,
"loss": 0.0002,
"step": 123
},
{
"epoch": 0.017543239132741483,
"grad_norm": 0.014439636841416359,
"learning_rate": 0.00019286154021410173,
"loss": 0.0003,
"step": 124
},
{
"epoch": 0.01768471686768295,
"grad_norm": 0.008952593430876732,
"learning_rate": 0.00019274451533346615,
"loss": 0.0002,
"step": 125
},
{
"epoch": 0.017826194602624412,
"grad_norm": 0.00849517434835434,
"learning_rate": 0.00019262657510190666,
"loss": 0.0003,
"step": 126
},
{
"epoch": 0.017967672337565875,
"grad_norm": 0.21418528258800507,
"learning_rate": 0.0001925077206834458,
"loss": 0.0017,
"step": 127
},
{
"epoch": 0.018109150072507338,
"grad_norm": 0.0048170918598771095,
"learning_rate": 0.0001923879532511287,
"loss": 0.0001,
"step": 128
},
{
"epoch": 0.018250627807448804,
"grad_norm": 0.016278916969895363,
"learning_rate": 0.0001922672739870115,
"loss": 0.0003,
"step": 129
},
{
"epoch": 0.018392105542390267,
"grad_norm": 0.001837101997807622,
"learning_rate": 0.00019214568408214985,
"loss": 0.0001,
"step": 130
},
{
"epoch": 0.01853358327733173,
"grad_norm": 29.20079231262207,
"learning_rate": 0.00019202318473658705,
"loss": 7.677,
"step": 131
},
{
"epoch": 0.018675061012273193,
"grad_norm": 0.25970369577407837,
"learning_rate": 0.00019189977715934213,
"loss": 0.0016,
"step": 132
},
{
"epoch": 0.018816538747214655,
"grad_norm": 0.04528359696269035,
"learning_rate": 0.00019177546256839812,
"loss": 0.0004,
"step": 133
},
{
"epoch": 0.01895801648215612,
"grad_norm": 0.005976614076644182,
"learning_rate": 0.0001916502421906898,
"loss": 0.0001,
"step": 134
},
{
"epoch": 0.019099494217097585,
"grad_norm": 0.8626922965049744,
"learning_rate": 0.00019152411726209176,
"loss": 0.0374,
"step": 135
},
{
"epoch": 0.019240971952039047,
"grad_norm": 0.3195020854473114,
"learning_rate": 0.00019139708902740613,
"loss": 0.0096,
"step": 136
},
{
"epoch": 0.01938244968698051,
"grad_norm": 0.020532745867967606,
"learning_rate": 0.0001912691587403503,
"loss": 0.0009,
"step": 137
},
{
"epoch": 0.019523927421921976,
"grad_norm": 0.09249910712242126,
"learning_rate": 0.00019114032766354453,
"loss": 0.0015,
"step": 138
},
{
"epoch": 0.01966540515686344,
"grad_norm": 0.018980201333761215,
"learning_rate": 0.00019101059706849957,
"loss": 0.0009,
"step": 139
},
{
"epoch": 0.019806882891804902,
"grad_norm": 0.028726842254400253,
"learning_rate": 0.00019087996823560402,
"loss": 0.001,
"step": 140
},
{
"epoch": 0.019948360626746365,
"grad_norm": 0.007452077232301235,
"learning_rate": 0.0001907484424541117,
"loss": 0.0003,
"step": 141
},
{
"epoch": 0.020089838361687828,
"grad_norm": 0.021318119019269943,
"learning_rate": 0.00019061602102212898,
"loss": 0.0004,
"step": 142
},
{
"epoch": 0.020231316096629294,
"grad_norm": 6.968769073486328,
"learning_rate": 0.00019048270524660196,
"loss": 0.1187,
"step": 143
},
{
"epoch": 0.020372793831570757,
"grad_norm": 0.025080297142267227,
"learning_rate": 0.0001903484964433035,
"loss": 0.0005,
"step": 144
},
{
"epoch": 0.02051427156651222,
"grad_norm": 0.02062026597559452,
"learning_rate": 0.00019021339593682028,
"loss": 0.0008,
"step": 145
},
{
"epoch": 0.020655749301453682,
"grad_norm": 0.14519056677818298,
"learning_rate": 0.00019007740506053983,
"loss": 0.0027,
"step": 146
},
{
"epoch": 0.02079722703639515,
"grad_norm": 0.02562364749610424,
"learning_rate": 0.0001899405251566371,
"loss": 0.001,
"step": 147
},
{
"epoch": 0.02093870477133661,
"grad_norm": 0.018246619030833244,
"learning_rate": 0.00018980275757606157,
"loss": 0.0007,
"step": 148
},
{
"epoch": 0.021080182506278074,
"grad_norm": 0.005684189964085817,
"learning_rate": 0.00018966410367852362,
"loss": 0.0003,
"step": 149
},
{
"epoch": 0.021221660241219537,
"grad_norm": 0.42502322793006897,
"learning_rate": 0.00018952456483248119,
"loss": 0.0112,
"step": 150
},
{
"epoch": 0.021363137976161,
"grad_norm": 2.7156591415405273,
"learning_rate": 0.0001893841424151264,
"loss": 0.22,
"step": 151
},
{
"epoch": 0.021504615711102466,
"grad_norm": 0.004702973645180464,
"learning_rate": 0.0001892428378123718,
"loss": 0.0003,
"step": 152
},
{
"epoch": 0.02164609344604393,
"grad_norm": 0.003851711517199874,
"learning_rate": 0.0001891006524188368,
"loss": 0.0002,
"step": 153
},
{
"epoch": 0.021787571180985392,
"grad_norm": 0.004598681814968586,
"learning_rate": 0.00018895758763783383,
"loss": 0.0002,
"step": 154
},
{
"epoch": 0.021929048915926855,
"grad_norm": 3.063443183898926,
"learning_rate": 0.00018881364488135448,
"loss": 0.0125,
"step": 155
},
{
"epoch": 0.02207052665086832,
"grad_norm": 0.005910379812121391,
"learning_rate": 0.00018866882557005567,
"loss": 0.0002,
"step": 156
},
{
"epoch": 0.022212004385809784,
"grad_norm": 0.005766018759459257,
"learning_rate": 0.00018852313113324552,
"loss": 0.0003,
"step": 157
},
{
"epoch": 0.022353482120751247,
"grad_norm": 0.004247742705047131,
"learning_rate": 0.00018837656300886937,
"loss": 0.0002,
"step": 158
},
{
"epoch": 0.02249495985569271,
"grad_norm": 0.007876652292907238,
"learning_rate": 0.00018822912264349534,
"loss": 0.0004,
"step": 159
},
{
"epoch": 0.022636437590634172,
"grad_norm": 0.007455014158040285,
"learning_rate": 0.00018808081149230036,
"loss": 0.0004,
"step": 160
},
{
"epoch": 0.02277791532557564,
"grad_norm": 0.03861529007554054,
"learning_rate": 0.00018793163101905563,
"loss": 0.0006,
"step": 161
},
{
"epoch": 0.0229193930605171,
"grad_norm": 0.8474092483520508,
"learning_rate": 0.00018778158269611218,
"loss": 0.0075,
"step": 162
},
{
"epoch": 0.023060870795458564,
"grad_norm": 0.050449665635824203,
"learning_rate": 0.00018763066800438636,
"loss": 0.001,
"step": 163
},
{
"epoch": 0.023202348530400027,
"grad_norm": 0.007101717870682478,
"learning_rate": 0.0001874788884333453,
"loss": 0.0002,
"step": 164
},
{
"epoch": 0.023343826265341493,
"grad_norm": 5.2260355949401855,
"learning_rate": 0.00018732624548099204,
"loss": 0.0506,
"step": 165
},
{
"epoch": 0.023485304000282956,
"grad_norm": 0.03760479390621185,
"learning_rate": 0.0001871727406538509,
"loss": 0.0008,
"step": 166
},
{
"epoch": 0.02362678173522442,
"grad_norm": 0.02344103716313839,
"learning_rate": 0.0001870183754669526,
"loss": 0.0005,
"step": 167
},
{
"epoch": 0.023768259470165882,
"grad_norm": 0.007347187492996454,
"learning_rate": 0.00018686315144381913,
"loss": 0.0003,
"step": 168
},
{
"epoch": 0.023909737205107345,
"grad_norm": 4.132694244384766,
"learning_rate": 0.000186707070116449,
"loss": 0.0651,
"step": 169
},
{
"epoch": 0.02405121494004881,
"grad_norm": 8.260241508483887,
"learning_rate": 0.0001865501330253019,
"loss": 0.1644,
"step": 170
},
{
"epoch": 0.024192692674990274,
"grad_norm": 0.0025873123668134212,
"learning_rate": 0.00018639234171928353,
"loss": 0.0002,
"step": 171
},
{
"epoch": 0.024334170409931737,
"grad_norm": 0.022175127640366554,
"learning_rate": 0.0001862336977557304,
"loss": 0.0005,
"step": 172
},
{
"epoch": 0.0244756481448732,
"grad_norm": 0.0020805918611586094,
"learning_rate": 0.0001860742027003944,
"loss": 0.0001,
"step": 173
},
{
"epoch": 0.024617125879814666,
"grad_norm": 0.00254829996265471,
"learning_rate": 0.00018591385812742725,
"loss": 0.0001,
"step": 174
},
{
"epoch": 0.02475860361475613,
"grad_norm": 0.003948370926082134,
"learning_rate": 0.00018575266561936523,
"loss": 0.0002,
"step": 175
},
{
"epoch": 0.02490008134969759,
"grad_norm": 62.64409637451172,
"learning_rate": 0.00018559062676711332,
"loss": 0.2596,
"step": 176
},
{
"epoch": 0.025041559084639054,
"grad_norm": 0.003057444002479315,
"learning_rate": 0.0001854277431699295,
"loss": 0.0002,
"step": 177
},
{
"epoch": 0.025183036819580517,
"grad_norm": 0.002523846924304962,
"learning_rate": 0.00018526401643540922,
"loss": 0.0001,
"step": 178
},
{
"epoch": 0.025324514554521983,
"grad_norm": 0.022342350333929062,
"learning_rate": 0.00018509944817946922,
"loss": 0.0006,
"step": 179
},
{
"epoch": 0.025465992289463446,
"grad_norm": 0.7193881273269653,
"learning_rate": 0.00018493404002633166,
"loss": 0.0397,
"step": 180
},
{
"epoch": 0.02560747002440491,
"grad_norm": 0.0033624654170125723,
"learning_rate": 0.00018476779360850832,
"loss": 0.0001,
"step": 181
},
{
"epoch": 0.02574894775934637,
"grad_norm": 4.625955581665039,
"learning_rate": 0.00018460071056678422,
"loss": 1.1598,
"step": 182
},
{
"epoch": 0.025890425494287838,
"grad_norm": 0.002986474661156535,
"learning_rate": 0.00018443279255020152,
"loss": 0.0002,
"step": 183
},
{
"epoch": 0.0260319032292293,
"grad_norm": 0.0038495738990604877,
"learning_rate": 0.00018426404121604323,
"loss": 0.0003,
"step": 184
},
{
"epoch": 0.026173380964170764,
"grad_norm": 0.003486146917566657,
"learning_rate": 0.00018409445822981693,
"loss": 0.0003,
"step": 185
},
{
"epoch": 0.026314858699112226,
"grad_norm": 0.014306096360087395,
"learning_rate": 0.00018392404526523817,
"loss": 0.0011,
"step": 186
},
{
"epoch": 0.02645633643405369,
"grad_norm": 0.01424553245306015,
"learning_rate": 0.0001837528040042142,
"loss": 0.001,
"step": 187
},
{
"epoch": 0.026597814168995156,
"grad_norm": 0.011042344383895397,
"learning_rate": 0.00018358073613682706,
"loss": 0.0008,
"step": 188
},
{
"epoch": 0.02673929190393662,
"grad_norm": 0.034611593931913376,
"learning_rate": 0.00018340784336131713,
"loss": 0.0017,
"step": 189
},
{
"epoch": 0.02688076963887808,
"grad_norm": 0.00807161070406437,
"learning_rate": 0.00018323412738406635,
"loss": 0.0006,
"step": 190
},
{
"epoch": 0.027022247373819544,
"grad_norm": 0.015202701091766357,
"learning_rate": 0.00018305958991958127,
"loss": 0.0009,
"step": 191
},
{
"epoch": 0.02716372510876101,
"grad_norm": 0.11597127467393875,
"learning_rate": 0.0001828842326904762,
"loss": 0.0045,
"step": 192
},
{
"epoch": 0.027305202843702473,
"grad_norm": 0.13688474893569946,
"learning_rate": 0.00018270805742745617,
"loss": 0.0021,
"step": 193
},
{
"epoch": 0.027446680578643936,
"grad_norm": 0.02395058050751686,
"learning_rate": 0.00018253106586929997,
"loss": 0.0015,
"step": 194
},
{
"epoch": 0.0275881583135854,
"grad_norm": 0.024011526256799698,
"learning_rate": 0.00018235325976284275,
"loss": 0.0017,
"step": 195
},
{
"epoch": 0.02772963604852686,
"grad_norm": 0.015601936727762222,
"learning_rate": 0.00018217464086295904,
"loss": 0.0008,
"step": 196
},
{
"epoch": 0.027871113783468328,
"grad_norm": 0.008236625231802464,
"learning_rate": 0.00018199521093254523,
"loss": 0.0007,
"step": 197
},
{
"epoch": 0.02801259151840979,
"grad_norm": 0.007339195813983679,
"learning_rate": 0.00018181497174250236,
"loss": 0.0006,
"step": 198
},
{
"epoch": 0.028154069253351253,
"grad_norm": 0.03408842161297798,
"learning_rate": 0.00018163392507171842,
"loss": 0.0013,
"step": 199
},
{
"epoch": 0.028295546988292716,
"grad_norm": 0.010708988644182682,
"learning_rate": 0.00018145207270705096,
"loss": 0.0008,
"step": 200
},
{
"epoch": 0.028437024723234183,
"grad_norm": 0.04180416092276573,
"learning_rate": 0.0001812694164433094,
"loss": 0.0008,
"step": 201
},
{
"epoch": 0.028578502458175645,
"grad_norm": 0.015216993167996407,
"learning_rate": 0.00018108595808323736,
"loss": 0.0004,
"step": 202
},
{
"epoch": 0.028719980193117108,
"grad_norm": 0.023775571957230568,
"learning_rate": 0.00018090169943749476,
"loss": 0.0014,
"step": 203
},
{
"epoch": 0.02886145792805857,
"grad_norm": 0.007402106188237667,
"learning_rate": 0.00018071664232464002,
"loss": 0.0005,
"step": 204
},
{
"epoch": 0.029002935663000034,
"grad_norm": 0.051116202026605606,
"learning_rate": 0.0001805307885711122,
"loss": 0.0011,
"step": 205
},
{
"epoch": 0.0291444133979415,
"grad_norm": 0.006247265264391899,
"learning_rate": 0.00018034414001121278,
"loss": 0.0004,
"step": 206
},
{
"epoch": 0.029285891132882963,
"grad_norm": 0.006299880333244801,
"learning_rate": 0.00018015669848708767,
"loss": 0.0004,
"step": 207
},
{
"epoch": 0.029427368867824426,
"grad_norm": 0.11377950757741928,
"learning_rate": 0.00017996846584870908,
"loss": 0.0009,
"step": 208
},
{
"epoch": 0.02956884660276589,
"grad_norm": 0.0020067133009433746,
"learning_rate": 0.0001797794439538571,
"loss": 0.0002,
"step": 209
},
{
"epoch": 0.029710324337707355,
"grad_norm": 0.40424010157585144,
"learning_rate": 0.0001795896346681016,
"loss": 0.0108,
"step": 210
},
{
"epoch": 0.029851802072648818,
"grad_norm": 0.0018006038153544068,
"learning_rate": 0.00017939903986478355,
"loss": 0.0001,
"step": 211
},
{
"epoch": 0.02999327980759028,
"grad_norm": 0.00778124388307333,
"learning_rate": 0.00017920766142499672,
"loss": 0.0004,
"step": 212
},
{
"epoch": 0.030134757542531743,
"grad_norm": 0.0031390658114105463,
"learning_rate": 0.00017901550123756906,
"loss": 0.0001,
"step": 213
},
{
"epoch": 0.030276235277473206,
"grad_norm": 0.03861818090081215,
"learning_rate": 0.00017882256119904403,
"loss": 0.0015,
"step": 214
},
{
"epoch": 0.030417713012414672,
"grad_norm": 0.6883278489112854,
"learning_rate": 0.00017862884321366188,
"loss": 0.004,
"step": 215
},
{
"epoch": 0.030559190747356135,
"grad_norm": 0.0016816803254187107,
"learning_rate": 0.000178434349193341,
"loss": 0.0001,
"step": 216
},
{
"epoch": 0.030700668482297598,
"grad_norm": 0.022568391636013985,
"learning_rate": 0.0001782390810576588,
"loss": 0.0007,
"step": 217
},
{
"epoch": 0.03084214621723906,
"grad_norm": 0.001979145687073469,
"learning_rate": 0.000178043040733833,
"loss": 0.0001,
"step": 218
},
{
"epoch": 0.030983623952180527,
"grad_norm": 0.006649916060268879,
"learning_rate": 0.00017784623015670238,
"loss": 0.0002,
"step": 219
},
{
"epoch": 0.03112510168712199,
"grad_norm": 0.0012332225451245904,
"learning_rate": 0.00017764865126870786,
"loss": 0.0001,
"step": 220
},
{
"epoch": 0.03126657942206345,
"grad_norm": 0.0019407098880037665,
"learning_rate": 0.00017745030601987337,
"loss": 0.0001,
"step": 221
},
{
"epoch": 0.03140805715700492,
"grad_norm": 0.0025570588186383247,
"learning_rate": 0.00017725119636778644,
"loss": 0.0001,
"step": 222
},
{
"epoch": 0.03154953489194638,
"grad_norm": 10.877803802490234,
"learning_rate": 0.00017705132427757895,
"loss": 0.2045,
"step": 223
},
{
"epoch": 0.031691012626887845,
"grad_norm": 0.001555653172545135,
"learning_rate": 0.00017685069172190766,
"loss": 0.0001,
"step": 224
},
{
"epoch": 0.03183249036182931,
"grad_norm": 0.0014934063656255603,
"learning_rate": 0.00017664930068093498,
"loss": 0.0001,
"step": 225
},
{
"epoch": 0.03197396809677077,
"grad_norm": 0.0034368853084743023,
"learning_rate": 0.00017644715314230918,
"loss": 0.0001,
"step": 226
},
{
"epoch": 0.03211544583171223,
"grad_norm": 0.0021537458524107933,
"learning_rate": 0.0001762442511011448,
"loss": 0.0001,
"step": 227
},
{
"epoch": 0.032256923566653696,
"grad_norm": 0.002286655129864812,
"learning_rate": 0.0001760405965600031,
"loss": 0.0001,
"step": 228
},
{
"epoch": 0.03239840130159516,
"grad_norm": 0.005433680955320597,
"learning_rate": 0.0001758361915288722,
"loss": 0.0003,
"step": 229
},
{
"epoch": 0.03253987903653662,
"grad_norm": 0.0013771787052974105,
"learning_rate": 0.0001756310380251472,
"loss": 0.0001,
"step": 230
},
{
"epoch": 0.03268135677147809,
"grad_norm": 0.0018148127710446715,
"learning_rate": 0.00017542513807361037,
"loss": 0.0001,
"step": 231
},
{
"epoch": 0.032822834506419554,
"grad_norm": 0.00181623047683388,
"learning_rate": 0.00017521849370641114,
"loss": 0.0001,
"step": 232
},
{
"epoch": 0.03296431224136102,
"grad_norm": 0.01041108276695013,
"learning_rate": 0.00017501110696304596,
"loss": 0.0003,
"step": 233
},
{
"epoch": 0.03310578997630248,
"grad_norm": 0.004963583778589964,
"learning_rate": 0.00017480297989033825,
"loss": 0.0002,
"step": 234
},
{
"epoch": 0.03324726771124394,
"grad_norm": 0.007771691773086786,
"learning_rate": 0.00017459411454241822,
"loss": 0.0003,
"step": 235
},
{
"epoch": 0.033388745446185406,
"grad_norm": 0.0008118674159049988,
"learning_rate": 0.00017438451298070252,
"loss": 0.0001,
"step": 236
},
{
"epoch": 0.03353022318112687,
"grad_norm": 0.004826071672141552,
"learning_rate": 0.00017417417727387394,
"loss": 0.0001,
"step": 237
},
{
"epoch": 0.03367170091606833,
"grad_norm": 0.0054870243184268475,
"learning_rate": 0.000173963109497861,
"loss": 0.0001,
"step": 238
},
{
"epoch": 0.033813178651009794,
"grad_norm": 0.31345561146736145,
"learning_rate": 0.0001737513117358174,
"loss": 0.0022,
"step": 239
},
{
"epoch": 0.033954656385951264,
"grad_norm": 0.0009997932938858867,
"learning_rate": 0.0001735387860781016,
"loss": 0.0,
"step": 240
},
{
"epoch": 0.03409613412089273,
"grad_norm": 0.0014913649065420032,
"learning_rate": 0.00017332553462225602,
"loss": 0.0001,
"step": 241
},
{
"epoch": 0.03423761185583419,
"grad_norm": 0.001048844656907022,
"learning_rate": 0.00017311155947298643,
"loss": 0.0001,
"step": 242
},
{
"epoch": 0.03437908959077565,
"grad_norm": 0.005004017613828182,
"learning_rate": 0.00017289686274214118,
"loss": 0.0001,
"step": 243
},
{
"epoch": 0.034520567325717115,
"grad_norm": 0.0016325002070516348,
"learning_rate": 0.0001726814465486903,
"loss": 0.0001,
"step": 244
},
{
"epoch": 0.03466204506065858,
"grad_norm": 2.20041561126709,
"learning_rate": 0.0001724653130187047,
"loss": 0.1624,
"step": 245
},
{
"epoch": 0.03480352279560004,
"grad_norm": 0.0004976244526915252,
"learning_rate": 0.00017224846428533499,
"loss": 0.0,
"step": 246
},
{
"epoch": 0.0349450005305415,
"grad_norm": 0.0007265186868607998,
"learning_rate": 0.0001720309024887907,
"loss": 0.0001,
"step": 247
},
{
"epoch": 0.035086478265482966,
"grad_norm": 0.03309307619929314,
"learning_rate": 0.00017181262977631888,
"loss": 0.0,
"step": 248
},
{
"epoch": 0.035227956000424436,
"grad_norm": 0.0008523318101651967,
"learning_rate": 0.00017159364830218312,
"loss": 0.0001,
"step": 249
},
{
"epoch": 0.0353694337353659,
"grad_norm": 0.0026479423977434635,
"learning_rate": 0.00017137396022764214,
"loss": 0.0002,
"step": 250
},
{
"epoch": 0.03551091147030736,
"grad_norm": 0.0009089628583751619,
"learning_rate": 0.00017115356772092857,
"loss": 0.0001,
"step": 251
},
{
"epoch": 0.035652389205248824,
"grad_norm": 0.001088530640117824,
"learning_rate": 0.0001709324729572274,
"loss": 0.0001,
"step": 252
},
{
"epoch": 0.03579386694019029,
"grad_norm": 0.0062820520251989365,
"learning_rate": 0.00017071067811865476,
"loss": 0.0001,
"step": 253
},
{
"epoch": 0.03593534467513175,
"grad_norm": 0.09086458384990692,
"learning_rate": 0.00017048818539423615,
"loss": 0.0018,
"step": 254
},
{
"epoch": 0.03607682241007321,
"grad_norm": 26.014320373535156,
"learning_rate": 0.00017026499697988493,
"loss": 3.5957,
"step": 255
},
{
"epoch": 0.036218300145014676,
"grad_norm": 0.0020564054138958454,
"learning_rate": 0.00017004111507838064,
"loss": 0.0002,
"step": 256
},
{
"epoch": 0.03635977787995614,
"grad_norm": 0.0016233263304457068,
"learning_rate": 0.00016981654189934727,
"loss": 0.0001,
"step": 257
},
{
"epoch": 0.03650125561489761,
"grad_norm": 0.0012816754169762135,
"learning_rate": 0.00016959127965923142,
"loss": 0.0001,
"step": 258
},
{
"epoch": 0.03664273334983907,
"grad_norm": 0.0034658850636333227,
"learning_rate": 0.0001693653305812805,
"loss": 0.0002,
"step": 259
},
{
"epoch": 0.036784211084780534,
"grad_norm": 0.02739444188773632,
"learning_rate": 0.00016913869689552064,
"loss": 0.0003,
"step": 260
},
{
"epoch": 0.036925688819722,
"grad_norm": 0.011004602536559105,
"learning_rate": 0.00016891138083873487,
"loss": 0.0005,
"step": 261
},
{
"epoch": 0.03706716655466346,
"grad_norm": 0.0057389517314732075,
"learning_rate": 0.00016868338465444085,
"loss": 0.0002,
"step": 262
},
{
"epoch": 0.03720864428960492,
"grad_norm": 3.8611650466918945,
"learning_rate": 0.00016845471059286887,
"loss": 1.4247,
"step": 263
},
{
"epoch": 0.037350122024546385,
"grad_norm": 0.01210610382258892,
"learning_rate": 0.00016822536091093965,
"loss": 0.0004,
"step": 264
},
{
"epoch": 0.03749159975948785,
"grad_norm": 0.013306356966495514,
"learning_rate": 0.00016799533787224192,
"loss": 0.0008,
"step": 265
},
{
"epoch": 0.03763307749442931,
"grad_norm": 0.27052658796310425,
"learning_rate": 0.00016776464374701025,
"loss": 0.0099,
"step": 266
},
{
"epoch": 0.03777455522937078,
"grad_norm": 0.008403144776821136,
"learning_rate": 0.00016753328081210245,
"loss": 0.0005,
"step": 267
},
{
"epoch": 0.03791603296431224,
"grad_norm": 0.13167297840118408,
"learning_rate": 0.00016730125135097735,
"loss": 0.0027,
"step": 268
},
{
"epoch": 0.038057510699253706,
"grad_norm": 0.026871370151638985,
"learning_rate": 0.000167068557653672,
"loss": 0.001,
"step": 269
},
{
"epoch": 0.03819898843419517,
"grad_norm": 0.03469863906502724,
"learning_rate": 0.0001668352020167793,
"loss": 0.0015,
"step": 270
},
{
"epoch": 0.03834046616913663,
"grad_norm": 11.140663146972656,
"learning_rate": 0.00016660118674342517,
"loss": 0.0507,
"step": 271
},
{
"epoch": 0.038481943904078095,
"grad_norm": 0.062388550490140915,
"learning_rate": 0.00016636651414324587,
"loss": 0.0023,
"step": 272
},
{
"epoch": 0.03862342163901956,
"grad_norm": 0.009884512983262539,
"learning_rate": 0.00016613118653236518,
"loss": 0.0006,
"step": 273
},
{
"epoch": 0.03876489937396102,
"grad_norm": 0.011747513897716999,
"learning_rate": 0.0001658952062333717,
"loss": 0.0007,
"step": 274
},
{
"epoch": 0.03890637710890248,
"grad_norm": 0.016286730766296387,
"learning_rate": 0.00016565857557529566,
"loss": 0.0007,
"step": 275
},
{
"epoch": 0.03904785484384395,
"grad_norm": 0.008195940405130386,
"learning_rate": 0.00016542129689358612,
"loss": 0.0005,
"step": 276
},
{
"epoch": 0.039189332578785416,
"grad_norm": 0.14791356027126312,
"learning_rate": 0.0001651833725300879,
"loss": 0.0012,
"step": 277
},
{
"epoch": 0.03933081031372688,
"grad_norm": 0.04902546480298042,
"learning_rate": 0.00016494480483301836,
"loss": 0.0014,
"step": 278
},
{
"epoch": 0.03947228804866834,
"grad_norm": 0.08834591507911682,
"learning_rate": 0.00016470559615694446,
"loss": 0.0009,
"step": 279
},
{
"epoch": 0.039613765783609804,
"grad_norm": 0.02635873667895794,
"learning_rate": 0.00016446574886275913,
"loss": 0.0006,
"step": 280
},
{
"epoch": 0.03975524351855127,
"grad_norm": 0.005103811621665955,
"learning_rate": 0.00016422526531765846,
"loss": 0.0003,
"step": 281
},
{
"epoch": 0.03989672125349273,
"grad_norm": 0.004541166592389345,
"learning_rate": 0.00016398414789511786,
"loss": 0.0004,
"step": 282
},
{
"epoch": 0.04003819898843419,
"grad_norm": 0.0033859983086586,
"learning_rate": 0.000163742398974869,
"loss": 0.0002,
"step": 283
},
{
"epoch": 0.040179676723375655,
"grad_norm": 0.003963902592658997,
"learning_rate": 0.00016350002094287609,
"loss": 0.0003,
"step": 284
},
{
"epoch": 0.040321154458317125,
"grad_norm": 0.005432737059891224,
"learning_rate": 0.00016325701619131246,
"loss": 0.0003,
"step": 285
},
{
"epoch": 0.04046263219325859,
"grad_norm": 0.014246117323637009,
"learning_rate": 0.00016301338711853693,
"loss": 0.0003,
"step": 286
},
{
"epoch": 0.04060410992820005,
"grad_norm": 0.060390155762434006,
"learning_rate": 0.00016276913612907007,
"loss": 0.0006,
"step": 287
},
{
"epoch": 0.040745587663141514,
"grad_norm": 0.003241261001676321,
"learning_rate": 0.00016252426563357055,
"loss": 0.0002,
"step": 288
},
{
"epoch": 0.040887065398082977,
"grad_norm": 0.0031794258393347263,
"learning_rate": 0.00016227877804881127,
"loss": 0.0003,
"step": 289
},
{
"epoch": 0.04102854313302444,
"grad_norm": 0.008073396049439907,
"learning_rate": 0.00016203267579765563,
"loss": 0.0002,
"step": 290
},
{
"epoch": 0.0411700208679659,
"grad_norm": 0.002360474318265915,
"learning_rate": 0.00016178596130903344,
"loss": 0.0002,
"step": 291
},
{
"epoch": 0.041311498602907365,
"grad_norm": 0.004607622046023607,
"learning_rate": 0.00016153863701791717,
"loss": 0.0003,
"step": 292
},
{
"epoch": 0.04145297633784883,
"grad_norm": 0.002519590314477682,
"learning_rate": 0.00016129070536529766,
"loss": 0.0002,
"step": 293
},
{
"epoch": 0.0415944540727903,
"grad_norm": 0.001880524680018425,
"learning_rate": 0.00016104216879816026,
"loss": 0.0001,
"step": 294
},
{
"epoch": 0.04173593180773176,
"grad_norm": 0.0013052290305495262,
"learning_rate": 0.00016079302976946055,
"loss": 0.0001,
"step": 295
},
{
"epoch": 0.04187740954267322,
"grad_norm": 0.0029020386282354593,
"learning_rate": 0.00016054329073810015,
"loss": 0.0002,
"step": 296
},
{
"epoch": 0.042018887277614686,
"grad_norm": 0.0025680111721158028,
"learning_rate": 0.00016029295416890248,
"loss": 0.0002,
"step": 297
},
{
"epoch": 0.04216036501255615,
"grad_norm": 0.0024828522000461817,
"learning_rate": 0.00016004202253258842,
"loss": 0.0001,
"step": 298
},
{
"epoch": 0.04230184274749761,
"grad_norm": 0.0020577250979840755,
"learning_rate": 0.0001597904983057519,
"loss": 0.0001,
"step": 299
},
{
"epoch": 0.042443320482439074,
"grad_norm": 0.0016125338152050972,
"learning_rate": 0.00015953838397083552,
"loss": 0.0001,
"step": 300
},
{
"epoch": 0.04258479821738054,
"grad_norm": 0.001345417695119977,
"learning_rate": 0.00015928568201610595,
"loss": 0.0001,
"step": 301
},
{
"epoch": 0.042726275952322,
"grad_norm": 0.0021328881848603487,
"learning_rate": 0.00015903239493562948,
"loss": 0.0001,
"step": 302
},
{
"epoch": 0.04286775368726347,
"grad_norm": 0.0024942061863839626,
"learning_rate": 0.00015877852522924732,
"loss": 0.0001,
"step": 303
},
{
"epoch": 0.04300923142220493,
"grad_norm": 0.004291863180696964,
"learning_rate": 0.00015852407540255104,
"loss": 0.0002,
"step": 304
},
{
"epoch": 0.043150709157146395,
"grad_norm": 0.005543672014027834,
"learning_rate": 0.00015826904796685762,
"loss": 0.0003,
"step": 305
},
{
"epoch": 0.04329218689208786,
"grad_norm": 0.002027965849265456,
"learning_rate": 0.00015801344543918495,
"loss": 0.0001,
"step": 306
},
{
"epoch": 0.04343366462702932,
"grad_norm": 0.002267736243084073,
"learning_rate": 0.00015775727034222675,
"loss": 0.0001,
"step": 307
},
{
"epoch": 0.043575142361970784,
"grad_norm": 0.02030862867832184,
"learning_rate": 0.00015750052520432787,
"loss": 0.0004,
"step": 308
},
{
"epoch": 0.04371662009691225,
"grad_norm": 0.005449316464364529,
"learning_rate": 0.0001572432125594591,
"loss": 0.0002,
"step": 309
},
{
"epoch": 0.04385809783185371,
"grad_norm": 0.0010752433445304632,
"learning_rate": 0.00015698533494719238,
"loss": 0.0001,
"step": 310
},
{
"epoch": 0.04399957556679517,
"grad_norm": 0.0009665637626312673,
"learning_rate": 0.00015672689491267567,
"loss": 0.0001,
"step": 311
},
{
"epoch": 0.04414105330173664,
"grad_norm": 0.0021984372287988663,
"learning_rate": 0.00015646789500660773,
"loss": 0.0001,
"step": 312
},
{
"epoch": 0.044282531036678105,
"grad_norm": 0.0006756667862646282,
"learning_rate": 0.00015620833778521307,
"loss": 0.0001,
"step": 313
},
{
"epoch": 0.04442400877161957,
"grad_norm": 0.0008639508741907775,
"learning_rate": 0.0001559482258102167,
"loss": 0.0001,
"step": 314
},
{
"epoch": 0.04456548650656103,
"grad_norm": 0.0005970299243927002,
"learning_rate": 0.00015568756164881882,
"loss": 0.0,
"step": 315
},
{
"epoch": 0.04470696424150249,
"grad_norm": 0.0009843648876994848,
"learning_rate": 0.00015542634787366942,
"loss": 0.0001,
"step": 316
},
{
"epoch": 0.044848441976443956,
"grad_norm": 0.0009396934765391052,
"learning_rate": 0.00015516458706284303,
"loss": 0.0001,
"step": 317
},
{
"epoch": 0.04498991971138542,
"grad_norm": 0.0009946058271452785,
"learning_rate": 0.0001549022817998132,
"loss": 0.0001,
"step": 318
},
{
"epoch": 0.04513139744632688,
"grad_norm": 0.0008691767579875886,
"learning_rate": 0.00015463943467342693,
"loss": 0.0001,
"step": 319
},
{
"epoch": 0.045272875181268345,
"grad_norm": 0.001260230434127152,
"learning_rate": 0.00015437604827787927,
"loss": 0.0001,
"step": 320
},
{
"epoch": 0.045414352916209814,
"grad_norm": 0.0007110429578460753,
"learning_rate": 0.00015411212521268758,
"loss": 0.0001,
"step": 321
},
{
"epoch": 0.04555583065115128,
"grad_norm": 0.011615327559411526,
"learning_rate": 0.00015384766808266602,
"loss": 0.0003,
"step": 322
},
{
"epoch": 0.04569730838609274,
"grad_norm": 0.000552054145373404,
"learning_rate": 0.00015358267949789966,
"loss": 0.0,
"step": 323
},
{
"epoch": 0.0458387861210342,
"grad_norm": 0.001125704264268279,
"learning_rate": 0.00015331716207371888,
"loss": 0.0001,
"step": 324
},
{
"epoch": 0.045980263855975666,
"grad_norm": 0.0021504261530935764,
"learning_rate": 0.0001530511184306734,
"loss": 0.0001,
"step": 325
},
{
"epoch": 0.04612174159091713,
"grad_norm": 0.002731619169935584,
"learning_rate": 0.00015278455119450664,
"loss": 0.0001,
"step": 326
},
{
"epoch": 0.04626321932585859,
"grad_norm": 0.0007831475813873112,
"learning_rate": 0.0001525174629961296,
"loss": 0.0001,
"step": 327
},
{
"epoch": 0.046404697060800054,
"grad_norm": 0.20330454409122467,
"learning_rate": 0.0001522498564715949,
"loss": 0.0041,
"step": 328
},
{
"epoch": 0.04654617479574152,
"grad_norm": 0.0007988195866346359,
"learning_rate": 0.00015198173426207094,
"loss": 0.0001,
"step": 329
},
{
"epoch": 0.04668765253068299,
"grad_norm": 17.48381805419922,
"learning_rate": 0.00015171309901381572,
"loss": 0.2524,
"step": 330
},
{
"epoch": 0.04682913026562445,
"grad_norm": 0.0008586746989749372,
"learning_rate": 0.00015144395337815064,
"loss": 0.0001,
"step": 331
},
{
"epoch": 0.04697060800056591,
"grad_norm": 0.0005710547557100654,
"learning_rate": 0.00015117430001143452,
"loss": 0.0,
"step": 332
},
{
"epoch": 0.047112085735507375,
"grad_norm": 0.0006703097024001181,
"learning_rate": 0.00015090414157503714,
"loss": 0.0001,
"step": 333
},
{
"epoch": 0.04725356347044884,
"grad_norm": 0.002217133529484272,
"learning_rate": 0.00015063348073531324,
"loss": 0.0001,
"step": 334
},
{
"epoch": 0.0473950412053903,
"grad_norm": 3.155515193939209,
"learning_rate": 0.0001503623201635761,
"loss": 1.6251,
"step": 335
},
{
"epoch": 0.047536518940331764,
"grad_norm": 0.004736212082207203,
"learning_rate": 0.000150090662536071,
"loss": 0.0003,
"step": 336
},
{
"epoch": 0.047677996675273226,
"grad_norm": 0.002677865093573928,
"learning_rate": 0.0001498185105339491,
"loss": 0.0001,
"step": 337
},
{
"epoch": 0.04781947441021469,
"grad_norm": 0.0027205327060073614,
"learning_rate": 0.00014954586684324078,
"loss": 0.0002,
"step": 338
},
{
"epoch": 0.04796095214515616,
"grad_norm": 0.009435261599719524,
"learning_rate": 0.00014927273415482915,
"loss": 0.0006,
"step": 339
},
{
"epoch": 0.04810242988009762,
"grad_norm": 0.0023822993971407413,
"learning_rate": 0.00014899911516442365,
"loss": 0.0002,
"step": 340
},
{
"epoch": 0.048243907615039085,
"grad_norm": 0.0032713001128286123,
"learning_rate": 0.00014872501257253323,
"loss": 0.0003,
"step": 341
},
{
"epoch": 0.04838538534998055,
"grad_norm": 0.006985257379710674,
"learning_rate": 0.0001484504290844398,
"loss": 0.0005,
"step": 342
},
{
"epoch": 0.04852686308492201,
"grad_norm": 0.005185326095670462,
"learning_rate": 0.00014817536741017152,
"loss": 0.0004,
"step": 343
},
{
"epoch": 0.04866834081986347,
"grad_norm": 0.010701478458940983,
"learning_rate": 0.00014789983026447612,
"loss": 0.0007,
"step": 344
},
{
"epoch": 0.048809818554804936,
"grad_norm": 0.006417698226869106,
"learning_rate": 0.0001476238203667939,
"loss": 0.0005,
"step": 345
},
{
"epoch": 0.0489512962897464,
"grad_norm": 0.022306060418486595,
"learning_rate": 0.0001473473404412312,
"loss": 0.0016,
"step": 346
},
{
"epoch": 0.04909277402468786,
"grad_norm": 0.01026198361068964,
"learning_rate": 0.0001470703932165333,
"loss": 0.0008,
"step": 347
},
{
"epoch": 0.04923425175962933,
"grad_norm": 0.009428659453988075,
"learning_rate": 0.00014679298142605734,
"loss": 0.0007,
"step": 348
},
{
"epoch": 0.049375729494570794,
"grad_norm": 0.018124036490917206,
"learning_rate": 0.00014651510780774583,
"loss": 0.0012,
"step": 349
},
{
"epoch": 0.04951720722951226,
"grad_norm": 0.015100178308784962,
"learning_rate": 0.00014623677510409918,
"loss": 0.0012,
"step": 350
},
{
"epoch": 0.04965868496445372,
"grad_norm": 0.011049584485590458,
"learning_rate": 0.00014595798606214882,
"loss": 0.0009,
"step": 351
},
{
"epoch": 0.04980016269939518,
"grad_norm": 0.018350180238485336,
"learning_rate": 0.00014567874343342997,
"loss": 0.0012,
"step": 352
},
{
"epoch": 0.049941640434336645,
"grad_norm": 0.005243629217147827,
"learning_rate": 0.00014539904997395468,
"loss": 0.0005,
"step": 353
},
{
"epoch": 0.05008311816927811,
"grad_norm": 0.009776602499186993,
"learning_rate": 0.00014511890844418453,
"loss": 0.0008,
"step": 354
},
{
"epoch": 0.05022459590421957,
"grad_norm": 0.07710767537355423,
"learning_rate": 0.00014483832160900326,
"loss": 0.0023,
"step": 355
},
{
"epoch": 0.050366073639161034,
"grad_norm": 0.010807436890900135,
"learning_rate": 0.00014455729223768966,
"loss": 0.0008,
"step": 356
},
{
"epoch": 0.050507551374102504,
"grad_norm": 0.010125311091542244,
"learning_rate": 0.0001442758231038902,
"loss": 0.0007,
"step": 357
},
{
"epoch": 0.050649029109043966,
"grad_norm": 0.004309076350182295,
"learning_rate": 0.00014399391698559152,
"loss": 0.0004,
"step": 358
},
{
"epoch": 0.05079050684398543,
"grad_norm": 0.00469523249194026,
"learning_rate": 0.0001437115766650933,
"loss": 0.0003,
"step": 359
},
{
"epoch": 0.05093198457892689,
"grad_norm": 0.01330766174942255,
"learning_rate": 0.00014342880492898048,
"loss": 0.0006,
"step": 360
},
{
"epoch": 0.051073462313868355,
"grad_norm": 0.007151063531637192,
"learning_rate": 0.0001431456045680959,
"loss": 0.0004,
"step": 361
},
{
"epoch": 0.05121494004880982,
"grad_norm": 0.002502172952517867,
"learning_rate": 0.00014286197837751286,
"loss": 0.0002,
"step": 362
},
{
"epoch": 0.05135641778375128,
"grad_norm": 0.0047018746845424175,
"learning_rate": 0.00014257792915650728,
"loss": 0.0003,
"step": 363
},
{
"epoch": 0.05149789551869274,
"grad_norm": 0.002204613294452429,
"learning_rate": 0.00014229345970853032,
"loss": 0.0002,
"step": 364
},
{
"epoch": 0.051639373253634206,
"grad_norm": 0.01051216758787632,
"learning_rate": 0.00014200857284118066,
"loss": 0.0005,
"step": 365
},
{
"epoch": 0.051780850988575676,
"grad_norm": 0.003874387824907899,
"learning_rate": 0.00014172327136617656,
"loss": 0.0003,
"step": 366
},
{
"epoch": 0.05192232872351714,
"grad_norm": 0.0045910426415503025,
"learning_rate": 0.00014143755809932845,
"loss": 0.0003,
"step": 367
},
{
"epoch": 0.0520638064584586,
"grad_norm": 0.593174159526825,
"learning_rate": 0.00014115143586051088,
"loss": 0.0112,
"step": 368
},
{
"epoch": 0.052205284193400064,
"grad_norm": 0.010553068481385708,
"learning_rate": 0.00014086490747363493,
"loss": 0.0003,
"step": 369
},
{
"epoch": 0.05234676192834153,
"grad_norm": 0.10641700774431229,
"learning_rate": 0.00014057797576662,
"loss": 0.001,
"step": 370
},
{
"epoch": 0.05248823966328299,
"grad_norm": 0.0037969814147800207,
"learning_rate": 0.00014029064357136628,
"loss": 0.0002,
"step": 371
},
{
"epoch": 0.05262971739822445,
"grad_norm": 0.0030788243748247623,
"learning_rate": 0.00014000291372372647,
"loss": 0.0002,
"step": 372
},
{
"epoch": 0.052771195133165916,
"grad_norm": 0.0028270557522773743,
"learning_rate": 0.00013971478906347806,
"loss": 0.0002,
"step": 373
},
{
"epoch": 0.05291267286810738,
"grad_norm": 0.022989150136709213,
"learning_rate": 0.00013942627243429512,
"loss": 0.0006,
"step": 374
},
{
"epoch": 0.05305415060304885,
"grad_norm": 0.008212580345571041,
"learning_rate": 0.00013913736668372026,
"loss": 0.0004,
"step": 375
},
{
"epoch": 0.05319562833799031,
"grad_norm": 0.0014105787267908454,
"learning_rate": 0.00013884807466313663,
"loss": 0.0001,
"step": 376
},
{
"epoch": 0.053337106072931774,
"grad_norm": 0.0034487394150346518,
"learning_rate": 0.00013855839922773968,
"loss": 0.0002,
"step": 377
},
{
"epoch": 0.05347858380787324,
"grad_norm": 0.0014626358170062304,
"learning_rate": 0.000138268343236509,
"loss": 0.0001,
"step": 378
},
{
"epoch": 0.0536200615428147,
"grad_norm": 0.001634497893974185,
"learning_rate": 0.00013797790955218014,
"loss": 0.0001,
"step": 379
},
{
"epoch": 0.05376153927775616,
"grad_norm": 0.3064191937446594,
"learning_rate": 0.00013768710104121627,
"loss": 0.0022,
"step": 380
},
{
"epoch": 0.053903017012697625,
"grad_norm": 0.0029049685690551996,
"learning_rate": 0.00013739592057378003,
"loss": 0.0002,
"step": 381
},
{
"epoch": 0.05404449474763909,
"grad_norm": 0.005087097175419331,
"learning_rate": 0.0001371043710237051,
"loss": 0.0003,
"step": 382
},
{
"epoch": 0.05418597248258055,
"grad_norm": 0.008555252104997635,
"learning_rate": 0.00013681245526846783,
"loss": 0.0004,
"step": 383
},
{
"epoch": 0.05432745021752202,
"grad_norm": 0.0014736767625436187,
"learning_rate": 0.0001365201761891588,
"loss": 0.0001,
"step": 384
},
{
"epoch": 0.05446892795246348,
"grad_norm": 0.0015276784542948008,
"learning_rate": 0.00013622753667045457,
"loss": 0.0001,
"step": 385
},
{
"epoch": 0.054610405687404946,
"grad_norm": 0.001212657312862575,
"learning_rate": 0.00013593453960058908,
"loss": 0.0001,
"step": 386
},
{
"epoch": 0.05475188342234641,
"grad_norm": 0.0023078841622918844,
"learning_rate": 0.00013564118787132506,
"loss": 0.0001,
"step": 387
},
{
"epoch": 0.05489336115728787,
"grad_norm": 0.002347096335142851,
"learning_rate": 0.00013534748437792573,
"loss": 0.0002,
"step": 388
},
{
"epoch": 0.055034838892229335,
"grad_norm": 0.004173379857093096,
"learning_rate": 0.0001350534320191259,
"loss": 0.0001,
"step": 389
},
{
"epoch": 0.0551763166271708,
"grad_norm": 0.000805948453489691,
"learning_rate": 0.0001347590336971037,
"loss": 0.0001,
"step": 390
},
{
"epoch": 0.05531779436211226,
"grad_norm": 0.00531614338979125,
"learning_rate": 0.0001344642923174517,
"loss": 0.0001,
"step": 391
},
{
"epoch": 0.05545927209705372,
"grad_norm": 0.009082508273422718,
"learning_rate": 0.00013416921078914835,
"loss": 0.0003,
"step": 392
},
{
"epoch": 0.05560074983199519,
"grad_norm": 0.022566402330994606,
"learning_rate": 0.00013387379202452917,
"loss": 0.0003,
"step": 393
},
{
"epoch": 0.055742227566936656,
"grad_norm": 0.004918436054140329,
"learning_rate": 0.00013357803893925807,
"loss": 0.0001,
"step": 394
},
{
"epoch": 0.05588370530187812,
"grad_norm": 0.0007803619955666363,
"learning_rate": 0.00013328195445229868,
"loss": 0.0001,
"step": 395
},
{
"epoch": 0.05602518303681958,
"grad_norm": 0.0024644150398671627,
"learning_rate": 0.00013298554148588528,
"loss": 0.0001,
"step": 396
},
{
"epoch": 0.056166660771761044,
"grad_norm": 0.008953128010034561,
"learning_rate": 0.00013268880296549425,
"loss": 0.0006,
"step": 397
},
{
"epoch": 0.05630813850670251,
"grad_norm": 0.0007975678890943527,
"learning_rate": 0.00013239174181981495,
"loss": 0.0001,
"step": 398
},
{
"epoch": 0.05644961624164397,
"grad_norm": 0.0007346520433202386,
"learning_rate": 0.00013209436098072095,
"loss": 0.0001,
"step": 399
},
{
"epoch": 0.05659109397658543,
"grad_norm": 0.025135928764939308,
"learning_rate": 0.00013179666338324108,
"loss": 0.0002,
"step": 400
},
{
"epoch": 0.056732571711526895,
"grad_norm": 0.0011148619232699275,
"learning_rate": 0.0001314986519655305,
"loss": 0.0001,
"step": 401
},
{
"epoch": 0.056874049446468365,
"grad_norm": 0.0004933550371788442,
"learning_rate": 0.0001312003296688415,
"loss": 0.0,
"step": 402
},
{
"epoch": 0.05701552718140983,
"grad_norm": 0.000759758404456079,
"learning_rate": 0.00013090169943749476,
"loss": 0.0001,
"step": 403
},
{
"epoch": 0.05715700491635129,
"grad_norm": 0.0008426797576248646,
"learning_rate": 0.0001306027642188501,
"loss": 0.0001,
"step": 404
},
{
"epoch": 0.057298482651292754,
"grad_norm": 0.0015815444057807326,
"learning_rate": 0.00013030352696327742,
"loss": 0.0001,
"step": 405
},
{
"epoch": 0.057439960386234216,
"grad_norm": 0.00043021078454330564,
"learning_rate": 0.00013000399062412763,
"loss": 0.0,
"step": 406
},
{
"epoch": 0.05758143812117568,
"grad_norm": 0.0008661262108944356,
"learning_rate": 0.0001297041581577035,
"loss": 0.0001,
"step": 407
},
{
"epoch": 0.05772291585611714,
"grad_norm": 0.004798825830221176,
"learning_rate": 0.0001294040325232304,
"loss": 0.0001,
"step": 408
},
{
"epoch": 0.057864393591058605,
"grad_norm": 0.0007471769349649549,
"learning_rate": 0.00012910361668282719,
"loss": 0.0001,
"step": 409
},
{
"epoch": 0.05800587132600007,
"grad_norm": 0.012662540189921856,
"learning_rate": 0.00012880291360147693,
"loss": 0.0001,
"step": 410
},
{
"epoch": 0.05814734906094154,
"grad_norm": 0.0005973036750219762,
"learning_rate": 0.0001285019262469976,
"loss": 0.0,
"step": 411
},
{
"epoch": 0.058288826795883,
"grad_norm": 0.0005400076624937356,
"learning_rate": 0.00012820065759001293,
"loss": 0.0,
"step": 412
},
{
"epoch": 0.05843030453082446,
"grad_norm": 0.0010891173733398318,
"learning_rate": 0.00012789911060392294,
"loss": 0.0001,
"step": 413
},
{
"epoch": 0.058571782265765926,
"grad_norm": 0.0004290946526452899,
"learning_rate": 0.0001275972882648746,
"loss": 0.0,
"step": 414
},
{
"epoch": 0.05871326000070739,
"grad_norm": 0.0004823437484446913,
"learning_rate": 0.00012729519355173254,
"loss": 0.0,
"step": 415
},
{
"epoch": 0.05885473773564885,
"grad_norm": 0.005344636272639036,
"learning_rate": 0.00012699282944604967,
"loss": 0.0001,
"step": 416
},
{
"epoch": 0.058996215470590314,
"grad_norm": 3.188809394836426,
"learning_rate": 0.00012669019893203759,
"loss": 1.2313,
"step": 417
},
{
"epoch": 0.05913769320553178,
"grad_norm": 0.0019267729949206114,
"learning_rate": 0.0001263873049965373,
"loss": 0.0001,
"step": 418
},
{
"epoch": 0.05927917094047324,
"grad_norm": 0.0005391717422753572,
"learning_rate": 0.00012608415062898972,
"loss": 0.0,
"step": 419
},
{
"epoch": 0.05942064867541471,
"grad_norm": 0.005409176927059889,
"learning_rate": 0.000125780738821406,
"loss": 0.0003,
"step": 420
},
{
"epoch": 0.05956212641035617,
"grad_norm": 0.013438250869512558,
"learning_rate": 0.00012547707256833823,
"loss": 0.0006,
"step": 421
},
{
"epoch": 0.059703604145297635,
"grad_norm": 0.060834258794784546,
"learning_rate": 0.00012517315486684972,
"loss": 0.0019,
"step": 422
},
{
"epoch": 0.0598450818802391,
"grad_norm": 0.30073702335357666,
"learning_rate": 0.0001248689887164855,
"loss": 0.0043,
"step": 423
},
{
"epoch": 0.05998655961518056,
"grad_norm": 0.5793721675872803,
"learning_rate": 0.00012456457711924266,
"loss": 0.023,
"step": 424
},
{
"epoch": 0.060128037350122024,
"grad_norm": 0.01727374456822872,
"learning_rate": 0.00012425992307954075,
"loss": 0.0012,
"step": 425
},
{
"epoch": 0.06026951508506349,
"grad_norm": 0.039440348744392395,
"learning_rate": 0.0001239550296041922,
"loss": 0.002,
"step": 426
},
{
"epoch": 0.06041099282000495,
"grad_norm": 0.01554853469133377,
"learning_rate": 0.00012364989970237248,
"loss": 0.0007,
"step": 427
},
{
"epoch": 0.06055247055494641,
"grad_norm": 0.020410068333148956,
"learning_rate": 0.00012334453638559057,
"loss": 0.0013,
"step": 428
},
{
"epoch": 0.06069394828988788,
"grad_norm": 3.5123672485351562,
"learning_rate": 0.00012303894266765908,
"loss": 0.159,
"step": 429
},
{
"epoch": 0.060835426024829345,
"grad_norm": 0.010013503022491932,
"learning_rate": 0.00012273312156466464,
"loss": 0.0008,
"step": 430
},
{
"epoch": 0.06097690375977081,
"grad_norm": 0.060755547136068344,
"learning_rate": 0.00012242707609493814,
"loss": 0.0035,
"step": 431
},
{
"epoch": 0.06111838149471227,
"grad_norm": 0.005364975892007351,
"learning_rate": 0.00012212080927902474,
"loss": 0.0004,
"step": 432
},
{
"epoch": 0.06125985922965373,
"grad_norm": 0.014719590544700623,
"learning_rate": 0.00012181432413965428,
"loss": 0.0007,
"step": 433
},
{
"epoch": 0.061401336964595196,
"grad_norm": 0.016079336404800415,
"learning_rate": 0.00012150762370171136,
"loss": 0.0008,
"step": 434
},
{
"epoch": 0.06154281469953666,
"grad_norm": 0.03487243875861168,
"learning_rate": 0.00012120071099220549,
"loss": 0.0018,
"step": 435
},
{
"epoch": 0.06168429243447812,
"grad_norm": 0.03025330975651741,
"learning_rate": 0.00012089358904024117,
"loss": 0.0015,
"step": 436
},
{
"epoch": 0.061825770169419585,
"grad_norm": 0.02542041428387165,
"learning_rate": 0.00012058626087698814,
"loss": 0.0008,
"step": 437
},
{
"epoch": 0.061967247904361054,
"grad_norm": 0.011038844473659992,
"learning_rate": 0.00012027872953565125,
"loss": 0.0005,
"step": 438
},
{
"epoch": 0.06210872563930252,
"grad_norm": 0.020355192944407463,
"learning_rate": 0.00011997099805144069,
"loss": 0.0009,
"step": 439
},
{
"epoch": 0.06225020337424398,
"grad_norm": 0.008449536748230457,
"learning_rate": 0.000119663069461542,
"loss": 0.0005,
"step": 440
},
{
"epoch": 0.06239168110918544,
"grad_norm": 0.008639265783131123,
"learning_rate": 0.00011935494680508606,
"loss": 0.0004,
"step": 441
},
{
"epoch": 0.0625331588441269,
"grad_norm": 0.020484648644924164,
"learning_rate": 0.00011904663312311901,
"loss": 0.0012,
"step": 442
},
{
"epoch": 0.06267463657906837,
"grad_norm": 0.0050698923878371716,
"learning_rate": 0.00011873813145857249,
"loss": 0.0003,
"step": 443
},
{
"epoch": 0.06281611431400984,
"grad_norm": 0.004197041038423777,
"learning_rate": 0.00011842944485623335,
"loss": 0.0003,
"step": 444
},
{
"epoch": 0.0629575920489513,
"grad_norm": 0.029855944216251373,
"learning_rate": 0.00011812057636271374,
"loss": 0.0012,
"step": 445
},
{
"epoch": 0.06309906978389276,
"grad_norm": 0.002124859020113945,
"learning_rate": 0.000117811529026421,
"loss": 0.0001,
"step": 446
},
{
"epoch": 0.06324054751883422,
"grad_norm": 0.0030106690246611834,
"learning_rate": 0.00011750230589752762,
"loss": 0.0001,
"step": 447
},
{
"epoch": 0.06338202525377569,
"grad_norm": 0.003053938504308462,
"learning_rate": 0.00011719291002794096,
"loss": 0.0002,
"step": 448
},
{
"epoch": 0.06352350298871715,
"grad_norm": 0.0032461783848702908,
"learning_rate": 0.00011688334447127338,
"loss": 0.0002,
"step": 449
},
{
"epoch": 0.06366498072365862,
"grad_norm": 0.02754434570670128,
"learning_rate": 0.00011657361228281199,
"loss": 0.0013,
"step": 450
},
{
"epoch": 0.06380645845860007,
"grad_norm": 0.0038834463339298964,
"learning_rate": 0.00011626371651948838,
"loss": 0.0002,
"step": 451
},
{
"epoch": 0.06394793619354154,
"grad_norm": 0.0011302996426820755,
"learning_rate": 0.00011595366023984864,
"loss": 0.0001,
"step": 452
},
{
"epoch": 0.06408941392848301,
"grad_norm": 0.004102921113371849,
"learning_rate": 0.0001156434465040231,
"loss": 0.0001,
"step": 453
},
{
"epoch": 0.06423089166342447,
"grad_norm": 0.0013566426932811737,
"learning_rate": 0.00011533307837369607,
"loss": 0.0001,
"step": 454
},
{
"epoch": 0.06437236939836594,
"grad_norm": 0.0034373244270682335,
"learning_rate": 0.00011502255891207572,
"loss": 0.0001,
"step": 455
},
{
"epoch": 0.06451384713330739,
"grad_norm": 0.0010898098116740584,
"learning_rate": 0.00011471189118386375,
"loss": 0.0001,
"step": 456
},
{
"epoch": 0.06465532486824886,
"grad_norm": 0.0038209185004234314,
"learning_rate": 0.00011440107825522521,
"loss": 0.0002,
"step": 457
},
{
"epoch": 0.06479680260319032,
"grad_norm": 0.0053474209271371365,
"learning_rate": 0.00011409012319375827,
"loss": 0.0003,
"step": 458
},
{
"epoch": 0.06493828033813179,
"grad_norm": 0.0008347167167812586,
"learning_rate": 0.0001137790290684638,
"loss": 0.0001,
"step": 459
},
{
"epoch": 0.06507975807307324,
"grad_norm": 0.0008003226830624044,
"learning_rate": 0.00011346779894971527,
"loss": 0.0001,
"step": 460
},
{
"epoch": 0.06522123580801471,
"grad_norm": 0.0006106279324740171,
"learning_rate": 0.00011315643590922827,
"loss": 0.0,
"step": 461
},
{
"epoch": 0.06536271354295618,
"grad_norm": 0.0030305362306535244,
"learning_rate": 0.0001128449430200303,
"loss": 0.0002,
"step": 462
},
{
"epoch": 0.06550419127789764,
"grad_norm": 0.003015139140188694,
"learning_rate": 0.00011253332335643043,
"loss": 0.0002,
"step": 463
},
{
"epoch": 0.06564566901283911,
"grad_norm": 0.002168464008718729,
"learning_rate": 0.00011222157999398895,
"loss": 0.0001,
"step": 464
},
{
"epoch": 0.06578714674778056,
"grad_norm": 0.0038204342126846313,
"learning_rate": 0.00011190971600948699,
"loss": 0.0003,
"step": 465
},
{
"epoch": 0.06592862448272203,
"grad_norm": 0.0014754064613953233,
"learning_rate": 0.00011159773448089614,
"loss": 0.0001,
"step": 466
},
{
"epoch": 0.06607010221766349,
"grad_norm": 0.0011957096867263317,
"learning_rate": 0.00011128563848734816,
"loss": 0.0001,
"step": 467
},
{
"epoch": 0.06621157995260496,
"grad_norm": 0.002384569263085723,
"learning_rate": 0.00011097343110910452,
"loss": 0.0,
"step": 468
},
{
"epoch": 0.06635305768754642,
"grad_norm": 0.0006383927538990974,
"learning_rate": 0.000110661115427526,
"loss": 0.0,
"step": 469
},
{
"epoch": 0.06649453542248789,
"grad_norm": 0.0007738766144029796,
"learning_rate": 0.00011034869452504226,
"loss": 0.0001,
"step": 470
},
{
"epoch": 0.06663601315742936,
"grad_norm": 0.11662693321704865,
"learning_rate": 0.00011003617148512149,
"loss": 0.0009,
"step": 471
},
{
"epoch": 0.06677749089237081,
"grad_norm": 0.000526952208019793,
"learning_rate": 0.00010972354939223996,
"loss": 0.0,
"step": 472
},
{
"epoch": 0.06691896862731228,
"grad_norm": 0.001668064622208476,
"learning_rate": 0.00010941083133185146,
"loss": 0.0001,
"step": 473
},
{
"epoch": 0.06706044636225374,
"grad_norm": 0.003478636732324958,
"learning_rate": 0.00010909802039035701,
"loss": 0.0002,
"step": 474
},
{
"epoch": 0.0672019240971952,
"grad_norm": 0.0026004633400589228,
"learning_rate": 0.00010878511965507434,
"loss": 0.0001,
"step": 475
},
{
"epoch": 0.06734340183213666,
"grad_norm": 0.0019350722432136536,
"learning_rate": 0.00010847213221420736,
"loss": 0.0001,
"step": 476
},
{
"epoch": 0.06748487956707813,
"grad_norm": 0.0009413132793270051,
"learning_rate": 0.00010815906115681578,
"loss": 0.0001,
"step": 477
},
{
"epoch": 0.06762635730201959,
"grad_norm": 0.00028766566538251936,
"learning_rate": 0.0001078459095727845,
"loss": 0.0,
"step": 478
},
{
"epoch": 0.06776783503696106,
"grad_norm": 0.00023354949371423572,
"learning_rate": 0.00010753268055279329,
"loss": 0.0,
"step": 479
},
{
"epoch": 0.06790931277190253,
"grad_norm": 0.002164566656574607,
"learning_rate": 0.0001072193771882861,
"loss": 0.0001,
"step": 480
},
{
"epoch": 0.06805079050684398,
"grad_norm": 0.0007285153842531145,
"learning_rate": 0.00010690600257144061,
"loss": 0.0001,
"step": 481
},
{
"epoch": 0.06819226824178545,
"grad_norm": 0.0002895043871831149,
"learning_rate": 0.0001065925597951378,
"loss": 0.0,
"step": 482
},
{
"epoch": 0.06833374597672691,
"grad_norm": 0.001868347986601293,
"learning_rate": 0.00010627905195293135,
"loss": 0.0001,
"step": 483
},
{
"epoch": 0.06847522371166838,
"grad_norm": 0.0008055320358835161,
"learning_rate": 0.00010596548213901708,
"loss": 0.0001,
"step": 484
},
{
"epoch": 0.06861670144660983,
"grad_norm": 0.0038549823220819235,
"learning_rate": 0.00010565185344820247,
"loss": 0.0001,
"step": 485
},
{
"epoch": 0.0687581791815513,
"grad_norm": 0.003560754470527172,
"learning_rate": 0.00010533816897587606,
"loss": 0.0003,
"step": 486
},
{
"epoch": 0.06889965691649276,
"grad_norm": 0.0006623827503062785,
"learning_rate": 0.00010502443181797697,
"loss": 0.0001,
"step": 487
},
{
"epoch": 0.06904113465143423,
"grad_norm": 0.030813848599791527,
"learning_rate": 0.00010471064507096426,
"loss": 0.0003,
"step": 488
},
{
"epoch": 0.0691826123863757,
"grad_norm": 0.003694392740726471,
"learning_rate": 0.0001043968118317865,
"loss": 0.0002,
"step": 489
},
{
"epoch": 0.06932409012131716,
"grad_norm": 0.0010670871706679463,
"learning_rate": 0.00010408293519785101,
"loss": 0.0001,
"step": 490
},
{
"epoch": 0.06946556785625863,
"grad_norm": 0.0027516393456608057,
"learning_rate": 0.00010376901826699348,
"loss": 0.0001,
"step": 491
},
{
"epoch": 0.06960704559120008,
"grad_norm": 0.0029371054843068123,
"learning_rate": 0.00010345506413744726,
"loss": 0.0001,
"step": 492
},
{
"epoch": 0.06974852332614155,
"grad_norm": 0.0013108784332871437,
"learning_rate": 0.00010314107590781284,
"loss": 0.0001,
"step": 493
},
{
"epoch": 0.069890001061083,
"grad_norm": 0.0004981230013072491,
"learning_rate": 0.00010282705667702734,
"loss": 0.0,
"step": 494
},
{
"epoch": 0.07003147879602448,
"grad_norm": 0.00029754231218248606,
"learning_rate": 0.00010251300954433376,
"loss": 0.0,
"step": 495
},
{
"epoch": 0.07017295653096593,
"grad_norm": 0.0013778514694422483,
"learning_rate": 0.00010219893760925052,
"loss": 0.0,
"step": 496
},
{
"epoch": 0.0703144342659074,
"grad_norm": 0.0023718923330307007,
"learning_rate": 0.00010188484397154084,
"loss": 0.0001,
"step": 497
},
{
"epoch": 0.07045591200084887,
"grad_norm": 0.0007462603389285505,
"learning_rate": 0.00010157073173118208,
"loss": 0.0001,
"step": 498
},
{
"epoch": 0.07059738973579033,
"grad_norm": 0.00037132593570277095,
"learning_rate": 0.00010125660398833528,
"loss": 0.0,
"step": 499
},
{
"epoch": 0.0707388674707318,
"grad_norm": 0.000680119323078543,
"learning_rate": 0.00010094246384331442,
"loss": 0.0001,
"step": 500
},
{
"epoch": 0.07088034520567325,
"grad_norm": 0.0008801145595498383,
"learning_rate": 0.00010062831439655591,
"loss": 0.0001,
"step": 501
},
{
"epoch": 0.07102182294061472,
"grad_norm": 0.0019729931373149157,
"learning_rate": 0.00010031415874858797,
"loss": 0.0002,
"step": 502
},
{
"epoch": 0.07116330067555618,
"grad_norm": 0.0009173870203085244,
"learning_rate": 0.0001,
"loss": 0.0001,
"step": 503
},
{
"epoch": 0.07130477841049765,
"grad_norm": 0.0003767950984183699,
"learning_rate": 9.968584125141204e-05,
"loss": 0.0,
"step": 504
},
{
"epoch": 0.0714462561454391,
"grad_norm": 0.0010468692053109407,
"learning_rate": 9.937168560344412e-05,
"loss": 0.0001,
"step": 505
},
{
"epoch": 0.07158773388038057,
"grad_norm": 0.0019238515524193645,
"learning_rate": 9.90575361566856e-05,
"loss": 0.0001,
"step": 506
},
{
"epoch": 0.07172921161532204,
"grad_norm": 0.00045130791841074824,
"learning_rate": 9.874339601166473e-05,
"loss": 0.0,
"step": 507
},
{
"epoch": 0.0718706893502635,
"grad_norm": 0.0017480598762631416,
"learning_rate": 9.842926826881796e-05,
"loss": 0.0001,
"step": 508
},
{
"epoch": 0.07201216708520497,
"grad_norm": 0.0005805511609651148,
"learning_rate": 9.81151560284592e-05,
"loss": 0.0,
"step": 509
},
{
"epoch": 0.07215364482014643,
"grad_norm": 0.00020824585226364434,
"learning_rate": 9.78010623907495e-05,
"loss": 0.0,
"step": 510
},
{
"epoch": 0.0722951225550879,
"grad_norm": 0.000254087004577741,
"learning_rate": 9.748699045566626e-05,
"loss": 0.0,
"step": 511
},
{
"epoch": 0.07243660029002935,
"grad_norm": 0.0006141222547739744,
"learning_rate": 9.717294332297268e-05,
"loss": 0.0,
"step": 512
},
{
"epoch": 0.07257807802497082,
"grad_norm": 0.0007048989646136761,
"learning_rate": 9.685892409218717e-05,
"loss": 0.0001,
"step": 513
},
{
"epoch": 0.07271955575991228,
"grad_norm": 0.00048093812074512243,
"learning_rate": 9.654493586255278e-05,
"loss": 0.0,
"step": 514
},
{
"epoch": 0.07286103349485375,
"grad_norm": 0.0009035237017087638,
"learning_rate": 9.623098173300654e-05,
"loss": 0.0001,
"step": 515
},
{
"epoch": 0.07300251122979522,
"grad_norm": 0.0005546315223909914,
"learning_rate": 9.591706480214901e-05,
"loss": 0.0,
"step": 516
},
{
"epoch": 0.07314398896473667,
"grad_norm": 0.017050910741090775,
"learning_rate": 9.560318816821353e-05,
"loss": 0.0005,
"step": 517
},
{
"epoch": 0.07328546669967814,
"grad_norm": 0.015889961272478104,
"learning_rate": 9.528935492903575e-05,
"loss": 0.0002,
"step": 518
},
{
"epoch": 0.0734269444346196,
"grad_norm": 0.0003646703262347728,
"learning_rate": 9.497556818202306e-05,
"loss": 0.0,
"step": 519
},
{
"epoch": 0.07356842216956107,
"grad_norm": 0.0006750635802745819,
"learning_rate": 9.466183102412395e-05,
"loss": 0.0,
"step": 520
},
{
"epoch": 0.07370989990450252,
"grad_norm": 0.0002909269533120096,
"learning_rate": 9.434814655179755e-05,
"loss": 0.0,
"step": 521
},
{
"epoch": 0.073851377639444,
"grad_norm": 0.0032838047482073307,
"learning_rate": 9.403451786098294e-05,
"loss": 0.0001,
"step": 522
},
{
"epoch": 0.07399285537438545,
"grad_norm": 0.0004030920681543648,
"learning_rate": 9.372094804706867e-05,
"loss": 0.0,
"step": 523
},
{
"epoch": 0.07413433310932692,
"grad_norm": 0.003876444650813937,
"learning_rate": 9.340744020486222e-05,
"loss": 0.0001,
"step": 524
},
{
"epoch": 0.07427581084426839,
"grad_norm": 0.0003544877690728754,
"learning_rate": 9.309399742855942e-05,
"loss": 0.0,
"step": 525
},
{
"epoch": 0.07441728857920984,
"grad_norm": 0.0003599386545829475,
"learning_rate": 9.278062281171393e-05,
"loss": 0.0,
"step": 526
},
{
"epoch": 0.07455876631415131,
"grad_norm": 0.0009949669474735856,
"learning_rate": 9.246731944720675e-05,
"loss": 0.0001,
"step": 527
},
{
"epoch": 0.07470024404909277,
"grad_norm": 0.0006730380118824542,
"learning_rate": 9.215409042721552e-05,
"loss": 0.0,
"step": 528
},
{
"epoch": 0.07484172178403424,
"grad_norm": 0.0006648433045484126,
"learning_rate": 9.184093884318425e-05,
"loss": 0.0,
"step": 529
},
{
"epoch": 0.0749831995189757,
"grad_norm": 0.0010465396335348487,
"learning_rate": 9.152786778579267e-05,
"loss": 0.0001,
"step": 530
},
{
"epoch": 0.07512467725391717,
"grad_norm": 0.0002523198490962386,
"learning_rate": 9.121488034492569e-05,
"loss": 0.0,
"step": 531
},
{
"epoch": 0.07526615498885862,
"grad_norm": 0.0005815126351080835,
"learning_rate": 9.090197960964301e-05,
"loss": 0.0,
"step": 532
},
{
"epoch": 0.07540763272380009,
"grad_norm": 0.0027641430497169495,
"learning_rate": 9.058916866814858e-05,
"loss": 0.0001,
"step": 533
},
{
"epoch": 0.07554911045874156,
"grad_norm": 0.0037838981952518225,
"learning_rate": 9.027645060776006e-05,
"loss": 0.0002,
"step": 534
},
{
"epoch": 0.07569058819368302,
"grad_norm": 0.00022687749878969043,
"learning_rate": 8.99638285148785e-05,
"loss": 0.0,
"step": 535
},
{
"epoch": 0.07583206592862449,
"grad_norm": 0.0002952757349703461,
"learning_rate": 8.965130547495776e-05,
"loss": 0.0,
"step": 536
},
{
"epoch": 0.07597354366356594,
"grad_norm": 0.00020262067846488208,
"learning_rate": 8.933888457247402e-05,
"loss": 0.0,
"step": 537
},
{
"epoch": 0.07611502139850741,
"grad_norm": 0.00022801620070822537,
"learning_rate": 8.902656889089548e-05,
"loss": 0.0,
"step": 538
},
{
"epoch": 0.07625649913344887,
"grad_norm": 0.0029370381962507963,
"learning_rate": 8.871436151265184e-05,
"loss": 0.0001,
"step": 539
},
{
"epoch": 0.07639797686839034,
"grad_norm": 0.00019757526752073318,
"learning_rate": 8.840226551910387e-05,
"loss": 0.0,
"step": 540
},
{
"epoch": 0.0765394546033318,
"grad_norm": 0.00035157534875907004,
"learning_rate": 8.809028399051302e-05,
"loss": 0.0,
"step": 541
},
{
"epoch": 0.07668093233827326,
"grad_norm": 0.00033226568484678864,
"learning_rate": 8.777842000601105e-05,
"loss": 0.0,
"step": 542
},
{
"epoch": 0.07682241007321473,
"grad_norm": 0.0004599531239364296,
"learning_rate": 8.746667664356956e-05,
"loss": 0.0,
"step": 543
},
{
"epoch": 0.07696388780815619,
"grad_norm": 0.000987498089671135,
"learning_rate": 8.715505697996971e-05,
"loss": 0.0001,
"step": 544
},
{
"epoch": 0.07710536554309766,
"grad_norm": 0.0003310377069283277,
"learning_rate": 8.684356409077176e-05,
"loss": 0.0,
"step": 545
},
{
"epoch": 0.07724684327803912,
"grad_norm": 0.0006641054060310125,
"learning_rate": 8.653220105028474e-05,
"loss": 0.0001,
"step": 546
},
{
"epoch": 0.07738832101298058,
"grad_norm": 0.0002603815810289234,
"learning_rate": 8.62209709315362e-05,
"loss": 0.0,
"step": 547
},
{
"epoch": 0.07752979874792204,
"grad_norm": 0.004001593217253685,
"learning_rate": 8.590987680624174e-05,
"loss": 0.0001,
"step": 548
},
{
"epoch": 0.07767127648286351,
"grad_norm": 0.00022212194744497538,
"learning_rate": 8.559892174477479e-05,
"loss": 0.0,
"step": 549
},
{
"epoch": 0.07781275421780497,
"grad_norm": 0.07186271995306015,
"learning_rate": 8.528810881613626e-05,
"loss": 0.0022,
"step": 550
},
{
"epoch": 0.07795423195274644,
"grad_norm": 0.0003144246293231845,
"learning_rate": 8.497744108792429e-05,
"loss": 0.0,
"step": 551
},
{
"epoch": 0.0780957096876879,
"grad_norm": 0.005859481170773506,
"learning_rate": 8.466692162630392e-05,
"loss": 0.0003,
"step": 552
},
{
"epoch": 0.07823718742262936,
"grad_norm": 0.000740467687137425,
"learning_rate": 8.435655349597689e-05,
"loss": 0.0,
"step": 553
},
{
"epoch": 0.07837866515757083,
"grad_norm": 0.0005886468570679426,
"learning_rate": 8.404633976015134e-05,
"loss": 0.0,
"step": 554
},
{
"epoch": 0.07852014289251229,
"grad_norm": 0.0009009242057800293,
"learning_rate": 8.373628348051165e-05,
"loss": 0.0001,
"step": 555
},
{
"epoch": 0.07866162062745376,
"grad_norm": 0.0015645289095118642,
"learning_rate": 8.342638771718802e-05,
"loss": 0.0001,
"step": 556
},
{
"epoch": 0.07880309836239521,
"grad_norm": 0.0002594326506368816,
"learning_rate": 8.311665552872662e-05,
"loss": 0.0,
"step": 557
},
{
"epoch": 0.07894457609733668,
"grad_norm": 0.00021320579980965704,
"learning_rate": 8.280708997205904e-05,
"loss": 0.0,
"step": 558
},
{
"epoch": 0.07908605383227814,
"grad_norm": 0.00017182575538754463,
"learning_rate": 8.249769410247239e-05,
"loss": 0.0,
"step": 559
},
{
"epoch": 0.07922753156721961,
"grad_norm": 0.00021368375746533275,
"learning_rate": 8.218847097357898e-05,
"loss": 0.0,
"step": 560
},
{
"epoch": 0.07936900930216108,
"grad_norm": 0.0002579468709882349,
"learning_rate": 8.187942363728625e-05,
"loss": 0.0,
"step": 561
},
{
"epoch": 0.07951048703710253,
"grad_norm": 0.00014378316700458527,
"learning_rate": 8.157055514376666e-05,
"loss": 0.0,
"step": 562
},
{
"epoch": 0.079651964772044,
"grad_norm": 0.0006984220235608518,
"learning_rate": 8.126186854142752e-05,
"loss": 0.0,
"step": 563
},
{
"epoch": 0.07979344250698546,
"grad_norm": 0.00012296480417717248,
"learning_rate": 8.095336687688102e-05,
"loss": 0.0,
"step": 564
},
{
"epoch": 0.07993492024192693,
"grad_norm": 0.0001581730757607147,
"learning_rate": 8.064505319491398e-05,
"loss": 0.0,
"step": 565
},
{
"epoch": 0.08007639797686839,
"grad_norm": 0.0001595177163835615,
"learning_rate": 8.033693053845801e-05,
"loss": 0.0,
"step": 566
},
{
"epoch": 0.08021787571180986,
"grad_norm": 0.8796876668930054,
"learning_rate": 8.002900194855932e-05,
"loss": 0.4228,
"step": 567
},
{
"epoch": 0.08035935344675131,
"grad_norm": 0.0006363423308357596,
"learning_rate": 7.972127046434878e-05,
"loss": 0.0,
"step": 568
},
{
"epoch": 0.08050083118169278,
"grad_norm": 0.0015658728079870343,
"learning_rate": 7.941373912301189e-05,
"loss": 0.0001,
"step": 569
},
{
"epoch": 0.08064230891663425,
"grad_norm": 0.0007438261527568102,
"learning_rate": 7.910641095975886e-05,
"loss": 0.0001,
"step": 570
},
{
"epoch": 0.0807837866515757,
"grad_norm": 0.0014448019210249186,
"learning_rate": 7.879928900779456e-05,
"loss": 0.0,
"step": 571
},
{
"epoch": 0.08092526438651718,
"grad_norm": 0.00021074670075904578,
"learning_rate": 7.849237629828869e-05,
"loss": 0.0,
"step": 572
},
{
"epoch": 0.08106674212145863,
"grad_norm": 0.00042521391878835857,
"learning_rate": 7.818567586034577e-05,
"loss": 0.0,
"step": 573
},
{
"epoch": 0.0812082198564001,
"grad_norm": 0.0009682802483439445,
"learning_rate": 7.787919072097531e-05,
"loss": 0.0001,
"step": 574
},
{
"epoch": 0.08134969759134156,
"grad_norm": 0.0012320525711402297,
"learning_rate": 7.75729239050619e-05,
"loss": 0.0001,
"step": 575
},
{
"epoch": 0.08149117532628303,
"grad_norm": 0.00030932322260923684,
"learning_rate": 7.726687843533538e-05,
"loss": 0.0,
"step": 576
},
{
"epoch": 0.08163265306122448,
"grad_norm": 0.0066994656808674335,
"learning_rate": 7.696105733234098e-05,
"loss": 0.0004,
"step": 577
},
{
"epoch": 0.08177413079616595,
"grad_norm": 0.0007564704865217209,
"learning_rate": 7.66554636144095e-05,
"loss": 0.0001,
"step": 578
},
{
"epoch": 0.08191560853110742,
"grad_norm": 0.0006317924126051366,
"learning_rate": 7.635010029762756e-05,
"loss": 0.0001,
"step": 579
},
{
"epoch": 0.08205708626604888,
"grad_norm": 0.00094227195950225,
"learning_rate": 7.604497039580785e-05,
"loss": 0.0001,
"step": 580
},
{
"epoch": 0.08219856400099035,
"grad_norm": 0.0027162383776158094,
"learning_rate": 7.574007692045928e-05,
"loss": 0.0002,
"step": 581
},
{
"epoch": 0.0823400417359318,
"grad_norm": 0.003974114544689655,
"learning_rate": 7.543542288075739e-05,
"loss": 0.0001,
"step": 582
},
{
"epoch": 0.08248151947087327,
"grad_norm": 0.004181701224297285,
"learning_rate": 7.513101128351454e-05,
"loss": 0.0003,
"step": 583
},
{
"epoch": 0.08262299720581473,
"grad_norm": 0.0016781368758529425,
"learning_rate": 7.48268451331503e-05,
"loss": 0.0001,
"step": 584
},
{
"epoch": 0.0827644749407562,
"grad_norm": 0.0009342627017758787,
"learning_rate": 7.45229274316618e-05,
"loss": 0.0001,
"step": 585
},
{
"epoch": 0.08290595267569766,
"grad_norm": 0.05183006078004837,
"learning_rate": 7.421926117859403e-05,
"loss": 0.0026,
"step": 586
},
{
"epoch": 0.08304743041063913,
"grad_norm": 0.0014916006475687027,
"learning_rate": 7.391584937101033e-05,
"loss": 0.0001,
"step": 587
},
{
"epoch": 0.0831889081455806,
"grad_norm": 0.001441052882000804,
"learning_rate": 7.361269500346274e-05,
"loss": 0.0001,
"step": 588
},
{
"epoch": 0.08333038588052205,
"grad_norm": 0.003165285335853696,
"learning_rate": 7.330980106796246e-05,
"loss": 0.0002,
"step": 589
},
{
"epoch": 0.08347186361546352,
"grad_norm": 0.00124356709420681,
"learning_rate": 7.300717055395039e-05,
"loss": 0.0001,
"step": 590
},
{
"epoch": 0.08361334135040498,
"grad_norm": 0.0007717587286606431,
"learning_rate": 7.270480644826749e-05,
"loss": 0.0001,
"step": 591
},
{
"epoch": 0.08375481908534645,
"grad_norm": 0.0012677700724452734,
"learning_rate": 7.240271173512546e-05,
"loss": 0.0001,
"step": 592
},
{
"epoch": 0.0838962968202879,
"grad_norm": 0.008508299477398396,
"learning_rate": 7.210088939607708e-05,
"loss": 0.0005,
"step": 593
},
{
"epoch": 0.08403777455522937,
"grad_norm": 0.0027715216856449842,
"learning_rate": 7.179934240998706e-05,
"loss": 0.0002,
"step": 594
},
{
"epoch": 0.08417925229017083,
"grad_norm": 0.0006361504783853889,
"learning_rate": 7.149807375300239e-05,
"loss": 0.0001,
"step": 595
},
{
"epoch": 0.0843207300251123,
"grad_norm": 0.0007495923200622201,
"learning_rate": 7.119708639852312e-05,
"loss": 0.0001,
"step": 596
},
{
"epoch": 0.08446220776005377,
"grad_norm": 0.0008743112557567656,
"learning_rate": 7.089638331717284e-05,
"loss": 0.0001,
"step": 597
},
{
"epoch": 0.08460368549499522,
"grad_norm": 0.006306476425379515,
"learning_rate": 7.059596747676962e-05,
"loss": 0.0004,
"step": 598
},
{
"epoch": 0.08474516322993669,
"grad_norm": 0.2450258582830429,
"learning_rate": 7.029584184229653e-05,
"loss": 0.002,
"step": 599
},
{
"epoch": 0.08488664096487815,
"grad_norm": 0.0009690061560831964,
"learning_rate": 6.999600937587239e-05,
"loss": 0.0001,
"step": 600
},
{
"epoch": 0.08502811869981962,
"grad_norm": 0.004833643790334463,
"learning_rate": 6.969647303672262e-05,
"loss": 0.0003,
"step": 601
},
{
"epoch": 0.08516959643476107,
"grad_norm": 0.00044146020081825554,
"learning_rate": 6.939723578114993e-05,
"loss": 0.0,
"step": 602
},
{
"epoch": 0.08531107416970254,
"grad_norm": 0.000619858386926353,
"learning_rate": 6.909830056250527e-05,
"loss": 0.0001,
"step": 603
},
{
"epoch": 0.085452551904644,
"grad_norm": 0.0006960814935155213,
"learning_rate": 6.879967033115853e-05,
"loss": 0.0001,
"step": 604
},
{
"epoch": 0.08559402963958547,
"grad_norm": 0.019819116219878197,
"learning_rate": 6.850134803446954e-05,
"loss": 0.0012,
"step": 605
},
{
"epoch": 0.08573550737452694,
"grad_norm": 0.0005196069832891226,
"learning_rate": 6.820333661675893e-05,
"loss": 0.0,
"step": 606
},
{
"epoch": 0.0858769851094684,
"grad_norm": 0.0008297892636619508,
"learning_rate": 6.790563901927907e-05,
"loss": 0.0001,
"step": 607
},
{
"epoch": 0.08601846284440987,
"grad_norm": 23.886945724487305,
"learning_rate": 6.760825818018508e-05,
"loss": 0.3313,
"step": 608
},
{
"epoch": 0.08615994057935132,
"grad_norm": 0.001881247735582292,
"learning_rate": 6.731119703450577e-05,
"loss": 0.0001,
"step": 609
},
{
"epoch": 0.08630141831429279,
"grad_norm": 0.0037710980977863073,
"learning_rate": 6.701445851411472e-05,
"loss": 0.0002,
"step": 610
},
{
"epoch": 0.08644289604923425,
"grad_norm": 0.001079973648302257,
"learning_rate": 6.671804554770135e-05,
"loss": 0.0001,
"step": 611
},
{
"epoch": 0.08658437378417572,
"grad_norm": 0.002796902321279049,
"learning_rate": 6.642196106074194e-05,
"loss": 0.0001,
"step": 612
},
{
"epoch": 0.08672585151911717,
"grad_norm": 0.023120107129216194,
"learning_rate": 6.612620797547087e-05,
"loss": 0.0005,
"step": 613
},
{
"epoch": 0.08686732925405864,
"grad_norm": 0.04147057607769966,
"learning_rate": 6.583078921085167e-05,
"loss": 0.0011,
"step": 614
},
{
"epoch": 0.08700880698900011,
"grad_norm": 0.033874575048685074,
"learning_rate": 6.55357076825483e-05,
"loss": 0.001,
"step": 615
},
{
"epoch": 0.08715028472394157,
"grad_norm": 0.07600700855255127,
"learning_rate": 6.52409663028963e-05,
"loss": 0.0032,
"step": 616
},
{
"epoch": 0.08729176245888304,
"grad_norm": 0.019974149763584137,
"learning_rate": 6.494656798087412e-05,
"loss": 0.001,
"step": 617
},
{
"epoch": 0.0874332401938245,
"grad_norm": 0.015204976312816143,
"learning_rate": 6.465251562207431e-05,
"loss": 0.0006,
"step": 618
},
{
"epoch": 0.08757471792876596,
"grad_norm": 0.01299876905977726,
"learning_rate": 6.435881212867493e-05,
"loss": 0.0005,
"step": 619
},
{
"epoch": 0.08771619566370742,
"grad_norm": 0.02097712829709053,
"learning_rate": 6.406546039941094e-05,
"loss": 0.0007,
"step": 620
},
{
"epoch": 0.08785767339864889,
"grad_norm": 0.020041853189468384,
"learning_rate": 6.377246332954544e-05,
"loss": 0.0007,
"step": 621
},
{
"epoch": 0.08799915113359034,
"grad_norm": 0.021356869488954544,
"learning_rate": 6.347982381084123e-05,
"loss": 0.0007,
"step": 622
},
{
"epoch": 0.08814062886853181,
"grad_norm": 0.006782687269151211,
"learning_rate": 6.318754473153221e-05,
"loss": 0.0002,
"step": 623
},
{
"epoch": 0.08828210660347328,
"grad_norm": 0.008033067919313908,
"learning_rate": 6.289562897629492e-05,
"loss": 0.0002,
"step": 624
},
{
"epoch": 0.08842358433841474,
"grad_norm": 0.0033709348645061255,
"learning_rate": 6.260407942621998e-05,
"loss": 0.0001,
"step": 625
},
{
"epoch": 0.08856506207335621,
"grad_norm": 0.005288586486130953,
"learning_rate": 6.231289895878375e-05,
"loss": 0.0002,
"step": 626
},
{
"epoch": 0.08870653980829767,
"grad_norm": 0.0014732020208612084,
"learning_rate": 6.20220904478199e-05,
"loss": 0.0,
"step": 627
},
{
"epoch": 0.08884801754323914,
"grad_norm": 0.002344023436307907,
"learning_rate": 6.173165676349103e-05,
"loss": 0.0001,
"step": 628
},
{
"epoch": 0.08898949527818059,
"grad_norm": 0.0037802611477673054,
"learning_rate": 6.144160077226036e-05,
"loss": 0.0002,
"step": 629
},
{
"epoch": 0.08913097301312206,
"grad_norm": 0.0017438894137740135,
"learning_rate": 6.11519253368634e-05,
"loss": 0.0001,
"step": 630
},
{
"epoch": 0.08927245074806352,
"grad_norm": 0.006431651767343283,
"learning_rate": 6.086263331627976e-05,
"loss": 0.0001,
"step": 631
},
{
"epoch": 0.08941392848300499,
"grad_norm": 0.0024249274283647537,
"learning_rate": 6.05737275657049e-05,
"loss": 0.0001,
"step": 632
},
{
"epoch": 0.08955540621794646,
"grad_norm": 0.009574075229465961,
"learning_rate": 6.0285210936521955e-05,
"loss": 0.0006,
"step": 633
},
{
"epoch": 0.08969688395288791,
"grad_norm": 0.0018211673013865948,
"learning_rate": 5.999708627627354e-05,
"loss": 0.0001,
"step": 634
},
{
"epoch": 0.08983836168782938,
"grad_norm": 0.0010810673702508211,
"learning_rate": 5.9709356428633746e-05,
"loss": 0.0,
"step": 635
},
{
"epoch": 0.08997983942277084,
"grad_norm": 0.0009683024836704135,
"learning_rate": 5.9422024233380013e-05,
"loss": 0.0001,
"step": 636
},
{
"epoch": 0.09012131715771231,
"grad_norm": 0.0022739556152373552,
"learning_rate": 5.913509252636511e-05,
"loss": 0.0002,
"step": 637
},
{
"epoch": 0.09026279489265376,
"grad_norm": 0.0006144489743746817,
"learning_rate": 5.884856413948913e-05,
"loss": 0.0,
"step": 638
},
{
"epoch": 0.09040427262759523,
"grad_norm": 0.001888429163955152,
"learning_rate": 5.856244190067159e-05,
"loss": 0.0001,
"step": 639
},
{
"epoch": 0.09054575036253669,
"grad_norm": 0.0006371226045303047,
"learning_rate": 5.82767286338235e-05,
"loss": 0.0,
"step": 640
},
{
"epoch": 0.09068722809747816,
"grad_norm": 0.0011725660879164934,
"learning_rate": 5.799142715881938e-05,
"loss": 0.0001,
"step": 641
},
{
"epoch": 0.09082870583241963,
"grad_norm": 0.0020375438034534454,
"learning_rate": 5.770654029146969e-05,
"loss": 0.0001,
"step": 642
},
{
"epoch": 0.09097018356736108,
"grad_norm": 0.00023711427638772875,
"learning_rate": 5.7422070843492734e-05,
"loss": 0.0,
"step": 643
},
{
"epoch": 0.09111166130230255,
"grad_norm": 0.0010858512250706553,
"learning_rate": 5.713802162248718e-05,
"loss": 0.0001,
"step": 644
},
{
"epoch": 0.09125313903724401,
"grad_norm": 0.008106240071356297,
"learning_rate": 5.6854395431904094e-05,
"loss": 0.0003,
"step": 645
},
{
"epoch": 0.09139461677218548,
"grad_norm": 0.0011279614409431815,
"learning_rate": 5.657119507101954e-05,
"loss": 0.0001,
"step": 646
},
{
"epoch": 0.09153609450712694,
"grad_norm": 0.001151368604041636,
"learning_rate": 5.6288423334906735e-05,
"loss": 0.0,
"step": 647
},
{
"epoch": 0.0916775722420684,
"grad_norm": 0.002258772263303399,
"learning_rate": 5.6006083014408484e-05,
"loss": 0.0001,
"step": 648
},
{
"epoch": 0.09181904997700986,
"grad_norm": 0.0006376850651577115,
"learning_rate": 5.572417689610987e-05,
"loss": 0.0,
"step": 649
},
{
"epoch": 0.09196052771195133,
"grad_norm": 0.0002894207718782127,
"learning_rate": 5.544270776231038e-05,
"loss": 0.0,
"step": 650
},
{
"epoch": 0.0921020054468928,
"grad_norm": 0.0004874817677773535,
"learning_rate": 5.5161678390996796e-05,
"loss": 0.0,
"step": 651
},
{
"epoch": 0.09224348318183426,
"grad_norm": 0.0003829976776614785,
"learning_rate": 5.488109155581549e-05,
"loss": 0.0,
"step": 652
},
{
"epoch": 0.09238496091677573,
"grad_norm": 0.0019727759063243866,
"learning_rate": 5.4600950026045326e-05,
"loss": 0.0001,
"step": 653
},
{
"epoch": 0.09252643865171718,
"grad_norm": 0.00044044721289537847,
"learning_rate": 5.4321256566570036e-05,
"loss": 0.0,
"step": 654
},
{
"epoch": 0.09266791638665865,
"grad_norm": 0.00258098216727376,
"learning_rate": 5.404201393785122e-05,
"loss": 0.0002,
"step": 655
},
{
"epoch": 0.09280939412160011,
"grad_norm": 0.0012749796733260155,
"learning_rate": 5.3763224895900846e-05,
"loss": 0.0001,
"step": 656
},
{
"epoch": 0.09295087185654158,
"grad_norm": 0.0007071017753332853,
"learning_rate": 5.348489219225416e-05,
"loss": 0.0,
"step": 657
},
{
"epoch": 0.09309234959148303,
"grad_norm": 0.0017588926712051034,
"learning_rate": 5.320701857394268e-05,
"loss": 0.0001,
"step": 658
},
{
"epoch": 0.0932338273264245,
"grad_norm": 0.002789848716929555,
"learning_rate": 5.292960678346675e-05,
"loss": 0.0002,
"step": 659
},
{
"epoch": 0.09337530506136597,
"grad_norm": 0.00038435394526459277,
"learning_rate": 5.265265955876879e-05,
"loss": 0.0,
"step": 660
},
{
"epoch": 0.09351678279630743,
"grad_norm": 0.0007107415585778654,
"learning_rate": 5.237617963320608e-05,
"loss": 0.0,
"step": 661
},
{
"epoch": 0.0936582605312489,
"grad_norm": 0.0032040157821029425,
"learning_rate": 5.210016973552391e-05,
"loss": 0.0002,
"step": 662
},
{
"epoch": 0.09379973826619036,
"grad_norm": 0.0017821533838286996,
"learning_rate": 5.182463258982846e-05,
"loss": 0.0001,
"step": 663
},
{
"epoch": 0.09394121600113182,
"grad_norm": 0.0003448922943789512,
"learning_rate": 5.1549570915560206e-05,
"loss": 0.0,
"step": 664
},
{
"epoch": 0.09408269373607328,
"grad_norm": 0.0010452198330312967,
"learning_rate": 5.127498742746675e-05,
"loss": 0.0,
"step": 665
},
{
"epoch": 0.09422417147101475,
"grad_norm": 0.0009789979085326195,
"learning_rate": 5.100088483557634e-05,
"loss": 0.0,
"step": 666
},
{
"epoch": 0.0943656492059562,
"grad_norm": 0.0005419703666120768,
"learning_rate": 5.072726584517086e-05,
"loss": 0.0,
"step": 667
},
{
"epoch": 0.09450712694089768,
"grad_norm": 0.00028989804559387267,
"learning_rate": 5.045413315675924e-05,
"loss": 0.0,
"step": 668
},
{
"epoch": 0.09464860467583915,
"grad_norm": 0.0018694131867960095,
"learning_rate": 5.018148946605092e-05,
"loss": 0.0001,
"step": 669
},
{
"epoch": 0.0947900824107806,
"grad_norm": 0.0001970644952962175,
"learning_rate": 4.990933746392899e-05,
"loss": 0.0,
"step": 670
},
{
"epoch": 0.09493156014572207,
"grad_norm": 0.00024093178217299283,
"learning_rate": 4.9637679836423924e-05,
"loss": 0.0,
"step": 671
},
{
"epoch": 0.09507303788066353,
"grad_norm": 0.001606027246452868,
"learning_rate": 4.9366519264686725e-05,
"loss": 0.0,
"step": 672
},
{
"epoch": 0.095214515615605,
"grad_norm": 0.0034317609388381243,
"learning_rate": 4.909585842496287e-05,
"loss": 0.0002,
"step": 673
},
{
"epoch": 0.09535599335054645,
"grad_norm": 0.0005060399416834116,
"learning_rate": 4.8825699988565485e-05,
"loss": 0.0,
"step": 674
},
{
"epoch": 0.09549747108548792,
"grad_norm": 0.00040333837387152016,
"learning_rate": 4.8556046621849346e-05,
"loss": 0.0,
"step": 675
},
{
"epoch": 0.09563894882042938,
"grad_norm": 0.032013844698667526,
"learning_rate": 4.828690098618429e-05,
"loss": 0.0004,
"step": 676
},
{
"epoch": 0.09578042655537085,
"grad_norm": 0.0005927220336161554,
"learning_rate": 4.8018265737929044e-05,
"loss": 0.0,
"step": 677
},
{
"epoch": 0.09592190429031232,
"grad_norm": 0.0006017786217853427,
"learning_rate": 4.7750143528405126e-05,
"loss": 0.0,
"step": 678
},
{
"epoch": 0.09606338202525377,
"grad_norm": 0.002094410127028823,
"learning_rate": 4.748253700387042e-05,
"loss": 0.0001,
"step": 679
},
{
"epoch": 0.09620485976019524,
"grad_norm": 0.0008351746946573257,
"learning_rate": 4.721544880549337e-05,
"loss": 0.0,
"step": 680
},
{
"epoch": 0.0963463374951367,
"grad_norm": 0.07186631858348846,
"learning_rate": 4.694888156932658e-05,
"loss": 0.0023,
"step": 681
},
{
"epoch": 0.09648781523007817,
"grad_norm": 0.0034203226678073406,
"learning_rate": 4.668283792628114e-05,
"loss": 0.0001,
"step": 682
},
{
"epoch": 0.09662929296501963,
"grad_norm": 0.0005536339012905955,
"learning_rate": 4.6417320502100316e-05,
"loss": 0.0,
"step": 683
},
{
"epoch": 0.0967707706999611,
"grad_norm": 0.006527572870254517,
"learning_rate": 4.615233191733398e-05,
"loss": 0.0004,
"step": 684
},
{
"epoch": 0.09691224843490255,
"grad_norm": 0.00046978742466308177,
"learning_rate": 4.588787478731242e-05,
"loss": 0.0,
"step": 685
},
{
"epoch": 0.09705372616984402,
"grad_norm": 0.0001826570078264922,
"learning_rate": 4.5623951722120736e-05,
"loss": 0.0,
"step": 686
},
{
"epoch": 0.09719520390478549,
"grad_norm": 0.0008781368378549814,
"learning_rate": 4.5360565326573104e-05,
"loss": 0.0,
"step": 687
},
{
"epoch": 0.09733668163972695,
"grad_norm": 0.0002281236374983564,
"learning_rate": 4.5097718200186814e-05,
"loss": 0.0,
"step": 688
},
{
"epoch": 0.09747815937466842,
"grad_norm": 0.0003516919387038797,
"learning_rate": 4.483541293715698e-05,
"loss": 0.0,
"step": 689
},
{
"epoch": 0.09761963710960987,
"grad_norm": 0.002035178244113922,
"learning_rate": 4.457365212633058e-05,
"loss": 0.0001,
"step": 690
},
{
"epoch": 0.09776111484455134,
"grad_norm": 0.0006699900841340423,
"learning_rate": 4.431243835118124e-05,
"loss": 0.0,
"step": 691
},
{
"epoch": 0.0979025925794928,
"grad_norm": 0.0003746839938685298,
"learning_rate": 4.4051774189783315e-05,
"loss": 0.0,
"step": 692
},
{
"epoch": 0.09804407031443427,
"grad_norm": 0.0003929166414309293,
"learning_rate": 4.379166221478697e-05,
"loss": 0.0,
"step": 693
},
{
"epoch": 0.09818554804937572,
"grad_norm": 0.010091761127114296,
"learning_rate": 4.3532104993392306e-05,
"loss": 0.0003,
"step": 694
},
{
"epoch": 0.09832702578431719,
"grad_norm": 0.0012642780784517527,
"learning_rate": 4.327310508732437e-05,
"loss": 0.0001,
"step": 695
},
{
"epoch": 0.09846850351925866,
"grad_norm": 0.00036933113005943596,
"learning_rate": 4.301466505280762e-05,
"loss": 0.0,
"step": 696
},
{
"epoch": 0.09860998125420012,
"grad_norm": 0.0009099773596972227,
"learning_rate": 4.2756787440540936e-05,
"loss": 0.0001,
"step": 697
},
{
"epoch": 0.09875145898914159,
"grad_norm": 0.0003582508652471006,
"learning_rate": 4.249947479567218e-05,
"loss": 0.0,
"step": 698
},
{
"epoch": 0.09889293672408304,
"grad_norm": 0.005193689372390509,
"learning_rate": 4.224272965777326e-05,
"loss": 0.0002,
"step": 699
},
{
"epoch": 0.09903441445902451,
"grad_norm": 0.0015010442584753036,
"learning_rate": 4.1986554560815096e-05,
"loss": 0.0001,
"step": 700
},
{
"epoch": 0.09917589219396597,
"grad_norm": 2.6988134384155273,
"learning_rate": 4.173095203314241e-05,
"loss": 0.8706,
"step": 701
},
{
"epoch": 0.09931736992890744,
"grad_norm": 0.0003478722064755857,
"learning_rate": 4.1475924597449024e-05,
"loss": 0.0,
"step": 702
},
{
"epoch": 0.0994588476638489,
"grad_norm": 0.00035000426578335464,
"learning_rate": 4.12214747707527e-05,
"loss": 0.0,
"step": 703
},
{
"epoch": 0.09960032539879037,
"grad_norm": 0.0004221213166601956,
"learning_rate": 4.096760506437057e-05,
"loss": 0.0,
"step": 704
},
{
"epoch": 0.09974180313373184,
"grad_norm": 0.0002811127051245421,
"learning_rate": 4.071431798389408e-05,
"loss": 0.0,
"step": 705
},
{
"epoch": 0.09988328086867329,
"grad_norm": 0.0014768473338335752,
"learning_rate": 4.0461616029164526e-05,
"loss": 0.0001,
"step": 706
},
{
"epoch": 0.10002475860361476,
"grad_norm": 0.0011548775946721435,
"learning_rate": 4.020950169424815e-05,
"loss": 0.0001,
"step": 707
},
{
"epoch": 0.10016623633855622,
"grad_norm": 0.002059435937553644,
"learning_rate": 3.9957977467411615e-05,
"loss": 0.0002,
"step": 708
},
{
"epoch": 0.10030771407349769,
"grad_norm": 0.0005607010680250823,
"learning_rate": 3.9707045831097555e-05,
"loss": 0.0,
"step": 709
},
{
"epoch": 0.10044919180843914,
"grad_norm": 0.0003256005293224007,
"learning_rate": 3.945670926189987e-05,
"loss": 0.0,
"step": 710
},
{
"epoch": 0.10059066954338061,
"grad_norm": 0.009765557013452053,
"learning_rate": 3.920697023053949e-05,
"loss": 0.0002,
"step": 711
},
{
"epoch": 0.10073214727832207,
"grad_norm": 0.030024884268641472,
"learning_rate": 3.895783120183976e-05,
"loss": 0.0004,
"step": 712
},
{
"epoch": 0.10087362501326354,
"grad_norm": 0.0017325967783108354,
"learning_rate": 3.8709294634702376e-05,
"loss": 0.0001,
"step": 713
},
{
"epoch": 0.10101510274820501,
"grad_norm": 0.0008862835238687694,
"learning_rate": 3.846136298208285e-05,
"loss": 0.0001,
"step": 714
},
{
"epoch": 0.10115658048314646,
"grad_norm": 0.0033906784374266863,
"learning_rate": 3.821403869096658e-05,
"loss": 0.0003,
"step": 715
},
{
"epoch": 0.10129805821808793,
"grad_norm": 0.0006109073874540627,
"learning_rate": 3.796732420234443e-05,
"loss": 0.0,
"step": 716
},
{
"epoch": 0.10143953595302939,
"grad_norm": 0.008680574595928192,
"learning_rate": 3.7721221951188765e-05,
"loss": 0.0002,
"step": 717
},
{
"epoch": 0.10158101368797086,
"grad_norm": 0.0005801686784252524,
"learning_rate": 3.747573436642951e-05,
"loss": 0.0001,
"step": 718
},
{
"epoch": 0.10172249142291231,
"grad_norm": 0.00133225554600358,
"learning_rate": 3.7230863870929964e-05,
"loss": 0.0001,
"step": 719
},
{
"epoch": 0.10186396915785378,
"grad_norm": 0.0013388440711423755,
"learning_rate": 3.698661288146311e-05,
"loss": 0.0001,
"step": 720
},
{
"epoch": 0.10200544689279524,
"grad_norm": 0.002170740393921733,
"learning_rate": 3.674298380868756e-05,
"loss": 0.0002,
"step": 721
},
{
"epoch": 0.10214692462773671,
"grad_norm": 0.0010131692979484797,
"learning_rate": 3.649997905712396e-05,
"loss": 0.0001,
"step": 722
},
{
"epoch": 0.10228840236267818,
"grad_norm": 0.0006320245447568595,
"learning_rate": 3.6257601025131026e-05,
"loss": 0.0001,
"step": 723
},
{
"epoch": 0.10242988009761964,
"grad_norm": 0.0005717655294574797,
"learning_rate": 3.601585210488218e-05,
"loss": 0.0001,
"step": 724
},
{
"epoch": 0.1025713578325611,
"grad_norm": 0.0009512366959825158,
"learning_rate": 3.577473468234156e-05,
"loss": 0.0001,
"step": 725
},
{
"epoch": 0.10271283556750256,
"grad_norm": 0.0010945550166070461,
"learning_rate": 3.553425113724088e-05,
"loss": 0.0001,
"step": 726
},
{
"epoch": 0.10285431330244403,
"grad_norm": 0.0025489837862551212,
"learning_rate": 3.52944038430556e-05,
"loss": 0.0001,
"step": 727
},
{
"epoch": 0.10299579103738549,
"grad_norm": 0.016141386702656746,
"learning_rate": 3.5055195166981645e-05,
"loss": 0.0013,
"step": 728
},
{
"epoch": 0.10313726877232696,
"grad_norm": 0.0008998136036098003,
"learning_rate": 3.481662746991214e-05,
"loss": 0.0001,
"step": 729
},
{
"epoch": 0.10327874650726841,
"grad_norm": 0.007485687267035246,
"learning_rate": 3.4578703106413904e-05,
"loss": 0.0006,
"step": 730
},
{
"epoch": 0.10342022424220988,
"grad_norm": 0.0015185687225311995,
"learning_rate": 3.4341424424704375e-05,
"loss": 0.0001,
"step": 731
},
{
"epoch": 0.10356170197715135,
"grad_norm": 0.0037518057506531477,
"learning_rate": 3.4104793766628304e-05,
"loss": 0.0002,
"step": 732
},
{
"epoch": 0.10370317971209281,
"grad_norm": 0.002752428874373436,
"learning_rate": 3.386881346763483e-05,
"loss": 0.0001,
"step": 733
},
{
"epoch": 0.10384465744703428,
"grad_norm": 0.007001630496233702,
"learning_rate": 3.363348585675414e-05,
"loss": 0.0001,
"step": 734
},
{
"epoch": 0.10398613518197573,
"grad_norm": 0.005060289520770311,
"learning_rate": 3.339881325657484e-05,
"loss": 0.0003,
"step": 735
},
{
"epoch": 0.1041276129169172,
"grad_norm": 0.003094696905463934,
"learning_rate": 3.316479798322072e-05,
"loss": 0.0001,
"step": 736
},
{
"epoch": 0.10426909065185866,
"grad_norm": 0.0031883700285106897,
"learning_rate": 3.2931442346328004e-05,
"loss": 0.0002,
"step": 737
},
{
"epoch": 0.10441056838680013,
"grad_norm": 0.0017164596356451511,
"learning_rate": 3.269874864902269e-05,
"loss": 0.0001,
"step": 738
},
{
"epoch": 0.10455204612174158,
"grad_norm": 0.0011868190485984087,
"learning_rate": 3.246671918789755e-05,
"loss": 0.0001,
"step": 739
},
{
"epoch": 0.10469352385668305,
"grad_norm": 0.00042919107363559306,
"learning_rate": 3.223535625298979e-05,
"loss": 0.0,
"step": 740
},
{
"epoch": 0.10483500159162452,
"grad_norm": 0.0008362437947653234,
"learning_rate": 3.200466212775808e-05,
"loss": 0.0001,
"step": 741
},
{
"epoch": 0.10497647932656598,
"grad_norm": 0.0012623334769159555,
"learning_rate": 3.1774639089060363e-05,
"loss": 0.0001,
"step": 742
},
{
"epoch": 0.10511795706150745,
"grad_norm": 0.0010986161651089787,
"learning_rate": 3.154528940713113e-05,
"loss": 0.0001,
"step": 743
},
{
"epoch": 0.1052594347964489,
"grad_norm": 0.0006085491622798145,
"learning_rate": 3.1316615345559185e-05,
"loss": 0.0001,
"step": 744
},
{
"epoch": 0.10540091253139038,
"grad_norm": 0.0008765387465246022,
"learning_rate": 3.108861916126518e-05,
"loss": 0.0001,
"step": 745
},
{
"epoch": 0.10554239026633183,
"grad_norm": 0.0006421508733183146,
"learning_rate": 3.086130310447937e-05,
"loss": 0.0001,
"step": 746
},
{
"epoch": 0.1056838680012733,
"grad_norm": 0.0016310947248712182,
"learning_rate": 3.063466941871952e-05,
"loss": 0.0001,
"step": 747
},
{
"epoch": 0.10582534573621476,
"grad_norm": 0.0014764780644327402,
"learning_rate": 3.0408720340768572e-05,
"loss": 0.0001,
"step": 748
},
{
"epoch": 0.10596682347115623,
"grad_norm": 0.0006910095689818263,
"learning_rate": 3.018345810065275e-05,
"loss": 0.0001,
"step": 749
},
{
"epoch": 0.1061083012060977,
"grad_norm": 0.0015598449390381575,
"learning_rate": 2.9958884921619367e-05,
"loss": 0.0001,
"step": 750
},
{
"epoch": 0.10624977894103915,
"grad_norm": 0.0007486201939173043,
"learning_rate": 2.9735003020115092e-05,
"loss": 0.0001,
"step": 751
},
{
"epoch": 0.10639125667598062,
"grad_norm": 0.005332762375473976,
"learning_rate": 2.9511814605763855e-05,
"loss": 0.0005,
"step": 752
},
{
"epoch": 0.10653273441092208,
"grad_norm": 0.04735836386680603,
"learning_rate": 2.9289321881345254e-05,
"loss": 0.0004,
"step": 753
},
{
"epoch": 0.10667421214586355,
"grad_norm": 0.004297490697354078,
"learning_rate": 2.9067527042772636e-05,
"loss": 0.0004,
"step": 754
},
{
"epoch": 0.106815689880805,
"grad_norm": 0.0011461108224466443,
"learning_rate": 2.8846432279071467e-05,
"loss": 0.0001,
"step": 755
},
{
"epoch": 0.10695716761574647,
"grad_norm": 0.0008430193993262947,
"learning_rate": 2.8626039772357882e-05,
"loss": 0.0001,
"step": 756
},
{
"epoch": 0.10709864535068793,
"grad_norm": 0.0005710861296392977,
"learning_rate": 2.840635169781688e-05,
"loss": 0.0,
"step": 757
},
{
"epoch": 0.1072401230856294,
"grad_norm": 0.0006396773969754577,
"learning_rate": 2.8187370223681132e-05,
"loss": 0.0001,
"step": 758
},
{
"epoch": 0.10738160082057087,
"grad_norm": 0.0046224696561694145,
"learning_rate": 2.7969097511209308e-05,
"loss": 0.0001,
"step": 759
},
{
"epoch": 0.10752307855551232,
"grad_norm": 0.003434223821386695,
"learning_rate": 2.775153571466502e-05,
"loss": 0.0002,
"step": 760
},
{
"epoch": 0.1076645562904538,
"grad_norm": 0.005056839436292648,
"learning_rate": 2.753468698129533e-05,
"loss": 0.0002,
"step": 761
},
{
"epoch": 0.10780603402539525,
"grad_norm": 0.0009986249497160316,
"learning_rate": 2.7318553451309726e-05,
"loss": 0.0001,
"step": 762
},
{
"epoch": 0.10794751176033672,
"grad_norm": 0.008566567674279213,
"learning_rate": 2.7103137257858868e-05,
"loss": 0.0001,
"step": 763
},
{
"epoch": 0.10808898949527818,
"grad_norm": 0.001387615455314517,
"learning_rate": 2.688844052701359e-05,
"loss": 0.0001,
"step": 764
},
{
"epoch": 0.10823046723021965,
"grad_norm": 0.0008968693437054753,
"learning_rate": 2.6674465377744017e-05,
"loss": 0.0001,
"step": 765
},
{
"epoch": 0.1083719449651611,
"grad_norm": 0.0020030024461448193,
"learning_rate": 2.646121392189841e-05,
"loss": 0.0001,
"step": 766
},
{
"epoch": 0.10851342270010257,
"grad_norm": 0.0008027153671719134,
"learning_rate": 2.624868826418262e-05,
"loss": 0.0,
"step": 767
},
{
"epoch": 0.10865490043504404,
"grad_norm": 0.000605833251029253,
"learning_rate": 2.603689050213902e-05,
"loss": 0.0001,
"step": 768
},
{
"epoch": 0.1087963781699855,
"grad_norm": 0.002874531550332904,
"learning_rate": 2.582582272612609e-05,
"loss": 0.0002,
"step": 769
},
{
"epoch": 0.10893785590492697,
"grad_norm": 0.0006501192692667246,
"learning_rate": 2.561548701929749e-05,
"loss": 0.0001,
"step": 770
},
{
"epoch": 0.10907933363986842,
"grad_norm": 0.000950626446865499,
"learning_rate": 2.540588545758179e-05,
"loss": 0.0001,
"step": 771
},
{
"epoch": 0.10922081137480989,
"grad_norm": 0.0013473192229866982,
"learning_rate": 2.5197020109661772e-05,
"loss": 0.0001,
"step": 772
},
{
"epoch": 0.10936228910975135,
"grad_norm": 0.0013363496400415897,
"learning_rate": 2.4988893036954043e-05,
"loss": 0.0001,
"step": 773
},
{
"epoch": 0.10950376684469282,
"grad_norm": 0.0068521020002663136,
"learning_rate": 2.4781506293588873e-05,
"loss": 0.0004,
"step": 774
},
{
"epoch": 0.10964524457963427,
"grad_norm": 0.0016451867995783687,
"learning_rate": 2.4574861926389615e-05,
"loss": 0.0001,
"step": 775
},
{
"epoch": 0.10978672231457574,
"grad_norm": 0.00135321996640414,
"learning_rate": 2.436896197485282e-05,
"loss": 0.0001,
"step": 776
},
{
"epoch": 0.10992820004951721,
"grad_norm": 0.0008520256378687918,
"learning_rate": 2.4163808471127812e-05,
"loss": 0.0001,
"step": 777
},
{
"epoch": 0.11006967778445867,
"grad_norm": 0.0019374149851500988,
"learning_rate": 2.3959403439996907e-05,
"loss": 0.0002,
"step": 778
},
{
"epoch": 0.11021115551940014,
"grad_norm": 0.0019093346782028675,
"learning_rate": 2.37557488988552e-05,
"loss": 0.0002,
"step": 779
},
{
"epoch": 0.1103526332543416,
"grad_norm": 0.0007253644871525466,
"learning_rate": 2.3552846857690846e-05,
"loss": 0.0001,
"step": 780
},
{
"epoch": 0.11049411098928306,
"grad_norm": 0.0013740455033257604,
"learning_rate": 2.3350699319065026e-05,
"loss": 0.0001,
"step": 781
},
{
"epoch": 0.11063558872422452,
"grad_norm": 0.0012033848324790597,
"learning_rate": 2.3149308278092342e-05,
"loss": 0.0001,
"step": 782
},
{
"epoch": 0.11077706645916599,
"grad_norm": 0.001106973155401647,
"learning_rate": 2.2948675722421086e-05,
"loss": 0.0001,
"step": 783
},
{
"epoch": 0.11091854419410745,
"grad_norm": 0.00048042790149338543,
"learning_rate": 2.2748803632213557e-05,
"loss": 0.0,
"step": 784
},
{
"epoch": 0.11106002192904892,
"grad_norm": 0.001044884673319757,
"learning_rate": 2.254969398012663e-05,
"loss": 0.0001,
"step": 785
},
{
"epoch": 0.11120149966399039,
"grad_norm": 0.0007944092503748834,
"learning_rate": 2.235134873129213e-05,
"loss": 0.0001,
"step": 786
},
{
"epoch": 0.11134297739893184,
"grad_norm": 0.0007932492881081998,
"learning_rate": 2.2153769843297667e-05,
"loss": 0.0001,
"step": 787
},
{
"epoch": 0.11148445513387331,
"grad_norm": 0.001373193459585309,
"learning_rate": 2.195695926616702e-05,
"loss": 0.0001,
"step": 788
},
{
"epoch": 0.11162593286881477,
"grad_norm": 0.006605231203138828,
"learning_rate": 2.1760918942341192e-05,
"loss": 0.0006,
"step": 789
},
{
"epoch": 0.11176741060375624,
"grad_norm": 0.0010321062291041017,
"learning_rate": 2.1565650806658975e-05,
"loss": 0.0001,
"step": 790
},
{
"epoch": 0.11190888833869769,
"grad_norm": 0.20049892365932465,
"learning_rate": 2.137115678633811e-05,
"loss": 0.0012,
"step": 791
},
{
"epoch": 0.11205036607363916,
"grad_norm": 0.003096005879342556,
"learning_rate": 2.1177438800956007e-05,
"loss": 0.0002,
"step": 792
},
{
"epoch": 0.11219184380858062,
"grad_norm": 0.0008285019430331886,
"learning_rate": 2.098449876243096e-05,
"loss": 0.0001,
"step": 793
},
{
"epoch": 0.11233332154352209,
"grad_norm": 0.003056522458791733,
"learning_rate": 2.07923385750033e-05,
"loss": 0.0002,
"step": 794
},
{
"epoch": 0.11247479927846356,
"grad_norm": 0.0010699566919356585,
"learning_rate": 2.0600960135216462e-05,
"loss": 0.0001,
"step": 795
},
{
"epoch": 0.11261627701340501,
"grad_norm": 0.003565857419744134,
"learning_rate": 2.0410365331898416e-05,
"loss": 0.0002,
"step": 796
},
{
"epoch": 0.11275775474834648,
"grad_norm": 0.000508810393512249,
"learning_rate": 2.0220556046142893e-05,
"loss": 0.0,
"step": 797
},
{
"epoch": 0.11289923248328794,
"grad_norm": 0.00033257991890423,
"learning_rate": 2.0031534151290943e-05,
"loss": 0.0,
"step": 798
},
{
"epoch": 0.11304071021822941,
"grad_norm": 0.0007687319885008037,
"learning_rate": 1.9843301512912327e-05,
"loss": 0.0001,
"step": 799
},
{
"epoch": 0.11318218795317087,
"grad_norm": 0.004830488003790379,
"learning_rate": 1.965585998878724e-05,
"loss": 0.0004,
"step": 800
},
{
"epoch": 0.11332366568811233,
"grad_norm": 0.0005077541572973132,
"learning_rate": 1.946921142888781e-05,
"loss": 0.0,
"step": 801
},
{
"epoch": 0.11346514342305379,
"grad_norm": 0.004109581001102924,
"learning_rate": 1.928335767535997e-05,
"loss": 0.0003,
"step": 802
},
{
"epoch": 0.11360662115799526,
"grad_norm": 0.0029293650295585394,
"learning_rate": 1.9098300562505266e-05,
"loss": 0.0002,
"step": 803
},
{
"epoch": 0.11374809889293673,
"grad_norm": 0.0005266869557090104,
"learning_rate": 1.891404191676265e-05,
"loss": 0.0,
"step": 804
},
{
"epoch": 0.11388957662787819,
"grad_norm": 0.0005665577482432127,
"learning_rate": 1.8730583556690605e-05,
"loss": 0.0001,
"step": 805
},
{
"epoch": 0.11403105436281966,
"grad_norm": 0.0034227666910737753,
"learning_rate": 1.854792729294905e-05,
"loss": 0.0001,
"step": 806
},
{
"epoch": 0.11417253209776111,
"grad_norm": 0.000532831996679306,
"learning_rate": 1.8366074928281607e-05,
"loss": 0.0,
"step": 807
},
{
"epoch": 0.11431400983270258,
"grad_norm": 0.0009800855768844485,
"learning_rate": 1.818502825749764e-05,
"loss": 0.0001,
"step": 808
},
{
"epoch": 0.11445548756764404,
"grad_norm": 0.0037957008462399244,
"learning_rate": 1.8004789067454764e-05,
"loss": 0.0003,
"step": 809
},
{
"epoch": 0.11459696530258551,
"grad_norm": 0.0018789039459079504,
"learning_rate": 1.7825359137040988e-05,
"loss": 0.0002,
"step": 810
},
{
"epoch": 0.11473844303752696,
"grad_norm": 0.0015523998299613595,
"learning_rate": 1.7646740237157256e-05,
"loss": 0.0001,
"step": 811
},
{
"epoch": 0.11487992077246843,
"grad_norm": 0.000724561046808958,
"learning_rate": 1.7468934130700044e-05,
"loss": 0.0001,
"step": 812
},
{
"epoch": 0.1150213985074099,
"grad_norm": 0.003468384500592947,
"learning_rate": 1.7291942572543807e-05,
"loss": 0.0002,
"step": 813
},
{
"epoch": 0.11516287624235136,
"grad_norm": 0.0004411648551467806,
"learning_rate": 1.7115767309523812e-05,
"loss": 0.0,
"step": 814
},
{
"epoch": 0.11530435397729283,
"grad_norm": 0.003343729767948389,
"learning_rate": 1.6940410080418723e-05,
"loss": 0.0001,
"step": 815
},
{
"epoch": 0.11544583171223428,
"grad_norm": 1.5019727945327759,
"learning_rate": 1.6765872615933677e-05,
"loss": 0.4939,
"step": 816
},
{
"epoch": 0.11558730944717575,
"grad_norm": 0.0021441394928842783,
"learning_rate": 1.6592156638682886e-05,
"loss": 0.0002,
"step": 817
},
{
"epoch": 0.11572878718211721,
"grad_norm": 0.0018722140230238438,
"learning_rate": 1.6419263863172997e-05,
"loss": 0.0001,
"step": 818
},
{
"epoch": 0.11587026491705868,
"grad_norm": 0.000854533922392875,
"learning_rate": 1.6247195995785837e-05,
"loss": 0.0001,
"step": 819
},
{
"epoch": 0.11601174265200014,
"grad_norm": 0.0007223361171782017,
"learning_rate": 1.6075954734761845e-05,
"loss": 0.0001,
"step": 820
},
{
"epoch": 0.1161532203869416,
"grad_norm": 0.0026152019854635,
"learning_rate": 1.5905541770183096e-05,
"loss": 0.0002,
"step": 821
},
{
"epoch": 0.11629469812188307,
"grad_norm": 0.001397658372297883,
"learning_rate": 1.5735958783956794e-05,
"loss": 0.0001,
"step": 822
},
{
"epoch": 0.11643617585682453,
"grad_norm": 0.0011791229480877519,
"learning_rate": 1.5567207449798515e-05,
"loss": 0.0001,
"step": 823
},
{
"epoch": 0.116577653591766,
"grad_norm": 0.0016374592669308186,
"learning_rate": 1.539928943321579e-05,
"loss": 0.0001,
"step": 824
},
{
"epoch": 0.11671913132670746,
"grad_norm": 0.002598709659650922,
"learning_rate": 1.5232206391491699e-05,
"loss": 0.0002,
"step": 825
},
{
"epoch": 0.11686060906164893,
"grad_norm": 0.0026453689206391573,
"learning_rate": 1.5065959973668353e-05,
"loss": 0.0002,
"step": 826
},
{
"epoch": 0.11700208679659038,
"grad_norm": 0.0007108437712304294,
"learning_rate": 1.4900551820530828e-05,
"loss": 0.0001,
"step": 827
},
{
"epoch": 0.11714356453153185,
"grad_norm": 0.0016297996044158936,
"learning_rate": 1.4735983564590783e-05,
"loss": 0.0001,
"step": 828
},
{
"epoch": 0.11728504226647331,
"grad_norm": 0.0010631303302943707,
"learning_rate": 1.4572256830070497e-05,
"loss": 0.0001,
"step": 829
},
{
"epoch": 0.11742652000141478,
"grad_norm": 0.041488487273454666,
"learning_rate": 1.4409373232886702e-05,
"loss": 0.0006,
"step": 830
},
{
"epoch": 0.11756799773635625,
"grad_norm": 0.002247254131361842,
"learning_rate": 1.4247334380634792e-05,
"loss": 0.0001,
"step": 831
},
{
"epoch": 0.1177094754712977,
"grad_norm": 0.0015282740350812674,
"learning_rate": 1.4086141872572789e-05,
"loss": 0.0001,
"step": 832
},
{
"epoch": 0.11785095320623917,
"grad_norm": 0.0033260490745306015,
"learning_rate": 1.3925797299605647e-05,
"loss": 0.0002,
"step": 833
},
{
"epoch": 0.11799243094118063,
"grad_norm": 0.00047575324424542487,
"learning_rate": 1.3766302244269624e-05,
"loss": 0.0,
"step": 834
},
{
"epoch": 0.1181339086761221,
"grad_norm": 1.9940252304077148,
"learning_rate": 1.3607658280716473e-05,
"loss": 0.1164,
"step": 835
},
{
"epoch": 0.11827538641106355,
"grad_norm": 0.013329320587217808,
"learning_rate": 1.3449866974698122e-05,
"loss": 0.0007,
"step": 836
},
{
"epoch": 0.11841686414600502,
"grad_norm": 0.009485892951488495,
"learning_rate": 1.3292929883550998e-05,
"loss": 0.0007,
"step": 837
},
{
"epoch": 0.11855834188094648,
"grad_norm": 0.004114389885216951,
"learning_rate": 1.3136848556180892e-05,
"loss": 0.0004,
"step": 838
},
{
"epoch": 0.11869981961588795,
"grad_norm": 0.008051803335547447,
"learning_rate": 1.2981624533047432e-05,
"loss": 0.0002,
"step": 839
},
{
"epoch": 0.11884129735082942,
"grad_norm": 0.00846549030393362,
"learning_rate": 1.2827259346149122e-05,
"loss": 0.0005,
"step": 840
},
{
"epoch": 0.11898277508577088,
"grad_norm": 0.004868239630013704,
"learning_rate": 1.2673754519008008e-05,
"loss": 0.0004,
"step": 841
},
{
"epoch": 0.11912425282071235,
"grad_norm": 0.00196613441221416,
"learning_rate": 1.2521111566654731e-05,
"loss": 0.0001,
"step": 842
},
{
"epoch": 0.1192657305556538,
"grad_norm": 0.0007714144885540009,
"learning_rate": 1.2369331995613665e-05,
"loss": 0.0001,
"step": 843
},
{
"epoch": 0.11940720829059527,
"grad_norm": 0.0020139142870903015,
"learning_rate": 1.2218417303887842e-05,
"loss": 0.0002,
"step": 844
},
{
"epoch": 0.11954868602553673,
"grad_norm": 0.001310071093030274,
"learning_rate": 1.206836898094439e-05,
"loss": 0.0001,
"step": 845
},
{
"epoch": 0.1196901637604782,
"grad_norm": 0.00624494906514883,
"learning_rate": 1.191918850769964e-05,
"loss": 0.0002,
"step": 846
},
{
"epoch": 0.11983164149541965,
"grad_norm": 0.0040423693135380745,
"learning_rate": 1.1770877356504683e-05,
"loss": 0.0002,
"step": 847
},
{
"epoch": 0.11997311923036112,
"grad_norm": 0.01039808988571167,
"learning_rate": 1.1623436991130654e-05,
"loss": 0.0007,
"step": 848
},
{
"epoch": 0.12011459696530259,
"grad_norm": 0.005628874525427818,
"learning_rate": 1.1476868866754486e-05,
"loss": 0.0004,
"step": 849
},
{
"epoch": 0.12025607470024405,
"grad_norm": 0.011199036613106728,
"learning_rate": 1.1331174429944347e-05,
"loss": 0.0007,
"step": 850
},
{
"epoch": 0.12039755243518552,
"grad_norm": 0.00257562636397779,
"learning_rate": 1.1186355118645554e-05,
"loss": 0.0001,
"step": 851
},
{
"epoch": 0.12053903017012697,
"grad_norm": 0.001868070219643414,
"learning_rate": 1.1042412362166222e-05,
"loss": 0.0001,
"step": 852
},
{
"epoch": 0.12068050790506844,
"grad_norm": 0.003299275878816843,
"learning_rate": 1.0899347581163221e-05,
"loss": 0.0003,
"step": 853
},
{
"epoch": 0.1208219856400099,
"grad_norm": 0.0014683003537356853,
"learning_rate": 1.0757162187628222e-05,
"loss": 0.0001,
"step": 854
},
{
"epoch": 0.12096346337495137,
"grad_norm": 0.004328795243054628,
"learning_rate": 1.0615857584873623e-05,
"loss": 0.0004,
"step": 855
},
{
"epoch": 0.12110494110989282,
"grad_norm": 0.002547298092395067,
"learning_rate": 1.0475435167518843e-05,
"loss": 0.0002,
"step": 856
},
{
"epoch": 0.1212464188448343,
"grad_norm": 0.0016848307568579912,
"learning_rate": 1.0335896321476413e-05,
"loss": 0.0001,
"step": 857
},
{
"epoch": 0.12138789657977576,
"grad_norm": 0.004747582133859396,
"learning_rate": 1.0197242423938446e-05,
"loss": 0.0005,
"step": 858
},
{
"epoch": 0.12152937431471722,
"grad_norm": 0.001700869295746088,
"learning_rate": 1.0059474843362892e-05,
"loss": 0.0001,
"step": 859
},
{
"epoch": 0.12167085204965869,
"grad_norm": 0.0012192331487312913,
"learning_rate": 9.922594939460194e-06,
"loss": 0.0001,
"step": 860
},
{
"epoch": 0.12181232978460015,
"grad_norm": 0.003007133025676012,
"learning_rate": 9.786604063179728e-06,
"loss": 0.0002,
"step": 861
},
{
"epoch": 0.12195380751954162,
"grad_norm": 0.001699776970781386,
"learning_rate": 9.651503556696516e-06,
"loss": 0.0001,
"step": 862
},
{
"epoch": 0.12209528525448307,
"grad_norm": 0.0022404356859624386,
"learning_rate": 9.517294753398064e-06,
"loss": 0.0002,
"step": 863
},
{
"epoch": 0.12223676298942454,
"grad_norm": 0.003892921144142747,
"learning_rate": 9.383978977871021e-06,
"loss": 0.0002,
"step": 864
},
{
"epoch": 0.122378240724366,
"grad_norm": 0.0008568639750592411,
"learning_rate": 9.251557545888312e-06,
"loss": 0.0001,
"step": 865
},
{
"epoch": 0.12251971845930747,
"grad_norm": 0.001413036254234612,
"learning_rate": 9.120031764395987e-06,
"loss": 0.0001,
"step": 866
},
{
"epoch": 0.12266119619424894,
"grad_norm": 0.0006800137925893068,
"learning_rate": 8.989402931500434e-06,
"loss": 0.0001,
"step": 867
},
{
"epoch": 0.12280267392919039,
"grad_norm": 0.009284158237278461,
"learning_rate": 8.85967233645547e-06,
"loss": 0.0004,
"step": 868
},
{
"epoch": 0.12294415166413186,
"grad_norm": 0.002639732789248228,
"learning_rate": 8.730841259649725e-06,
"loss": 0.0002,
"step": 869
},
{
"epoch": 0.12308562939907332,
"grad_norm": 0.009310910478234291,
"learning_rate": 8.602910972593892e-06,
"loss": 0.0007,
"step": 870
},
{
"epoch": 0.12322710713401479,
"grad_norm": 0.001654073246754706,
"learning_rate": 8.475882737908248e-06,
"loss": 0.0001,
"step": 871
},
{
"epoch": 0.12336858486895624,
"grad_norm": 0.007454808335751295,
"learning_rate": 8.34975780931021e-06,
"loss": 0.0001,
"step": 872
},
{
"epoch": 0.12351006260389771,
"grad_norm": 0.002037209691479802,
"learning_rate": 8.224537431601886e-06,
"loss": 0.0002,
"step": 873
},
{
"epoch": 0.12365154033883917,
"grad_norm": 0.003405377734452486,
"learning_rate": 8.100222840657878e-06,
"loss": 0.0002,
"step": 874
},
{
"epoch": 0.12379301807378064,
"grad_norm": 0.002389519941061735,
"learning_rate": 7.976815263412963e-06,
"loss": 0.0002,
"step": 875
},
{
"epoch": 0.12393449580872211,
"grad_norm": 0.000885749002918601,
"learning_rate": 7.854315917850163e-06,
"loss": 0.0001,
"step": 876
},
{
"epoch": 0.12407597354366356,
"grad_norm": 0.0019240875262767076,
"learning_rate": 7.73272601298851e-06,
"loss": 0.0002,
"step": 877
},
{
"epoch": 0.12421745127860503,
"grad_norm": 0.001213736366480589,
"learning_rate": 7.612046748871327e-06,
"loss": 0.0001,
"step": 878
},
{
"epoch": 0.12435892901354649,
"grad_norm": 0.0018200764898210764,
"learning_rate": 7.492279316554207e-06,
"loss": 0.0002,
"step": 879
},
{
"epoch": 0.12450040674848796,
"grad_norm": 0.048911500722169876,
"learning_rate": 7.3734248980933395e-06,
"loss": 0.0009,
"step": 880
},
{
"epoch": 0.12464188448342942,
"grad_norm": 0.003094620769843459,
"learning_rate": 7.255484666533874e-06,
"loss": 0.0001,
"step": 881
},
{
"epoch": 0.12478336221837089,
"grad_norm": 0.02175629325211048,
"learning_rate": 7.138459785898266e-06,
"loss": 0.0013,
"step": 882
},
{
"epoch": 0.12492483995331234,
"grad_norm": 0.007283073849976063,
"learning_rate": 7.022351411174866e-06,
"loss": 0.0002,
"step": 883
},
{
"epoch": 0.1250663176882538,
"grad_norm": 0.0008444397244602442,
"learning_rate": 6.907160688306425e-06,
"loss": 0.0001,
"step": 884
},
{
"epoch": 0.12520779542319527,
"grad_norm": 0.0045507000759243965,
"learning_rate": 6.7928887541789055e-06,
"loss": 0.0003,
"step": 885
},
{
"epoch": 0.12534927315813674,
"grad_norm": 0.006236658431589603,
"learning_rate": 6.679536736610137e-06,
"loss": 0.0005,
"step": 886
},
{
"epoch": 0.1254907508930782,
"grad_norm": 0.0028792882803827524,
"learning_rate": 6.5671057543387985e-06,
"loss": 0.0001,
"step": 887
},
{
"epoch": 0.12563222862801968,
"grad_norm": 0.004536019172519445,
"learning_rate": 6.455596917013273e-06,
"loss": 0.0002,
"step": 888
},
{
"epoch": 0.12577370636296112,
"grad_norm": 0.0009695796761661768,
"learning_rate": 6.345011325180772e-06,
"loss": 0.0001,
"step": 889
},
{
"epoch": 0.1259151840979026,
"grad_norm": 0.00836182851344347,
"learning_rate": 6.235350070276447e-06,
"loss": 0.0004,
"step": 890
},
{
"epoch": 0.12605666183284406,
"grad_norm": 0.0012931537348777056,
"learning_rate": 6.126614234612593e-06,
"loss": 0.0001,
"step": 891
},
{
"epoch": 0.12619813956778553,
"grad_norm": 0.004833157639950514,
"learning_rate": 6.018804891368035e-06,
"loss": 0.0004,
"step": 892
},
{
"epoch": 0.12633961730272697,
"grad_norm": 0.0011749600525945425,
"learning_rate": 5.911923104577455e-06,
"loss": 0.0001,
"step": 893
},
{
"epoch": 0.12648109503766844,
"grad_norm": 0.0016763556050136685,
"learning_rate": 5.805969929120947e-06,
"loss": 0.0001,
"step": 894
},
{
"epoch": 0.1266225727726099,
"grad_norm": 0.003065883880481124,
"learning_rate": 5.700946410713548e-06,
"loss": 0.0002,
"step": 895
},
{
"epoch": 0.12676405050755138,
"grad_norm": 0.015194329433143139,
"learning_rate": 5.5968535858950345e-06,
"loss": 0.0004,
"step": 896
},
{
"epoch": 0.12690552824249285,
"grad_norm": 0.007229126524180174,
"learning_rate": 5.49369248201953e-06,
"loss": 0.0006,
"step": 897
},
{
"epoch": 0.1270470059774343,
"grad_norm": 0.026731327176094055,
"learning_rate": 5.39146411724547e-06,
"loss": 0.0005,
"step": 898
},
{
"epoch": 0.12718848371237576,
"grad_norm": 0.001329147256910801,
"learning_rate": 5.290169500525577e-06,
"loss": 0.0001,
"step": 899
},
{
"epoch": 0.12732996144731723,
"grad_norm": 0.016030043363571167,
"learning_rate": 5.189809631596798e-06,
"loss": 0.0008,
"step": 900
},
{
"epoch": 0.1274714391822587,
"grad_norm": 0.001103358343243599,
"learning_rate": 5.0903855009705514e-06,
"loss": 0.0001,
"step": 901
},
{
"epoch": 0.12761291691720014,
"grad_norm": 0.0012724302941933274,
"learning_rate": 4.991898089922819e-06,
"loss": 0.0001,
"step": 902
},
{
"epoch": 0.1277543946521416,
"grad_norm": 0.0033973059616982937,
"learning_rate": 4.8943483704846475e-06,
"loss": 0.0003,
"step": 903
},
{
"epoch": 0.12789587238708308,
"grad_norm": 0.01135913748294115,
"learning_rate": 4.797737305432337e-06,
"loss": 0.0006,
"step": 904
},
{
"epoch": 0.12803735012202455,
"grad_norm": 0.0018166914815083146,
"learning_rate": 4.702065848278126e-06,
"loss": 0.0002,
"step": 905
},
{
"epoch": 0.12817882785696602,
"grad_norm": 0.0008935577352531254,
"learning_rate": 4.607334943260655e-06,
"loss": 0.0001,
"step": 906
},
{
"epoch": 0.12832030559190746,
"grad_norm": 0.0008974755764938891,
"learning_rate": 4.513545525335705e-06,
"loss": 0.0001,
"step": 907
},
{
"epoch": 0.12846178332684893,
"grad_norm": 0.0007228873437270522,
"learning_rate": 4.420698520166988e-06,
"loss": 0.0001,
"step": 908
},
{
"epoch": 0.1286032610617904,
"grad_norm": 0.004292274825274944,
"learning_rate": 4.328794844116946e-06,
"loss": 0.0002,
"step": 909
},
{
"epoch": 0.12874473879673187,
"grad_norm": 0.003769136033952236,
"learning_rate": 4.237835404237778e-06,
"loss": 0.0002,
"step": 910
},
{
"epoch": 0.12888621653167331,
"grad_norm": 0.044125765562057495,
"learning_rate": 4.147821098262405e-06,
"loss": 0.0006,
"step": 911
},
{
"epoch": 0.12902769426661478,
"grad_norm": 0.001430506817996502,
"learning_rate": 4.0587528145957235e-06,
"loss": 0.0001,
"step": 912
},
{
"epoch": 0.12916917200155625,
"grad_norm": 0.007022987585514784,
"learning_rate": 3.970631432305694e-06,
"loss": 0.0004,
"step": 913
},
{
"epoch": 0.12931064973649772,
"grad_norm": 0.0009093685657717288,
"learning_rate": 3.883457821114811e-06,
"loss": 0.0001,
"step": 914
},
{
"epoch": 0.1294521274714392,
"grad_norm": 0.0009062972967512906,
"learning_rate": 3.797232841391407e-06,
"loss": 0.0001,
"step": 915
},
{
"epoch": 0.12959360520638064,
"grad_norm": 0.001498628407716751,
"learning_rate": 3.711957344141237e-06,
"loss": 0.0001,
"step": 916
},
{
"epoch": 0.1297350829413221,
"grad_norm": 0.0011825069086626172,
"learning_rate": 3.627632170999029e-06,
"loss": 0.0001,
"step": 917
},
{
"epoch": 0.12987656067626357,
"grad_norm": 0.005202493164688349,
"learning_rate": 3.5442581542201923e-06,
"loss": 0.0002,
"step": 918
},
{
"epoch": 0.13001803841120504,
"grad_norm": 0.0010673865908756852,
"learning_rate": 3.461836116672612e-06,
"loss": 0.0001,
"step": 919
},
{
"epoch": 0.1301595161461465,
"grad_norm": 0.003380801063030958,
"learning_rate": 3.380366871828522e-06,
"loss": 0.0003,
"step": 920
},
{
"epoch": 0.13030099388108796,
"grad_norm": 0.001819453900679946,
"learning_rate": 3.2998512237565005e-06,
"loss": 0.0002,
"step": 921
},
{
"epoch": 0.13044247161602943,
"grad_norm": 0.004628917668014765,
"learning_rate": 3.2202899671134546e-06,
"loss": 0.0003,
"step": 922
},
{
"epoch": 0.1305839493509709,
"grad_norm": 0.0009073893306776881,
"learning_rate": 3.1416838871368924e-06,
"loss": 0.0001,
"step": 923
},
{
"epoch": 0.13072542708591237,
"grad_norm": 0.0032710530795156956,
"learning_rate": 3.064033759637064e-06,
"loss": 0.0003,
"step": 924
},
{
"epoch": 0.1308669048208538,
"grad_norm": 0.000904114858713001,
"learning_rate": 2.9873403509894203e-06,
"loss": 0.0001,
"step": 925
},
{
"epoch": 0.13100838255579528,
"grad_norm": 0.0010823109187185764,
"learning_rate": 2.9116044181269007e-06,
"loss": 0.0001,
"step": 926
},
{
"epoch": 0.13114986029073675,
"grad_norm": 0.010690744034945965,
"learning_rate": 2.836826708532603e-06,
"loss": 0.0006,
"step": 927
},
{
"epoch": 0.13129133802567822,
"grad_norm": 0.007006384897977114,
"learning_rate": 2.7630079602323442e-06,
"loss": 0.0003,
"step": 928
},
{
"epoch": 0.13143281576061966,
"grad_norm": 0.0007068116683512926,
"learning_rate": 2.690148901787337e-06,
"loss": 0.0001,
"step": 929
},
{
"epoch": 0.13157429349556113,
"grad_norm": 0.003064696444198489,
"learning_rate": 2.618250252287113e-06,
"loss": 0.0002,
"step": 930
},
{
"epoch": 0.1317157712305026,
"grad_norm": 0.0013085821410641074,
"learning_rate": 2.5473127213422763e-06,
"loss": 0.0001,
"step": 931
},
{
"epoch": 0.13185724896544407,
"grad_norm": 0.0008828960126265883,
"learning_rate": 2.4773370090776626e-06,
"loss": 0.0001,
"step": 932
},
{
"epoch": 0.13199872670038554,
"grad_norm": 2.9277994632720947,
"learning_rate": 2.4083238061252567e-06,
"loss": 1.0387,
"step": 933
},
{
"epoch": 0.13214020443532698,
"grad_norm": 0.0015159097965806723,
"learning_rate": 2.3402737936175425e-06,
"loss": 0.0001,
"step": 934
},
{
"epoch": 0.13228168217026845,
"grad_norm": 0.0016068522818386555,
"learning_rate": 2.273187643180652e-06,
"loss": 0.0001,
"step": 935
},
{
"epoch": 0.13242315990520992,
"grad_norm": 0.001012073247693479,
"learning_rate": 2.2070660169278166e-06,
"loss": 0.0001,
"step": 936
},
{
"epoch": 0.1325646376401514,
"grad_norm": 0.004135122988373041,
"learning_rate": 2.141909567452793e-06,
"loss": 0.0002,
"step": 937
},
{
"epoch": 0.13270611537509283,
"grad_norm": 0.0009160821209661663,
"learning_rate": 2.0777189378234143e-06,
"loss": 0.0001,
"step": 938
},
{
"epoch": 0.1328475931100343,
"grad_norm": 0.0008047740557231009,
"learning_rate": 2.014494761575314e-06,
"loss": 0.0001,
"step": 939
},
{
"epoch": 0.13298907084497577,
"grad_norm": 0.0031038711313158274,
"learning_rate": 1.9522376627055583e-06,
"loss": 0.0003,
"step": 940
},
{
"epoch": 0.13313054857991724,
"grad_norm": 0.006230383645743132,
"learning_rate": 1.8909482556666024e-06,
"loss": 0.0005,
"step": 941
},
{
"epoch": 0.1332720263148587,
"grad_norm": 0.0016900923801586032,
"learning_rate": 1.8306271453601199e-06,
"loss": 0.0001,
"step": 942
},
{
"epoch": 0.13341350404980015,
"grad_norm": 0.0012291180901229382,
"learning_rate": 1.771274927131139e-06,
"loss": 0.0001,
"step": 943
},
{
"epoch": 0.13355498178474162,
"grad_norm": 0.002149342093616724,
"learning_rate": 1.712892186762083e-06,
"loss": 0.0002,
"step": 944
},
{
"epoch": 0.1336964595196831,
"grad_norm": 0.0009066860657185316,
"learning_rate": 1.6554795004670388e-06,
"loss": 0.0001,
"step": 945
},
{
"epoch": 0.13383793725462456,
"grad_norm": 0.0023614042438566685,
"learning_rate": 1.5990374348860305e-06,
"loss": 0.0001,
"step": 946
},
{
"epoch": 0.133979414989566,
"grad_norm": 0.0028560732025653124,
"learning_rate": 1.543566547079467e-06,
"loss": 0.0002,
"step": 947
},
{
"epoch": 0.13412089272450747,
"grad_norm": 0.004639330320060253,
"learning_rate": 1.4890673845226133e-06,
"loss": 0.0002,
"step": 948
},
{
"epoch": 0.13426237045944894,
"grad_norm": 0.0011574679519981146,
"learning_rate": 1.4355404851001952e-06,
"loss": 0.0001,
"step": 949
},
{
"epoch": 0.1344038481943904,
"grad_norm": 0.01731015555560589,
"learning_rate": 1.3829863771011253e-06,
"loss": 0.0002,
"step": 950
},
{
"epoch": 0.13454532592933188,
"grad_norm": 0.0013583000982180238,
"learning_rate": 1.3314055792131964e-06,
"loss": 0.0001,
"step": 951
},
{
"epoch": 0.13468680366427332,
"grad_norm": 0.01872401311993599,
"learning_rate": 1.280798600518085e-06,
"loss": 0.0004,
"step": 952
},
{
"epoch": 0.1348282813992148,
"grad_norm": 0.005545449908822775,
"learning_rate": 1.231165940486234e-06,
"loss": 0.0005,
"step": 953
},
{
"epoch": 0.13496975913415626,
"grad_norm": 0.0026185193564742804,
"learning_rate": 1.1825080889719563e-06,
"loss": 0.0001,
"step": 954
},
{
"epoch": 0.13511123686909773,
"grad_norm": 0.0041889348067343235,
"learning_rate": 1.134825526208605e-06,
"loss": 0.0003,
"step": 955
},
{
"epoch": 0.13525271460403918,
"grad_norm": 0.0009472736855968833,
"learning_rate": 1.0881187228038215e-06,
"loss": 0.0001,
"step": 956
},
{
"epoch": 0.13539419233898065,
"grad_norm": 0.0025584392715245485,
"learning_rate": 1.0423881397349068e-06,
"loss": 0.0001,
"step": 957
},
{
"epoch": 0.13553567007392212,
"grad_norm": 0.0036006199661642313,
"learning_rate": 9.976342283442463e-07,
"loss": 0.0003,
"step": 958
},
{
"epoch": 0.13567714780886359,
"grad_norm": 0.004876970779150724,
"learning_rate": 9.538574303348813e-07,
"loss": 0.0003,
"step": 959
},
{
"epoch": 0.13581862554380505,
"grad_norm": 0.0027515313122421503,
"learning_rate": 9.110581777661331e-07,
"loss": 0.0002,
"step": 960
},
{
"epoch": 0.1359601032787465,
"grad_norm": 0.001473053591325879,
"learning_rate": 8.692368930493521e-07,
"loss": 0.0001,
"step": 961
},
{
"epoch": 0.13610158101368797,
"grad_norm": 0.004612274467945099,
"learning_rate": 8.283939889437209e-07,
"loss": 0.0002,
"step": 962
},
{
"epoch": 0.13624305874862944,
"grad_norm": 0.0012349090538918972,
"learning_rate": 7.885298685522235e-07,
"loss": 0.0001,
"step": 963
},
{
"epoch": 0.1363845364835709,
"grad_norm": 0.0017732393462210894,
"learning_rate": 7.496449253176274e-07,
"loss": 0.0002,
"step": 964
},
{
"epoch": 0.13652601421851235,
"grad_norm": 0.0008514790097251534,
"learning_rate": 7.117395430186414e-07,
"loss": 0.0001,
"step": 965
},
{
"epoch": 0.13666749195345382,
"grad_norm": 0.000827544427011162,
"learning_rate": 6.748140957660631e-07,
"loss": 0.0001,
"step": 966
},
{
"epoch": 0.1368089696883953,
"grad_norm": 0.0010190614266321063,
"learning_rate": 6.388689479991605e-07,
"loss": 0.0001,
"step": 967
},
{
"epoch": 0.13695044742333676,
"grad_norm": 0.0039400323294103146,
"learning_rate": 6.039044544820404e-07,
"loss": 0.0002,
"step": 968
},
{
"epoch": 0.13709192515827823,
"grad_norm": 0.002428358420729637,
"learning_rate": 5.699209603001076e-07,
"loss": 0.0002,
"step": 969
},
{
"epoch": 0.13723340289321967,
"grad_norm": 0.0017006148118525743,
"learning_rate": 5.369188008567672e-07,
"loss": 0.0001,
"step": 970
},
{
"epoch": 0.13737488062816114,
"grad_norm": 0.0008442560210824013,
"learning_rate": 5.048983018699827e-07,
"loss": 0.0001,
"step": 971
},
{
"epoch": 0.1375163583631026,
"grad_norm": 0.002315743826329708,
"learning_rate": 4.738597793691679e-07,
"loss": 0.0002,
"step": 972
},
{
"epoch": 0.13765783609804408,
"grad_norm": 0.016204334795475006,
"learning_rate": 4.438035396920004e-07,
"loss": 0.0003,
"step": 973
},
{
"epoch": 0.13779931383298552,
"grad_norm": 0.007021232508122921,
"learning_rate": 4.1472987948143473e-07,
"loss": 0.0005,
"step": 974
},
{
"epoch": 0.137940791567927,
"grad_norm": 0.0011094443034380674,
"learning_rate": 3.866390856827495e-07,
"loss": 0.0001,
"step": 975
},
{
"epoch": 0.13808226930286846,
"grad_norm": 0.0013754486571997404,
"learning_rate": 3.595314355407609e-07,
"loss": 0.0001,
"step": 976
},
{
"epoch": 0.13822374703780993,
"grad_norm": 0.0012389803305268288,
"learning_rate": 3.3340719659701313e-07,
"loss": 0.0001,
"step": 977
},
{
"epoch": 0.1383652247727514,
"grad_norm": 0.0010980741353705525,
"learning_rate": 3.0826662668720364e-07,
"loss": 0.0001,
"step": 978
},
{
"epoch": 0.13850670250769284,
"grad_norm": 0.006173287518322468,
"learning_rate": 2.841099739386066e-07,
"loss": 0.0004,
"step": 979
},
{
"epoch": 0.1386481802426343,
"grad_norm": 1.0002788305282593,
"learning_rate": 2.609374767676309e-07,
"loss": 0.3774,
"step": 980
},
{
"epoch": 0.13878965797757578,
"grad_norm": 0.0016743881860747933,
"learning_rate": 2.387493638774774e-07,
"loss": 0.0001,
"step": 981
},
{
"epoch": 0.13893113571251725,
"grad_norm": 0.0023470830637961626,
"learning_rate": 2.175458542558517e-07,
"loss": 0.0001,
"step": 982
},
{
"epoch": 0.1390726134474587,
"grad_norm": 0.015273729339241982,
"learning_rate": 1.973271571728441e-07,
"loss": 0.0009,
"step": 983
},
{
"epoch": 0.13921409118240016,
"grad_norm": 0.008181693963706493,
"learning_rate": 1.7809347217881966e-07,
"loss": 0.0004,
"step": 984
},
{
"epoch": 0.13935556891734163,
"grad_norm": 0.0018706975970417261,
"learning_rate": 1.598449891024978e-07,
"loss": 0.0001,
"step": 985
},
{
"epoch": 0.1394970466522831,
"grad_norm": 0.002527001081034541,
"learning_rate": 1.425818880490315e-07,
"loss": 0.0002,
"step": 986
},
{
"epoch": 0.13963852438722457,
"grad_norm": 0.0017283704364672303,
"learning_rate": 1.2630433939825327e-07,
"loss": 0.0001,
"step": 987
},
{
"epoch": 0.139780002122166,
"grad_norm": 0.0018094810657203197,
"learning_rate": 1.1101250380300965e-07,
"loss": 0.0001,
"step": 988
},
{
"epoch": 0.13992147985710748,
"grad_norm": 0.001633922685869038,
"learning_rate": 9.670653218752934e-08,
"loss": 0.0001,
"step": 989
},
{
"epoch": 0.14006295759204895,
"grad_norm": 0.0005336939357221127,
"learning_rate": 8.33865657459909e-08,
"loss": 0.0001,
"step": 990
},
{
"epoch": 0.14020443532699042,
"grad_norm": 0.004103470593690872,
"learning_rate": 7.105273594107953e-08,
"loss": 0.0002,
"step": 991
},
{
"epoch": 0.14034591306193187,
"grad_norm": 0.003065005410462618,
"learning_rate": 5.970516450271025e-08,
"loss": 0.0003,
"step": 992
},
{
"epoch": 0.14048739079687333,
"grad_norm": 0.0010252476204186678,
"learning_rate": 4.934396342684e-08,
"loss": 0.0001,
"step": 993
},
{
"epoch": 0.1406288685318148,
"grad_norm": 0.0008998365374282002,
"learning_rate": 3.996923497434635e-08,
"loss": 0.0001,
"step": 994
},
{
"epoch": 0.14077034626675627,
"grad_norm": 0.007965818047523499,
"learning_rate": 3.1581071670006015e-08,
"loss": 0.0003,
"step": 995
},
{
"epoch": 0.14091182400169774,
"grad_norm": 0.0009977156296372414,
"learning_rate": 2.417955630159563e-08,
"loss": 0.0001,
"step": 996
},
{
"epoch": 0.1410533017366392,
"grad_norm": 0.0016674831276759505,
"learning_rate": 1.7764761919103477e-08,
"loss": 0.0001,
"step": 997
},
{
"epoch": 0.14119477947158066,
"grad_norm": 0.002003878355026245,
"learning_rate": 1.2336751833941229e-08,
"loss": 0.0002,
"step": 998
},
{
"epoch": 0.14133625720652213,
"grad_norm": 0.0028543765656650066,
"learning_rate": 7.895579618388827e-09,
"loss": 0.0001,
"step": 999
},
{
"epoch": 0.1414777349414636,
"grad_norm": 0.0011507285526022315,
"learning_rate": 4.4412891050171765e-09,
"loss": 0.0001,
"step": 1000
},
{
"epoch": 0.1414777349414636,
"eval_loss": 0.003401491791009903,
"eval_runtime": 154.3696,
"eval_samples_per_second": 9.646,
"eval_steps_per_second": 9.646,
"step": 1000
}
],
"logging_steps": 1,
"max_steps": 1000,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 3.1662243053568e+17,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}