terry69's picture
Model save
cf5e372 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 1.0,
"eval_steps": 500,
"global_step": 1861,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0005373455131649651,
"grad_norm": 23.072626458069166,
"learning_rate": 5.3475935828877005e-08,
"loss": 1.329,
"step": 1
},
{
"epoch": 0.0026867275658248252,
"grad_norm": 23.380544253320735,
"learning_rate": 2.6737967914438503e-07,
"loss": 1.3238,
"step": 5
},
{
"epoch": 0.0053734551316496505,
"grad_norm": 14.233532908496148,
"learning_rate": 5.347593582887701e-07,
"loss": 1.2647,
"step": 10
},
{
"epoch": 0.008060182697474477,
"grad_norm": 11.791057381257374,
"learning_rate": 8.021390374331551e-07,
"loss": 1.1476,
"step": 15
},
{
"epoch": 0.010746910263299301,
"grad_norm": 7.369104708436159,
"learning_rate": 1.0695187165775401e-06,
"loss": 1.0118,
"step": 20
},
{
"epoch": 0.013433637829124127,
"grad_norm": 3.790654764997102,
"learning_rate": 1.3368983957219254e-06,
"loss": 0.9345,
"step": 25
},
{
"epoch": 0.016120365394948953,
"grad_norm": 3.447275461870041,
"learning_rate": 1.6042780748663103e-06,
"loss": 0.8918,
"step": 30
},
{
"epoch": 0.018807092960773777,
"grad_norm": 2.9186352407291847,
"learning_rate": 1.8716577540106954e-06,
"loss": 0.8518,
"step": 35
},
{
"epoch": 0.021493820526598602,
"grad_norm": 2.9408795060935695,
"learning_rate": 2.1390374331550802e-06,
"loss": 0.844,
"step": 40
},
{
"epoch": 0.02418054809242343,
"grad_norm": 2.8790720251278565,
"learning_rate": 2.4064171122994653e-06,
"loss": 0.8318,
"step": 45
},
{
"epoch": 0.026867275658248254,
"grad_norm": 2.963731808583039,
"learning_rate": 2.673796791443851e-06,
"loss": 0.8099,
"step": 50
},
{
"epoch": 0.02955400322407308,
"grad_norm": 2.8588219846284297,
"learning_rate": 2.9411764705882355e-06,
"loss": 0.8054,
"step": 55
},
{
"epoch": 0.032240730789897906,
"grad_norm": 3.105229156653952,
"learning_rate": 3.2085561497326205e-06,
"loss": 0.7913,
"step": 60
},
{
"epoch": 0.03492745835572273,
"grad_norm": 3.1413414626518943,
"learning_rate": 3.4759358288770056e-06,
"loss": 0.7867,
"step": 65
},
{
"epoch": 0.037614185921547555,
"grad_norm": 3.132841956959321,
"learning_rate": 3.7433155080213907e-06,
"loss": 0.7893,
"step": 70
},
{
"epoch": 0.04030091348737238,
"grad_norm": 3.0623425732500142,
"learning_rate": 4.010695187165775e-06,
"loss": 0.7717,
"step": 75
},
{
"epoch": 0.042987641053197204,
"grad_norm": 9.233814569420407,
"learning_rate": 4.2780748663101604e-06,
"loss": 0.7715,
"step": 80
},
{
"epoch": 0.04567436861902203,
"grad_norm": 3.3097521575259368,
"learning_rate": 4.5454545454545455e-06,
"loss": 0.761,
"step": 85
},
{
"epoch": 0.04836109618484686,
"grad_norm": 3.0349283160634597,
"learning_rate": 4.812834224598931e-06,
"loss": 0.7621,
"step": 90
},
{
"epoch": 0.051047823750671684,
"grad_norm": 3.1760527715984153,
"learning_rate": 5.0802139037433165e-06,
"loss": 0.7591,
"step": 95
},
{
"epoch": 0.05373455131649651,
"grad_norm": 2.928755685153135,
"learning_rate": 5.347593582887702e-06,
"loss": 0.7433,
"step": 100
},
{
"epoch": 0.05642127888232133,
"grad_norm": 3.0202006224727174,
"learning_rate": 5.614973262032086e-06,
"loss": 0.7314,
"step": 105
},
{
"epoch": 0.05910800644814616,
"grad_norm": 3.1295278900578682,
"learning_rate": 5.882352941176471e-06,
"loss": 0.7483,
"step": 110
},
{
"epoch": 0.06179473401397098,
"grad_norm": 3.0863071896945167,
"learning_rate": 6.149732620320856e-06,
"loss": 0.7276,
"step": 115
},
{
"epoch": 0.06448146157979581,
"grad_norm": 3.0744127119070526,
"learning_rate": 6.417112299465241e-06,
"loss": 0.7183,
"step": 120
},
{
"epoch": 0.06716818914562063,
"grad_norm": 3.22926416708401,
"learning_rate": 6.684491978609626e-06,
"loss": 0.7242,
"step": 125
},
{
"epoch": 0.06985491671144546,
"grad_norm": 2.9238184305563033,
"learning_rate": 6.951871657754011e-06,
"loss": 0.7194,
"step": 130
},
{
"epoch": 0.07254164427727028,
"grad_norm": 2.907399933470749,
"learning_rate": 7.219251336898396e-06,
"loss": 0.7204,
"step": 135
},
{
"epoch": 0.07522837184309511,
"grad_norm": 2.8941698993873137,
"learning_rate": 7.486631016042781e-06,
"loss": 0.6986,
"step": 140
},
{
"epoch": 0.07791509940891994,
"grad_norm": 2.899562495835514,
"learning_rate": 7.754010695187166e-06,
"loss": 0.7021,
"step": 145
},
{
"epoch": 0.08060182697474476,
"grad_norm": 2.8740571869341687,
"learning_rate": 8.02139037433155e-06,
"loss": 0.7096,
"step": 150
},
{
"epoch": 0.08328855454056959,
"grad_norm": 2.912059141124366,
"learning_rate": 8.288770053475937e-06,
"loss": 0.7012,
"step": 155
},
{
"epoch": 0.08597528210639441,
"grad_norm": 2.933852566630193,
"learning_rate": 8.556149732620321e-06,
"loss": 0.6909,
"step": 160
},
{
"epoch": 0.08866200967221924,
"grad_norm": 2.954361982135904,
"learning_rate": 8.823529411764707e-06,
"loss": 0.6947,
"step": 165
},
{
"epoch": 0.09134873723804406,
"grad_norm": 2.914429965048127,
"learning_rate": 9.090909090909091e-06,
"loss": 0.7064,
"step": 170
},
{
"epoch": 0.09403546480386889,
"grad_norm": 2.824165175450085,
"learning_rate": 9.358288770053477e-06,
"loss": 0.6962,
"step": 175
},
{
"epoch": 0.09672219236969372,
"grad_norm": 2.7629699563001724,
"learning_rate": 9.625668449197861e-06,
"loss": 0.7093,
"step": 180
},
{
"epoch": 0.09940891993551854,
"grad_norm": 2.9700788007957484,
"learning_rate": 9.893048128342247e-06,
"loss": 0.7001,
"step": 185
},
{
"epoch": 0.10209564750134337,
"grad_norm": 2.720187963907155,
"learning_rate": 9.999920755303033e-06,
"loss": 0.6972,
"step": 190
},
{
"epoch": 0.10478237506716818,
"grad_norm": 2.579060861199631,
"learning_rate": 9.999436491251425e-06,
"loss": 0.7011,
"step": 195
},
{
"epoch": 0.10746910263299302,
"grad_norm": 2.650831290761427,
"learning_rate": 9.998512030567253e-06,
"loss": 0.7045,
"step": 200
},
{
"epoch": 0.11015583019881783,
"grad_norm": 3.3197406459470673,
"learning_rate": 9.99714745464859e-06,
"loss": 0.6992,
"step": 205
},
{
"epoch": 0.11284255776464266,
"grad_norm": 3.0709444563569126,
"learning_rate": 9.995342883645325e-06,
"loss": 0.708,
"step": 210
},
{
"epoch": 0.1155292853304675,
"grad_norm": 2.815374931010047,
"learning_rate": 9.993098476448576e-06,
"loss": 0.7011,
"step": 215
},
{
"epoch": 0.11821601289629231,
"grad_norm": 2.9613320208688005,
"learning_rate": 9.990414430676716e-06,
"loss": 0.6821,
"step": 220
},
{
"epoch": 0.12090274046211714,
"grad_norm": 2.690351994232397,
"learning_rate": 9.987290982657961e-06,
"loss": 0.6848,
"step": 225
},
{
"epoch": 0.12358946802794196,
"grad_norm": 3.5601408393747445,
"learning_rate": 9.983728407409565e-06,
"loss": 0.6832,
"step": 230
},
{
"epoch": 0.1262761955937668,
"grad_norm": 2.4701820214975014,
"learning_rate": 9.979727018613607e-06,
"loss": 0.6934,
"step": 235
},
{
"epoch": 0.12896292315959162,
"grad_norm": 2.4626384928709983,
"learning_rate": 9.975287168589369e-06,
"loss": 0.6785,
"step": 240
},
{
"epoch": 0.13164965072541646,
"grad_norm": 2.588440598744194,
"learning_rate": 9.970409248262314e-06,
"loss": 0.6978,
"step": 245
},
{
"epoch": 0.13433637829124126,
"grad_norm": 2.6880919710650413,
"learning_rate": 9.965093687129669e-06,
"loss": 0.6808,
"step": 250
},
{
"epoch": 0.1370231058570661,
"grad_norm": 2.9087730515287955,
"learning_rate": 9.959340953222602e-06,
"loss": 0.6758,
"step": 255
},
{
"epoch": 0.13970983342289092,
"grad_norm": 2.6772738824086075,
"learning_rate": 9.953151553065019e-06,
"loss": 0.6698,
"step": 260
},
{
"epoch": 0.14239656098871575,
"grad_norm": 2.669218732871304,
"learning_rate": 9.94652603162896e-06,
"loss": 0.6939,
"step": 265
},
{
"epoch": 0.14508328855454056,
"grad_norm": 2.4902462796339457,
"learning_rate": 9.939464972286618e-06,
"loss": 0.6702,
"step": 270
},
{
"epoch": 0.1477700161203654,
"grad_norm": 2.4766182391586975,
"learning_rate": 9.931968996758972e-06,
"loss": 0.6697,
"step": 275
},
{
"epoch": 0.15045674368619022,
"grad_norm": 2.5255485155595228,
"learning_rate": 9.924038765061042e-06,
"loss": 0.6665,
"step": 280
},
{
"epoch": 0.15314347125201505,
"grad_norm": 2.5656888228811794,
"learning_rate": 9.915674975443779e-06,
"loss": 0.6565,
"step": 285
},
{
"epoch": 0.15583019881783988,
"grad_norm": 2.825801360492887,
"learning_rate": 9.906878364332586e-06,
"loss": 0.6642,
"step": 290
},
{
"epoch": 0.15851692638366469,
"grad_norm": 2.6497249215881253,
"learning_rate": 9.897649706262474e-06,
"loss": 0.6624,
"step": 295
},
{
"epoch": 0.16120365394948952,
"grad_norm": 2.6523672302785575,
"learning_rate": 9.88798981380986e-06,
"loss": 0.6659,
"step": 300
},
{
"epoch": 0.16389038151531435,
"grad_norm": 2.6257923259309495,
"learning_rate": 9.877899537521028e-06,
"loss": 0.6533,
"step": 305
},
{
"epoch": 0.16657710908113918,
"grad_norm": 2.465767578464491,
"learning_rate": 9.867379765837237e-06,
"loss": 0.6518,
"step": 310
},
{
"epoch": 0.169263836646964,
"grad_norm": 2.556282218650964,
"learning_rate": 9.85643142501649e-06,
"loss": 0.6559,
"step": 315
},
{
"epoch": 0.17195056421278881,
"grad_norm": 2.6286087389340227,
"learning_rate": 9.845055479051986e-06,
"loss": 0.6469,
"step": 320
},
{
"epoch": 0.17463729177861365,
"grad_norm": 2.419535618698552,
"learning_rate": 9.833252929587231e-06,
"loss": 0.6631,
"step": 325
},
{
"epoch": 0.17732401934443848,
"grad_norm": 2.4542391410254956,
"learning_rate": 9.821024815827854e-06,
"loss": 0.6591,
"step": 330
},
{
"epoch": 0.1800107469102633,
"grad_norm": 2.4908873755968997,
"learning_rate": 9.808372214450093e-06,
"loss": 0.6658,
"step": 335
},
{
"epoch": 0.1826974744760881,
"grad_norm": 2.3733848426387176,
"learning_rate": 9.795296239506011e-06,
"loss": 0.662,
"step": 340
},
{
"epoch": 0.18538420204191294,
"grad_norm": 2.865296798119886,
"learning_rate": 9.781798042325392e-06,
"loss": 0.6526,
"step": 345
},
{
"epoch": 0.18807092960773777,
"grad_norm": 2.487445934484789,
"learning_rate": 9.767878811414373e-06,
"loss": 0.6525,
"step": 350
},
{
"epoch": 0.1907576571735626,
"grad_norm": 2.388699669948045,
"learning_rate": 9.753539772350792e-06,
"loss": 0.647,
"step": 355
},
{
"epoch": 0.19344438473938744,
"grad_norm": 2.445554737991476,
"learning_rate": 9.738782187676282e-06,
"loss": 0.6425,
"step": 360
},
{
"epoch": 0.19613111230521224,
"grad_norm": 2.435104584478995,
"learning_rate": 9.723607356785103e-06,
"loss": 0.6443,
"step": 365
},
{
"epoch": 0.19881783987103707,
"grad_norm": 2.479158175926532,
"learning_rate": 9.70801661580973e-06,
"loss": 0.6575,
"step": 370
},
{
"epoch": 0.2015045674368619,
"grad_norm": 2.637755494092567,
"learning_rate": 9.692011337503212e-06,
"loss": 0.6421,
"step": 375
},
{
"epoch": 0.20419129500268673,
"grad_norm": 2.520737872507786,
"learning_rate": 9.675592931118293e-06,
"loss": 0.6358,
"step": 380
},
{
"epoch": 0.20687802256851157,
"grad_norm": 2.3907603747141675,
"learning_rate": 9.658762842283343e-06,
"loss": 0.6285,
"step": 385
},
{
"epoch": 0.20956475013433637,
"grad_norm": 2.3651549722988023,
"learning_rate": 9.641522552875055e-06,
"loss": 0.6119,
"step": 390
},
{
"epoch": 0.2122514777001612,
"grad_norm": 2.5208115737245604,
"learning_rate": 9.62387358088798e-06,
"loss": 0.6333,
"step": 395
},
{
"epoch": 0.21493820526598603,
"grad_norm": 2.6819070319357037,
"learning_rate": 9.605817480300863e-06,
"loss": 0.6205,
"step": 400
},
{
"epoch": 0.21762493283181086,
"grad_norm": 2.356493009815594,
"learning_rate": 9.587355840939813e-06,
"loss": 0.64,
"step": 405
},
{
"epoch": 0.22031166039763567,
"grad_norm": 2.405371194871644,
"learning_rate": 9.568490288338324e-06,
"loss": 0.6245,
"step": 410
},
{
"epoch": 0.2229983879634605,
"grad_norm": 2.6012482105829706,
"learning_rate": 9.549222483594154e-06,
"loss": 0.6451,
"step": 415
},
{
"epoch": 0.22568511552928533,
"grad_norm": 2.3956259391703196,
"learning_rate": 9.529554123223053e-06,
"loss": 0.6223,
"step": 420
},
{
"epoch": 0.22837184309511016,
"grad_norm": 2.398583860789219,
"learning_rate": 9.5094869390094e-06,
"loss": 0.6427,
"step": 425
},
{
"epoch": 0.231058570660935,
"grad_norm": 2.5232663534614863,
"learning_rate": 9.48902269785371e-06,
"loss": 0.6264,
"step": 430
},
{
"epoch": 0.2337452982267598,
"grad_norm": 2.4227785562286033,
"learning_rate": 9.468163201617063e-06,
"loss": 0.6172,
"step": 435
},
{
"epoch": 0.23643202579258463,
"grad_norm": 2.4898638907719586,
"learning_rate": 9.446910286962453e-06,
"loss": 0.6186,
"step": 440
},
{
"epoch": 0.23911875335840946,
"grad_norm": 2.3600937624812524,
"learning_rate": 9.425265825193077e-06,
"loss": 0.6009,
"step": 445
},
{
"epoch": 0.2418054809242343,
"grad_norm": 2.479426617963906,
"learning_rate": 9.403231722087554e-06,
"loss": 0.6373,
"step": 450
},
{
"epoch": 0.24449220849005912,
"grad_norm": 2.4357419818938117,
"learning_rate": 9.380809917732132e-06,
"loss": 0.6076,
"step": 455
},
{
"epoch": 0.24717893605588392,
"grad_norm": 2.47734706096724,
"learning_rate": 9.358002386349862e-06,
"loss": 0.6052,
"step": 460
},
{
"epoch": 0.24986566362170876,
"grad_norm": 2.545179042557431,
"learning_rate": 9.334811136126778e-06,
"loss": 0.6067,
"step": 465
},
{
"epoch": 0.2525523911875336,
"grad_norm": 2.576572600102202,
"learning_rate": 9.31123820903506e-06,
"loss": 0.6035,
"step": 470
},
{
"epoch": 0.2552391187533584,
"grad_norm": 2.4612256004699145,
"learning_rate": 9.287285680653254e-06,
"loss": 0.606,
"step": 475
},
{
"epoch": 0.25792584631918325,
"grad_norm": 2.495237864289767,
"learning_rate": 9.262955659983522e-06,
"loss": 0.5942,
"step": 480
},
{
"epoch": 0.2606125738850081,
"grad_norm": 2.65866167773882,
"learning_rate": 9.238250289265921e-06,
"loss": 0.599,
"step": 485
},
{
"epoch": 0.2632993014508329,
"grad_norm": 2.473870495050297,
"learning_rate": 9.21317174378982e-06,
"loss": 0.612,
"step": 490
},
{
"epoch": 0.2659860290166577,
"grad_norm": 2.3022323203953,
"learning_rate": 9.187722231702326e-06,
"loss": 0.5794,
"step": 495
},
{
"epoch": 0.2686727565824825,
"grad_norm": 2.3360448253147643,
"learning_rate": 9.161903993813892e-06,
"loss": 0.5821,
"step": 500
},
{
"epoch": 0.27135948414830735,
"grad_norm": 2.4400501057412454,
"learning_rate": 9.135719303400995e-06,
"loss": 0.5969,
"step": 505
},
{
"epoch": 0.2740462117141322,
"grad_norm": 2.324402047254847,
"learning_rate": 9.10917046600598e-06,
"loss": 0.5914,
"step": 510
},
{
"epoch": 0.276732939279957,
"grad_norm": 2.4328149953899296,
"learning_rate": 9.082259819234063e-06,
"loss": 0.5871,
"step": 515
},
{
"epoch": 0.27941966684578184,
"grad_norm": 2.326659710915374,
"learning_rate": 9.054989732547507e-06,
"loss": 0.5874,
"step": 520
},
{
"epoch": 0.2821063944116067,
"grad_norm": 2.4409777215420942,
"learning_rate": 9.027362607056986e-06,
"loss": 0.5964,
"step": 525
},
{
"epoch": 0.2847931219774315,
"grad_norm": 2.412955363836415,
"learning_rate": 8.999380875310176e-06,
"loss": 0.5877,
"step": 530
},
{
"epoch": 0.28747984954325634,
"grad_norm": 2.8194077899428516,
"learning_rate": 8.971047001077561e-06,
"loss": 0.5883,
"step": 535
},
{
"epoch": 0.2901665771090811,
"grad_norm": 2.469304660089161,
"learning_rate": 8.942363479135516e-06,
"loss": 0.5801,
"step": 540
},
{
"epoch": 0.29285330467490595,
"grad_norm": 2.3564533376477192,
"learning_rate": 8.913332835046629e-06,
"loss": 0.576,
"step": 545
},
{
"epoch": 0.2955400322407308,
"grad_norm": 2.4491464187209067,
"learning_rate": 8.883957624937333e-06,
"loss": 0.5792,
"step": 550
},
{
"epoch": 0.2982267598065556,
"grad_norm": 2.3159629602291276,
"learning_rate": 8.854240435272842e-06,
"loss": 0.5617,
"step": 555
},
{
"epoch": 0.30091348737238044,
"grad_norm": 2.5317785480271584,
"learning_rate": 8.824183882629411e-06,
"loss": 0.5669,
"step": 560
},
{
"epoch": 0.30360021493820527,
"grad_norm": 2.3852078136194135,
"learning_rate": 8.793790613463956e-06,
"loss": 0.5697,
"step": 565
},
{
"epoch": 0.3062869425040301,
"grad_norm": 2.4921927583022168,
"learning_rate": 8.763063303881021e-06,
"loss": 0.5796,
"step": 570
},
{
"epoch": 0.30897367006985493,
"grad_norm": 9.308745310175,
"learning_rate": 8.73200465939717e-06,
"loss": 0.5735,
"step": 575
},
{
"epoch": 0.31166039763567976,
"grad_norm": 2.4517036683344458,
"learning_rate": 8.700617414702746e-06,
"loss": 0.5671,
"step": 580
},
{
"epoch": 0.31434712520150454,
"grad_norm": 2.2949061651822635,
"learning_rate": 8.668904333421098e-06,
"loss": 0.5696,
"step": 585
},
{
"epoch": 0.31703385276732937,
"grad_norm": 2.2916177476898096,
"learning_rate": 8.636868207865244e-06,
"loss": 0.5756,
"step": 590
},
{
"epoch": 0.3197205803331542,
"grad_norm": 2.5518854172821297,
"learning_rate": 8.604511858792006e-06,
"loss": 0.5622,
"step": 595
},
{
"epoch": 0.32240730789897903,
"grad_norm": 2.4475160043733797,
"learning_rate": 8.571838135153645e-06,
"loss": 0.5525,
"step": 600
},
{
"epoch": 0.32509403546480387,
"grad_norm": 2.3488225651794497,
"learning_rate": 8.538849913847019e-06,
"loss": 0.569,
"step": 605
},
{
"epoch": 0.3277807630306287,
"grad_norm": 2.353785375547004,
"learning_rate": 8.505550099460264e-06,
"loss": 0.5575,
"step": 610
},
{
"epoch": 0.33046749059645353,
"grad_norm": 2.5058455114214166,
"learning_rate": 8.471941624017058e-06,
"loss": 0.5497,
"step": 615
},
{
"epoch": 0.33315421816227836,
"grad_norm": 2.4037582668437674,
"learning_rate": 8.43802744671845e-06,
"loss": 0.5592,
"step": 620
},
{
"epoch": 0.3358409457281032,
"grad_norm": 2.3703138788584206,
"learning_rate": 8.403810553682307e-06,
"loss": 0.55,
"step": 625
},
{
"epoch": 0.338527673293928,
"grad_norm": 2.4422133082637605,
"learning_rate": 8.369293957680397e-06,
"loss": 0.5451,
"step": 630
},
{
"epoch": 0.3412144008597528,
"grad_norm": 2.372256863029062,
"learning_rate": 8.334480697873101e-06,
"loss": 0.5616,
"step": 635
},
{
"epoch": 0.34390112842557763,
"grad_norm": 2.30267735280899,
"learning_rate": 8.299373839541829e-06,
"loss": 0.547,
"step": 640
},
{
"epoch": 0.34658785599140246,
"grad_norm": 2.312324577995851,
"learning_rate": 8.26397647381912e-06,
"loss": 0.5406,
"step": 645
},
{
"epoch": 0.3492745835572273,
"grad_norm": 2.4374915255270175,
"learning_rate": 8.228291717416472e-06,
"loss": 0.5388,
"step": 650
},
{
"epoch": 0.3519613111230521,
"grad_norm": 2.3871133470130674,
"learning_rate": 8.192322712349917e-06,
"loss": 0.5432,
"step": 655
},
{
"epoch": 0.35464803868887695,
"grad_norm": 2.519969391683205,
"learning_rate": 8.15607262566337e-06,
"loss": 0.5348,
"step": 660
},
{
"epoch": 0.3573347662547018,
"grad_norm": 2.4866718815954623,
"learning_rate": 8.119544649149762e-06,
"loss": 0.5567,
"step": 665
},
{
"epoch": 0.3600214938205266,
"grad_norm": 2.401099368264956,
"learning_rate": 8.08274199907003e-06,
"loss": 0.5409,
"step": 670
},
{
"epoch": 0.36270822138635145,
"grad_norm": 2.320605485178993,
"learning_rate": 8.0456679158699e-06,
"loss": 0.5283,
"step": 675
},
{
"epoch": 0.3653949489521762,
"grad_norm": 2.3005736041616642,
"learning_rate": 8.008325663894586e-06,
"loss": 0.5368,
"step": 680
},
{
"epoch": 0.36808167651800106,
"grad_norm": 2.305647834819274,
"learning_rate": 7.970718531101365e-06,
"loss": 0.5398,
"step": 685
},
{
"epoch": 0.3707684040838259,
"grad_norm": 2.3268228923372263,
"learning_rate": 7.932849828770062e-06,
"loss": 0.5348,
"step": 690
},
{
"epoch": 0.3734551316496507,
"grad_norm": 2.2813438553059053,
"learning_rate": 7.89472289121151e-06,
"loss": 0.5602,
"step": 695
},
{
"epoch": 0.37614185921547555,
"grad_norm": 2.2064861994852816,
"learning_rate": 7.856341075473963e-06,
"loss": 0.5227,
"step": 700
},
{
"epoch": 0.3788285867813004,
"grad_norm": 2.385886752618868,
"learning_rate": 7.817707761047498e-06,
"loss": 0.5292,
"step": 705
},
{
"epoch": 0.3815153143471252,
"grad_norm": 2.3717077533920867,
"learning_rate": 7.77882634956647e-06,
"loss": 0.5332,
"step": 710
},
{
"epoch": 0.38420204191295004,
"grad_norm": 2.389159454524817,
"learning_rate": 7.739700264509993e-06,
"loss": 0.5236,
"step": 715
},
{
"epoch": 0.3868887694787749,
"grad_norm": 2.314814719990629,
"learning_rate": 7.700332950900504e-06,
"loss": 0.5217,
"step": 720
},
{
"epoch": 0.38957549704459965,
"grad_norm": 2.207891685389126,
"learning_rate": 7.660727875000432e-06,
"loss": 0.5078,
"step": 725
},
{
"epoch": 0.3922622246104245,
"grad_norm": 2.3264819673930432,
"learning_rate": 7.6208885240069995e-06,
"loss": 0.5267,
"step": 730
},
{
"epoch": 0.3949489521762493,
"grad_norm": 2.288819911752258,
"learning_rate": 7.5808184057451765e-06,
"loss": 0.5067,
"step": 735
},
{
"epoch": 0.39763567974207414,
"grad_norm": 2.3653593555899857,
"learning_rate": 7.540521048358814e-06,
"loss": 0.5344,
"step": 740
},
{
"epoch": 0.400322407307899,
"grad_norm": 2.4648359924535743,
"learning_rate": 7.500000000000001e-06,
"loss": 0.5227,
"step": 745
},
{
"epoch": 0.4030091348737238,
"grad_norm": 2.409473996165666,
"learning_rate": 7.459258828516645e-06,
"loss": 0.5096,
"step": 750
},
{
"epoch": 0.40569586243954864,
"grad_norm": 2.3668323179746236,
"learning_rate": 7.418301121138335e-06,
"loss": 0.5171,
"step": 755
},
{
"epoch": 0.40838259000537347,
"grad_norm": 2.4782926067309825,
"learning_rate": 7.3771304841604764e-06,
"loss": 0.51,
"step": 760
},
{
"epoch": 0.4110693175711983,
"grad_norm": 2.267794928141394,
"learning_rate": 7.335750542626772e-06,
"loss": 0.5245,
"step": 765
},
{
"epoch": 0.41375604513702313,
"grad_norm": 2.318079683575523,
"learning_rate": 7.294164940010031e-06,
"loss": 0.5124,
"step": 770
},
{
"epoch": 0.4164427727028479,
"grad_norm": 2.1909268149999988,
"learning_rate": 7.2523773378913655e-06,
"loss": 0.5168,
"step": 775
},
{
"epoch": 0.41912950026867274,
"grad_norm": 2.2456539972858773,
"learning_rate": 7.210391415637797e-06,
"loss": 0.4946,
"step": 780
},
{
"epoch": 0.42181622783449757,
"grad_norm": 2.4974603199079133,
"learning_rate": 7.168210870078277e-06,
"loss": 0.5064,
"step": 785
},
{
"epoch": 0.4245029554003224,
"grad_norm": 2.334444583921718,
"learning_rate": 7.125839415178204e-06,
"loss": 0.5116,
"step": 790
},
{
"epoch": 0.42718968296614723,
"grad_norm": 2.397330148256394,
"learning_rate": 7.083280781712394e-06,
"loss": 0.5081,
"step": 795
},
{
"epoch": 0.42987641053197206,
"grad_norm": 2.5416270893996273,
"learning_rate": 7.0405387169365965e-06,
"loss": 0.499,
"step": 800
},
{
"epoch": 0.4325631380977969,
"grad_norm": 2.3363347223952657,
"learning_rate": 6.9976169842575526e-06,
"loss": 0.5049,
"step": 805
},
{
"epoch": 0.4352498656636217,
"grad_norm": 2.296216857638828,
"learning_rate": 6.9545193629016215e-06,
"loss": 0.5168,
"step": 810
},
{
"epoch": 0.43793659322944656,
"grad_norm": 2.4900891751826237,
"learning_rate": 6.911249647582036e-06,
"loss": 0.4939,
"step": 815
},
{
"epoch": 0.44062332079527133,
"grad_norm": 2.3708479871646797,
"learning_rate": 6.867811648164769e-06,
"loss": 0.4968,
"step": 820
},
{
"epoch": 0.44331004836109617,
"grad_norm": 2.2942739105778007,
"learning_rate": 6.824209189333082e-06,
"loss": 0.4892,
"step": 825
},
{
"epoch": 0.445996775926921,
"grad_norm": 2.298491349290504,
"learning_rate": 6.780446110250766e-06,
"loss": 0.5064,
"step": 830
},
{
"epoch": 0.4486835034927458,
"grad_norm": 2.3571392373027624,
"learning_rate": 6.736526264224101e-06,
"loss": 0.5005,
"step": 835
},
{
"epoch": 0.45137023105857066,
"grad_norm": 2.459885767558715,
"learning_rate": 6.692453518362587e-06,
"loss": 0.4951,
"step": 840
},
{
"epoch": 0.4540569586243955,
"grad_norm": 2.411449421942393,
"learning_rate": 6.648231753238431e-06,
"loss": 0.4828,
"step": 845
},
{
"epoch": 0.4567436861902203,
"grad_norm": 2.404363576863236,
"learning_rate": 6.603864862544879e-06,
"loss": 0.493,
"step": 850
},
{
"epoch": 0.45943041375604515,
"grad_norm": 2.2737487815721478,
"learning_rate": 6.5593567527533715e-06,
"loss": 0.4744,
"step": 855
},
{
"epoch": 0.46211714132187,
"grad_norm": 2.3678978985955186,
"learning_rate": 6.514711342769588e-06,
"loss": 0.5012,
"step": 860
},
{
"epoch": 0.4648038688876948,
"grad_norm": 2.356303245264604,
"learning_rate": 6.469932563588386e-06,
"loss": 0.4973,
"step": 865
},
{
"epoch": 0.4674905964535196,
"grad_norm": 2.7020262831984527,
"learning_rate": 6.425024357947677e-06,
"loss": 0.4918,
"step": 870
},
{
"epoch": 0.4701773240193444,
"grad_norm": 2.3340485214143745,
"learning_rate": 6.3799906799812805e-06,
"loss": 0.4982,
"step": 875
},
{
"epoch": 0.47286405158516925,
"grad_norm": 2.4191389152178373,
"learning_rate": 6.334835494870759e-06,
"loss": 0.4751,
"step": 880
},
{
"epoch": 0.4755507791509941,
"grad_norm": 2.272119239990629,
"learning_rate": 6.289562778496285e-06,
"loss": 0.4872,
"step": 885
},
{
"epoch": 0.4782375067168189,
"grad_norm": 2.300477262266863,
"learning_rate": 6.244176517086573e-06,
"loss": 0.4821,
"step": 890
},
{
"epoch": 0.48092423428264375,
"grad_norm": 2.446204046472319,
"learning_rate": 6.1986807068678926e-06,
"loss": 0.4823,
"step": 895
},
{
"epoch": 0.4836109618484686,
"grad_norm": 2.290872783259082,
"learning_rate": 6.153079353712201e-06,
"loss": 0.4683,
"step": 900
},
{
"epoch": 0.4862976894142934,
"grad_norm": 2.311264668993125,
"learning_rate": 6.107376472784438e-06,
"loss": 0.4759,
"step": 905
},
{
"epoch": 0.48898441698011824,
"grad_norm": 2.1766053144740805,
"learning_rate": 6.061576088188981e-06,
"loss": 0.4541,
"step": 910
},
{
"epoch": 0.491671144545943,
"grad_norm": 2.2863598186899563,
"learning_rate": 6.015682232615336e-06,
"loss": 0.4751,
"step": 915
},
{
"epoch": 0.49435787211176785,
"grad_norm": 2.3157344313618453,
"learning_rate": 5.969698946983055e-06,
"loss": 0.4579,
"step": 920
},
{
"epoch": 0.4970445996775927,
"grad_norm": 2.4556667211388308,
"learning_rate": 5.923630280085948e-06,
"loss": 0.4667,
"step": 925
},
{
"epoch": 0.4997313272434175,
"grad_norm": 2.2693647056660575,
"learning_rate": 5.877480288235569e-06,
"loss": 0.4642,
"step": 930
},
{
"epoch": 0.5024180548092424,
"grad_norm": 2.299235513307737,
"learning_rate": 5.831253034904083e-06,
"loss": 0.4625,
"step": 935
},
{
"epoch": 0.5051047823750672,
"grad_norm": 2.2594758653976603,
"learning_rate": 5.7849525903664636e-06,
"loss": 0.4741,
"step": 940
},
{
"epoch": 0.507791509940892,
"grad_norm": 2.346200404836091,
"learning_rate": 5.738583031342123e-06,
"loss": 0.4533,
"step": 945
},
{
"epoch": 0.5104782375067168,
"grad_norm": 2.198398726512657,
"learning_rate": 5.692148440635946e-06,
"loss": 0.4561,
"step": 950
},
{
"epoch": 0.5131649650725416,
"grad_norm": 2.3681691462263466,
"learning_rate": 5.645652906778808e-06,
"loss": 0.4621,
"step": 955
},
{
"epoch": 0.5158516926383665,
"grad_norm": 2.344944047074419,
"learning_rate": 5.599100523667586e-06,
"loss": 0.4599,
"step": 960
},
{
"epoch": 0.5185384202041913,
"grad_norm": 2.2092289217419085,
"learning_rate": 5.552495390204691e-06,
"loss": 0.4722,
"step": 965
},
{
"epoch": 0.5212251477700162,
"grad_norm": 2.3297576895231606,
"learning_rate": 5.505841609937162e-06,
"loss": 0.4532,
"step": 970
},
{
"epoch": 0.5239118753358409,
"grad_norm": 2.267324308428592,
"learning_rate": 5.4591432906953515e-06,
"loss": 0.4527,
"step": 975
},
{
"epoch": 0.5265986029016658,
"grad_norm": 2.4550058840280884,
"learning_rate": 5.412404544231235e-06,
"loss": 0.4562,
"step": 980
},
{
"epoch": 0.5292853304674906,
"grad_norm": 2.268535616599798,
"learning_rate": 5.365629485856381e-06,
"loss": 0.4454,
"step": 985
},
{
"epoch": 0.5319720580333154,
"grad_norm": 2.598323262451839,
"learning_rate": 5.318822234079584e-06,
"loss": 0.4614,
"step": 990
},
{
"epoch": 0.5346587855991403,
"grad_norm": 6.381482924500483,
"learning_rate": 5.271986910244254e-06,
"loss": 0.4366,
"step": 995
},
{
"epoch": 0.537345513164965,
"grad_norm": 2.359236695134924,
"learning_rate": 5.225127638165514e-06,
"loss": 0.4535,
"step": 1000
},
{
"epoch": 0.5400322407307899,
"grad_norm": 2.268448533208311,
"learning_rate": 5.178248543767122e-06,
"loss": 0.4342,
"step": 1005
},
{
"epoch": 0.5427189682966147,
"grad_norm": 2.358403761646346,
"learning_rate": 5.1313537547181716e-06,
"loss": 0.4405,
"step": 1010
},
{
"epoch": 0.5454056958624396,
"grad_norm": 2.2710106949262805,
"learning_rate": 5.084447400069656e-06,
"loss": 0.4419,
"step": 1015
},
{
"epoch": 0.5480924234282644,
"grad_norm": 2.368278941060436,
"learning_rate": 5.037533609890917e-06,
"loss": 0.4364,
"step": 1020
},
{
"epoch": 0.5507791509940893,
"grad_norm": 2.284175829190933,
"learning_rate": 4.990616514905982e-06,
"loss": 0.4452,
"step": 1025
},
{
"epoch": 0.553465878559914,
"grad_norm": 2.41532321709073,
"learning_rate": 4.943700246129871e-06,
"loss": 0.436,
"step": 1030
},
{
"epoch": 0.5561526061257388,
"grad_norm": 2.3211025544242787,
"learning_rate": 4.896788934504853e-06,
"loss": 0.4467,
"step": 1035
},
{
"epoch": 0.5588393336915637,
"grad_norm": 2.347444291429596,
"learning_rate": 4.849886710536725e-06,
"loss": 0.4288,
"step": 1040
},
{
"epoch": 0.5615260612573885,
"grad_norm": 2.4127644606277014,
"learning_rate": 4.802997703931124e-06,
"loss": 0.4355,
"step": 1045
},
{
"epoch": 0.5642127888232134,
"grad_norm": 2.4325621488004776,
"learning_rate": 4.7561260432299015e-06,
"loss": 0.4551,
"step": 1050
},
{
"epoch": 0.5668995163890381,
"grad_norm": 2.242665015732091,
"learning_rate": 4.7092758554476215e-06,
"loss": 0.4364,
"step": 1055
},
{
"epoch": 0.569586243954863,
"grad_norm": 2.4289606347980692,
"learning_rate": 4.662451265708174e-06,
"loss": 0.4316,
"step": 1060
},
{
"epoch": 0.5722729715206878,
"grad_norm": 2.2828904861500257,
"learning_rate": 4.6156563968815575e-06,
"loss": 0.4298,
"step": 1065
},
{
"epoch": 0.5749596990865127,
"grad_norm": 2.320278023559682,
"learning_rate": 4.568895369220868e-06,
"loss": 0.4332,
"step": 1070
},
{
"epoch": 0.5776464266523375,
"grad_norm": 2.228672536344676,
"learning_rate": 4.52217229999951e-06,
"loss": 0.4291,
"step": 1075
},
{
"epoch": 0.5803331542181622,
"grad_norm": 2.2818442860821664,
"learning_rate": 4.47549130314868e-06,
"loss": 0.4254,
"step": 1080
},
{
"epoch": 0.5830198817839871,
"grad_norm": 2.218722467460458,
"learning_rate": 4.428856488895128e-06,
"loss": 0.441,
"step": 1085
},
{
"epoch": 0.5857066093498119,
"grad_norm": 2.2852430401184907,
"learning_rate": 4.382271963399268e-06,
"loss": 0.4201,
"step": 1090
},
{
"epoch": 0.5883933369156368,
"grad_norm": 2.2551743568535416,
"learning_rate": 4.33574182839362e-06,
"loss": 0.4284,
"step": 1095
},
{
"epoch": 0.5910800644814616,
"grad_norm": 2.161938070818427,
"learning_rate": 4.28927018082167e-06,
"loss": 0.4274,
"step": 1100
},
{
"epoch": 0.5937667920472864,
"grad_norm": 2.4061344797719753,
"learning_rate": 4.2428611124771184e-06,
"loss": 0.4141,
"step": 1105
},
{
"epoch": 0.5964535196131112,
"grad_norm": 2.325443484824538,
"learning_rate": 4.19651870964362e-06,
"loss": 0.4286,
"step": 1110
},
{
"epoch": 0.5991402471789361,
"grad_norm": 2.3440359985504133,
"learning_rate": 4.150247052734979e-06,
"loss": 0.4302,
"step": 1115
},
{
"epoch": 0.6018269747447609,
"grad_norm": 2.4615226071875567,
"learning_rate": 4.104050215935875e-06,
"loss": 0.4111,
"step": 1120
},
{
"epoch": 0.6045137023105857,
"grad_norm": 2.4429242324006983,
"learning_rate": 4.0579322668431295e-06,
"loss": 0.4193,
"step": 1125
},
{
"epoch": 0.6072004298764105,
"grad_norm": 2.3015769237979677,
"learning_rate": 4.011897266107567e-06,
"loss": 0.42,
"step": 1130
},
{
"epoch": 0.6098871574422353,
"grad_norm": 2.1719410742796,
"learning_rate": 3.965949267076465e-06,
"loss": 0.4027,
"step": 1135
},
{
"epoch": 0.6125738850080602,
"grad_norm": 2.192156669745234,
"learning_rate": 3.9200923154366685e-06,
"loss": 0.423,
"step": 1140
},
{
"epoch": 0.615260612573885,
"grad_norm": 2.2811472307718414,
"learning_rate": 3.874330448858369e-06,
"loss": 0.4142,
"step": 1145
},
{
"epoch": 0.6179473401397099,
"grad_norm": 2.116499798174077,
"learning_rate": 3.8286676966395895e-06,
"loss": 0.4107,
"step": 1150
},
{
"epoch": 0.6206340677055346,
"grad_norm": 2.244666553521093,
"learning_rate": 3.7831080793514065e-06,
"loss": 0.4017,
"step": 1155
},
{
"epoch": 0.6233207952713595,
"grad_norm": 2.4675679743826295,
"learning_rate": 3.7376556084839465e-06,
"loss": 0.4059,
"step": 1160
},
{
"epoch": 0.6260075228371843,
"grad_norm": 2.443117409755618,
"learning_rate": 3.692314286093167e-06,
"loss": 0.4162,
"step": 1165
},
{
"epoch": 0.6286942504030091,
"grad_norm": 2.2029356738105363,
"learning_rate": 3.647088104448494e-06,
"loss": 0.4045,
"step": 1170
},
{
"epoch": 0.631380977968834,
"grad_norm": 2.2250689532361045,
"learning_rate": 3.601981045681292e-06,
"loss": 0.4035,
"step": 1175
},
{
"epoch": 0.6340677055346587,
"grad_norm": 2.3017262536884395,
"learning_rate": 3.556997081434248e-06,
"loss": 0.4043,
"step": 1180
},
{
"epoch": 0.6367544331004836,
"grad_norm": 2.242688700397434,
"learning_rate": 3.5121401725116653e-06,
"loss": 0.405,
"step": 1185
},
{
"epoch": 0.6394411606663084,
"grad_norm": 2.0746209840311747,
"learning_rate": 3.4674142685307264e-06,
"loss": 0.4086,
"step": 1190
},
{
"epoch": 0.6421278882321333,
"grad_norm": 2.2459358965322993,
"learning_rate": 3.4228233075737225e-06,
"loss": 0.4121,
"step": 1195
},
{
"epoch": 0.6448146157979581,
"grad_norm": 2.1930816457650235,
"learning_rate": 3.3783712158413163e-06,
"loss": 0.395,
"step": 1200
},
{
"epoch": 0.647501343363783,
"grad_norm": 2.0405182523892145,
"learning_rate": 3.3340619073068347e-06,
"loss": 0.3865,
"step": 1205
},
{
"epoch": 0.6501880709296077,
"grad_norm": 2.280921131796164,
"learning_rate": 3.289899283371657e-06,
"loss": 0.3905,
"step": 1210
},
{
"epoch": 0.6528747984954326,
"grad_norm": 2.297801391720408,
"learning_rate": 3.2458872325216893e-06,
"loss": 0.3992,
"step": 1215
},
{
"epoch": 0.6555615260612574,
"grad_norm": 2.2255966269222363,
"learning_rate": 3.202029629984991e-06,
"loss": 0.4012,
"step": 1220
},
{
"epoch": 0.6582482536270822,
"grad_norm": 2.159969955127635,
"learning_rate": 3.158330337390565e-06,
"loss": 0.4008,
"step": 1225
},
{
"epoch": 0.6609349811929071,
"grad_norm": 2.2717513219011636,
"learning_rate": 3.1147932024283424e-06,
"loss": 0.3915,
"step": 1230
},
{
"epoch": 0.6636217087587318,
"grad_norm": 2.2223909448906247,
"learning_rate": 3.071422058510394e-06,
"loss": 0.3918,
"step": 1235
},
{
"epoch": 0.6663084363245567,
"grad_norm": 2.3054337829096294,
"learning_rate": 3.0282207244334084e-06,
"loss": 0.4009,
"step": 1240
},
{
"epoch": 0.6689951638903815,
"grad_norm": 2.2290900583766677,
"learning_rate": 2.9851930040424383e-06,
"loss": 0.4068,
"step": 1245
},
{
"epoch": 0.6716818914562064,
"grad_norm": 2.1757281052534303,
"learning_rate": 2.9423426858959892e-06,
"loss": 0.3882,
"step": 1250
},
{
"epoch": 0.6743686190220312,
"grad_norm": 2.1885464432673314,
"learning_rate": 2.8996735429324256e-06,
"loss": 0.389,
"step": 1255
},
{
"epoch": 0.677055346587856,
"grad_norm": 2.0896855258083464,
"learning_rate": 2.8571893321377773e-06,
"loss": 0.3856,
"step": 1260
},
{
"epoch": 0.6797420741536808,
"grad_norm": 2.313353541234272,
"learning_rate": 2.8148937942149347e-06,
"loss": 0.392,
"step": 1265
},
{
"epoch": 0.6824288017195056,
"grad_norm": 2.3338524034127346,
"learning_rate": 2.7727906532542783e-06,
"loss": 0.3869,
"step": 1270
},
{
"epoch": 0.6851155292853305,
"grad_norm": 2.084691037407018,
"learning_rate": 2.7308836164057913e-06,
"loss": 0.3826,
"step": 1275
},
{
"epoch": 0.6878022568511553,
"grad_norm": 2.324796342452183,
"learning_rate": 2.6891763735526223e-06,
"loss": 0.3871,
"step": 1280
},
{
"epoch": 0.6904889844169801,
"grad_norm": 2.160101574703143,
"learning_rate": 2.6476725969862227e-06,
"loss": 0.3728,
"step": 1285
},
{
"epoch": 0.6931757119828049,
"grad_norm": 2.067721186262042,
"learning_rate": 2.6063759410829813e-06,
"loss": 0.3811,
"step": 1290
},
{
"epoch": 0.6958624395486298,
"grad_norm": 2.4251135654528646,
"learning_rate": 2.565290041982471e-06,
"loss": 0.3884,
"step": 1295
},
{
"epoch": 0.6985491671144546,
"grad_norm": 2.1893196908992123,
"learning_rate": 2.524418517267283e-06,
"loss": 0.3915,
"step": 1300
},
{
"epoch": 0.7012358946802795,
"grad_norm": 2.1923809598940602,
"learning_rate": 2.4837649656445117e-06,
"loss": 0.3853,
"step": 1305
},
{
"epoch": 0.7039226222461042,
"grad_norm": 2.2626797878823712,
"learning_rate": 2.4433329666288774e-06,
"loss": 0.3771,
"step": 1310
},
{
"epoch": 0.706609349811929,
"grad_norm": 2.208206845682125,
"learning_rate": 2.4031260802275623e-06,
"loss": 0.3689,
"step": 1315
},
{
"epoch": 0.7092960773777539,
"grad_norm": 2.18574695336372,
"learning_rate": 2.3631478466267498e-06,
"loss": 0.3728,
"step": 1320
},
{
"epoch": 0.7119828049435787,
"grad_norm": 2.207451564888781,
"learning_rate": 2.323401785879921e-06,
"loss": 0.3758,
"step": 1325
},
{
"epoch": 0.7146695325094036,
"grad_norm": 2.168655924857549,
"learning_rate": 2.283891397597908e-06,
"loss": 0.3672,
"step": 1330
},
{
"epoch": 0.7173562600752283,
"grad_norm": 2.255193862061058,
"learning_rate": 2.2446201606407537e-06,
"loss": 0.3794,
"step": 1335
},
{
"epoch": 0.7200429876410532,
"grad_norm": 2.2636128693117166,
"learning_rate": 2.205591532811416e-06,
"loss": 0.3742,
"step": 1340
},
{
"epoch": 0.722729715206878,
"grad_norm": 2.1328893959069117,
"learning_rate": 2.166808950551296e-06,
"loss": 0.3778,
"step": 1345
},
{
"epoch": 0.7254164427727029,
"grad_norm": 2.1602209025177705,
"learning_rate": 2.128275828637664e-06,
"loss": 0.3788,
"step": 1350
},
{
"epoch": 0.7281031703385277,
"grad_norm": 2.28424029482692,
"learning_rate": 2.089995559883004e-06,
"loss": 0.371,
"step": 1355
},
{
"epoch": 0.7307898979043524,
"grad_norm": 2.15439710916817,
"learning_rate": 2.0519715148362585e-06,
"loss": 0.3724,
"step": 1360
},
{
"epoch": 0.7334766254701773,
"grad_norm": 2.1370354226282355,
"learning_rate": 2.0142070414860704e-06,
"loss": 0.3551,
"step": 1365
},
{
"epoch": 0.7361633530360021,
"grad_norm": 2.1423156686501836,
"learning_rate": 1.976705464965985e-06,
"loss": 0.3817,
"step": 1370
},
{
"epoch": 0.738850080601827,
"grad_norm": 2.1767372641376976,
"learning_rate": 1.9394700872616856e-06,
"loss": 0.3692,
"step": 1375
},
{
"epoch": 0.7415368081676518,
"grad_norm": 2.1148705992310552,
"learning_rate": 1.902504186920245e-06,
"loss": 0.3701,
"step": 1380
},
{
"epoch": 0.7442235357334767,
"grad_norm": 2.127192293566224,
"learning_rate": 1.8658110187614538e-06,
"loss": 0.3716,
"step": 1385
},
{
"epoch": 0.7469102632993014,
"grad_norm": 2.1527774862898394,
"learning_rate": 1.8293938135912475e-06,
"loss": 0.3663,
"step": 1390
},
{
"epoch": 0.7495969908651263,
"grad_norm": 2.3762653649020553,
"learning_rate": 1.793255777917217e-06,
"loss": 0.356,
"step": 1395
},
{
"epoch": 0.7522837184309511,
"grad_norm": 2.0672220526784444,
"learning_rate": 1.7574000936662928e-06,
"loss": 0.3666,
"step": 1400
},
{
"epoch": 0.7549704459967759,
"grad_norm": 2.131662760991029,
"learning_rate": 1.7218299179045789e-06,
"loss": 0.3777,
"step": 1405
},
{
"epoch": 0.7576571735626008,
"grad_norm": 2.3447659932151423,
"learning_rate": 1.6865483825593643e-06,
"loss": 0.3718,
"step": 1410
},
{
"epoch": 0.7603439011284255,
"grad_norm": 2.393514042224357,
"learning_rate": 1.6515585941433694e-06,
"loss": 0.3642,
"step": 1415
},
{
"epoch": 0.7630306286942504,
"grad_norm": 2.3166220749115194,
"learning_rate": 1.6168636334812126e-06,
"loss": 0.3712,
"step": 1420
},
{
"epoch": 0.7657173562600752,
"grad_norm": 2.1832693900107745,
"learning_rate": 1.5824665554381579e-06,
"loss": 0.3658,
"step": 1425
},
{
"epoch": 0.7684040838259001,
"grad_norm": 2.0779160960958594,
"learning_rate": 1.5483703886511191e-06,
"loss": 0.3615,
"step": 1430
},
{
"epoch": 0.7710908113917249,
"grad_norm": 2.098593414629146,
"learning_rate": 1.5145781352620054e-06,
"loss": 0.353,
"step": 1435
},
{
"epoch": 0.7737775389575497,
"grad_norm": 2.1861074294439224,
"learning_rate": 1.481092770653374e-06,
"loss": 0.3599,
"step": 1440
},
{
"epoch": 0.7764642665233745,
"grad_norm": 2.2448007733369923,
"learning_rate": 1.4479172431864647e-06,
"loss": 0.3548,
"step": 1445
},
{
"epoch": 0.7791509940891993,
"grad_norm": 2.307957775652367,
"learning_rate": 1.4150544739415755e-06,
"loss": 0.3549,
"step": 1450
},
{
"epoch": 0.7818377216550242,
"grad_norm": 2.1872047936836903,
"learning_rate": 1.382507356460891e-06,
"loss": 0.3533,
"step": 1455
},
{
"epoch": 0.784524449220849,
"grad_norm": 2.1557683435766304,
"learning_rate": 1.3502787564936875e-06,
"loss": 0.3678,
"step": 1460
},
{
"epoch": 0.7872111767866738,
"grad_norm": 2.088183401090359,
"learning_rate": 1.3183715117440143e-06,
"loss": 0.3452,
"step": 1465
},
{
"epoch": 0.7898979043524986,
"grad_norm": 2.1559096637691195,
"learning_rate": 1.2867884316208345e-06,
"loss": 0.3605,
"step": 1470
},
{
"epoch": 0.7925846319183235,
"grad_norm": 1.8737663503332451,
"learning_rate": 1.255532296990662e-06,
"loss": 0.3514,
"step": 1475
},
{
"epoch": 0.7952713594841483,
"grad_norm": 2.196223751578153,
"learning_rate": 1.2246058599327021e-06,
"loss": 0.3511,
"step": 1480
},
{
"epoch": 0.7979580870499732,
"grad_norm": 2.0822224737026294,
"learning_rate": 1.194011843496537e-06,
"loss": 0.3653,
"step": 1485
},
{
"epoch": 0.800644814615798,
"grad_norm": 2.2216664507383457,
"learning_rate": 1.163752941462362e-06,
"loss": 0.3491,
"step": 1490
},
{
"epoch": 0.8033315421816228,
"grad_norm": 2.259893004106475,
"learning_rate": 1.1338318181038037e-06,
"loss": 0.3495,
"step": 1495
},
{
"epoch": 0.8060182697474476,
"grad_norm": 2.1666650973142687,
"learning_rate": 1.1042511079533275e-06,
"loss": 0.3611,
"step": 1500
},
{
"epoch": 0.8087049973132724,
"grad_norm": 2.2659364719554898,
"learning_rate": 1.0750134155702674e-06,
"loss": 0.3626,
"step": 1505
},
{
"epoch": 0.8113917248790973,
"grad_norm": 2.108890981344199,
"learning_rate": 1.046121315311508e-06,
"loss": 0.3522,
"step": 1510
},
{
"epoch": 0.814078452444922,
"grad_norm": 1.9553754507307322,
"learning_rate": 1.017577351104801e-06,
"loss": 0.3491,
"step": 1515
},
{
"epoch": 0.8167651800107469,
"grad_norm": 2.042354110902273,
"learning_rate": 9.893840362247809e-07,
"loss": 0.3489,
"step": 1520
},
{
"epoch": 0.8194519075765717,
"grad_norm": 2.1701934651234613,
"learning_rate": 9.615438530716753e-07,
"loss": 0.3556,
"step": 1525
},
{
"epoch": 0.8221386351423966,
"grad_norm": 2.298551493046633,
"learning_rate": 9.340592529527237e-07,
"loss": 0.3533,
"step": 1530
},
{
"epoch": 0.8248253627082214,
"grad_norm": 2.0661919944679656,
"learning_rate": 9.069326558663488e-07,
"loss": 0.3504,
"step": 1535
},
{
"epoch": 0.8275120902740463,
"grad_norm": 2.238182054477219,
"learning_rate": 8.801664502890722e-07,
"loss": 0.3583,
"step": 1540
},
{
"epoch": 0.830198817839871,
"grad_norm": 2.206073109503085,
"learning_rate": 8.537629929652186e-07,
"loss": 0.3493,
"step": 1545
},
{
"epoch": 0.8328855454056958,
"grad_norm": 2.112920269926838,
"learning_rate": 8.277246086993962e-07,
"loss": 0.3485,
"step": 1550
},
{
"epoch": 0.8355722729715207,
"grad_norm": 2.1091946693423824,
"learning_rate": 8.02053590151805e-07,
"loss": 0.3602,
"step": 1555
},
{
"epoch": 0.8382590005373455,
"grad_norm": 2.0674911667909592,
"learning_rate": 7.767521976363735e-07,
"loss": 0.344,
"step": 1560
},
{
"epoch": 0.8409457281031704,
"grad_norm": 2.094630956022192,
"learning_rate": 7.518226589217286e-07,
"loss": 0.3555,
"step": 1565
},
{
"epoch": 0.8436324556689951,
"grad_norm": 1.9871731688863412,
"learning_rate": 7.27267169035053e-07,
"loss": 0.3446,
"step": 1570
},
{
"epoch": 0.84631918323482,
"grad_norm": 1.9968082439337067,
"learning_rate": 7.030878900688115e-07,
"loss": 0.3453,
"step": 1575
},
{
"epoch": 0.8490059108006448,
"grad_norm": 2.0534061294688435,
"learning_rate": 6.792869509903777e-07,
"loss": 0.3471,
"step": 1580
},
{
"epoch": 0.8516926383664697,
"grad_norm": 2.06210125589499,
"learning_rate": 6.558664474545817e-07,
"loss": 0.3395,
"step": 1585
},
{
"epoch": 0.8543793659322945,
"grad_norm": 1.9567313896707874,
"learning_rate": 6.328284416191893e-07,
"loss": 0.3401,
"step": 1590
},
{
"epoch": 0.8570660934981192,
"grad_norm": 2.060413092979503,
"learning_rate": 6.101749619633346e-07,
"loss": 0.3394,
"step": 1595
},
{
"epoch": 0.8597528210639441,
"grad_norm": 2.1887059434963603,
"learning_rate": 5.879080031089047e-07,
"loss": 0.3543,
"step": 1600
},
{
"epoch": 0.8624395486297689,
"grad_norm": 1.9845082909122282,
"learning_rate": 5.660295256449233e-07,
"loss": 0.3389,
"step": 1605
},
{
"epoch": 0.8651262761955938,
"grad_norm": 2.3356240543268414,
"learning_rate": 5.445414559549167e-07,
"loss": 0.3403,
"step": 1610
},
{
"epoch": 0.8678130037614186,
"grad_norm": 2.2880237510434123,
"learning_rate": 5.234456860473042e-07,
"loss": 0.333,
"step": 1615
},
{
"epoch": 0.8704997313272435,
"grad_norm": 1.987792837037979,
"learning_rate": 5.027440733887973e-07,
"loss": 0.3413,
"step": 1620
},
{
"epoch": 0.8731864588930682,
"grad_norm": 2.145351742840335,
"learning_rate": 4.824384407408622e-07,
"loss": 0.3419,
"step": 1625
},
{
"epoch": 0.8758731864588931,
"grad_norm": 2.0454741447896034,
"learning_rate": 4.625305759992205e-07,
"loss": 0.3446,
"step": 1630
},
{
"epoch": 0.8785599140247179,
"grad_norm": 2.3653559873352754,
"learning_rate": 4.4302223203642803e-07,
"loss": 0.3456,
"step": 1635
},
{
"epoch": 0.8812466415905427,
"grad_norm": 1.9465586799987966,
"learning_rate": 4.2391512654753443e-07,
"loss": 0.3401,
"step": 1640
},
{
"epoch": 0.8839333691563676,
"grad_norm": 2.0641483788200166,
"learning_rate": 4.05210941898847e-07,
"loss": 0.3444,
"step": 1645
},
{
"epoch": 0.8866200967221923,
"grad_norm": 1.9634013461126347,
"learning_rate": 3.8691132497979064e-07,
"loss": 0.3262,
"step": 1650
},
{
"epoch": 0.8893068242880172,
"grad_norm": 1.9771681166485953,
"learning_rate": 3.6901788705790753e-07,
"loss": 0.3339,
"step": 1655
},
{
"epoch": 0.891993551853842,
"grad_norm": 2.0517920199325363,
"learning_rate": 3.5153220363698225e-07,
"loss": 0.3505,
"step": 1660
},
{
"epoch": 0.8946802794196669,
"grad_norm": 1.9243621922759124,
"learning_rate": 3.344558143183246e-07,
"loss": 0.342,
"step": 1665
},
{
"epoch": 0.8973670069854917,
"grad_norm": 1.9232396735880564,
"learning_rate": 3.1779022266520245e-07,
"loss": 0.347,
"step": 1670
},
{
"epoch": 0.9000537345513165,
"grad_norm": 2.0661817058830305,
"learning_rate": 3.015368960704584e-07,
"loss": 0.3293,
"step": 1675
},
{
"epoch": 0.9027404621171413,
"grad_norm": 2.1100704132509094,
"learning_rate": 2.856972656273066e-07,
"loss": 0.3331,
"step": 1680
},
{
"epoch": 0.9054271896829661,
"grad_norm": 2.0718216201058444,
"learning_rate": 2.7027272600332443e-07,
"loss": 0.3422,
"step": 1685
},
{
"epoch": 0.908113917248791,
"grad_norm": 2.1084735618419246,
"learning_rate": 2.5526463531765467e-07,
"loss": 0.3453,
"step": 1690
},
{
"epoch": 0.9108006448146158,
"grad_norm": 1.8503057057006569,
"learning_rate": 2.4067431502142414e-07,
"loss": 0.3369,
"step": 1695
},
{
"epoch": 0.9134873723804406,
"grad_norm": 2.2410702930225437,
"learning_rate": 2.2650304978138916e-07,
"loss": 0.3422,
"step": 1700
},
{
"epoch": 0.9161740999462654,
"grad_norm": 1.988010259435427,
"learning_rate": 2.1275208736682262e-07,
"loss": 0.3273,
"step": 1705
},
{
"epoch": 0.9188608275120903,
"grad_norm": 2.162508541847929,
"learning_rate": 1.9942263853964917e-07,
"loss": 0.348,
"step": 1710
},
{
"epoch": 0.9215475550779151,
"grad_norm": 2.173499113968412,
"learning_rate": 1.8651587694783924e-07,
"loss": 0.3366,
"step": 1715
},
{
"epoch": 0.92423428264374,
"grad_norm": 1.9783121068716565,
"learning_rate": 1.7403293902206851e-07,
"loss": 0.3282,
"step": 1720
},
{
"epoch": 0.9269210102095647,
"grad_norm": 1.8953184142427286,
"learning_rate": 1.6197492387565629e-07,
"loss": 0.3346,
"step": 1725
},
{
"epoch": 0.9296077377753896,
"grad_norm": 2.0149463904024474,
"learning_rate": 1.503428932077916e-07,
"loss": 0.3457,
"step": 1730
},
{
"epoch": 0.9322944653412144,
"grad_norm": 2.0287919597007447,
"learning_rate": 1.3913787121004717e-07,
"loss": 0.3301,
"step": 1735
},
{
"epoch": 0.9349811929070392,
"grad_norm": 2.049509308073804,
"learning_rate": 1.2836084447620466e-07,
"loss": 0.3483,
"step": 1740
},
{
"epoch": 0.9376679204728641,
"grad_norm": 2.0687381489986714,
"learning_rate": 1.180127619153837e-07,
"loss": 0.3296,
"step": 1745
},
{
"epoch": 0.9403546480386888,
"grad_norm": 2.129632171046833,
"learning_rate": 1.0809453466849029e-07,
"loss": 0.3416,
"step": 1750
},
{
"epoch": 0.9430413756045137,
"grad_norm": 1.940321140330739,
"learning_rate": 9.860703602799281e-08,
"loss": 0.344,
"step": 1755
},
{
"epoch": 0.9457281031703385,
"grad_norm": 2.2300828043810434,
"learning_rate": 8.955110136102952e-08,
"loss": 0.3332,
"step": 1760
},
{
"epoch": 0.9484148307361634,
"grad_norm": 2.039077625849431,
"learning_rate": 8.092752803585513e-08,
"loss": 0.3398,
"step": 1765
},
{
"epoch": 0.9511015583019882,
"grad_norm": 1.9309466845356567,
"learning_rate": 7.273707535162988e-08,
"loss": 0.3345,
"step": 1770
},
{
"epoch": 0.9537882858678131,
"grad_norm": 2.085038229808163,
"learning_rate": 6.498046447156958e-08,
"loss": 0.3413,
"step": 1775
},
{
"epoch": 0.9564750134336378,
"grad_norm": 2.073268982177981,
"learning_rate": 5.7658378359443104e-08,
"loss": 0.3389,
"step": 1780
},
{
"epoch": 0.9591617409994626,
"grad_norm": 2.1191914780783185,
"learning_rate": 5.077146171943936e-08,
"loss": 0.3301,
"step": 1785
},
{
"epoch": 0.9618484685652875,
"grad_norm": 2.0695293690655494,
"learning_rate": 4.432032093940219e-08,
"loss": 0.3387,
"step": 1790
},
{
"epoch": 0.9645351961311123,
"grad_norm": 2.181866235925443,
"learning_rate": 3.8305524037438035e-08,
"loss": 0.3327,
"step": 1795
},
{
"epoch": 0.9672219236969372,
"grad_norm": 2.0829327323896742,
"learning_rate": 3.27276006119015e-08,
"loss": 0.3269,
"step": 1800
},
{
"epoch": 0.9699086512627619,
"grad_norm": 2.0402878486865084,
"learning_rate": 2.7587041794766012e-08,
"loss": 0.3428,
"step": 1805
},
{
"epoch": 0.9725953788285868,
"grad_norm": 2.1613154681131386,
"learning_rate": 2.2884300208378395e-08,
"loss": 0.3389,
"step": 1810
},
{
"epoch": 0.9752821063944116,
"grad_norm": 2.190787226227149,
"learning_rate": 1.8619789925608534e-08,
"loss": 0.3487,
"step": 1815
},
{
"epoch": 0.9779688339602365,
"grad_norm": 1.9274721887684194,
"learning_rate": 1.4793886433387417e-08,
"loss": 0.3248,
"step": 1820
},
{
"epoch": 0.9806555615260613,
"grad_norm": 2.0401602016541296,
"learning_rate": 1.1406926599646373e-08,
"loss": 0.3378,
"step": 1825
},
{
"epoch": 0.983342289091886,
"grad_norm": 1.9245890826575305,
"learning_rate": 8.459208643659122e-09,
"loss": 0.3301,
"step": 1830
},
{
"epoch": 0.9860290166577109,
"grad_norm": 2.145271841454969,
"learning_rate": 5.950992109779452e-09,
"loss": 0.3337,
"step": 1835
},
{
"epoch": 0.9887157442235357,
"grad_norm": 2.102840117283832,
"learning_rate": 3.88249784459227e-09,
"loss": 0.3436,
"step": 1840
},
{
"epoch": 0.9914024717893606,
"grad_norm": 1.8642178563070946,
"learning_rate": 2.2539079774658303e-09,
"loss": 0.3346,
"step": 1845
},
{
"epoch": 0.9940891993551854,
"grad_norm": 2.0754130978586462,
"learning_rate": 1.0653659045156695e-09,
"loss": 0.323,
"step": 1850
},
{
"epoch": 0.9967759269210102,
"grad_norm": 2.1177194452551293,
"learning_rate": 3.1697627597970794e-10,
"loss": 0.3273,
"step": 1855
},
{
"epoch": 0.999462654486835,
"grad_norm": 2.036609175873343,
"learning_rate": 8.804987003951937e-12,
"loss": 0.3344,
"step": 1860
},
{
"epoch": 1.0,
"eval_runtime": 3.3729,
"eval_samples_per_second": 2.965,
"eval_steps_per_second": 0.889,
"step": 1861
},
{
"epoch": 1.0,
"step": 1861,
"total_flos": 194827769610240.0,
"train_loss": 0.5009698675307838,
"train_runtime": 16706.09,
"train_samples_per_second": 1.782,
"train_steps_per_second": 0.111
}
],
"logging_steps": 5,
"max_steps": 1861,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 100,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 194827769610240.0,
"train_batch_size": 4,
"trial_name": null,
"trial_params": null
}