sujr's picture
Upload folder using huggingface_hub
feeecb6 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 0.586881856680783,
"eval_steps": 500,
"global_step": 4400,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.001333822401547234,
"grad_norm": 5.80256772259428,
"learning_rate": 4e-06,
"loss": 1.0498,
"step": 10
},
{
"epoch": 0.002667644803094468,
"grad_norm": 33.895696082107904,
"learning_rate": 8e-06,
"loss": 1.0653,
"step": 20
},
{
"epoch": 0.004001467204641702,
"grad_norm": 5.523348234283539,
"learning_rate": 1.2e-05,
"loss": 1.0341,
"step": 30
},
{
"epoch": 0.005335289606188936,
"grad_norm": 11.1556403156453,
"learning_rate": 1.6e-05,
"loss": 0.9692,
"step": 40
},
{
"epoch": 0.00666911200773617,
"grad_norm": 3.7375231126561825,
"learning_rate": 1.9999999999999998e-05,
"loss": 0.9554,
"step": 50
},
{
"epoch": 0.008002934409283404,
"grad_norm": 8.43538339698909,
"learning_rate": 2.4e-05,
"loss": 0.8965,
"step": 60
},
{
"epoch": 0.009336756810830639,
"grad_norm": 13.403454896011478,
"learning_rate": 2.8e-05,
"loss": 0.8273,
"step": 70
},
{
"epoch": 0.010670579212377872,
"grad_norm": 3.95522050766088,
"learning_rate": 2.9999966406213696e-05,
"loss": 0.7837,
"step": 80
},
{
"epoch": 0.012004401613925107,
"grad_norm": 36.799552052300854,
"learning_rate": 2.9999697656826056e-05,
"loss": 0.8288,
"step": 90
},
{
"epoch": 0.01333822401547234,
"grad_norm": 1.6305479563258536,
"learning_rate": 2.9999160162865885e-05,
"loss": 0.7778,
"step": 100
},
{
"epoch": 0.014672046417019574,
"grad_norm": 2.159536648784889,
"learning_rate": 2.9998353933963273e-05,
"loss": 0.7616,
"step": 110
},
{
"epoch": 0.016005868818566808,
"grad_norm": 3.397321425707004,
"learning_rate": 2.999727898456315e-05,
"loss": 0.7594,
"step": 120
},
{
"epoch": 0.017339691220114042,
"grad_norm": 4.772220837365037,
"learning_rate": 2.999593533392503e-05,
"loss": 0.756,
"step": 130
},
{
"epoch": 0.018673513621661277,
"grad_norm": 2.4845945633126885,
"learning_rate": 2.9994323006122654e-05,
"loss": 0.7601,
"step": 140
},
{
"epoch": 0.02000733602320851,
"grad_norm": 3.591682569169127,
"learning_rate": 2.9992442030043557e-05,
"loss": 0.7894,
"step": 150
},
{
"epoch": 0.021341158424755743,
"grad_norm": 2.5679458807474416,
"learning_rate": 2.9990292439388565e-05,
"loss": 0.7093,
"step": 160
},
{
"epoch": 0.022674980826302978,
"grad_norm": 1.9412569107551652,
"learning_rate": 2.9987874272671168e-05,
"loss": 0.706,
"step": 170
},
{
"epoch": 0.024008803227850213,
"grad_norm": 3.2667097270489,
"learning_rate": 2.9985187573216855e-05,
"loss": 0.7586,
"step": 180
},
{
"epoch": 0.025342625629397444,
"grad_norm": 4.4208737375400675,
"learning_rate": 2.998223238916232e-05,
"loss": 0.6985,
"step": 190
},
{
"epoch": 0.02667644803094468,
"grad_norm": 5.515966302183704,
"learning_rate": 2.9979008773454618e-05,
"loss": 0.7323,
"step": 200
},
{
"epoch": 0.028010270432491914,
"grad_norm": 2.964165450396077,
"learning_rate": 2.997551678385019e-05,
"loss": 0.7603,
"step": 210
},
{
"epoch": 0.02934409283403915,
"grad_norm": 3.0952916783456197,
"learning_rate": 2.997175648291384e-05,
"loss": 0.7421,
"step": 220
},
{
"epoch": 0.03067791523558638,
"grad_norm": 4.213588693904103,
"learning_rate": 2.996772793801763e-05,
"loss": 0.7322,
"step": 230
},
{
"epoch": 0.032011737637133615,
"grad_norm": 1.8568586103139084,
"learning_rate": 2.996343122133965e-05,
"loss": 0.6922,
"step": 240
},
{
"epoch": 0.033345560038680847,
"grad_norm": 4.494146778909846,
"learning_rate": 2.9958866409862745e-05,
"loss": 0.7244,
"step": 250
},
{
"epoch": 0.034679382440228085,
"grad_norm": 7.438170074282725,
"learning_rate": 2.9954033585373108e-05,
"loss": 0.7093,
"step": 260
},
{
"epoch": 0.036013204841775316,
"grad_norm": 2.3744787346857015,
"learning_rate": 2.994893283445885e-05,
"loss": 0.6983,
"step": 270
},
{
"epoch": 0.037347027243322554,
"grad_norm": 1.4722011682616383,
"learning_rate": 2.9943564248508415e-05,
"loss": 0.6781,
"step": 280
},
{
"epoch": 0.038680849644869786,
"grad_norm": 3.3397620832486075,
"learning_rate": 2.9937927923708966e-05,
"loss": 0.7399,
"step": 290
},
{
"epoch": 0.04001467204641702,
"grad_norm": 5.05063397044549,
"learning_rate": 2.993202396104465e-05,
"loss": 0.7671,
"step": 300
},
{
"epoch": 0.041348494447964255,
"grad_norm": 3.0128431385936767,
"learning_rate": 2.9925852466294795e-05,
"loss": 0.7015,
"step": 310
},
{
"epoch": 0.04268231684951149,
"grad_norm": 2.0161342716764237,
"learning_rate": 2.9919413550032014e-05,
"loss": 0.7009,
"step": 320
},
{
"epoch": 0.04401613925105872,
"grad_norm": 1.3114004070324985,
"learning_rate": 2.991270732762022e-05,
"loss": 0.7153,
"step": 330
},
{
"epoch": 0.045349961652605957,
"grad_norm": 18.493625676806268,
"learning_rate": 2.990573391921255e-05,
"loss": 0.7518,
"step": 340
},
{
"epoch": 0.04668378405415319,
"grad_norm": 2.9526764059703567,
"learning_rate": 2.989849344974924e-05,
"loss": 0.7133,
"step": 350
},
{
"epoch": 0.048017606455700426,
"grad_norm": 5.26274958582726,
"learning_rate": 2.9890986048955368e-05,
"loss": 0.7139,
"step": 360
},
{
"epoch": 0.04935142885724766,
"grad_norm": 3.5319788357887933,
"learning_rate": 2.9883211851338516e-05,
"loss": 0.7084,
"step": 370
},
{
"epoch": 0.05068525125879489,
"grad_norm": 7.607269935902469,
"learning_rate": 2.9875170996186392e-05,
"loss": 0.7309,
"step": 380
},
{
"epoch": 0.05201907366034213,
"grad_norm": 2.3456663308287253,
"learning_rate": 2.986686362756431e-05,
"loss": 0.6827,
"step": 390
},
{
"epoch": 0.05335289606188936,
"grad_norm": 2.176182050789012,
"learning_rate": 2.9858289894312617e-05,
"loss": 0.6995,
"step": 400
},
{
"epoch": 0.0546867184634366,
"grad_norm": 11.171630173781537,
"learning_rate": 2.9849449950044036e-05,
"loss": 0.7335,
"step": 410
},
{
"epoch": 0.05602054086498383,
"grad_norm": 6.63441431767892,
"learning_rate": 2.984034395314088e-05,
"loss": 0.7031,
"step": 420
},
{
"epoch": 0.05735436326653106,
"grad_norm": 2.861620412225736,
"learning_rate": 2.983097206675227e-05,
"loss": 0.6559,
"step": 430
},
{
"epoch": 0.0586881856680783,
"grad_norm": 5.523165036486206,
"learning_rate": 2.9821334458791156e-05,
"loss": 0.726,
"step": 440
},
{
"epoch": 0.06002200806962553,
"grad_norm": 3.5602243751368197,
"learning_rate": 2.9811431301931344e-05,
"loss": 0.7202,
"step": 450
},
{
"epoch": 0.06135583047117276,
"grad_norm": 11.333380381168622,
"learning_rate": 2.9801262773604377e-05,
"loss": 0.7189,
"step": 460
},
{
"epoch": 0.06268965287271999,
"grad_norm": 14.159758615106613,
"learning_rate": 2.9790829055996398e-05,
"loss": 0.7267,
"step": 470
},
{
"epoch": 0.06402347527426723,
"grad_norm": 9.009079485918289,
"learning_rate": 2.978013033604483e-05,
"loss": 0.748,
"step": 480
},
{
"epoch": 0.06535729767581447,
"grad_norm": 1.9682648681675994,
"learning_rate": 2.976916680543506e-05,
"loss": 0.7369,
"step": 490
},
{
"epoch": 0.06669112007736169,
"grad_norm": 2.9278164598232777,
"learning_rate": 2.975793866059701e-05,
"loss": 0.7037,
"step": 500
},
{
"epoch": 0.06802494247890893,
"grad_norm": 5.5563562303649885,
"learning_rate": 2.9746446102701606e-05,
"loss": 0.6986,
"step": 510
},
{
"epoch": 0.06935876488045617,
"grad_norm": 4.036767303783137,
"learning_rate": 2.9734689337657157e-05,
"loss": 0.7119,
"step": 520
},
{
"epoch": 0.07069258728200341,
"grad_norm": 1.9856990692088847,
"learning_rate": 2.9722668576105703e-05,
"loss": 0.7205,
"step": 530
},
{
"epoch": 0.07202640968355063,
"grad_norm": 5.200308739226583,
"learning_rate": 2.971038403341921e-05,
"loss": 0.6918,
"step": 540
},
{
"epoch": 0.07336023208509787,
"grad_norm": 2.237349124701919,
"learning_rate": 2.9697835929695727e-05,
"loss": 0.7339,
"step": 550
},
{
"epoch": 0.07469405448664511,
"grad_norm": 1.6388680632753365,
"learning_rate": 2.968502448975544e-05,
"loss": 0.7086,
"step": 560
},
{
"epoch": 0.07602787688819233,
"grad_norm": 2.8545575025135244,
"learning_rate": 2.967194994313663e-05,
"loss": 0.678,
"step": 570
},
{
"epoch": 0.07736169928973957,
"grad_norm": 2.674647983669599,
"learning_rate": 2.9658612524091594e-05,
"loss": 0.7119,
"step": 580
},
{
"epoch": 0.07869552169128681,
"grad_norm": 2.489047760330112,
"learning_rate": 2.9645012471582406e-05,
"loss": 0.7382,
"step": 590
},
{
"epoch": 0.08002934409283403,
"grad_norm": 5.509352102248308,
"learning_rate": 2.9631150029276662e-05,
"loss": 0.738,
"step": 600
},
{
"epoch": 0.08136316649438127,
"grad_norm": 3.6489235270404015,
"learning_rate": 2.9617025445543114e-05,
"loss": 0.7018,
"step": 610
},
{
"epoch": 0.08269698889592851,
"grad_norm": 2.7813651243235697,
"learning_rate": 2.9602638973447218e-05,
"loss": 0.7381,
"step": 620
},
{
"epoch": 0.08403081129747574,
"grad_norm": 8.271390523006518,
"learning_rate": 2.9587990870746574e-05,
"loss": 0.7168,
"step": 630
},
{
"epoch": 0.08536463369902297,
"grad_norm": 1.2460611751687307,
"learning_rate": 2.9573081399886356e-05,
"loss": 0.7004,
"step": 640
},
{
"epoch": 0.08669845610057021,
"grad_norm": 1.704626418994062,
"learning_rate": 2.9557910827994568e-05,
"loss": 0.738,
"step": 650
},
{
"epoch": 0.08803227850211744,
"grad_norm": 3.275051693107957,
"learning_rate": 2.9542479426877283e-05,
"loss": 0.7017,
"step": 660
},
{
"epoch": 0.08936610090366467,
"grad_norm": 11.389990685570503,
"learning_rate": 2.9526787473013753e-05,
"loss": 0.7107,
"step": 670
},
{
"epoch": 0.09069992330521191,
"grad_norm": 5.591277359184055,
"learning_rate": 2.9510835247551485e-05,
"loss": 0.7141,
"step": 680
},
{
"epoch": 0.09203374570675915,
"grad_norm": 3.180111568581053,
"learning_rate": 2.949462303630116e-05,
"loss": 0.6987,
"step": 690
},
{
"epoch": 0.09336756810830638,
"grad_norm": 3.8428068166831753,
"learning_rate": 2.9478151129731567e-05,
"loss": 0.7373,
"step": 700
},
{
"epoch": 0.09470139050985361,
"grad_norm": 2.231397231771392,
"learning_rate": 2.9461419822964348e-05,
"loss": 0.6962,
"step": 710
},
{
"epoch": 0.09603521291140085,
"grad_norm": 18.287201889017563,
"learning_rate": 2.9444429415768726e-05,
"loss": 0.6723,
"step": 720
},
{
"epoch": 0.09736903531294808,
"grad_norm": 4.340932687135137,
"learning_rate": 2.942718021255617e-05,
"loss": 0.7151,
"step": 730
},
{
"epoch": 0.09870285771449532,
"grad_norm": 2.7813821825484446,
"learning_rate": 2.940967252237488e-05,
"loss": 0.7332,
"step": 740
},
{
"epoch": 0.10003668011604255,
"grad_norm": 2.3251782912937475,
"learning_rate": 2.9391906658904296e-05,
"loss": 0.6751,
"step": 750
},
{
"epoch": 0.10137050251758978,
"grad_norm": 8.123799866292751,
"learning_rate": 2.937388294044946e-05,
"loss": 0.6886,
"step": 760
},
{
"epoch": 0.10270432491913702,
"grad_norm": 1.528579329214318,
"learning_rate": 2.9355601689935315e-05,
"loss": 0.7146,
"step": 770
},
{
"epoch": 0.10403814732068425,
"grad_norm": 2.0278953433974825,
"learning_rate": 2.933706323490092e-05,
"loss": 0.7453,
"step": 780
},
{
"epoch": 0.10537196972223148,
"grad_norm": 1.4306270659678864,
"learning_rate": 2.9318267907493583e-05,
"loss": 0.6702,
"step": 790
},
{
"epoch": 0.10670579212377872,
"grad_norm": 1.5178081087799355,
"learning_rate": 2.9299216044462903e-05,
"loss": 0.7346,
"step": 800
},
{
"epoch": 0.10803961452532596,
"grad_norm": 9.506616797760028,
"learning_rate": 2.927990798715475e-05,
"loss": 0.6558,
"step": 810
},
{
"epoch": 0.1093734369268732,
"grad_norm": 2.4597311302505767,
"learning_rate": 2.926034408150513e-05,
"loss": 0.726,
"step": 820
},
{
"epoch": 0.11070725932842042,
"grad_norm": 12.372180964422007,
"learning_rate": 2.9240524678034016e-05,
"loss": 0.7308,
"step": 830
},
{
"epoch": 0.11204108172996766,
"grad_norm": 1.4488469801164658,
"learning_rate": 2.9220450131839037e-05,
"loss": 0.7072,
"step": 840
},
{
"epoch": 0.1133749041315149,
"grad_norm": 8.602946960846197,
"learning_rate": 2.920012080258912e-05,
"loss": 0.7234,
"step": 850
},
{
"epoch": 0.11470872653306212,
"grad_norm": 1.441195423452674,
"learning_rate": 2.9179537054518085e-05,
"loss": 0.6934,
"step": 860
},
{
"epoch": 0.11604254893460936,
"grad_norm": 4.318952956999577,
"learning_rate": 2.9158699256418056e-05,
"loss": 0.6534,
"step": 870
},
{
"epoch": 0.1173763713361566,
"grad_norm": 9.733179695623866,
"learning_rate": 2.9137607781632913e-05,
"loss": 0.71,
"step": 880
},
{
"epoch": 0.11871019373770382,
"grad_norm": 7.397049093836735,
"learning_rate": 2.911626300805155e-05,
"loss": 0.7386,
"step": 890
},
{
"epoch": 0.12004401613925106,
"grad_norm": 2.920812240139869,
"learning_rate": 2.9094665318101155e-05,
"loss": 0.6789,
"step": 900
},
{
"epoch": 0.1213778385407983,
"grad_norm": 1.7031296196271206,
"learning_rate": 2.9072815098740326e-05,
"loss": 0.715,
"step": 910
},
{
"epoch": 0.12271166094234552,
"grad_norm": 1.5630656172291801,
"learning_rate": 2.9050712741452136e-05,
"loss": 0.7136,
"step": 920
},
{
"epoch": 0.12404548334389276,
"grad_norm": 7.870543414771234,
"learning_rate": 2.902835864223715e-05,
"loss": 0.6669,
"step": 930
},
{
"epoch": 0.12537930574543998,
"grad_norm": 4.843671834991794,
"learning_rate": 2.9005753201606287e-05,
"loss": 0.7281,
"step": 940
},
{
"epoch": 0.12671312814698724,
"grad_norm": 3.010503818258016,
"learning_rate": 2.8982896824573678e-05,
"loss": 0.7018,
"step": 950
},
{
"epoch": 0.12804695054853446,
"grad_norm": 2.5552186559589654,
"learning_rate": 2.8959789920649394e-05,
"loss": 0.7338,
"step": 960
},
{
"epoch": 0.12938077295008168,
"grad_norm": 12.306055851495117,
"learning_rate": 2.893643290383212e-05,
"loss": 0.6732,
"step": 970
},
{
"epoch": 0.13071459535162894,
"grad_norm": 2.16185926525944,
"learning_rate": 2.891282619260172e-05,
"loss": 0.7108,
"step": 980
},
{
"epoch": 0.13204841775317616,
"grad_norm": 5.992378798792086,
"learning_rate": 2.8888970209911754e-05,
"loss": 0.6525,
"step": 990
},
{
"epoch": 0.13338224015472339,
"grad_norm": 2.986272238787896,
"learning_rate": 2.8864865383181893e-05,
"loss": 0.6655,
"step": 1000
},
{
"epoch": 0.13471606255627064,
"grad_norm": 12.855377354582437,
"learning_rate": 2.8840512144290273e-05,
"loss": 0.6826,
"step": 1010
},
{
"epoch": 0.13604988495781786,
"grad_norm": 2.045979893776702,
"learning_rate": 2.8815910929565734e-05,
"loss": 0.6616,
"step": 1020
},
{
"epoch": 0.1373837073593651,
"grad_norm": 6.623264301300591,
"learning_rate": 2.879106217978002e-05,
"loss": 0.6935,
"step": 1030
},
{
"epoch": 0.13871752976091234,
"grad_norm": 2.67990218211766,
"learning_rate": 2.8765966340139892e-05,
"loss": 0.6671,
"step": 1040
},
{
"epoch": 0.14005135216245956,
"grad_norm": 2.699521523924172,
"learning_rate": 2.8740623860279116e-05,
"loss": 0.6763,
"step": 1050
},
{
"epoch": 0.14138517456400682,
"grad_norm": 4.1129898011507535,
"learning_rate": 2.871503519425044e-05,
"loss": 0.7159,
"step": 1060
},
{
"epoch": 0.14271899696555404,
"grad_norm": 2.4592021333659146,
"learning_rate": 2.8689200800517448e-05,
"loss": 0.6551,
"step": 1070
},
{
"epoch": 0.14405281936710126,
"grad_norm": 5.138500389099849,
"learning_rate": 2.866312114194634e-05,
"loss": 0.7214,
"step": 1080
},
{
"epoch": 0.14538664176864852,
"grad_norm": 2.822433730666048,
"learning_rate": 2.8636796685797657e-05,
"loss": 0.6862,
"step": 1090
},
{
"epoch": 0.14672046417019574,
"grad_norm": 3.086468537427806,
"learning_rate": 2.8610227903717876e-05,
"loss": 0.6784,
"step": 1100
},
{
"epoch": 0.14805428657174297,
"grad_norm": 2.079766793749202,
"learning_rate": 2.8583415271730994e-05,
"loss": 0.7065,
"step": 1110
},
{
"epoch": 0.14938810897329022,
"grad_norm": 1.659870509072264,
"learning_rate": 2.855635927022998e-05,
"loss": 0.7197,
"step": 1120
},
{
"epoch": 0.15072193137483744,
"grad_norm": 7.870626779339635,
"learning_rate": 2.8529060383968175e-05,
"loss": 0.7305,
"step": 1130
},
{
"epoch": 0.15205575377638467,
"grad_norm": 3.0600340899893537,
"learning_rate": 2.850151910205061e-05,
"loss": 0.6922,
"step": 1140
},
{
"epoch": 0.15338957617793192,
"grad_norm": 3.6147451373702806,
"learning_rate": 2.847373591792523e-05,
"loss": 0.7044,
"step": 1150
},
{
"epoch": 0.15472339857947914,
"grad_norm": 4.740777951553679,
"learning_rate": 2.844571132937407e-05,
"loss": 0.6794,
"step": 1160
},
{
"epoch": 0.15605722098102637,
"grad_norm": 3.377522973717319,
"learning_rate": 2.841744583850431e-05,
"loss": 0.673,
"step": 1170
},
{
"epoch": 0.15739104338257362,
"grad_norm": 4.250656077289992,
"learning_rate": 2.838893995173932e-05,
"loss": 0.6975,
"step": 1180
},
{
"epoch": 0.15872486578412084,
"grad_norm": 11.73693900915769,
"learning_rate": 2.836019417980955e-05,
"loss": 0.6572,
"step": 1190
},
{
"epoch": 0.16005868818566807,
"grad_norm": 2.729291714043308,
"learning_rate": 2.8331209037743387e-05,
"loss": 0.7247,
"step": 1200
},
{
"epoch": 0.16139251058721532,
"grad_norm": 2.347985877636318,
"learning_rate": 2.8301985044857947e-05,
"loss": 0.7199,
"step": 1210
},
{
"epoch": 0.16272633298876255,
"grad_norm": 2.2534314586033113,
"learning_rate": 2.8272522724749743e-05,
"loss": 0.6835,
"step": 1220
},
{
"epoch": 0.16406015539030977,
"grad_norm": 3.159583116387406,
"learning_rate": 2.8242822605285323e-05,
"loss": 0.7122,
"step": 1230
},
{
"epoch": 0.16539397779185702,
"grad_norm": 2.086588782887239,
"learning_rate": 2.8212885218591812e-05,
"loss": 0.6949,
"step": 1240
},
{
"epoch": 0.16672780019340425,
"grad_norm": 7.284236966547317,
"learning_rate": 2.8182711101047362e-05,
"loss": 0.6641,
"step": 1250
},
{
"epoch": 0.16806162259495147,
"grad_norm": 3.0369619450249594,
"learning_rate": 2.815230079327156e-05,
"loss": 0.6731,
"step": 1260
},
{
"epoch": 0.16939544499649872,
"grad_norm": 1.4144726574636068,
"learning_rate": 2.8121654840115734e-05,
"loss": 0.6898,
"step": 1270
},
{
"epoch": 0.17072926739804595,
"grad_norm": 3.66202356670303,
"learning_rate": 2.809077379065319e-05,
"loss": 0.7174,
"step": 1280
},
{
"epoch": 0.17206308979959317,
"grad_norm": 4.778073521019285,
"learning_rate": 2.805965819816937e-05,
"loss": 0.6186,
"step": 1290
},
{
"epoch": 0.17339691220114042,
"grad_norm": 3.9620427201734576,
"learning_rate": 2.802830862015196e-05,
"loss": 0.684,
"step": 1300
},
{
"epoch": 0.17473073460268765,
"grad_norm": 4.170199740083487,
"learning_rate": 2.799672561828087e-05,
"loss": 0.7102,
"step": 1310
},
{
"epoch": 0.17606455700423487,
"grad_norm": 2.2612205048804714,
"learning_rate": 2.79649097584182e-05,
"loss": 0.7451,
"step": 1320
},
{
"epoch": 0.17739837940578213,
"grad_norm": 1.7156828128822517,
"learning_rate": 2.7932861610598077e-05,
"loss": 0.6641,
"step": 1330
},
{
"epoch": 0.17873220180732935,
"grad_norm": 7.960733847217257,
"learning_rate": 2.7900581749016466e-05,
"loss": 0.7365,
"step": 1340
},
{
"epoch": 0.1800660242088766,
"grad_norm": 2.5364939682563756,
"learning_rate": 2.7868070752020865e-05,
"loss": 0.7078,
"step": 1350
},
{
"epoch": 0.18139984661042383,
"grad_norm": 2.7446281678776137,
"learning_rate": 2.7835329202099944e-05,
"loss": 0.7214,
"step": 1360
},
{
"epoch": 0.18273366901197105,
"grad_norm": 3.2416602016145886,
"learning_rate": 2.7802357685873117e-05,
"loss": 0.6757,
"step": 1370
},
{
"epoch": 0.1840674914135183,
"grad_norm": 5.225459736579946,
"learning_rate": 2.7769156794080033e-05,
"loss": 0.7381,
"step": 1380
},
{
"epoch": 0.18540131381506553,
"grad_norm": 5.176692689501482,
"learning_rate": 2.7735727121569967e-05,
"loss": 0.7354,
"step": 1390
},
{
"epoch": 0.18673513621661275,
"grad_norm": 2.7441883232342574,
"learning_rate": 2.770206926729121e-05,
"loss": 0.6937,
"step": 1400
},
{
"epoch": 0.18806895861816,
"grad_norm": 2.9792116246243525,
"learning_rate": 2.7668183834280284e-05,
"loss": 0.6641,
"step": 1410
},
{
"epoch": 0.18940278101970723,
"grad_norm": 2.4645298487410723,
"learning_rate": 2.763407142965117e-05,
"loss": 0.6274,
"step": 1420
},
{
"epoch": 0.19073660342125445,
"grad_norm": 7.245032878035033,
"learning_rate": 2.759973266458444e-05,
"loss": 0.6962,
"step": 1430
},
{
"epoch": 0.1920704258228017,
"grad_norm": 5.642209662597534,
"learning_rate": 2.756516815431627e-05,
"loss": 0.7016,
"step": 1440
},
{
"epoch": 0.19340424822434893,
"grad_norm": 2.9804981875184526,
"learning_rate": 2.7530378518127445e-05,
"loss": 0.7331,
"step": 1450
},
{
"epoch": 0.19473807062589615,
"grad_norm": 7.496561660992361,
"learning_rate": 2.7495364379332256e-05,
"loss": 0.7234,
"step": 1460
},
{
"epoch": 0.1960718930274434,
"grad_norm": 1.6139389803246291,
"learning_rate": 2.7460126365267335e-05,
"loss": 0.7013,
"step": 1470
},
{
"epoch": 0.19740571542899063,
"grad_norm": 4.618678334755141,
"learning_rate": 2.7424665107280402e-05,
"loss": 0.6892,
"step": 1480
},
{
"epoch": 0.19873953783053785,
"grad_norm": 15.494190234738744,
"learning_rate": 2.738898124071898e-05,
"loss": 0.6785,
"step": 1490
},
{
"epoch": 0.2000733602320851,
"grad_norm": 3.1680363319798954,
"learning_rate": 2.735307540491898e-05,
"loss": 0.669,
"step": 1500
},
{
"epoch": 0.20140718263363233,
"grad_norm": 2.5397562341036224,
"learning_rate": 2.7316948243193273e-05,
"loss": 0.6726,
"step": 1510
},
{
"epoch": 0.20274100503517956,
"grad_norm": 4.139021422606072,
"learning_rate": 2.7280600402820146e-05,
"loss": 0.6706,
"step": 1520
},
{
"epoch": 0.2040748274367268,
"grad_norm": 2.7422468825646065,
"learning_rate": 2.724403253503171e-05,
"loss": 0.7078,
"step": 1530
},
{
"epoch": 0.20540864983827403,
"grad_norm": 2.744225768808104,
"learning_rate": 2.7207245295002242e-05,
"loss": 0.6821,
"step": 1540
},
{
"epoch": 0.20674247223982126,
"grad_norm": 2.234040668790152,
"learning_rate": 2.7170239341836436e-05,
"loss": 0.7451,
"step": 1550
},
{
"epoch": 0.2080762946413685,
"grad_norm": 2.531733996425376,
"learning_rate": 2.7133015338557585e-05,
"loss": 0.7205,
"step": 1560
},
{
"epoch": 0.20941011704291573,
"grad_norm": 2.9772483856455616,
"learning_rate": 2.7095573952095727e-05,
"loss": 0.7274,
"step": 1570
},
{
"epoch": 0.21074393944446296,
"grad_norm": 3.317235333047955,
"learning_rate": 2.705791585327568e-05,
"loss": 0.7309,
"step": 1580
},
{
"epoch": 0.2120777618460102,
"grad_norm": 1.9652386793628944,
"learning_rate": 2.7020041716805014e-05,
"loss": 0.7157,
"step": 1590
},
{
"epoch": 0.21341158424755743,
"grad_norm": 2.93724058913164,
"learning_rate": 2.6981952221261986e-05,
"loss": 0.7123,
"step": 1600
},
{
"epoch": 0.21474540664910466,
"grad_norm": 6.395577225750395,
"learning_rate": 2.6943648049083366e-05,
"loss": 0.6991,
"step": 1610
},
{
"epoch": 0.2160792290506519,
"grad_norm": 2.4292347967714973,
"learning_rate": 2.6905129886552208e-05,
"loss": 0.7004,
"step": 1620
},
{
"epoch": 0.21741305145219914,
"grad_norm": 1.8304810950546353,
"learning_rate": 2.6866398423785568e-05,
"loss": 0.6941,
"step": 1630
},
{
"epoch": 0.2187468738537464,
"grad_norm": 2.762870839632077,
"learning_rate": 2.682745435472212e-05,
"loss": 0.6928,
"step": 1640
},
{
"epoch": 0.2200806962552936,
"grad_norm": 3.4172019229090917,
"learning_rate": 2.6788298377109748e-05,
"loss": 0.7344,
"step": 1650
},
{
"epoch": 0.22141451865684084,
"grad_norm": 2.7483538989548175,
"learning_rate": 2.6748931192493017e-05,
"loss": 0.7367,
"step": 1660
},
{
"epoch": 0.2227483410583881,
"grad_norm": 7.314729269236597,
"learning_rate": 2.670935350620063e-05,
"loss": 0.6849,
"step": 1670
},
{
"epoch": 0.2240821634599353,
"grad_norm": 3.8688065039432527,
"learning_rate": 2.6669566027332767e-05,
"loss": 0.6812,
"step": 1680
},
{
"epoch": 0.22541598586148254,
"grad_norm": 7.10517346658295,
"learning_rate": 2.6629569468748404e-05,
"loss": 0.6089,
"step": 1690
},
{
"epoch": 0.2267498082630298,
"grad_norm": 2.4198822683275147,
"learning_rate": 2.658936454705251e-05,
"loss": 0.6666,
"step": 1700
},
{
"epoch": 0.22808363066457701,
"grad_norm": 2.4915285584652054,
"learning_rate": 2.6548951982583246e-05,
"loss": 0.7088,
"step": 1710
},
{
"epoch": 0.22941745306612424,
"grad_norm": 2.2849831540010537,
"learning_rate": 2.650833249939903e-05,
"loss": 0.7149,
"step": 1720
},
{
"epoch": 0.2307512754676715,
"grad_norm": 1.5098088938051029,
"learning_rate": 2.6467506825265573e-05,
"loss": 0.7254,
"step": 1730
},
{
"epoch": 0.23208509786921871,
"grad_norm": 3.4800248296443814,
"learning_rate": 2.642647569164284e-05,
"loss": 0.6916,
"step": 1740
},
{
"epoch": 0.23341892027076594,
"grad_norm": 7.281500947090542,
"learning_rate": 2.638523983367194e-05,
"loss": 0.6831,
"step": 1750
},
{
"epoch": 0.2347527426723132,
"grad_norm": 3.0161864395495446,
"learning_rate": 2.634379999016198e-05,
"loss": 0.6999,
"step": 1760
},
{
"epoch": 0.23608656507386042,
"grad_norm": 2.0917745352156762,
"learning_rate": 2.6302156903576784e-05,
"loss": 0.7112,
"step": 1770
},
{
"epoch": 0.23742038747540764,
"grad_norm": 1.918811185774526,
"learning_rate": 2.6260311320021628e-05,
"loss": 0.6725,
"step": 1780
},
{
"epoch": 0.2387542098769549,
"grad_norm": 3.0697413876733695,
"learning_rate": 2.6218263989229855e-05,
"loss": 0.7133,
"step": 1790
},
{
"epoch": 0.24008803227850212,
"grad_norm": 6.14274393655379,
"learning_rate": 2.617601566454944e-05,
"loss": 0.6678,
"step": 1800
},
{
"epoch": 0.24142185468004934,
"grad_norm": 4.259979200715344,
"learning_rate": 2.613356710292951e-05,
"loss": 0.7013,
"step": 1810
},
{
"epoch": 0.2427556770815966,
"grad_norm": 3.1011058557692808,
"learning_rate": 2.6090919064906766e-05,
"loss": 0.7027,
"step": 1820
},
{
"epoch": 0.24408949948314382,
"grad_norm": 3.677900978078831,
"learning_rate": 2.6048072314591854e-05,
"loss": 0.711,
"step": 1830
},
{
"epoch": 0.24542332188469104,
"grad_norm": 2.368576699713982,
"learning_rate": 2.600502761965569e-05,
"loss": 0.6917,
"step": 1840
},
{
"epoch": 0.2467571442862383,
"grad_norm": 3.0346306894457,
"learning_rate": 2.59617857513157e-05,
"loss": 0.69,
"step": 1850
},
{
"epoch": 0.24809096668778552,
"grad_norm": 3.1228131080916204,
"learning_rate": 2.591834748432198e-05,
"loss": 0.695,
"step": 1860
},
{
"epoch": 0.24942478908933274,
"grad_norm": 2.6886660685401034,
"learning_rate": 2.5874713596943465e-05,
"loss": 0.6681,
"step": 1870
},
{
"epoch": 0.25075861149087997,
"grad_norm": 1.7244460999561722,
"learning_rate": 2.5830884870953933e-05,
"loss": 0.6737,
"step": 1880
},
{
"epoch": 0.25209243389242725,
"grad_norm": 2.4283725332509842,
"learning_rate": 2.578686209161803e-05,
"loss": 0.6598,
"step": 1890
},
{
"epoch": 0.2534262562939745,
"grad_norm": 5.496556851547161,
"learning_rate": 2.5742646047677186e-05,
"loss": 0.6931,
"step": 1900
},
{
"epoch": 0.2547600786955217,
"grad_norm": 1.2751270156124934,
"learning_rate": 2.5698237531335493e-05,
"loss": 0.7043,
"step": 1910
},
{
"epoch": 0.2560939010970689,
"grad_norm": 8.807017683974516,
"learning_rate": 2.56536373382455e-05,
"loss": 0.6234,
"step": 1920
},
{
"epoch": 0.25742772349861615,
"grad_norm": 3.6331868296726277,
"learning_rate": 2.5608846267493974e-05,
"loss": 0.6763,
"step": 1930
},
{
"epoch": 0.25876154590016337,
"grad_norm": 5.094905230807839,
"learning_rate": 2.5563865121587563e-05,
"loss": 0.6692,
"step": 1940
},
{
"epoch": 0.26009536830171065,
"grad_norm": 2.0520732769663237,
"learning_rate": 2.5518694706438445e-05,
"loss": 0.7008,
"step": 1950
},
{
"epoch": 0.2614291907032579,
"grad_norm": 2.1265138955486336,
"learning_rate": 2.5473335831349842e-05,
"loss": 0.6623,
"step": 1960
},
{
"epoch": 0.2627630131048051,
"grad_norm": 4.532469697105077,
"learning_rate": 2.5427789309001577e-05,
"loss": 0.7099,
"step": 1970
},
{
"epoch": 0.2640968355063523,
"grad_norm": 1.8912900905557881,
"learning_rate": 2.538205595543548e-05,
"loss": 0.712,
"step": 1980
},
{
"epoch": 0.26543065790789955,
"grad_norm": 9.714825687307293,
"learning_rate": 2.5336136590040767e-05,
"loss": 0.6418,
"step": 1990
},
{
"epoch": 0.26676448030944677,
"grad_norm": 4.375615975749738,
"learning_rate": 2.529003203553937e-05,
"loss": 0.6933,
"step": 2000
},
{
"epoch": 0.26809830271099405,
"grad_norm": 5.945657366701919,
"learning_rate": 2.5243743117971186e-05,
"loss": 0.6748,
"step": 2010
},
{
"epoch": 0.2694321251125413,
"grad_norm": 7.453951551881255,
"learning_rate": 2.5197270666679295e-05,
"loss": 0.7004,
"step": 2020
},
{
"epoch": 0.2707659475140885,
"grad_norm": 2.3916662603858665,
"learning_rate": 2.515061551429509e-05,
"loss": 0.6961,
"step": 2030
},
{
"epoch": 0.2720997699156357,
"grad_norm": 3.5972047868369104,
"learning_rate": 2.5103778496723334e-05,
"loss": 0.7058,
"step": 2040
},
{
"epoch": 0.27343359231718295,
"grad_norm": 4.525268184238612,
"learning_rate": 2.5056760453127242e-05,
"loss": 0.6704,
"step": 2050
},
{
"epoch": 0.2747674147187302,
"grad_norm": 5.9581146555788465,
"learning_rate": 2.5009562225913385e-05,
"loss": 0.6722,
"step": 2060
},
{
"epoch": 0.27610123712027745,
"grad_norm": 4.163590223716233,
"learning_rate": 2.4962184660716645e-05,
"loss": 0.6933,
"step": 2070
},
{
"epoch": 0.2774350595218247,
"grad_norm": 2.0180801697563258,
"learning_rate": 2.4914628606385022e-05,
"loss": 0.6982,
"step": 2080
},
{
"epoch": 0.2787688819233719,
"grad_norm": 2.3996169579330373,
"learning_rate": 2.4866894914964462e-05,
"loss": 0.6832,
"step": 2090
},
{
"epoch": 0.2801027043249191,
"grad_norm": 20.07054133895426,
"learning_rate": 2.481898444168357e-05,
"loss": 0.6871,
"step": 2100
},
{
"epoch": 0.28143652672646635,
"grad_norm": 3.563765719247629,
"learning_rate": 2.4770898044938284e-05,
"loss": 0.703,
"step": 2110
},
{
"epoch": 0.28277034912801363,
"grad_norm": 1.9816905810381245,
"learning_rate": 2.4722636586276522e-05,
"loss": 0.7132,
"step": 2120
},
{
"epoch": 0.28410417152956086,
"grad_norm": 4.0053115388283205,
"learning_rate": 2.4674200930382712e-05,
"loss": 0.6991,
"step": 2130
},
{
"epoch": 0.2854379939311081,
"grad_norm": 1.9643538302216321,
"learning_rate": 2.4625591945062326e-05,
"loss": 0.7182,
"step": 2140
},
{
"epoch": 0.2867718163326553,
"grad_norm": 1.7027289253737494,
"learning_rate": 2.4576810501226318e-05,
"loss": 0.6856,
"step": 2150
},
{
"epoch": 0.28810563873420253,
"grad_norm": 3.394597130806682,
"learning_rate": 2.4527857472875515e-05,
"loss": 0.7013,
"step": 2160
},
{
"epoch": 0.28943946113574975,
"grad_norm": 2.766786923916393,
"learning_rate": 2.447873373708498e-05,
"loss": 0.6913,
"step": 2170
},
{
"epoch": 0.29077328353729703,
"grad_norm": 6.781532105937228,
"learning_rate": 2.4429440173988275e-05,
"loss": 0.7401,
"step": 2180
},
{
"epoch": 0.29210710593884426,
"grad_norm": 2.6220209383444946,
"learning_rate": 2.43799776667617e-05,
"loss": 0.7287,
"step": 2190
},
{
"epoch": 0.2934409283403915,
"grad_norm": 4.597566226152422,
"learning_rate": 2.4330347101608492e-05,
"loss": 0.6664,
"step": 2200
},
{
"epoch": 0.2947747507419387,
"grad_norm": 3.15622915128866,
"learning_rate": 2.428054936774289e-05,
"loss": 0.6757,
"step": 2210
},
{
"epoch": 0.29610857314348593,
"grad_norm": 3.5777836932521065,
"learning_rate": 2.423058535737427e-05,
"loss": 0.7396,
"step": 2220
},
{
"epoch": 0.29744239554503316,
"grad_norm": 2.505384749600403,
"learning_rate": 2.418045596569111e-05,
"loss": 0.7156,
"step": 2230
},
{
"epoch": 0.29877621794658044,
"grad_norm": 15.640998645324629,
"learning_rate": 2.4130162090844976e-05,
"loss": 0.7016,
"step": 2240
},
{
"epoch": 0.30011004034812766,
"grad_norm": 6.1147200283733865,
"learning_rate": 2.4079704633934427e-05,
"loss": 0.6835,
"step": 2250
},
{
"epoch": 0.3014438627496749,
"grad_norm": 2.4704828096249907,
"learning_rate": 2.4029084498988864e-05,
"loss": 0.717,
"step": 2260
},
{
"epoch": 0.3027776851512221,
"grad_norm": 3.624817679194012,
"learning_rate": 2.3978302592952332e-05,
"loss": 0.6863,
"step": 2270
},
{
"epoch": 0.30411150755276933,
"grad_norm": 7.1778372122735155,
"learning_rate": 2.392735982566728e-05,
"loss": 0.7057,
"step": 2280
},
{
"epoch": 0.30544532995431656,
"grad_norm": 1.541203747230883,
"learning_rate": 2.387625710985826e-05,
"loss": 0.6755,
"step": 2290
},
{
"epoch": 0.30677915235586384,
"grad_norm": 5.290753363343769,
"learning_rate": 2.3824995361115552e-05,
"loss": 0.7214,
"step": 2300
},
{
"epoch": 0.30811297475741106,
"grad_norm": 11.18524078914846,
"learning_rate": 2.3773575497878784e-05,
"loss": 0.687,
"step": 2310
},
{
"epoch": 0.3094467971589583,
"grad_norm": 2.8473409260968854,
"learning_rate": 2.372199844142048e-05,
"loss": 0.6588,
"step": 2320
},
{
"epoch": 0.3107806195605055,
"grad_norm": 3.6509202763742894,
"learning_rate": 2.3670265115829523e-05,
"loss": 0.7146,
"step": 2330
},
{
"epoch": 0.31211444196205274,
"grad_norm": 2.86323212169014,
"learning_rate": 2.3618376447994633e-05,
"loss": 0.6965,
"step": 2340
},
{
"epoch": 0.31344826436359996,
"grad_norm": 1.6724444694024563,
"learning_rate": 2.3566333367587737e-05,
"loss": 0.6827,
"step": 2350
},
{
"epoch": 0.31478208676514724,
"grad_norm": 3.7438462947121876,
"learning_rate": 2.3514136807047318e-05,
"loss": 0.677,
"step": 2360
},
{
"epoch": 0.31611590916669446,
"grad_norm": 3.150319939971515,
"learning_rate": 2.3461787701561724e-05,
"loss": 0.6926,
"step": 2370
},
{
"epoch": 0.3174497315682417,
"grad_norm": 1.9724696911512674,
"learning_rate": 2.340928698905239e-05,
"loss": 0.7269,
"step": 2380
},
{
"epoch": 0.3187835539697889,
"grad_norm": 2.6615995505256604,
"learning_rate": 2.335663561015704e-05,
"loss": 0.719,
"step": 2390
},
{
"epoch": 0.32011737637133614,
"grad_norm": 3.648818329043563,
"learning_rate": 2.3303834508212845e-05,
"loss": 0.6593,
"step": 2400
},
{
"epoch": 0.3214511987728834,
"grad_norm": 5.032935766388129,
"learning_rate": 2.325088462923951e-05,
"loss": 0.7018,
"step": 2410
},
{
"epoch": 0.32278502117443064,
"grad_norm": 5.116190153583237,
"learning_rate": 2.319778692192233e-05,
"loss": 0.6138,
"step": 2420
},
{
"epoch": 0.32411884357597787,
"grad_norm": 8.77553429349065,
"learning_rate": 2.3144542337595196e-05,
"loss": 0.6995,
"step": 2430
},
{
"epoch": 0.3254526659775251,
"grad_norm": 4.020402137418298,
"learning_rate": 2.3091151830223537e-05,
"loss": 0.6935,
"step": 2440
},
{
"epoch": 0.3267864883790723,
"grad_norm": 2.326990350307363,
"learning_rate": 2.3037616356387237e-05,
"loss": 0.6657,
"step": 2450
},
{
"epoch": 0.32812031078061954,
"grad_norm": 1.9450305290081706,
"learning_rate": 2.2983936875263495e-05,
"loss": 0.6884,
"step": 2460
},
{
"epoch": 0.3294541331821668,
"grad_norm": 2.4083218262957407,
"learning_rate": 2.2930114348609655e-05,
"loss": 0.6324,
"step": 2470
},
{
"epoch": 0.33078795558371404,
"grad_norm": 4.469293094525185,
"learning_rate": 2.2876149740745935e-05,
"loss": 0.7054,
"step": 2480
},
{
"epoch": 0.33212177798526127,
"grad_norm": 3.0408327884382613,
"learning_rate": 2.28220440185382e-05,
"loss": 0.6996,
"step": 2490
},
{
"epoch": 0.3334556003868085,
"grad_norm": 2.5340984000691273,
"learning_rate": 2.2767798151380597e-05,
"loss": 0.6908,
"step": 2500
},
{
"epoch": 0.3347894227883557,
"grad_norm": 2.4867165525033,
"learning_rate": 2.27134131111782e-05,
"loss": 0.6838,
"step": 2510
},
{
"epoch": 0.33612324518990294,
"grad_norm": 14.755496795057269,
"learning_rate": 2.2658889872329628e-05,
"loss": 0.7072,
"step": 2520
},
{
"epoch": 0.3374570675914502,
"grad_norm": 11.498768616138861,
"learning_rate": 2.2604229411709518e-05,
"loss": 0.6837,
"step": 2530
},
{
"epoch": 0.33879088999299745,
"grad_norm": 1.6627733851927542,
"learning_rate": 2.25494327086511e-05,
"loss": 0.6948,
"step": 2540
},
{
"epoch": 0.34012471239454467,
"grad_norm": 4.465322393758394,
"learning_rate": 2.2494500744928583e-05,
"loss": 0.706,
"step": 2550
},
{
"epoch": 0.3414585347960919,
"grad_norm": 2.5329140738676714,
"learning_rate": 2.243943450473963e-05,
"loss": 0.6652,
"step": 2560
},
{
"epoch": 0.3427923571976391,
"grad_norm": 2.6213955428320963,
"learning_rate": 2.2384234974687658e-05,
"loss": 0.7123,
"step": 2570
},
{
"epoch": 0.34412617959918634,
"grad_norm": 2.8450668136715827,
"learning_rate": 2.2328903143764216e-05,
"loss": 0.6748,
"step": 2580
},
{
"epoch": 0.3454600020007336,
"grad_norm": 9.246863580911334,
"learning_rate": 2.2273440003331237e-05,
"loss": 0.6774,
"step": 2590
},
{
"epoch": 0.34679382440228085,
"grad_norm": 2.610989556515575,
"learning_rate": 2.2217846547103275e-05,
"loss": 0.7042,
"step": 2600
},
{
"epoch": 0.3481276468038281,
"grad_norm": 7.325969061692186,
"learning_rate": 2.216212377112972e-05,
"loss": 0.6834,
"step": 2610
},
{
"epoch": 0.3494614692053753,
"grad_norm": 3.001379331751721,
"learning_rate": 2.2106272673776934e-05,
"loss": 0.7033,
"step": 2620
},
{
"epoch": 0.3507952916069225,
"grad_norm": 3.463073346975308,
"learning_rate": 2.2050294255710375e-05,
"loss": 0.6839,
"step": 2630
},
{
"epoch": 0.35212911400846975,
"grad_norm": 3.524564101951424,
"learning_rate": 2.1994189519876663e-05,
"loss": 0.6948,
"step": 2640
},
{
"epoch": 0.353462936410017,
"grad_norm": 3.152341329769827,
"learning_rate": 2.19379594714856e-05,
"loss": 0.6767,
"step": 2650
},
{
"epoch": 0.35479675881156425,
"grad_norm": 4.2343916663936305,
"learning_rate": 2.188160511799219e-05,
"loss": 0.6755,
"step": 2660
},
{
"epoch": 0.3561305812131115,
"grad_norm": 2.7909676165285813,
"learning_rate": 2.1825127469078555e-05,
"loss": 0.6694,
"step": 2670
},
{
"epoch": 0.3574644036146587,
"grad_norm": 1.8765416483232782,
"learning_rate": 2.1768527536635868e-05,
"loss": 0.7031,
"step": 2680
},
{
"epoch": 0.3587982260162059,
"grad_norm": 13.262978009985517,
"learning_rate": 2.171180633474621e-05,
"loss": 0.7371,
"step": 2690
},
{
"epoch": 0.3601320484177532,
"grad_norm": 3.886717400478723,
"learning_rate": 2.1654964879664407e-05,
"loss": 0.7109,
"step": 2700
},
{
"epoch": 0.3614658708193004,
"grad_norm": 2.040560351248799,
"learning_rate": 2.1598004189799826e-05,
"loss": 0.7274,
"step": 2710
},
{
"epoch": 0.36279969322084765,
"grad_norm": 24.610089275348535,
"learning_rate": 2.1540925285698122e-05,
"loss": 0.6886,
"step": 2720
},
{
"epoch": 0.3641335156223949,
"grad_norm": 3.6439264742220216,
"learning_rate": 2.148372919002295e-05,
"loss": 0.681,
"step": 2730
},
{
"epoch": 0.3654673380239421,
"grad_norm": 5.83580774778366,
"learning_rate": 2.142641692753765e-05,
"loss": 0.6502,
"step": 2740
},
{
"epoch": 0.3668011604254893,
"grad_norm": 1.8530940550203352,
"learning_rate": 2.1368989525086893e-05,
"loss": 0.6854,
"step": 2750
},
{
"epoch": 0.3681349828270366,
"grad_norm": 5.003536499561226,
"learning_rate": 2.1311448011578255e-05,
"loss": 0.6699,
"step": 2760
},
{
"epoch": 0.36946880522858383,
"grad_norm": 2.6889933495770912,
"learning_rate": 2.125379341796382e-05,
"loss": 0.741,
"step": 2770
},
{
"epoch": 0.37080262763013105,
"grad_norm": 2.0672372686575575,
"learning_rate": 2.1196026777221684e-05,
"loss": 0.693,
"step": 2780
},
{
"epoch": 0.3721364500316783,
"grad_norm": 3.023122371840424,
"learning_rate": 2.1138149124337448e-05,
"loss": 0.7227,
"step": 2790
},
{
"epoch": 0.3734702724332255,
"grad_norm": 5.98908480573641,
"learning_rate": 2.108016149628569e-05,
"loss": 0.6875,
"step": 2800
},
{
"epoch": 0.3748040948347727,
"grad_norm": 13.324804502845906,
"learning_rate": 2.102206493201137e-05,
"loss": 0.6693,
"step": 2810
},
{
"epoch": 0.37613791723632,
"grad_norm": 2.877158805709884,
"learning_rate": 2.096386047241123e-05,
"loss": 0.6752,
"step": 2820
},
{
"epoch": 0.37747173963786723,
"grad_norm": 3.417018003930411,
"learning_rate": 2.0905549160315116e-05,
"loss": 0.6874,
"step": 2830
},
{
"epoch": 0.37880556203941446,
"grad_norm": 6.197947611584602,
"learning_rate": 2.084713204046734e-05,
"loss": 0.6995,
"step": 2840
},
{
"epoch": 0.3801393844409617,
"grad_norm": 2.4400537269180327,
"learning_rate": 2.078861015950793e-05,
"loss": 0.718,
"step": 2850
},
{
"epoch": 0.3814732068425089,
"grad_norm": 3.4313321352162878,
"learning_rate": 2.072998456595387e-05,
"loss": 0.6928,
"step": 2860
},
{
"epoch": 0.38280702924405613,
"grad_norm": 3.323108743280233,
"learning_rate": 2.0671256310180334e-05,
"loss": 0.7141,
"step": 2870
},
{
"epoch": 0.3841408516456034,
"grad_norm": 2.270407423855968,
"learning_rate": 2.0612426444401874e-05,
"loss": 0.6677,
"step": 2880
},
{
"epoch": 0.38547467404715063,
"grad_norm": 4.473087793045971,
"learning_rate": 2.0553496022653535e-05,
"loss": 0.706,
"step": 2890
},
{
"epoch": 0.38680849644869786,
"grad_norm": 4.498504602131192,
"learning_rate": 2.0494466100772006e-05,
"loss": 0.6783,
"step": 2900
},
{
"epoch": 0.3881423188502451,
"grad_norm": 1.8721168603816298,
"learning_rate": 2.0435337736376677e-05,
"loss": 0.7327,
"step": 2910
},
{
"epoch": 0.3894761412517923,
"grad_norm": 2.1819398242824093,
"learning_rate": 2.03761119888507e-05,
"loss": 0.6798,
"step": 2920
},
{
"epoch": 0.39080996365333953,
"grad_norm": 29.747303047069977,
"learning_rate": 2.031678991932201e-05,
"loss": 0.7045,
"step": 2930
},
{
"epoch": 0.3921437860548868,
"grad_norm": 4.708328967247123,
"learning_rate": 2.0257372590644314e-05,
"loss": 0.6896,
"step": 2940
},
{
"epoch": 0.39347760845643404,
"grad_norm": 2.873510721340991,
"learning_rate": 2.0197861067378044e-05,
"loss": 0.6802,
"step": 2950
},
{
"epoch": 0.39481143085798126,
"grad_norm": 4.540574995423212,
"learning_rate": 2.0138256415771275e-05,
"loss": 0.6219,
"step": 2960
},
{
"epoch": 0.3961452532595285,
"grad_norm": 11.817372765224325,
"learning_rate": 2.0078559703740654e-05,
"loss": 0.65,
"step": 2970
},
{
"epoch": 0.3974790756610757,
"grad_norm": 11.004144754692504,
"learning_rate": 2.0018772000852216e-05,
"loss": 0.7056,
"step": 2980
},
{
"epoch": 0.398812898062623,
"grad_norm": 1.7365475356133573,
"learning_rate": 1.9958894378302265e-05,
"loss": 0.6827,
"step": 2990
},
{
"epoch": 0.4001467204641702,
"grad_norm": 4.31426545646336,
"learning_rate": 1.989892790889817e-05,
"loss": 0.6796,
"step": 3000
},
{
"epoch": 0.40148054286571744,
"grad_norm": 2.534413468413497,
"learning_rate": 1.9838873667039134e-05,
"loss": 0.6825,
"step": 3010
},
{
"epoch": 0.40281436526726466,
"grad_norm": 2.5821079814088,
"learning_rate": 1.9778732728696937e-05,
"loss": 0.6522,
"step": 3020
},
{
"epoch": 0.4041481876688119,
"grad_norm": 10.45675108188373,
"learning_rate": 1.9718506171396694e-05,
"loss": 0.6752,
"step": 3030
},
{
"epoch": 0.4054820100703591,
"grad_norm": 10.969680268488736,
"learning_rate": 1.965819507419751e-05,
"loss": 0.7195,
"step": 3040
},
{
"epoch": 0.4068158324719064,
"grad_norm": 9.540053007670354,
"learning_rate": 1.9597800517673165e-05,
"loss": 0.6762,
"step": 3050
},
{
"epoch": 0.4081496548734536,
"grad_norm": 8.551702443669248,
"learning_rate": 1.9537323583892753e-05,
"loss": 0.7292,
"step": 3060
},
{
"epoch": 0.40948347727500084,
"grad_norm": 3.0994689178852903,
"learning_rate": 1.9476765356401304e-05,
"loss": 0.6764,
"step": 3070
},
{
"epoch": 0.41081729967654806,
"grad_norm": 3.1013298812228163,
"learning_rate": 1.9416126920200344e-05,
"loss": 0.6484,
"step": 3080
},
{
"epoch": 0.4121511220780953,
"grad_norm": 2.00628497131861,
"learning_rate": 1.9355409361728482e-05,
"loss": 0.7094,
"step": 3090
},
{
"epoch": 0.4134849444796425,
"grad_norm": 5.224082004633703,
"learning_rate": 1.9294613768841932e-05,
"loss": 0.7279,
"step": 3100
},
{
"epoch": 0.4148187668811898,
"grad_norm": 18.62631978728915,
"learning_rate": 1.9233741230795022e-05,
"loss": 0.662,
"step": 3110
},
{
"epoch": 0.416152589282737,
"grad_norm": 3.6495526914982968,
"learning_rate": 1.9172792838220686e-05,
"loss": 0.6836,
"step": 3120
},
{
"epoch": 0.41748641168428424,
"grad_norm": 2.304337917905853,
"learning_rate": 1.9111769683110914e-05,
"loss": 0.6901,
"step": 3130
},
{
"epoch": 0.41882023408583147,
"grad_norm": 8.427846401703292,
"learning_rate": 1.905067285879719e-05,
"loss": 0.6606,
"step": 3140
},
{
"epoch": 0.4201540564873787,
"grad_norm": 2.2306668115119104,
"learning_rate": 1.8989503459930908e-05,
"loss": 0.7434,
"step": 3150
},
{
"epoch": 0.4214878788889259,
"grad_norm": 2.231586663842237,
"learning_rate": 1.892826258246376e-05,
"loss": 0.7184,
"step": 3160
},
{
"epoch": 0.4228217012904732,
"grad_norm": 5.804571835994344,
"learning_rate": 1.886695132362808e-05,
"loss": 0.7073,
"step": 3170
},
{
"epoch": 0.4241555236920204,
"grad_norm": 4.7472512172058785,
"learning_rate": 1.8805570781917228e-05,
"loss": 0.7102,
"step": 3180
},
{
"epoch": 0.42548934609356764,
"grad_norm": 1.723627694530291,
"learning_rate": 1.8744122057065856e-05,
"loss": 0.6828,
"step": 3190
},
{
"epoch": 0.42682316849511487,
"grad_norm": 1.9952068710149184,
"learning_rate": 1.868260625003024e-05,
"loss": 0.6545,
"step": 3200
},
{
"epoch": 0.4281569908966621,
"grad_norm": 4.588444559005735,
"learning_rate": 1.8621024462968553e-05,
"loss": 0.67,
"step": 3210
},
{
"epoch": 0.4294908132982093,
"grad_norm": 2.155634253115107,
"learning_rate": 1.85593777992211e-05,
"loss": 0.7173,
"step": 3220
},
{
"epoch": 0.4308246356997566,
"grad_norm": 3.3412948579128194,
"learning_rate": 1.849766736329056e-05,
"loss": 0.6364,
"step": 3230
},
{
"epoch": 0.4321584581013038,
"grad_norm": 2.1344417176214607,
"learning_rate": 1.8435894260822208e-05,
"loss": 0.6919,
"step": 3240
},
{
"epoch": 0.43349228050285105,
"grad_norm": 3.8410669902748764,
"learning_rate": 1.8374059598584084e-05,
"loss": 0.6524,
"step": 3250
},
{
"epoch": 0.43482610290439827,
"grad_norm": 2.609728029777106,
"learning_rate": 1.831216448444717e-05,
"loss": 0.688,
"step": 3260
},
{
"epoch": 0.4361599253059455,
"grad_norm": 2.182084710285402,
"learning_rate": 1.8250210027365562e-05,
"loss": 0.7327,
"step": 3270
},
{
"epoch": 0.4374937477074928,
"grad_norm": 1.0672619638672702,
"learning_rate": 1.818819733735657e-05,
"loss": 0.7137,
"step": 3280
},
{
"epoch": 0.43882757010904,
"grad_norm": 1.7248236414002174,
"learning_rate": 1.812612752548084e-05,
"loss": 0.6848,
"step": 3290
},
{
"epoch": 0.4401613925105872,
"grad_norm": 2.717100059326369,
"learning_rate": 1.806400170382246e-05,
"loss": 0.6582,
"step": 3300
},
{
"epoch": 0.44149521491213445,
"grad_norm": 2.7420980324781348,
"learning_rate": 1.8001820985469026e-05,
"loss": 0.6976,
"step": 3310
},
{
"epoch": 0.4428290373136817,
"grad_norm": 3.9917362204420357,
"learning_rate": 1.7939586484491704e-05,
"loss": 0.7259,
"step": 3320
},
{
"epoch": 0.4441628597152289,
"grad_norm": 3.2371945093430514,
"learning_rate": 1.787729931592525e-05,
"loss": 0.6883,
"step": 3330
},
{
"epoch": 0.4454966821167762,
"grad_norm": 2.439245137250377,
"learning_rate": 1.781496059574807e-05,
"loss": 0.6876,
"step": 3340
},
{
"epoch": 0.4468305045183234,
"grad_norm": 4.525984025887397,
"learning_rate": 1.7752571440862178e-05,
"loss": 0.6724,
"step": 3350
},
{
"epoch": 0.4481643269198706,
"grad_norm": 2.3388903272276518,
"learning_rate": 1.7690132969073223e-05,
"loss": 0.7065,
"step": 3360
},
{
"epoch": 0.44949814932141785,
"grad_norm": 6.946538587379132,
"learning_rate": 1.7627646299070457e-05,
"loss": 0.6444,
"step": 3370
},
{
"epoch": 0.4508319717229651,
"grad_norm": 1.5334789635428385,
"learning_rate": 1.7565112550406663e-05,
"loss": 0.6597,
"step": 3380
},
{
"epoch": 0.4521657941245123,
"grad_norm": 1.7438745925855814,
"learning_rate": 1.7502532843478134e-05,
"loss": 0.736,
"step": 3390
},
{
"epoch": 0.4534996165260596,
"grad_norm": 2.352884928297456,
"learning_rate": 1.743990829950458e-05,
"loss": 0.7209,
"step": 3400
},
{
"epoch": 0.4548334389276068,
"grad_norm": 2.589791551987411,
"learning_rate": 1.737724004050903e-05,
"loss": 0.6873,
"step": 3410
},
{
"epoch": 0.45616726132915403,
"grad_norm": 1.5018800238986845,
"learning_rate": 1.731452918929774e-05,
"loss": 0.6993,
"step": 3420
},
{
"epoch": 0.45750108373070125,
"grad_norm": 1.618737845945941,
"learning_rate": 1.7251776869440097e-05,
"loss": 0.719,
"step": 3430
},
{
"epoch": 0.4588349061322485,
"grad_norm": 4.764891120811521,
"learning_rate": 1.718898420524845e-05,
"loss": 0.7066,
"step": 3440
},
{
"epoch": 0.4601687285337957,
"grad_norm": 30.008073864717016,
"learning_rate": 1.7126152321757985e-05,
"loss": 0.7234,
"step": 3450
},
{
"epoch": 0.461502550935343,
"grad_norm": 4.718402571866902,
"learning_rate": 1.7063282344706577e-05,
"loss": 0.671,
"step": 3460
},
{
"epoch": 0.4628363733368902,
"grad_norm": 3.279168331496427,
"learning_rate": 1.7000375400514602e-05,
"loss": 0.6748,
"step": 3470
},
{
"epoch": 0.46417019573843743,
"grad_norm": 4.202866783860852,
"learning_rate": 1.693743261626476e-05,
"loss": 0.7135,
"step": 3480
},
{
"epoch": 0.46550401813998465,
"grad_norm": 2.959211747400748,
"learning_rate": 1.68744551196819e-05,
"loss": 0.6684,
"step": 3490
},
{
"epoch": 0.4668378405415319,
"grad_norm": 3.7208053935256085,
"learning_rate": 1.6811444039112787e-05,
"loss": 0.6842,
"step": 3500
},
{
"epoch": 0.4681716629430791,
"grad_norm": 1.8411337183473255,
"learning_rate": 1.6748400503505905e-05,
"loss": 0.6796,
"step": 3510
},
{
"epoch": 0.4695054853446264,
"grad_norm": 1.5569024338481647,
"learning_rate": 1.6685325642391223e-05,
"loss": 0.7357,
"step": 3520
},
{
"epoch": 0.4708393077461736,
"grad_norm": 2.30459532472586,
"learning_rate": 1.662222058585996e-05,
"loss": 0.6825,
"step": 3530
},
{
"epoch": 0.47217313014772083,
"grad_norm": 1.6593076444414934,
"learning_rate": 1.6559086464544334e-05,
"loss": 0.7067,
"step": 3540
},
{
"epoch": 0.47350695254926806,
"grad_norm": 2.6738168898709356,
"learning_rate": 1.6495924409597305e-05,
"loss": 0.665,
"step": 3550
},
{
"epoch": 0.4748407749508153,
"grad_norm": 10.974918207024547,
"learning_rate": 1.6432735552672317e-05,
"loss": 0.705,
"step": 3560
},
{
"epoch": 0.4761745973523625,
"grad_norm": 4.279092732465272,
"learning_rate": 1.636952102590301e-05,
"loss": 0.6858,
"step": 3570
},
{
"epoch": 0.4775084197539098,
"grad_norm": 8.958608602390235,
"learning_rate": 1.630628196188295e-05,
"loss": 0.7022,
"step": 3580
},
{
"epoch": 0.478842242155457,
"grad_norm": 1.2316277268276075,
"learning_rate": 1.6243019493645315e-05,
"loss": 0.7091,
"step": 3590
},
{
"epoch": 0.48017606455700423,
"grad_norm": 1.6977852924595596,
"learning_rate": 1.617973475464262e-05,
"loss": 0.6725,
"step": 3600
},
{
"epoch": 0.48150988695855146,
"grad_norm": 9.102696583046576,
"learning_rate": 1.6116428878726396e-05,
"loss": 0.706,
"step": 3610
},
{
"epoch": 0.4828437093600987,
"grad_norm": 2.983654314671525,
"learning_rate": 1.6053103000126874e-05,
"loss": 0.6663,
"step": 3620
},
{
"epoch": 0.48417753176164596,
"grad_norm": 2.9273555172026304,
"learning_rate": 1.598975825343267e-05,
"loss": 0.6986,
"step": 3630
},
{
"epoch": 0.4855113541631932,
"grad_norm": 2.4687475856334613,
"learning_rate": 1.5926395773570447e-05,
"loss": 0.7192,
"step": 3640
},
{
"epoch": 0.4868451765647404,
"grad_norm": 4.171039626246759,
"learning_rate": 1.5863016695784604e-05,
"loss": 0.6702,
"step": 3650
},
{
"epoch": 0.48817899896628764,
"grad_norm": 3.8655482044779337,
"learning_rate": 1.5799622155616887e-05,
"loss": 0.6568,
"step": 3660
},
{
"epoch": 0.48951282136783486,
"grad_norm": 2.8245022157946362,
"learning_rate": 1.5736213288886112e-05,
"loss": 0.7075,
"step": 3670
},
{
"epoch": 0.4908466437693821,
"grad_norm": 2.1969432272158556,
"learning_rate": 1.567279123166776e-05,
"loss": 0.7043,
"step": 3680
},
{
"epoch": 0.49218046617092936,
"grad_norm": 3.7154807458182835,
"learning_rate": 1.560935712027364e-05,
"loss": 0.6467,
"step": 3690
},
{
"epoch": 0.4935142885724766,
"grad_norm": 4.060155573527941,
"learning_rate": 1.5545912091231543e-05,
"loss": 0.6957,
"step": 3700
},
{
"epoch": 0.4948481109740238,
"grad_norm": 2.057087008440973,
"learning_rate": 1.548245728126486e-05,
"loss": 0.6656,
"step": 3710
},
{
"epoch": 0.49618193337557104,
"grad_norm": 1.975534767472513,
"learning_rate": 1.5418993827272224e-05,
"loss": 0.6867,
"step": 3720
},
{
"epoch": 0.49751575577711826,
"grad_norm": 11.237169875747464,
"learning_rate": 1.5355522866307144e-05,
"loss": 0.693,
"step": 3730
},
{
"epoch": 0.4988495781786655,
"grad_norm": 2.7505125088389066,
"learning_rate": 1.529204553555762e-05,
"loss": 0.6715,
"step": 3740
},
{
"epoch": 0.5001834005802127,
"grad_norm": 14.47964311360144,
"learning_rate": 1.522856297232579e-05,
"loss": 0.6638,
"step": 3750
},
{
"epoch": 0.5015172229817599,
"grad_norm": 1.4576903787797197,
"learning_rate": 1.5165076314007529e-05,
"loss": 0.6461,
"step": 3760
},
{
"epoch": 0.5028510453833072,
"grad_norm": 4.190097060433623,
"learning_rate": 1.5101586698072095e-05,
"loss": 0.6997,
"step": 3770
},
{
"epoch": 0.5041848677848545,
"grad_norm": 2.6358802196743887,
"learning_rate": 1.5038095262041725e-05,
"loss": 0.6805,
"step": 3780
},
{
"epoch": 0.5055186901864017,
"grad_norm": 2.9885793100944484,
"learning_rate": 1.4974603143471268e-05,
"loss": 0.663,
"step": 3790
},
{
"epoch": 0.506852512587949,
"grad_norm": 3.364287860442736,
"learning_rate": 1.4911111479927804e-05,
"loss": 0.6851,
"step": 3800
},
{
"epoch": 0.5081863349894962,
"grad_norm": 6.415730527817265,
"learning_rate": 1.4847621408970266e-05,
"loss": 0.6544,
"step": 3810
},
{
"epoch": 0.5095201573910434,
"grad_norm": 1.6327349630681778,
"learning_rate": 1.4784134068129043e-05,
"loss": 0.6629,
"step": 3820
},
{
"epoch": 0.5108539797925906,
"grad_norm": 3.0622996050606783,
"learning_rate": 1.4720650594885614e-05,
"loss": 0.6651,
"step": 3830
},
{
"epoch": 0.5121878021941378,
"grad_norm": 5.445942430441996,
"learning_rate": 1.4657172126652167e-05,
"loss": 0.664,
"step": 3840
},
{
"epoch": 0.5135216245956851,
"grad_norm": 4.518334654823446,
"learning_rate": 1.459369980075121e-05,
"loss": 0.6959,
"step": 3850
},
{
"epoch": 0.5148554469972323,
"grad_norm": 1.8471627413065406,
"learning_rate": 1.4530234754395207e-05,
"loss": 0.6774,
"step": 3860
},
{
"epoch": 0.5161892693987795,
"grad_norm": 3.6484122755334525,
"learning_rate": 1.4466778124666192e-05,
"loss": 0.6825,
"step": 3870
},
{
"epoch": 0.5175230918003267,
"grad_norm": 2.087118207544068,
"learning_rate": 1.4403331048495404e-05,
"loss": 0.6985,
"step": 3880
},
{
"epoch": 0.5188569142018741,
"grad_norm": 11.878313425481934,
"learning_rate": 1.4339894662642914e-05,
"loss": 0.6764,
"step": 3890
},
{
"epoch": 0.5201907366034213,
"grad_norm": 2.5453717997032115,
"learning_rate": 1.4276470103677257e-05,
"loss": 0.7091,
"step": 3900
},
{
"epoch": 0.5215245590049685,
"grad_norm": 4.791248513372535,
"learning_rate": 1.4213058507955072e-05,
"loss": 0.644,
"step": 3910
},
{
"epoch": 0.5228583814065157,
"grad_norm": 2.1955258954683545,
"learning_rate": 1.4149661011600734e-05,
"loss": 0.6954,
"step": 3920
},
{
"epoch": 0.524192203808063,
"grad_norm": 3.5143987933185676,
"learning_rate": 1.4086278750486017e-05,
"loss": 0.6848,
"step": 3930
},
{
"epoch": 0.5255260262096102,
"grad_norm": 3.168504700204386,
"learning_rate": 1.4022912860209709e-05,
"loss": 0.6752,
"step": 3940
},
{
"epoch": 0.5268598486111574,
"grad_norm": 1.9655682723891459,
"learning_rate": 1.3959564476077308e-05,
"loss": 0.6904,
"step": 3950
},
{
"epoch": 0.5281936710127046,
"grad_norm": 1.6897897373972772,
"learning_rate": 1.389623473308065e-05,
"loss": 0.6929,
"step": 3960
},
{
"epoch": 0.5295274934142519,
"grad_norm": 4.400154605229998,
"learning_rate": 1.3832924765877587e-05,
"loss": 0.726,
"step": 3970
},
{
"epoch": 0.5308613158157991,
"grad_norm": 2.790842978581456,
"learning_rate": 1.3769635708771654e-05,
"loss": 0.6724,
"step": 3980
},
{
"epoch": 0.5321951382173463,
"grad_norm": 1.5712798066752716,
"learning_rate": 1.3706368695691745e-05,
"loss": 0.6703,
"step": 3990
},
{
"epoch": 0.5335289606188935,
"grad_norm": 5.340886291219129,
"learning_rate": 1.3643124860171801e-05,
"loss": 0.6595,
"step": 4000
},
{
"epoch": 0.5348627830204409,
"grad_norm": 1.985940330857511,
"learning_rate": 1.35799053353305e-05,
"loss": 0.6892,
"step": 4010
},
{
"epoch": 0.5361966054219881,
"grad_norm": 3.917331449757074,
"learning_rate": 1.3516711253850949e-05,
"loss": 0.6417,
"step": 4020
},
{
"epoch": 0.5375304278235353,
"grad_norm": 1.66962823795828,
"learning_rate": 1.3453543747960393e-05,
"loss": 0.6784,
"step": 4030
},
{
"epoch": 0.5388642502250826,
"grad_norm": 4.181035760200595,
"learning_rate": 1.3390403949409943e-05,
"loss": 0.7115,
"step": 4040
},
{
"epoch": 0.5401980726266298,
"grad_norm": 2.4193575665243214,
"learning_rate": 1.3327292989454273e-05,
"loss": 0.7104,
"step": 4050
},
{
"epoch": 0.541531895028177,
"grad_norm": 2.0442192962046275,
"learning_rate": 1.3264211998831374e-05,
"loss": 0.7008,
"step": 4060
},
{
"epoch": 0.5428657174297242,
"grad_norm": 3.0689852808863183,
"learning_rate": 1.3201162107742285e-05,
"loss": 0.677,
"step": 4070
},
{
"epoch": 0.5441995398312715,
"grad_norm": 2.22632841251654,
"learning_rate": 1.3138144445830841e-05,
"loss": 0.6223,
"step": 4080
},
{
"epoch": 0.5455333622328187,
"grad_norm": 8.813265719863766,
"learning_rate": 1.3075160142163442e-05,
"loss": 0.6791,
"step": 4090
},
{
"epoch": 0.5468671846343659,
"grad_norm": 2.461550778463616,
"learning_rate": 1.3012210325208818e-05,
"loss": 0.7165,
"step": 4100
},
{
"epoch": 0.5482010070359131,
"grad_norm": 2.1304508310591896,
"learning_rate": 1.2949296122817813e-05,
"loss": 0.6905,
"step": 4110
},
{
"epoch": 0.5495348294374603,
"grad_norm": 2.1733622775851535,
"learning_rate": 1.2886418662203174e-05,
"loss": 0.6963,
"step": 4120
},
{
"epoch": 0.5508686518390077,
"grad_norm": 2.654530675610581,
"learning_rate": 1.282357906991936e-05,
"loss": 0.6796,
"step": 4130
},
{
"epoch": 0.5522024742405549,
"grad_norm": 2.6976858995246085,
"learning_rate": 1.276077847184236e-05,
"loss": 0.6922,
"step": 4140
},
{
"epoch": 0.5535362966421021,
"grad_norm": 2.5591371381474857,
"learning_rate": 1.2698017993149504e-05,
"loss": 0.7047,
"step": 4150
},
{
"epoch": 0.5548701190436494,
"grad_norm": 6.439964637422321,
"learning_rate": 1.2635298758299336e-05,
"loss": 0.6722,
"step": 4160
},
{
"epoch": 0.5562039414451966,
"grad_norm": 1.6222259612163727,
"learning_rate": 1.2572621891011426e-05,
"loss": 0.6646,
"step": 4170
},
{
"epoch": 0.5575377638467438,
"grad_norm": 3.410425968580818,
"learning_rate": 1.2509988514246272e-05,
"loss": 0.6894,
"step": 4180
},
{
"epoch": 0.558871586248291,
"grad_norm": 2.7111542804682327,
"learning_rate": 1.2447399750185166e-05,
"loss": 0.7196,
"step": 4190
},
{
"epoch": 0.5602054086498383,
"grad_norm": 3.3657872237953868,
"learning_rate": 1.2384856720210086e-05,
"loss": 0.7052,
"step": 4200
},
{
"epoch": 0.5615392310513855,
"grad_norm": 3.4383001609998143,
"learning_rate": 1.2322360544883608e-05,
"loss": 0.664,
"step": 4210
},
{
"epoch": 0.5628730534529327,
"grad_norm": 4.31412552867304,
"learning_rate": 1.2259912343928831e-05,
"loss": 0.6923,
"step": 4220
},
{
"epoch": 0.5642068758544799,
"grad_norm": 2.9738159323747655,
"learning_rate": 1.2197513236209312e-05,
"loss": 0.6787,
"step": 4230
},
{
"epoch": 0.5655406982560273,
"grad_norm": 14.42279175461777,
"learning_rate": 1.213516433970902e-05,
"loss": 0.7313,
"step": 4240
},
{
"epoch": 0.5668745206575745,
"grad_norm": 2.6156276324588195,
"learning_rate": 1.2072866771512306e-05,
"loss": 0.6856,
"step": 4250
},
{
"epoch": 0.5682083430591217,
"grad_norm": 2.692794641012978,
"learning_rate": 1.201062164778389e-05,
"loss": 0.6587,
"step": 4260
},
{
"epoch": 0.5695421654606689,
"grad_norm": 3.01896569407463,
"learning_rate": 1.1948430083748864e-05,
"loss": 0.7225,
"step": 4270
},
{
"epoch": 0.5708759878622162,
"grad_norm": 2.266424840293995,
"learning_rate": 1.1886293193672707e-05,
"loss": 0.6847,
"step": 4280
},
{
"epoch": 0.5722098102637634,
"grad_norm": 2.2789387948762987,
"learning_rate": 1.1824212090841321e-05,
"loss": 0.7011,
"step": 4290
},
{
"epoch": 0.5735436326653106,
"grad_norm": 2.826447974943076,
"learning_rate": 1.1762187887541088e-05,
"loss": 0.689,
"step": 4300
},
{
"epoch": 0.5748774550668578,
"grad_norm": 2.565293440960005,
"learning_rate": 1.1700221695038944e-05,
"loss": 0.7077,
"step": 4310
},
{
"epoch": 0.5762112774684051,
"grad_norm": 4.459154190124916,
"learning_rate": 1.1638314623562459e-05,
"loss": 0.6885,
"step": 4320
},
{
"epoch": 0.5775450998699523,
"grad_norm": 1.8187338733285852,
"learning_rate": 1.1576467782279953e-05,
"loss": 0.7103,
"step": 4330
},
{
"epoch": 0.5788789222714995,
"grad_norm": 4.078050868504266,
"learning_rate": 1.1514682279280621e-05,
"loss": 0.6742,
"step": 4340
},
{
"epoch": 0.5802127446730467,
"grad_norm": 2.4612673583806233,
"learning_rate": 1.1452959221554684e-05,
"loss": 0.6941,
"step": 4350
},
{
"epoch": 0.5815465670745941,
"grad_norm": 8.05059787591381,
"learning_rate": 1.1391299714973553e-05,
"loss": 0.7072,
"step": 4360
},
{
"epoch": 0.5828803894761413,
"grad_norm": 5.041675641180621,
"learning_rate": 1.1329704864270005e-05,
"loss": 0.6914,
"step": 4370
},
{
"epoch": 0.5842142118776885,
"grad_norm": 3.8176735967050672,
"learning_rate": 1.1268175773018409e-05,
"loss": 0.6489,
"step": 4380
},
{
"epoch": 0.5855480342792357,
"grad_norm": 2.068471874891413,
"learning_rate": 1.1206713543614942e-05,
"loss": 0.7182,
"step": 4390
},
{
"epoch": 0.586881856680783,
"grad_norm": 4.7154770167485065,
"learning_rate": 1.1145319277257834e-05,
"loss": 0.6961,
"step": 4400
}
],
"logging_steps": 10,
"max_steps": 7497,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 400,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 1.2027710597077402e+19,
"train_batch_size": 4,
"trial_name": null,
"trial_params": null
}