terry69's picture
Model save
f393aee verified
raw
history blame
59.5 kB
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 1.0,
"eval_steps": 500,
"global_step": 1682,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0005945303210463733,
"grad_norm": 23.17915866800298,
"learning_rate": 5.91715976331361e-08,
"loss": 1.3012,
"step": 1
},
{
"epoch": 0.002972651605231867,
"grad_norm": 23.37449539925455,
"learning_rate": 2.958579881656805e-07,
"loss": 1.3439,
"step": 5
},
{
"epoch": 0.005945303210463734,
"grad_norm": 10.391844977150628,
"learning_rate": 5.91715976331361e-07,
"loss": 1.2902,
"step": 10
},
{
"epoch": 0.0089179548156956,
"grad_norm": 12.298658554924508,
"learning_rate": 8.875739644970415e-07,
"loss": 1.1528,
"step": 15
},
{
"epoch": 0.011890606420927468,
"grad_norm": 5.836227197737131,
"learning_rate": 1.183431952662722e-06,
"loss": 1.0462,
"step": 20
},
{
"epoch": 0.014863258026159334,
"grad_norm": 3.59897554922516,
"learning_rate": 1.4792899408284026e-06,
"loss": 0.9416,
"step": 25
},
{
"epoch": 0.0178359096313912,
"grad_norm": 3.2741630752466273,
"learning_rate": 1.775147928994083e-06,
"loss": 0.9146,
"step": 30
},
{
"epoch": 0.020808561236623068,
"grad_norm": 2.905060653147668,
"learning_rate": 2.0710059171597635e-06,
"loss": 0.8965,
"step": 35
},
{
"epoch": 0.023781212841854936,
"grad_norm": 2.9848388765396066,
"learning_rate": 2.366863905325444e-06,
"loss": 0.8532,
"step": 40
},
{
"epoch": 0.0267538644470868,
"grad_norm": 2.928858994497014,
"learning_rate": 2.6627218934911246e-06,
"loss": 0.8607,
"step": 45
},
{
"epoch": 0.029726516052318668,
"grad_norm": 2.9699519304565105,
"learning_rate": 2.958579881656805e-06,
"loss": 0.8293,
"step": 50
},
{
"epoch": 0.032699167657550536,
"grad_norm": 2.9924496822372846,
"learning_rate": 3.2544378698224853e-06,
"loss": 0.8376,
"step": 55
},
{
"epoch": 0.0356718192627824,
"grad_norm": 3.0189144878653127,
"learning_rate": 3.550295857988166e-06,
"loss": 0.8223,
"step": 60
},
{
"epoch": 0.03864447086801427,
"grad_norm": 3.022152891252942,
"learning_rate": 3.846153846153847e-06,
"loss": 0.808,
"step": 65
},
{
"epoch": 0.041617122473246136,
"grad_norm": 3.0781659112469577,
"learning_rate": 4.142011834319527e-06,
"loss": 0.7936,
"step": 70
},
{
"epoch": 0.044589774078478,
"grad_norm": 3.084476562677424,
"learning_rate": 4.437869822485207e-06,
"loss": 0.7752,
"step": 75
},
{
"epoch": 0.04756242568370987,
"grad_norm": 3.2098986890021237,
"learning_rate": 4.733727810650888e-06,
"loss": 0.7763,
"step": 80
},
{
"epoch": 0.050535077288941736,
"grad_norm": 3.19384567951823,
"learning_rate": 5.029585798816569e-06,
"loss": 0.7746,
"step": 85
},
{
"epoch": 0.0535077288941736,
"grad_norm": 2.964927416048633,
"learning_rate": 5.325443786982249e-06,
"loss": 0.7682,
"step": 90
},
{
"epoch": 0.05648038049940547,
"grad_norm": 3.1019748122364277,
"learning_rate": 5.621301775147929e-06,
"loss": 0.7518,
"step": 95
},
{
"epoch": 0.059453032104637336,
"grad_norm": 3.2201430089223386,
"learning_rate": 5.91715976331361e-06,
"loss": 0.7436,
"step": 100
},
{
"epoch": 0.0624256837098692,
"grad_norm": 2.927109670294165,
"learning_rate": 6.21301775147929e-06,
"loss": 0.7426,
"step": 105
},
{
"epoch": 0.06539833531510107,
"grad_norm": 2.8885844881352742,
"learning_rate": 6.5088757396449705e-06,
"loss": 0.74,
"step": 110
},
{
"epoch": 0.06837098692033294,
"grad_norm": 3.172326235032539,
"learning_rate": 6.8047337278106515e-06,
"loss": 0.7647,
"step": 115
},
{
"epoch": 0.0713436385255648,
"grad_norm": 2.8788604196009047,
"learning_rate": 7.100591715976332e-06,
"loss": 0.7341,
"step": 120
},
{
"epoch": 0.07431629013079667,
"grad_norm": 2.7082199945019956,
"learning_rate": 7.396449704142013e-06,
"loss": 0.7347,
"step": 125
},
{
"epoch": 0.07728894173602854,
"grad_norm": 2.877616745493528,
"learning_rate": 7.692307692307694e-06,
"loss": 0.7266,
"step": 130
},
{
"epoch": 0.0802615933412604,
"grad_norm": 2.873705604654048,
"learning_rate": 7.988165680473373e-06,
"loss": 0.7461,
"step": 135
},
{
"epoch": 0.08323424494649227,
"grad_norm": 3.188866051330491,
"learning_rate": 8.284023668639054e-06,
"loss": 0.7267,
"step": 140
},
{
"epoch": 0.08620689655172414,
"grad_norm": 2.7709923287552787,
"learning_rate": 8.579881656804735e-06,
"loss": 0.7183,
"step": 145
},
{
"epoch": 0.089179548156956,
"grad_norm": 2.8980920523843032,
"learning_rate": 8.875739644970414e-06,
"loss": 0.7204,
"step": 150
},
{
"epoch": 0.09215219976218787,
"grad_norm": 2.8127651120484893,
"learning_rate": 9.171597633136095e-06,
"loss": 0.7263,
"step": 155
},
{
"epoch": 0.09512485136741974,
"grad_norm": 3.099526975800466,
"learning_rate": 9.467455621301776e-06,
"loss": 0.7266,
"step": 160
},
{
"epoch": 0.0980975029726516,
"grad_norm": 2.847472407806089,
"learning_rate": 9.763313609467457e-06,
"loss": 0.7148,
"step": 165
},
{
"epoch": 0.10107015457788347,
"grad_norm": 3.1542218699949243,
"learning_rate": 9.999989221415223e-06,
"loss": 0.73,
"step": 170
},
{
"epoch": 0.10404280618311534,
"grad_norm": 2.7886695417326197,
"learning_rate": 9.999611975827441e-06,
"loss": 0.7205,
"step": 175
},
{
"epoch": 0.1070154577883472,
"grad_norm": 2.863907914649196,
"learning_rate": 9.998695847471015e-06,
"loss": 0.7291,
"step": 180
},
{
"epoch": 0.10998810939357907,
"grad_norm": 2.682699306725325,
"learning_rate": 9.997240935090764e-06,
"loss": 0.7179,
"step": 185
},
{
"epoch": 0.11296076099881094,
"grad_norm": 2.6714047020353893,
"learning_rate": 9.9952473955043e-06,
"loss": 0.7095,
"step": 190
},
{
"epoch": 0.1159334126040428,
"grad_norm": 2.919385127037625,
"learning_rate": 9.992715443585123e-06,
"loss": 0.7183,
"step": 195
},
{
"epoch": 0.11890606420927467,
"grad_norm": 2.758146983625847,
"learning_rate": 9.989645352239467e-06,
"loss": 0.7097,
"step": 200
},
{
"epoch": 0.12187871581450654,
"grad_norm": 2.478949177408348,
"learning_rate": 9.986037452376875e-06,
"loss": 0.7053,
"step": 205
},
{
"epoch": 0.1248513674197384,
"grad_norm": 2.7139657440292004,
"learning_rate": 9.98189213287454e-06,
"loss": 0.7141,
"step": 210
},
{
"epoch": 0.1278240190249703,
"grad_norm": 2.525450968321056,
"learning_rate": 9.977209840535388e-06,
"loss": 0.712,
"step": 215
},
{
"epoch": 0.13079667063020214,
"grad_norm": 2.5660210355660085,
"learning_rate": 9.971991080039912e-06,
"loss": 0.7029,
"step": 220
},
{
"epoch": 0.133769322235434,
"grad_norm": 2.719791305848057,
"learning_rate": 9.966236413891787e-06,
"loss": 0.6874,
"step": 225
},
{
"epoch": 0.1367419738406659,
"grad_norm": 2.7013784687029174,
"learning_rate": 9.959946462357237e-06,
"loss": 0.7053,
"step": 230
},
{
"epoch": 0.13971462544589774,
"grad_norm": 2.4163450862939273,
"learning_rate": 9.953121903398172e-06,
"loss": 0.69,
"step": 235
},
{
"epoch": 0.1426872770511296,
"grad_norm": 2.6651187768588884,
"learning_rate": 9.945763472599125e-06,
"loss": 0.6974,
"step": 240
},
{
"epoch": 0.1456599286563615,
"grad_norm": 2.61783925406702,
"learning_rate": 9.937871963087958e-06,
"loss": 0.6964,
"step": 245
},
{
"epoch": 0.14863258026159334,
"grad_norm": 2.6420632329467826,
"learning_rate": 9.929448225450375e-06,
"loss": 0.6814,
"step": 250
},
{
"epoch": 0.1516052318668252,
"grad_norm": 2.585841442008173,
"learning_rate": 9.920493167638256e-06,
"loss": 0.6904,
"step": 255
},
{
"epoch": 0.1545778834720571,
"grad_norm": 2.772149995259441,
"learning_rate": 9.911007754871773e-06,
"loss": 0.6998,
"step": 260
},
{
"epoch": 0.15755053507728894,
"grad_norm": 2.633310745015164,
"learning_rate": 9.900993009535365e-06,
"loss": 0.6936,
"step": 265
},
{
"epoch": 0.1605231866825208,
"grad_norm": 2.5624767016075016,
"learning_rate": 9.890450011067544e-06,
"loss": 0.6901,
"step": 270
},
{
"epoch": 0.1634958382877527,
"grad_norm": 2.4863789713476803,
"learning_rate": 9.879379895844537e-06,
"loss": 0.7005,
"step": 275
},
{
"epoch": 0.16646848989298454,
"grad_norm": 2.6081863153211535,
"learning_rate": 9.867783857057808e-06,
"loss": 0.6739,
"step": 280
},
{
"epoch": 0.1694411414982164,
"grad_norm": 2.5731120093742854,
"learning_rate": 9.855663144585456e-06,
"loss": 0.6788,
"step": 285
},
{
"epoch": 0.1724137931034483,
"grad_norm": 2.4603786025187375,
"learning_rate": 9.843019064857481e-06,
"loss": 0.682,
"step": 290
},
{
"epoch": 0.17538644470868014,
"grad_norm": 2.5203036804775083,
"learning_rate": 9.829852980714986e-06,
"loss": 0.6592,
"step": 295
},
{
"epoch": 0.178359096313912,
"grad_norm": 2.4982207442211846,
"learning_rate": 9.816166311263276e-06,
"loss": 0.6678,
"step": 300
},
{
"epoch": 0.1813317479191439,
"grad_norm": 2.7749883913033395,
"learning_rate": 9.801960531718898e-06,
"loss": 0.6906,
"step": 305
},
{
"epoch": 0.18430439952437574,
"grad_norm": 2.604444101827745,
"learning_rate": 9.787237173250641e-06,
"loss": 0.6591,
"step": 310
},
{
"epoch": 0.1872770511296076,
"grad_norm": 2.4391472456168812,
"learning_rate": 9.771997822814496e-06,
"loss": 0.6697,
"step": 315
},
{
"epoch": 0.1902497027348395,
"grad_norm": 2.4616817852361366,
"learning_rate": 9.756244122982608e-06,
"loss": 0.6734,
"step": 320
},
{
"epoch": 0.19322235434007135,
"grad_norm": 2.5774348327106984,
"learning_rate": 9.739977771766225e-06,
"loss": 0.6588,
"step": 325
},
{
"epoch": 0.1961950059453032,
"grad_norm": 2.5087234779825116,
"learning_rate": 9.723200522432683e-06,
"loss": 0.6664,
"step": 330
},
{
"epoch": 0.1991676575505351,
"grad_norm": 2.3989927240738607,
"learning_rate": 9.705914183316435e-06,
"loss": 0.6803,
"step": 335
},
{
"epoch": 0.20214030915576695,
"grad_norm": 2.401172984855569,
"learning_rate": 9.68812061762413e-06,
"loss": 0.6636,
"step": 340
},
{
"epoch": 0.2051129607609988,
"grad_norm": 2.4421036295734426,
"learning_rate": 9.669821743233794e-06,
"loss": 0.6389,
"step": 345
},
{
"epoch": 0.2080856123662307,
"grad_norm": 2.577840676624431,
"learning_rate": 9.65101953248811e-06,
"loss": 0.6586,
"step": 350
},
{
"epoch": 0.21105826397146255,
"grad_norm": 2.3940226323264917,
"learning_rate": 9.631716011981823e-06,
"loss": 0.6479,
"step": 355
},
{
"epoch": 0.2140309155766944,
"grad_norm": 2.5250690081911786,
"learning_rate": 9.611913262343321e-06,
"loss": 0.6506,
"step": 360
},
{
"epoch": 0.2170035671819263,
"grad_norm": 2.4264301363563545,
"learning_rate": 9.591613418010355e-06,
"loss": 0.6357,
"step": 365
},
{
"epoch": 0.21997621878715815,
"grad_norm": 2.751369457099525,
"learning_rate": 9.57081866699999e-06,
"loss": 0.6551,
"step": 370
},
{
"epoch": 0.22294887039239,
"grad_norm": 2.7219604193356672,
"learning_rate": 9.549531250672764e-06,
"loss": 0.6463,
"step": 375
},
{
"epoch": 0.2259215219976219,
"grad_norm": 2.45624731672107,
"learning_rate": 9.52775346349111e-06,
"loss": 0.646,
"step": 380
},
{
"epoch": 0.22889417360285375,
"grad_norm": 2.544975764712231,
"learning_rate": 9.505487652772038e-06,
"loss": 0.6354,
"step": 385
},
{
"epoch": 0.2318668252080856,
"grad_norm": 2.374830073694884,
"learning_rate": 9.482736218434144e-06,
"loss": 0.6502,
"step": 390
},
{
"epoch": 0.2348394768133175,
"grad_norm": 2.422663231385471,
"learning_rate": 9.459501612738915e-06,
"loss": 0.6502,
"step": 395
},
{
"epoch": 0.23781212841854935,
"grad_norm": 2.490293745714411,
"learning_rate": 9.435786340026434e-06,
"loss": 0.6303,
"step": 400
},
{
"epoch": 0.2407847800237812,
"grad_norm": 2.6066004181742124,
"learning_rate": 9.41159295644543e-06,
"loss": 0.6486,
"step": 405
},
{
"epoch": 0.2437574316290131,
"grad_norm": 2.5218752196441434,
"learning_rate": 9.38692406967778e-06,
"loss": 0.6209,
"step": 410
},
{
"epoch": 0.24673008323424495,
"grad_norm": 2.389380886868179,
"learning_rate": 9.36178233865743e-06,
"loss": 0.6168,
"step": 415
},
{
"epoch": 0.2497027348394768,
"grad_norm": 2.5150661226784794,
"learning_rate": 9.336170473283803e-06,
"loss": 0.6171,
"step": 420
},
{
"epoch": 0.25267538644470866,
"grad_norm": 2.435724631506405,
"learning_rate": 9.310091234129718e-06,
"loss": 0.6299,
"step": 425
},
{
"epoch": 0.2556480380499406,
"grad_norm": 2.394787101037759,
"learning_rate": 9.283547432143842e-06,
"loss": 0.6051,
"step": 430
},
{
"epoch": 0.25862068965517243,
"grad_norm": 2.3112769318054602,
"learning_rate": 9.256541928347701e-06,
"loss": 0.6332,
"step": 435
},
{
"epoch": 0.2615933412604043,
"grad_norm": 2.395884463823803,
"learning_rate": 9.229077633527317e-06,
"loss": 0.6363,
"step": 440
},
{
"epoch": 0.26456599286563615,
"grad_norm": 2.5353977000342245,
"learning_rate": 9.201157507919466e-06,
"loss": 0.6096,
"step": 445
},
{
"epoch": 0.267538644470868,
"grad_norm": 2.4062321379781957,
"learning_rate": 9.17278456089261e-06,
"loss": 0.6214,
"step": 450
},
{
"epoch": 0.27051129607609986,
"grad_norm": 2.3853947296491254,
"learning_rate": 9.143961850622523e-06,
"loss": 0.6357,
"step": 455
},
{
"epoch": 0.2734839476813318,
"grad_norm": 2.3411923936500587,
"learning_rate": 9.114692483762682e-06,
"loss": 0.6111,
"step": 460
},
{
"epoch": 0.27645659928656363,
"grad_norm": 2.805658532417448,
"learning_rate": 9.084979615109402e-06,
"loss": 0.6338,
"step": 465
},
{
"epoch": 0.2794292508917955,
"grad_norm": 2.519691039204996,
"learning_rate": 9.054826447261807e-06,
"loss": 0.616,
"step": 470
},
{
"epoch": 0.28240190249702735,
"grad_norm": 2.5502443998981295,
"learning_rate": 9.02423623027663e-06,
"loss": 0.6215,
"step": 475
},
{
"epoch": 0.2853745541022592,
"grad_norm": 2.3581542332305374,
"learning_rate": 8.993212261317911e-06,
"loss": 0.6143,
"step": 480
},
{
"epoch": 0.28834720570749106,
"grad_norm": 2.2185371096529267,
"learning_rate": 8.961757884301614e-06,
"loss": 0.6103,
"step": 485
},
{
"epoch": 0.291319857312723,
"grad_norm": 2.388161926408038,
"learning_rate": 8.929876489535196e-06,
"loss": 0.6165,
"step": 490
},
{
"epoch": 0.29429250891795483,
"grad_norm": 2.3962973629889466,
"learning_rate": 8.89757151335219e-06,
"loss": 0.6124,
"step": 495
},
{
"epoch": 0.2972651605231867,
"grad_norm": 2.3456101381645746,
"learning_rate": 8.864846437741817e-06,
"loss": 0.6168,
"step": 500
},
{
"epoch": 0.30023781212841855,
"grad_norm": 2.857739205060792,
"learning_rate": 8.831704789973677e-06,
"loss": 0.601,
"step": 505
},
{
"epoch": 0.3032104637336504,
"grad_norm": 2.404200391558541,
"learning_rate": 8.79815014221757e-06,
"loss": 0.6159,
"step": 510
},
{
"epoch": 0.30618311533888226,
"grad_norm": 2.5066882094814185,
"learning_rate": 8.764186111158468e-06,
"loss": 0.6107,
"step": 515
},
{
"epoch": 0.3091557669441142,
"grad_norm": 2.439281663409053,
"learning_rate": 8.729816357606683e-06,
"loss": 0.6082,
"step": 520
},
{
"epoch": 0.31212841854934603,
"grad_norm": 2.434841960288868,
"learning_rate": 8.695044586103297e-06,
"loss": 0.6085,
"step": 525
},
{
"epoch": 0.3151010701545779,
"grad_norm": 2.3045925871070394,
"learning_rate": 8.659874544520862e-06,
"loss": 0.5752,
"step": 530
},
{
"epoch": 0.31807372175980975,
"grad_norm": 2.515840298273436,
"learning_rate": 8.624310023659437e-06,
"loss": 0.5954,
"step": 535
},
{
"epoch": 0.3210463733650416,
"grad_norm": 2.514641603087097,
"learning_rate": 8.588354856838002e-06,
"loss": 0.5923,
"step": 540
},
{
"epoch": 0.32401902497027346,
"grad_norm": 2.353964195966498,
"learning_rate": 8.552012919481278e-06,
"loss": 0.5785,
"step": 545
},
{
"epoch": 0.3269916765755054,
"grad_norm": 2.3620191543194657,
"learning_rate": 8.515288128702015e-06,
"loss": 0.5942,
"step": 550
},
{
"epoch": 0.32996432818073723,
"grad_norm": 2.5008196020127964,
"learning_rate": 8.478184442878787e-06,
"loss": 0.6116,
"step": 555
},
{
"epoch": 0.3329369797859691,
"grad_norm": 2.3614970118631833,
"learning_rate": 8.440705861229344e-06,
"loss": 0.5838,
"step": 560
},
{
"epoch": 0.33590963139120095,
"grad_norm": 2.3019970993481813,
"learning_rate": 8.40285642337955e-06,
"loss": 0.5979,
"step": 565
},
{
"epoch": 0.3388822829964328,
"grad_norm": 2.437131942480355,
"learning_rate": 8.36464020892797e-06,
"loss": 0.5583,
"step": 570
},
{
"epoch": 0.34185493460166466,
"grad_norm": 2.326966178552118,
"learning_rate": 8.326061337006161e-06,
"loss": 0.5877,
"step": 575
},
{
"epoch": 0.3448275862068966,
"grad_norm": 2.5231196817030455,
"learning_rate": 8.287123965834682e-06,
"loss": 0.5774,
"step": 580
},
{
"epoch": 0.34780023781212843,
"grad_norm": 2.445974147547591,
"learning_rate": 8.247832292274909e-06,
"loss": 0.5957,
"step": 585
},
{
"epoch": 0.3507728894173603,
"grad_norm": 2.30995967302817,
"learning_rate": 8.208190551376668e-06,
"loss": 0.5913,
"step": 590
},
{
"epoch": 0.35374554102259215,
"grad_norm": 2.304615444057916,
"learning_rate": 8.16820301592176e-06,
"loss": 0.5951,
"step": 595
},
{
"epoch": 0.356718192627824,
"grad_norm": 2.2337712440097466,
"learning_rate": 8.127873995963439e-06,
"loss": 0.5717,
"step": 600
},
{
"epoch": 0.35969084423305586,
"grad_norm": 2.352192550598562,
"learning_rate": 8.087207838361819e-06,
"loss": 0.5559,
"step": 605
},
{
"epoch": 0.3626634958382878,
"grad_norm": 2.4383288854885086,
"learning_rate": 8.046208926315381e-06,
"loss": 0.5621,
"step": 610
},
{
"epoch": 0.36563614744351963,
"grad_norm": 2.4669126509592982,
"learning_rate": 8.004881678888518e-06,
"loss": 0.5822,
"step": 615
},
{
"epoch": 0.3686087990487515,
"grad_norm": 2.350359506686674,
"learning_rate": 7.963230550535212e-06,
"loss": 0.5726,
"step": 620
},
{
"epoch": 0.37158145065398335,
"grad_norm": 2.3573597155480965,
"learning_rate": 7.921260030618935e-06,
"loss": 0.5615,
"step": 625
},
{
"epoch": 0.3745541022592152,
"grad_norm": 2.768754472930866,
"learning_rate": 7.878974642928752e-06,
"loss": 0.566,
"step": 630
},
{
"epoch": 0.37752675386444706,
"grad_norm": 2.460878358484002,
"learning_rate": 7.836378945191718e-06,
"loss": 0.5534,
"step": 635
},
{
"epoch": 0.380499405469679,
"grad_norm": 2.280914113206368,
"learning_rate": 7.793477528581642e-06,
"loss": 0.5655,
"step": 640
},
{
"epoch": 0.38347205707491083,
"grad_norm": 2.481778359904293,
"learning_rate": 7.750275017224208e-06,
"loss": 0.5676,
"step": 645
},
{
"epoch": 0.3864447086801427,
"grad_norm": 2.616616432524906,
"learning_rate": 7.706776067698574e-06,
"loss": 0.564,
"step": 650
},
{
"epoch": 0.38941736028537455,
"grad_norm": 2.371851297853059,
"learning_rate": 7.662985368535465e-06,
"loss": 0.5633,
"step": 655
},
{
"epoch": 0.3923900118906064,
"grad_norm": 2.3788259328454413,
"learning_rate": 7.618907639711816e-06,
"loss": 0.5478,
"step": 660
},
{
"epoch": 0.39536266349583826,
"grad_norm": 2.329685605293642,
"learning_rate": 7.574547632142029e-06,
"loss": 0.5496,
"step": 665
},
{
"epoch": 0.3983353151010702,
"grad_norm": 2.3986113985448867,
"learning_rate": 7.529910127165897e-06,
"loss": 0.5488,
"step": 670
},
{
"epoch": 0.40130796670630203,
"grad_norm": 2.314354656341197,
"learning_rate": 7.48499993603325e-06,
"loss": 0.5564,
"step": 675
},
{
"epoch": 0.4042806183115339,
"grad_norm": 2.301009486833614,
"learning_rate": 7.4398218993853765e-06,
"loss": 0.5579,
"step": 680
},
{
"epoch": 0.40725326991676575,
"grad_norm": 2.529267389965807,
"learning_rate": 7.394380886733263e-06,
"loss": 0.5418,
"step": 685
},
{
"epoch": 0.4102259215219976,
"grad_norm": 2.273496393101965,
"learning_rate": 7.3486817959327524e-06,
"loss": 0.5486,
"step": 690
},
{
"epoch": 0.41319857312722946,
"grad_norm": 2.362055285648788,
"learning_rate": 7.302729552656613e-06,
"loss": 0.5473,
"step": 695
},
{
"epoch": 0.4161712247324614,
"grad_norm": 2.568398004847708,
"learning_rate": 7.256529109863637e-06,
"loss": 0.5635,
"step": 700
},
{
"epoch": 0.41914387633769323,
"grad_norm": 2.334007582897662,
"learning_rate": 7.210085447264777e-06,
"loss": 0.5569,
"step": 705
},
{
"epoch": 0.4221165279429251,
"grad_norm": 2.27944109416587,
"learning_rate": 7.163403570786416e-06,
"loss": 0.5314,
"step": 710
},
{
"epoch": 0.42508917954815695,
"grad_norm": 2.439475832033061,
"learning_rate": 7.116488512030798e-06,
"loss": 0.5511,
"step": 715
},
{
"epoch": 0.4280618311533888,
"grad_norm": 2.3531415117791354,
"learning_rate": 7.069345327733701e-06,
"loss": 0.5457,
"step": 720
},
{
"epoch": 0.43103448275862066,
"grad_norm": 2.3295198969589035,
"learning_rate": 7.021979099219396e-06,
"loss": 0.5239,
"step": 725
},
{
"epoch": 0.4340071343638526,
"grad_norm": 2.5063503569942385,
"learning_rate": 6.974394931852957e-06,
"loss": 0.5384,
"step": 730
},
{
"epoch": 0.43697978596908443,
"grad_norm": 2.355478191724581,
"learning_rate": 6.926597954489979e-06,
"loss": 0.5098,
"step": 735
},
{
"epoch": 0.4399524375743163,
"grad_norm": 2.4091145093686372,
"learning_rate": 6.878593318923763e-06,
"loss": 0.5146,
"step": 740
},
{
"epoch": 0.44292508917954815,
"grad_norm": 2.3684497417326913,
"learning_rate": 6.830386199330036e-06,
"loss": 0.5146,
"step": 745
},
{
"epoch": 0.44589774078478,
"grad_norm": 2.4513900163292113,
"learning_rate": 6.781981791709247e-06,
"loss": 0.5152,
"step": 750
},
{
"epoch": 0.44887039239001186,
"grad_norm": 2.453629962173857,
"learning_rate": 6.73338531332652e-06,
"loss": 0.536,
"step": 755
},
{
"epoch": 0.4518430439952438,
"grad_norm": 2.474092048043491,
"learning_rate": 6.6846020021493105e-06,
"loss": 0.5236,
"step": 760
},
{
"epoch": 0.45481569560047563,
"grad_norm": 2.4532902284126523,
"learning_rate": 6.63563711628283e-06,
"loss": 0.518,
"step": 765
},
{
"epoch": 0.4577883472057075,
"grad_norm": 2.4470168591075163,
"learning_rate": 6.586495933403306e-06,
"loss": 0.5192,
"step": 770
},
{
"epoch": 0.46076099881093935,
"grad_norm": 2.483107107350786,
"learning_rate": 6.53718375018912e-06,
"loss": 0.5217,
"step": 775
},
{
"epoch": 0.4637336504161712,
"grad_norm": 2.433139401600069,
"learning_rate": 6.4877058817499174e-06,
"loss": 0.4948,
"step": 780
},
{
"epoch": 0.4667063020214031,
"grad_norm": 2.3018625992746684,
"learning_rate": 6.4380676610537075e-06,
"loss": 0.5156,
"step": 785
},
{
"epoch": 0.469678953626635,
"grad_norm": 2.425100167363479,
"learning_rate": 6.388274438352053e-06,
"loss": 0.5186,
"step": 790
},
{
"epoch": 0.47265160523186683,
"grad_norm": 2.4243934526405493,
"learning_rate": 6.3383315806034014e-06,
"loss": 0.5123,
"step": 795
},
{
"epoch": 0.4756242568370987,
"grad_norm": 2.3005885180264545,
"learning_rate": 6.288244470894596e-06,
"loss": 0.5147,
"step": 800
},
{
"epoch": 0.47859690844233055,
"grad_norm": 2.461720715630345,
"learning_rate": 6.238018507860667e-06,
"loss": 0.5192,
"step": 805
},
{
"epoch": 0.4815695600475624,
"grad_norm": 2.391513806976481,
"learning_rate": 6.187659105102941e-06,
"loss": 0.5114,
"step": 810
},
{
"epoch": 0.4845422116527943,
"grad_norm": 2.2043491836548363,
"learning_rate": 6.1371716906055336e-06,
"loss": 0.5277,
"step": 815
},
{
"epoch": 0.4875148632580262,
"grad_norm": 2.4124196938652074,
"learning_rate": 6.086561706150292e-06,
"loss": 0.5271,
"step": 820
},
{
"epoch": 0.49048751486325803,
"grad_norm": 2.365747909189894,
"learning_rate": 6.035834606730258e-06,
"loss": 0.4949,
"step": 825
},
{
"epoch": 0.4934601664684899,
"grad_norm": 2.4935417512792273,
"learning_rate": 5.984995859961707e-06,
"loss": 0.4807,
"step": 830
},
{
"epoch": 0.49643281807372175,
"grad_norm": 2.23752471394695,
"learning_rate": 5.934050945494803e-06,
"loss": 0.5053,
"step": 835
},
{
"epoch": 0.4994054696789536,
"grad_norm": 2.3920061376574187,
"learning_rate": 5.883005354422995e-06,
"loss": 0.5087,
"step": 840
},
{
"epoch": 0.5023781212841855,
"grad_norm": 2.35431470302546,
"learning_rate": 5.83186458869115e-06,
"loss": 0.51,
"step": 845
},
{
"epoch": 0.5053507728894173,
"grad_norm": 2.3257622845834653,
"learning_rate": 5.7806341605025264e-06,
"loss": 0.5092,
"step": 850
},
{
"epoch": 0.5083234244946492,
"grad_norm": 2.236833604753037,
"learning_rate": 5.7293195917246426e-06,
"loss": 0.5014,
"step": 855
},
{
"epoch": 0.5112960760998811,
"grad_norm": 2.408857775213423,
"learning_rate": 5.67792641329411e-06,
"loss": 0.4932,
"step": 860
},
{
"epoch": 0.514268727705113,
"grad_norm": 2.4984651682967782,
"learning_rate": 5.6264601646204645e-06,
"loss": 0.5,
"step": 865
},
{
"epoch": 0.5172413793103449,
"grad_norm": 2.6589983137124964,
"learning_rate": 5.574926392989118e-06,
"loss": 0.4953,
"step": 870
},
{
"epoch": 0.5202140309155767,
"grad_norm": 2.2920034526589474,
"learning_rate": 5.523330652963443e-06,
"loss": 0.4918,
"step": 875
},
{
"epoch": 0.5231866825208086,
"grad_norm": 2.4555058214366414,
"learning_rate": 5.471678505786063e-06,
"loss": 0.5018,
"step": 880
},
{
"epoch": 0.5261593341260404,
"grad_norm": 2.5114137922653446,
"learning_rate": 5.419975518779443e-06,
"loss": 0.4941,
"step": 885
},
{
"epoch": 0.5291319857312723,
"grad_norm": 2.271061287454104,
"learning_rate": 5.36822726474582e-06,
"loss": 0.4931,
"step": 890
},
{
"epoch": 0.5321046373365041,
"grad_norm": 2.381608236846904,
"learning_rate": 5.3164393213665275e-06,
"loss": 0.4785,
"step": 895
},
{
"epoch": 0.535077288941736,
"grad_norm": 2.3066959393356106,
"learning_rate": 5.2646172706008154e-06,
"loss": 0.49,
"step": 900
},
{
"epoch": 0.5380499405469679,
"grad_norm": 2.357712136319602,
"learning_rate": 5.212766698084196e-06,
"loss": 0.5027,
"step": 905
},
{
"epoch": 0.5410225921521997,
"grad_norm": 2.5267610099653406,
"learning_rate": 5.160893192526395e-06,
"loss": 0.4777,
"step": 910
},
{
"epoch": 0.5439952437574316,
"grad_norm": 2.359927663823654,
"learning_rate": 5.109002345108971e-06,
"loss": 0.4937,
"step": 915
},
{
"epoch": 0.5469678953626635,
"grad_norm": 2.41091168049318,
"learning_rate": 5.0570997488826775e-06,
"loss": 0.4757,
"step": 920
},
{
"epoch": 0.5499405469678954,
"grad_norm": 2.2779085550054305,
"learning_rate": 5.005190998164614e-06,
"loss": 0.4951,
"step": 925
},
{
"epoch": 0.5529131985731273,
"grad_norm": 2.2695911460870417,
"learning_rate": 4.953281687935236e-06,
"loss": 0.4712,
"step": 930
},
{
"epoch": 0.5558858501783591,
"grad_norm": 2.3975975966184198,
"learning_rate": 4.901377413235312e-06,
"loss": 0.4808,
"step": 935
},
{
"epoch": 0.558858501783591,
"grad_norm": 2.3802468272374253,
"learning_rate": 4.849483768562849e-06,
"loss": 0.4611,
"step": 940
},
{
"epoch": 0.5618311533888228,
"grad_norm": 2.324549924901845,
"learning_rate": 4.797606347270104e-06,
"loss": 0.4606,
"step": 945
},
{
"epoch": 0.5648038049940547,
"grad_norm": 2.8248050417928727,
"learning_rate": 4.7457507409606954e-06,
"loss": 0.4586,
"step": 950
},
{
"epoch": 0.5677764565992865,
"grad_norm": 2.3408008419334867,
"learning_rate": 4.693922538886914e-06,
"loss": 0.4673,
"step": 955
},
{
"epoch": 0.5707491082045184,
"grad_norm": 2.455876677604174,
"learning_rate": 4.642127327347292e-06,
"loss": 0.4803,
"step": 960
},
{
"epoch": 0.5737217598097503,
"grad_norm": 2.4097659884184717,
"learning_rate": 4.590370689084474e-06,
"loss": 0.4634,
"step": 965
},
{
"epoch": 0.5766944114149821,
"grad_norm": 2.5668401837693033,
"learning_rate": 4.53865820268349e-06,
"loss": 0.4598,
"step": 970
},
{
"epoch": 0.579667063020214,
"grad_norm": 2.2667882653983566,
"learning_rate": 4.486995441970468e-06,
"loss": 0.4606,
"step": 975
},
{
"epoch": 0.582639714625446,
"grad_norm": 2.3952069585748728,
"learning_rate": 4.435387975411849e-06,
"loss": 0.4554,
"step": 980
},
{
"epoch": 0.5856123662306778,
"grad_norm": 2.280919958503882,
"learning_rate": 4.383841365514208e-06,
"loss": 0.4765,
"step": 985
},
{
"epoch": 0.5885850178359097,
"grad_norm": 2.342556124656158,
"learning_rate": 4.332361168224678e-06,
"loss": 0.4613,
"step": 990
},
{
"epoch": 0.5915576694411415,
"grad_norm": 2.2952593312149405,
"learning_rate": 4.280952932332128e-06,
"loss": 0.446,
"step": 995
},
{
"epoch": 0.5945303210463734,
"grad_norm": 2.3321748153973183,
"learning_rate": 4.229622198869063e-06,
"loss": 0.452,
"step": 1000
},
{
"epoch": 0.5975029726516052,
"grad_norm": 2.4823844105009423,
"learning_rate": 4.178374500514397e-06,
"loss": 0.449,
"step": 1005
},
{
"epoch": 0.6004756242568371,
"grad_norm": 2.2950369831862134,
"learning_rate": 4.127215360997117e-06,
"loss": 0.4549,
"step": 1010
},
{
"epoch": 0.603448275862069,
"grad_norm": 2.5166575033757477,
"learning_rate": 4.076150294500897e-06,
"loss": 0.4519,
"step": 1015
},
{
"epoch": 0.6064209274673008,
"grad_norm": 2.3807622926087664,
"learning_rate": 4.0251848050697615e-06,
"loss": 0.4559,
"step": 1020
},
{
"epoch": 0.6093935790725327,
"grad_norm": 2.351343124866575,
"learning_rate": 3.974324386014828e-06,
"loss": 0.4562,
"step": 1025
},
{
"epoch": 0.6123662306777645,
"grad_norm": 2.2523827712367988,
"learning_rate": 3.92357451932221e-06,
"loss": 0.4453,
"step": 1030
},
{
"epoch": 0.6153388822829964,
"grad_norm": 2.30496980280335,
"learning_rate": 3.8729406750621486e-06,
"loss": 0.4571,
"step": 1035
},
{
"epoch": 0.6183115338882283,
"grad_norm": 2.27622501534253,
"learning_rate": 3.82242831079941e-06,
"loss": 0.4337,
"step": 1040
},
{
"epoch": 0.6212841854934602,
"grad_norm": 2.2971541253075114,
"learning_rate": 3.772042871005057e-06,
"loss": 0.4417,
"step": 1045
},
{
"epoch": 0.6242568370986921,
"grad_norm": 2.295568197082708,
"learning_rate": 3.721789786469602e-06,
"loss": 0.4482,
"step": 1050
},
{
"epoch": 0.6272294887039239,
"grad_norm": 2.418242134420845,
"learning_rate": 3.6716744737176567e-06,
"loss": 0.4452,
"step": 1055
},
{
"epoch": 0.6302021403091558,
"grad_norm": 2.258447637407748,
"learning_rate": 3.6217023344241198e-06,
"loss": 0.458,
"step": 1060
},
{
"epoch": 0.6331747919143876,
"grad_norm": 2.3383623329057732,
"learning_rate": 3.57187875483194e-06,
"loss": 0.4484,
"step": 1065
},
{
"epoch": 0.6361474435196195,
"grad_norm": 2.2709297436937885,
"learning_rate": 3.5222091051715803e-06,
"loss": 0.4385,
"step": 1070
},
{
"epoch": 0.6391200951248514,
"grad_norm": 2.3001869154507357,
"learning_rate": 3.472698739082172e-06,
"loss": 0.4409,
"step": 1075
},
{
"epoch": 0.6420927467300832,
"grad_norm": 2.2279803463182395,
"learning_rate": 3.423352993034482e-06,
"loss": 0.428,
"step": 1080
},
{
"epoch": 0.6450653983353151,
"grad_norm": 2.1572728551701723,
"learning_rate": 3.374177185755721e-06,
"loss": 0.4195,
"step": 1085
},
{
"epoch": 0.6480380499405469,
"grad_norm": 2.30995305892372,
"learning_rate": 3.3251766176562605e-06,
"loss": 0.4387,
"step": 1090
},
{
"epoch": 0.6510107015457788,
"grad_norm": 2.363018968058027,
"learning_rate": 3.2763565702583322e-06,
"loss": 0.428,
"step": 1095
},
{
"epoch": 0.6539833531510107,
"grad_norm": 2.3825197647163137,
"learning_rate": 3.227722305626761e-06,
"loss": 0.4299,
"step": 1100
},
{
"epoch": 0.6569560047562426,
"grad_norm": 2.16179977498919,
"learning_rate": 3.1792790658017874e-06,
"loss": 0.4327,
"step": 1105
},
{
"epoch": 0.6599286563614745,
"grad_norm": 2.175753238496728,
"learning_rate": 3.1310320722340636e-06,
"loss": 0.4204,
"step": 1110
},
{
"epoch": 0.6629013079667063,
"grad_norm": 2.222441260772541,
"learning_rate": 3.082986525221851e-06,
"loss": 0.4316,
"step": 1115
},
{
"epoch": 0.6658739595719382,
"grad_norm": 2.2178029194036415,
"learning_rate": 3.035147603350511e-06,
"loss": 0.4231,
"step": 1120
},
{
"epoch": 0.66884661117717,
"grad_norm": 2.3519506089063786,
"learning_rate": 2.9875204629343333e-06,
"loss": 0.4231,
"step": 1125
},
{
"epoch": 0.6718192627824019,
"grad_norm": 2.248473095466383,
"learning_rate": 2.940110237460758e-06,
"loss": 0.4166,
"step": 1130
},
{
"epoch": 0.6747919143876338,
"grad_norm": 2.3278826893891793,
"learning_rate": 2.8929220370370705e-06,
"loss": 0.4184,
"step": 1135
},
{
"epoch": 0.6777645659928656,
"grad_norm": 2.2527103466010794,
"learning_rate": 2.845960947839598e-06,
"loss": 0.4253,
"step": 1140
},
{
"epoch": 0.6807372175980975,
"grad_norm": 2.3636767593489836,
"learning_rate": 2.7992320315655043e-06,
"loss": 0.4294,
"step": 1145
},
{
"epoch": 0.6837098692033293,
"grad_norm": 2.2384821444252543,
"learning_rate": 2.752740324887219e-06,
"loss": 0.4164,
"step": 1150
},
{
"epoch": 0.6866825208085613,
"grad_norm": 2.3602120268768783,
"learning_rate": 2.706490838909547e-06,
"loss": 0.4244,
"step": 1155
},
{
"epoch": 0.6896551724137931,
"grad_norm": 2.474112669222775,
"learning_rate": 2.6604885586295636e-06,
"loss": 0.4137,
"step": 1160
},
{
"epoch": 0.692627824019025,
"grad_norm": 2.2552175118807605,
"learning_rate": 2.6147384423992893e-06,
"loss": 0.428,
"step": 1165
},
{
"epoch": 0.6956004756242569,
"grad_norm": 2.4189395286363258,
"learning_rate": 2.5692454213912655e-06,
"loss": 0.4244,
"step": 1170
},
{
"epoch": 0.6985731272294887,
"grad_norm": 2.240213088622091,
"learning_rate": 2.5240143990670513e-06,
"loss": 0.4117,
"step": 1175
},
{
"epoch": 0.7015457788347206,
"grad_norm": 2.2726272120032283,
"learning_rate": 2.479050250648692e-06,
"loss": 0.4296,
"step": 1180
},
{
"epoch": 0.7045184304399524,
"grad_norm": 2.1850341107989975,
"learning_rate": 2.4343578225932573e-06,
"loss": 0.4132,
"step": 1185
},
{
"epoch": 0.7074910820451843,
"grad_norm": 2.272873233207882,
"learning_rate": 2.389941932070455e-06,
"loss": 0.3981,
"step": 1190
},
{
"epoch": 0.7104637336504162,
"grad_norm": 2.252150495601297,
"learning_rate": 2.345807366443419e-06,
"loss": 0.4115,
"step": 1195
},
{
"epoch": 0.713436385255648,
"grad_norm": 2.266551085513327,
"learning_rate": 2.3019588827527017e-06,
"loss": 0.4086,
"step": 1200
},
{
"epoch": 0.7164090368608799,
"grad_norm": 2.332350830052582,
"learning_rate": 2.2584012072035366e-06,
"loss": 0.4089,
"step": 1205
},
{
"epoch": 0.7193816884661117,
"grad_norm": 2.2344702253310715,
"learning_rate": 2.21513903465643e-06,
"loss": 0.3998,
"step": 1210
},
{
"epoch": 0.7223543400713437,
"grad_norm": 2.3267615219996935,
"learning_rate": 2.1721770281211162e-06,
"loss": 0.4039,
"step": 1215
},
{
"epoch": 0.7253269916765755,
"grad_norm": 2.2320045041894945,
"learning_rate": 2.1295198182539627e-06,
"loss": 0.4106,
"step": 1220
},
{
"epoch": 0.7282996432818074,
"grad_norm": 2.3252249478957574,
"learning_rate": 2.0871720028588516e-06,
"loss": 0.4067,
"step": 1225
},
{
"epoch": 0.7312722948870393,
"grad_norm": 2.2267058392084604,
"learning_rate": 2.045138146391607e-06,
"loss": 0.4086,
"step": 1230
},
{
"epoch": 0.7342449464922711,
"grad_norm": 2.411234372389695,
"learning_rate": 2.0034227794680195e-06,
"loss": 0.3969,
"step": 1235
},
{
"epoch": 0.737217598097503,
"grad_norm": 2.1311399653064518,
"learning_rate": 1.962030398375506e-06,
"loss": 0.4165,
"step": 1240
},
{
"epoch": 0.7401902497027348,
"grad_norm": 2.3782226385738836,
"learning_rate": 1.920965464588481e-06,
"loss": 0.3987,
"step": 1245
},
{
"epoch": 0.7431629013079667,
"grad_norm": 2.3739915931205076,
"learning_rate": 1.8802324042874847e-06,
"loss": 0.3995,
"step": 1250
},
{
"epoch": 0.7461355529131986,
"grad_norm": 2.3894925514110965,
"learning_rate": 1.8398356078820934e-06,
"loss": 0.3906,
"step": 1255
},
{
"epoch": 0.7491082045184304,
"grad_norm": 2.3093439961839657,
"learning_rate": 1.7997794295377098e-06,
"loss": 0.3917,
"step": 1260
},
{
"epoch": 0.7520808561236623,
"grad_norm": 2.319382769614651,
"learning_rate": 1.7600681867062469e-06,
"loss": 0.3885,
"step": 1265
},
{
"epoch": 0.7550535077288941,
"grad_norm": 2.2803304350831413,
"learning_rate": 1.7207061596607694e-06,
"loss": 0.3927,
"step": 1270
},
{
"epoch": 0.7580261593341261,
"grad_norm": 2.3220946916005567,
"learning_rate": 1.6816975910341526e-06,
"loss": 0.3962,
"step": 1275
},
{
"epoch": 0.760998810939358,
"grad_norm": 2.264317075137385,
"learning_rate": 1.6430466853617788e-06,
"loss": 0.3978,
"step": 1280
},
{
"epoch": 0.7639714625445898,
"grad_norm": 2.1705446882611654,
"learning_rate": 1.6047576086283613e-06,
"loss": 0.3912,
"step": 1285
},
{
"epoch": 0.7669441141498217,
"grad_norm": 2.291864149582287,
"learning_rate": 1.566834487818909e-06,
"loss": 0.3997,
"step": 1290
},
{
"epoch": 0.7699167657550535,
"grad_norm": 2.2779523408752276,
"learning_rate": 1.5292814104739012e-06,
"loss": 0.4015,
"step": 1295
},
{
"epoch": 0.7728894173602854,
"grad_norm": 2.2360566853344532,
"learning_rate": 1.492102424248717e-06,
"loss": 0.3936,
"step": 1300
},
{
"epoch": 0.7758620689655172,
"grad_norm": 2.3193326093728395,
"learning_rate": 1.455301536477348e-06,
"loss": 0.4006,
"step": 1305
},
{
"epoch": 0.7788347205707491,
"grad_norm": 2.241071015791803,
"learning_rate": 1.4188827137404814e-06,
"loss": 0.3953,
"step": 1310
},
{
"epoch": 0.781807372175981,
"grad_norm": 2.3345320959262397,
"learning_rate": 1.3828498814379526e-06,
"loss": 0.3827,
"step": 1315
},
{
"epoch": 0.7847800237812128,
"grad_norm": 2.255050340004342,
"learning_rate": 1.3472069233656455e-06,
"loss": 0.395,
"step": 1320
},
{
"epoch": 0.7877526753864447,
"grad_norm": 2.182203091512302,
"learning_rate": 1.3119576812968893e-06,
"loss": 0.3889,
"step": 1325
},
{
"epoch": 0.7907253269916765,
"grad_norm": 2.266179435014493,
"learning_rate": 1.2771059545683612e-06,
"loss": 0.3974,
"step": 1330
},
{
"epoch": 0.7936979785969085,
"grad_norm": 2.3267812132219663,
"learning_rate": 1.242655499670583e-06,
"loss": 0.3827,
"step": 1335
},
{
"epoch": 0.7966706302021404,
"grad_norm": 2.1058933825314736,
"learning_rate": 1.2086100298430225e-06,
"loss": 0.3833,
"step": 1340
},
{
"epoch": 0.7996432818073722,
"grad_norm": 2.1268029483679514,
"learning_rate": 1.1749732146738651e-06,
"loss": 0.3866,
"step": 1345
},
{
"epoch": 0.8026159334126041,
"grad_norm": 2.1738354617676854,
"learning_rate": 1.1417486797044886e-06,
"loss": 0.3791,
"step": 1350
},
{
"epoch": 0.8055885850178359,
"grad_norm": 2.160878200122144,
"learning_rate": 1.1089400060386801e-06,
"loss": 0.3755,
"step": 1355
},
{
"epoch": 0.8085612366230678,
"grad_norm": 2.129626397999257,
"learning_rate": 1.0765507299566552e-06,
"loss": 0.372,
"step": 1360
},
{
"epoch": 0.8115338882282996,
"grad_norm": 2.3501282748597947,
"learning_rate": 1.0445843425338902e-06,
"loss": 0.3856,
"step": 1365
},
{
"epoch": 0.8145065398335315,
"grad_norm": 2.2841395141791883,
"learning_rate": 1.0130442892648434e-06,
"loss": 0.383,
"step": 1370
},
{
"epoch": 0.8174791914387634,
"grad_norm": 2.2929912740203333,
"learning_rate": 9.819339696915848e-07,
"loss": 0.3826,
"step": 1375
},
{
"epoch": 0.8204518430439952,
"grad_norm": 2.0935559084604436,
"learning_rate": 9.512567370373643e-07,
"loss": 0.3846,
"step": 1380
},
{
"epoch": 0.8234244946492271,
"grad_norm": 2.2050607660086574,
"learning_rate": 9.210158978452033e-07,
"loss": 0.3831,
"step": 1385
},
{
"epoch": 0.8263971462544589,
"grad_norm": 2.365214622091771,
"learning_rate": 8.912147116214842e-07,
"loss": 0.3766,
"step": 1390
},
{
"epoch": 0.8293697978596909,
"grad_norm": 2.2601705624692547,
"learning_rate": 8.618563904846294e-07,
"loss": 0.3804,
"step": 1395
},
{
"epoch": 0.8323424494649228,
"grad_norm": 2.4159041452669117,
"learning_rate": 8.329440988188886e-07,
"loss": 0.3924,
"step": 1400
},
{
"epoch": 0.8353151010701546,
"grad_norm": 2.1795173707942643,
"learning_rate": 8.04480952933257e-07,
"loss": 0.3754,
"step": 1405
},
{
"epoch": 0.8382877526753865,
"grad_norm": 2.2351438825547896,
"learning_rate": 7.764700207255904e-07,
"loss": 0.3897,
"step": 1410
},
{
"epoch": 0.8412604042806183,
"grad_norm": 2.20058726362207,
"learning_rate": 7.489143213519301e-07,
"loss": 0.3775,
"step": 1415
},
{
"epoch": 0.8442330558858502,
"grad_norm": 2.3495313166309213,
"learning_rate": 7.218168249010821e-07,
"loss": 0.3795,
"step": 1420
},
{
"epoch": 0.847205707491082,
"grad_norm": 2.068114427890453,
"learning_rate": 6.951804520744915e-07,
"loss": 0.3754,
"step": 1425
},
{
"epoch": 0.8501783590963139,
"grad_norm": 2.3055094145170445,
"learning_rate": 6.690080738714267e-07,
"loss": 0.3871,
"step": 1430
},
{
"epoch": 0.8531510107015458,
"grad_norm": 2.1607153035130953,
"learning_rate": 6.433025112795383e-07,
"loss": 0.3649,
"step": 1435
},
{
"epoch": 0.8561236623067776,
"grad_norm": 2.3452133211542847,
"learning_rate": 6.180665349707892e-07,
"loss": 0.3789,
"step": 1440
},
{
"epoch": 0.8590963139120095,
"grad_norm": 2.270134523374029,
"learning_rate": 5.93302865002825e-07,
"loss": 0.3774,
"step": 1445
},
{
"epoch": 0.8620689655172413,
"grad_norm": 2.188053068739294,
"learning_rate": 5.69014170525794e-07,
"loss": 0.3769,
"step": 1450
},
{
"epoch": 0.8650416171224733,
"grad_norm": 2.1512178822513772,
"learning_rate": 5.452030694946448e-07,
"loss": 0.3766,
"step": 1455
},
{
"epoch": 0.8680142687277052,
"grad_norm": 2.5248216489482314,
"learning_rate": 5.21872128386961e-07,
"loss": 0.3685,
"step": 1460
},
{
"epoch": 0.870986920332937,
"grad_norm": 2.1936261850407455,
"learning_rate": 4.990238619263232e-07,
"loss": 0.3755,
"step": 1465
},
{
"epoch": 0.8739595719381689,
"grad_norm": 2.0674457126577956,
"learning_rate": 4.7666073281126713e-07,
"loss": 0.37,
"step": 1470
},
{
"epoch": 0.8769322235434007,
"grad_norm": 2.4393074814620705,
"learning_rate": 4.547851514498386e-07,
"loss": 0.3743,
"step": 1475
},
{
"epoch": 0.8799048751486326,
"grad_norm": 2.2218679471161984,
"learning_rate": 4.333994756997878e-07,
"loss": 0.3737,
"step": 1480
},
{
"epoch": 0.8828775267538644,
"grad_norm": 2.326578195058314,
"learning_rate": 4.125060106144313e-07,
"loss": 0.3706,
"step": 1485
},
{
"epoch": 0.8858501783590963,
"grad_norm": 2.159799023836089,
"learning_rate": 3.921070081941969e-07,
"loss": 0.3757,
"step": 1490
},
{
"epoch": 0.8888228299643282,
"grad_norm": 2.0971056501716543,
"learning_rate": 3.72204667143895e-07,
"loss": 0.3662,
"step": 1495
},
{
"epoch": 0.89179548156956,
"grad_norm": 2.2898930751852817,
"learning_rate": 3.528011326357306e-07,
"loss": 0.3803,
"step": 1500
},
{
"epoch": 0.8947681331747919,
"grad_norm": 2.276170558283118,
"learning_rate": 3.338984960780856e-07,
"loss": 0.3696,
"step": 1505
},
{
"epoch": 0.8977407847800237,
"grad_norm": 2.151976309053238,
"learning_rate": 3.154987948901006e-07,
"loss": 0.3856,
"step": 1510
},
{
"epoch": 0.9007134363852557,
"grad_norm": 2.168451215561875,
"learning_rate": 2.9760401228206394e-07,
"loss": 0.3773,
"step": 1515
},
{
"epoch": 0.9036860879904876,
"grad_norm": 2.2792459501817155,
"learning_rate": 2.802160770416584e-07,
"loss": 0.3707,
"step": 1520
},
{
"epoch": 0.9066587395957194,
"grad_norm": 2.4152961249318436,
"learning_rate": 2.633368633260658e-07,
"loss": 0.3664,
"step": 1525
},
{
"epoch": 0.9096313912009513,
"grad_norm": 2.2767769677463066,
"learning_rate": 2.469681904599569e-07,
"loss": 0.3786,
"step": 1530
},
{
"epoch": 0.9126040428061831,
"grad_norm": 2.3341884568313795,
"learning_rate": 2.3111182273940103e-07,
"loss": 0.366,
"step": 1535
},
{
"epoch": 0.915576694411415,
"grad_norm": 2.3090738993253064,
"learning_rate": 2.1576946924169785e-07,
"loss": 0.3682,
"step": 1540
},
{
"epoch": 0.9185493460166468,
"grad_norm": 2.2708470444789968,
"learning_rate": 2.009427836411665e-07,
"loss": 0.367,
"step": 1545
},
{
"epoch": 0.9215219976218787,
"grad_norm": 2.0757645629730543,
"learning_rate": 1.8663336403090482e-07,
"loss": 0.3664,
"step": 1550
},
{
"epoch": 0.9244946492271106,
"grad_norm": 2.424017703704812,
"learning_rate": 1.7284275275053663e-07,
"loss": 0.3792,
"step": 1555
},
{
"epoch": 0.9274673008323424,
"grad_norm": 2.127097574037306,
"learning_rate": 1.59572436219973e-07,
"loss": 0.3542,
"step": 1560
},
{
"epoch": 0.9304399524375743,
"grad_norm": 2.3260445722240015,
"learning_rate": 1.4682384477919808e-07,
"loss": 0.3675,
"step": 1565
},
{
"epoch": 0.9334126040428062,
"grad_norm": 2.39014963013141,
"learning_rate": 1.345983525340988e-07,
"loss": 0.3667,
"step": 1570
},
{
"epoch": 0.9363852556480381,
"grad_norm": 2.2694068787756247,
"learning_rate": 1.228972772083592e-07,
"loss": 0.37,
"step": 1575
},
{
"epoch": 0.93935790725327,
"grad_norm": 2.3620320947289604,
"learning_rate": 1.1172188000142803e-07,
"loss": 0.3727,
"step": 1580
},
{
"epoch": 0.9423305588585018,
"grad_norm": 2.187390021455131,
"learning_rate": 1.0107336545258084e-07,
"loss": 0.3587,
"step": 1585
},
{
"epoch": 0.9453032104637337,
"grad_norm": 2.2100838643930034,
"learning_rate": 9.095288131108937e-08,
"loss": 0.3673,
"step": 1590
},
{
"epoch": 0.9482758620689655,
"grad_norm": 2.293416030314438,
"learning_rate": 8.136151841250938e-08,
"loss": 0.3783,
"step": 1595
},
{
"epoch": 0.9512485136741974,
"grad_norm": 2.178738314835226,
"learning_rate": 7.230031056110864e-08,
"loss": 0.3731,
"step": 1600
},
{
"epoch": 0.9542211652794292,
"grad_norm": 2.2670575062833165,
"learning_rate": 6.377023441843599e-08,
"loss": 0.373,
"step": 1605
},
{
"epoch": 0.9571938168846611,
"grad_norm": 2.1567401585206514,
"learning_rate": 5.577220939805117e-08,
"loss": 0.3699,
"step": 1610
},
{
"epoch": 0.960166468489893,
"grad_norm": 2.0789730650126,
"learning_rate": 4.830709756642904e-08,
"loss": 0.366,
"step": 1615
},
{
"epoch": 0.9631391200951248,
"grad_norm": 2.157814350971754,
"learning_rate": 4.1375703550038394e-08,
"loss": 0.3633,
"step": 1620
},
{
"epoch": 0.9661117717003567,
"grad_norm": 2.1100943054104344,
"learning_rate": 3.4978774448617414e-08,
"loss": 0.3519,
"step": 1625
},
{
"epoch": 0.9690844233055886,
"grad_norm": 2.2083883883509743,
"learning_rate": 2.9116999754646437e-08,
"loss": 0.3613,
"step": 1630
},
{
"epoch": 0.9720570749108205,
"grad_norm": 2.2428508865311527,
"learning_rate": 2.3791011279033492e-08,
"loss": 0.3644,
"step": 1635
},
{
"epoch": 0.9750297265160524,
"grad_norm": 2.1651670922102872,
"learning_rate": 1.9001383083011028e-08,
"loss": 0.3662,
"step": 1640
},
{
"epoch": 0.9780023781212842,
"grad_norm": 2.3186672122830716,
"learning_rate": 1.4748631416262593e-08,
"loss": 0.3606,
"step": 1645
},
{
"epoch": 0.9809750297265161,
"grad_norm": 2.2616261488795617,
"learning_rate": 1.103321466128071e-08,
"loss": 0.3603,
"step": 1650
},
{
"epoch": 0.9839476813317479,
"grad_norm": 2.277277885300596,
"learning_rate": 7.855533283955829e-09,
"loss": 0.3657,
"step": 1655
},
{
"epoch": 0.9869203329369798,
"grad_norm": 2.1694357282951566,
"learning_rate": 5.215929790416407e-09,
"loss": 0.363,
"step": 1660
},
{
"epoch": 0.9898929845422116,
"grad_norm": 2.2318921404664795,
"learning_rate": 3.1146886901090024e-09,
"loss": 0.3603,
"step": 1665
},
{
"epoch": 0.9928656361474435,
"grad_norm": 2.209506235328767,
"learning_rate": 1.5520364651344655e-09,
"loss": 0.3624,
"step": 1670
},
{
"epoch": 0.9958382877526754,
"grad_norm": 2.184944707544881,
"learning_rate": 5.281415458346928e-10,
"loss": 0.3688,
"step": 1675
},
{
"epoch": 0.9988109393579072,
"grad_norm": 2.267638514087637,
"learning_rate": 4.3114292641033286e-11,
"loss": 0.3663,
"step": 1680
},
{
"epoch": 1.0,
"eval_runtime": 3.3713,
"eval_samples_per_second": 2.966,
"eval_steps_per_second": 0.89,
"step": 1682
},
{
"epoch": 1.0,
"step": 1682,
"total_flos": 176088290426880.0,
"train_loss": 0.0,
"train_runtime": 0.008,
"train_samples_per_second": 3378420.528,
"train_steps_per_second": 211190.52
}
],
"logging_steps": 5,
"max_steps": 1682,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 100,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 176088290426880.0,
"train_batch_size": 4,
"trial_name": null,
"trial_params": null
}