lesso's picture
Training in progress, epoch 0, checkpoint
7d5fc95 verified
raw
history blame
178 kB
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 0.03578009553285507,
"eval_steps": 500,
"global_step": 1000,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 3.578009553285507e-05,
"grad_norm": 1.4940814971923828,
"learning_rate": 0.00019999950652018584,
"loss": 1.524,
"step": 1
},
{
"epoch": 7.156019106571014e-05,
"grad_norm": 3.4283759593963623,
"learning_rate": 0.0001999980260856137,
"loss": 2.3643,
"step": 2
},
{
"epoch": 0.00010734028659856522,
"grad_norm": 1.3139270544052124,
"learning_rate": 0.000199995558710895,
"loss": 0.8935,
"step": 3
},
{
"epoch": 0.00014312038213142028,
"grad_norm": 1.062879204750061,
"learning_rate": 0.00019999210442038162,
"loss": 0.5728,
"step": 4
},
{
"epoch": 0.00017890047766427536,
"grad_norm": 1.8600218296051025,
"learning_rate": 0.00019998766324816607,
"loss": 0.6961,
"step": 5
},
{
"epoch": 0.00021468057319713044,
"grad_norm": 1.5905139446258545,
"learning_rate": 0.0001999822352380809,
"loss": 0.2833,
"step": 6
},
{
"epoch": 0.0002504606687299855,
"grad_norm": 6.56783390045166,
"learning_rate": 0.00019997582044369843,
"loss": 0.2166,
"step": 7
},
{
"epoch": 0.00028624076426284056,
"grad_norm": 1.2253773212432861,
"learning_rate": 0.00019996841892833,
"loss": 0.0389,
"step": 8
},
{
"epoch": 0.00032202085979569564,
"grad_norm": 0.7007305026054382,
"learning_rate": 0.00019996003076502565,
"loss": 0.0157,
"step": 9
},
{
"epoch": 0.0003578009553285507,
"grad_norm": 1.0908080339431763,
"learning_rate": 0.00019995065603657316,
"loss": 0.0769,
"step": 10
},
{
"epoch": 0.0003935810508614058,
"grad_norm": 0.3062255382537842,
"learning_rate": 0.0001999402948354973,
"loss": 0.0058,
"step": 11
},
{
"epoch": 0.0004293611463942609,
"grad_norm": 0.5422049164772034,
"learning_rate": 0.00019992894726405893,
"loss": 0.0098,
"step": 12
},
{
"epoch": 0.00046514124192711595,
"grad_norm": 0.9161967635154724,
"learning_rate": 0.000199916613434254,
"loss": 0.0162,
"step": 13
},
{
"epoch": 0.000500921337459971,
"grad_norm": 0.17783205211162567,
"learning_rate": 0.0001999032934678125,
"loss": 0.0023,
"step": 14
},
{
"epoch": 0.000536701432992826,
"grad_norm": 0.27469125390052795,
"learning_rate": 0.00019988898749619702,
"loss": 0.0035,
"step": 15
},
{
"epoch": 0.0005724815285256811,
"grad_norm": 3.371744394302368,
"learning_rate": 0.00019987369566060176,
"loss": 0.0233,
"step": 16
},
{
"epoch": 0.0006082616240585362,
"grad_norm": 0.6902956366539001,
"learning_rate": 0.00019985741811195097,
"loss": 0.0026,
"step": 17
},
{
"epoch": 0.0006440417195913913,
"grad_norm": 0.013953864574432373,
"learning_rate": 0.00019984015501089752,
"loss": 0.0004,
"step": 18
},
{
"epoch": 0.0006798218151242464,
"grad_norm": 0.005650808569043875,
"learning_rate": 0.0001998219065278212,
"loss": 0.0003,
"step": 19
},
{
"epoch": 0.0007156019106571014,
"grad_norm": 0.1283494383096695,
"learning_rate": 0.00019980267284282717,
"loss": 0.0008,
"step": 20
},
{
"epoch": 0.0007513820061899565,
"grad_norm": 0.15634015202522278,
"learning_rate": 0.00019978245414574417,
"loss": 0.0011,
"step": 21
},
{
"epoch": 0.0007871621017228116,
"grad_norm": 0.3101247549057007,
"learning_rate": 0.00019976125063612252,
"loss": 0.001,
"step": 22
},
{
"epoch": 0.0008229421972556667,
"grad_norm": 1.2682957649230957,
"learning_rate": 0.00019973906252323238,
"loss": 0.0052,
"step": 23
},
{
"epoch": 0.0008587222927885217,
"grad_norm": 0.329192578792572,
"learning_rate": 0.0001997158900260614,
"loss": 0.0015,
"step": 24
},
{
"epoch": 0.0008945023883213768,
"grad_norm": 0.8752695322036743,
"learning_rate": 0.0001996917333733128,
"loss": 0.004,
"step": 25
},
{
"epoch": 0.0009302824838542319,
"grad_norm": 0.16207607090473175,
"learning_rate": 0.00019966659280340297,
"loss": 0.0007,
"step": 26
},
{
"epoch": 0.000966062579387087,
"grad_norm": 0.002739664865657687,
"learning_rate": 0.00019964046856445924,
"loss": 0.0001,
"step": 27
},
{
"epoch": 0.001001842674919942,
"grad_norm": 0.8487679958343506,
"learning_rate": 0.00019961336091431727,
"loss": 0.0045,
"step": 28
},
{
"epoch": 0.001037622770452797,
"grad_norm": 0.7072966694831848,
"learning_rate": 0.00019958527012051857,
"loss": 0.0016,
"step": 29
},
{
"epoch": 0.001073402865985652,
"grad_norm": 0.7657275199890137,
"learning_rate": 0.00019955619646030802,
"loss": 0.0024,
"step": 30
},
{
"epoch": 0.0011091829615185072,
"grad_norm": 0.05178741365671158,
"learning_rate": 0.00019952614022063084,
"loss": 0.0003,
"step": 31
},
{
"epoch": 0.0011449630570513623,
"grad_norm": 0.8519219756126404,
"learning_rate": 0.00019949510169813003,
"loss": 0.0044,
"step": 32
},
{
"epoch": 0.0011807431525842173,
"grad_norm": 0.04824286326766014,
"learning_rate": 0.00019946308119914323,
"loss": 0.0012,
"step": 33
},
{
"epoch": 0.0012165232481170724,
"grad_norm": 0.10049160569906235,
"learning_rate": 0.0001994300790396999,
"loss": 0.0013,
"step": 34
},
{
"epoch": 0.0012523033436499275,
"grad_norm": 0.021055705845355988,
"learning_rate": 0.000199396095545518,
"loss": 0.0002,
"step": 35
},
{
"epoch": 0.0012880834391827826,
"grad_norm": 0.79359370470047,
"learning_rate": 0.00019936113105200085,
"loss": 0.0017,
"step": 36
},
{
"epoch": 0.0013238635347156376,
"grad_norm": 0.3737424612045288,
"learning_rate": 0.00019932518590423394,
"loss": 0.0039,
"step": 37
},
{
"epoch": 0.0013596436302484927,
"grad_norm": 0.0020007130224257708,
"learning_rate": 0.00019928826045698136,
"loss": 0.0001,
"step": 38
},
{
"epoch": 0.0013954237257813478,
"grad_norm": 0.0023270663805305958,
"learning_rate": 0.0001992503550746824,
"loss": 0.0001,
"step": 39
},
{
"epoch": 0.0014312038213142029,
"grad_norm": 0.02750846929848194,
"learning_rate": 0.0001992114701314478,
"loss": 0.0004,
"step": 40
},
{
"epoch": 0.001466983916847058,
"grad_norm": 0.015391684137284756,
"learning_rate": 0.0001991716060110563,
"loss": 0.0001,
"step": 41
},
{
"epoch": 0.001502764012379913,
"grad_norm": 2.5219709873199463,
"learning_rate": 0.00019913076310695068,
"loss": 0.0401,
"step": 42
},
{
"epoch": 0.0015385441079127681,
"grad_norm": 0.0812700092792511,
"learning_rate": 0.00019908894182223388,
"loss": 0.0004,
"step": 43
},
{
"epoch": 0.0015743242034456232,
"grad_norm": 0.01355485338717699,
"learning_rate": 0.00019904614256966512,
"loss": 0.0002,
"step": 44
},
{
"epoch": 0.0016101042989784783,
"grad_norm": 0.0012422667350620031,
"learning_rate": 0.00019900236577165576,
"loss": 0.0001,
"step": 45
},
{
"epoch": 0.0016458843945113333,
"grad_norm": 0.0012065384071320295,
"learning_rate": 0.0001989576118602651,
"loss": 0.0001,
"step": 46
},
{
"epoch": 0.0016816644900441884,
"grad_norm": 0.01380125805735588,
"learning_rate": 0.00019891188127719618,
"loss": 0.0006,
"step": 47
},
{
"epoch": 0.0017174445855770435,
"grad_norm": 0.028274118900299072,
"learning_rate": 0.0001988651744737914,
"loss": 0.0004,
"step": 48
},
{
"epoch": 0.0017532246811098986,
"grad_norm": 0.3730854094028473,
"learning_rate": 0.00019881749191102808,
"loss": 0.0046,
"step": 49
},
{
"epoch": 0.0017890047766427537,
"grad_norm": 0.004828768782317638,
"learning_rate": 0.00019876883405951377,
"loss": 0.0001,
"step": 50
},
{
"epoch": 0.0018247848721756087,
"grad_norm": 2.613616943359375,
"learning_rate": 0.00019871920139948192,
"loss": 0.0984,
"step": 51
},
{
"epoch": 0.0018605649677084638,
"grad_norm": 0.00172997429035604,
"learning_rate": 0.0001986685944207868,
"loss": 0.0001,
"step": 52
},
{
"epoch": 0.0018963450632413189,
"grad_norm": 0.0117227453738451,
"learning_rate": 0.0001986170136228989,
"loss": 0.0002,
"step": 53
},
{
"epoch": 0.001932125158774174,
"grad_norm": 0.14388525485992432,
"learning_rate": 0.00019856445951489982,
"loss": 0.0013,
"step": 54
},
{
"epoch": 0.001967905254307029,
"grad_norm": 0.10388373583555222,
"learning_rate": 0.0001985109326154774,
"loss": 0.0008,
"step": 55
},
{
"epoch": 0.002003685349839884,
"grad_norm": 0.0035523439291864634,
"learning_rate": 0.00019845643345292054,
"loss": 0.0002,
"step": 56
},
{
"epoch": 0.002039465445372739,
"grad_norm": 0.006682439241558313,
"learning_rate": 0.00019840096256511398,
"loss": 0.0003,
"step": 57
},
{
"epoch": 0.002075245540905594,
"grad_norm": 0.013288861140608788,
"learning_rate": 0.00019834452049953297,
"loss": 0.0004,
"step": 58
},
{
"epoch": 0.002111025636438449,
"grad_norm": 0.007986829616129398,
"learning_rate": 0.00019828710781323792,
"loss": 0.0003,
"step": 59
},
{
"epoch": 0.002146805731971304,
"grad_norm": 0.022813275456428528,
"learning_rate": 0.0001982287250728689,
"loss": 0.0007,
"step": 60
},
{
"epoch": 0.0021825858275041593,
"grad_norm": 0.017735198140144348,
"learning_rate": 0.0001981693728546399,
"loss": 0.0006,
"step": 61
},
{
"epoch": 0.0022183659230370144,
"grad_norm": 0.031412046402692795,
"learning_rate": 0.0001981090517443334,
"loss": 0.0008,
"step": 62
},
{
"epoch": 0.0022541460185698694,
"grad_norm": 1.2430920600891113,
"learning_rate": 0.00019804776233729444,
"loss": 0.015,
"step": 63
},
{
"epoch": 0.0022899261141027245,
"grad_norm": 0.008373609744012356,
"learning_rate": 0.0001979855052384247,
"loss": 0.0004,
"step": 64
},
{
"epoch": 0.0023257062096355796,
"grad_norm": 0.012491998262703419,
"learning_rate": 0.00019792228106217658,
"loss": 0.0005,
"step": 65
},
{
"epoch": 0.0023614863051684347,
"grad_norm": 2.589193105697632,
"learning_rate": 0.00019785809043254722,
"loss": 0.0061,
"step": 66
},
{
"epoch": 0.0023972664007012898,
"grad_norm": 0.007409463636577129,
"learning_rate": 0.0001977929339830722,
"loss": 0.0003,
"step": 67
},
{
"epoch": 0.002433046496234145,
"grad_norm": 0.1197238564491272,
"learning_rate": 0.00019772681235681936,
"loss": 0.0008,
"step": 68
},
{
"epoch": 0.002468826591767,
"grad_norm": 0.08556798845529556,
"learning_rate": 0.00019765972620638248,
"loss": 0.001,
"step": 69
},
{
"epoch": 0.002504606687299855,
"grad_norm": 0.010016442276537418,
"learning_rate": 0.00019759167619387476,
"loss": 0.0004,
"step": 70
},
{
"epoch": 0.00254038678283271,
"grad_norm": 0.10208962112665176,
"learning_rate": 0.00019752266299092236,
"loss": 0.0015,
"step": 71
},
{
"epoch": 0.002576166878365565,
"grad_norm": 0.00643647788092494,
"learning_rate": 0.00019745268727865774,
"loss": 0.0003,
"step": 72
},
{
"epoch": 0.00261194697389842,
"grad_norm": 0.0046915169805288315,
"learning_rate": 0.0001973817497477129,
"loss": 0.0003,
"step": 73
},
{
"epoch": 0.0026477270694312753,
"grad_norm": 0.25393474102020264,
"learning_rate": 0.00019730985109821266,
"loss": 0.0027,
"step": 74
},
{
"epoch": 0.0026835071649641304,
"grad_norm": 0.007773595862090588,
"learning_rate": 0.00019723699203976766,
"loss": 0.0004,
"step": 75
},
{
"epoch": 0.0027192872604969854,
"grad_norm": 0.004822044633328915,
"learning_rate": 0.0001971631732914674,
"loss": 0.0003,
"step": 76
},
{
"epoch": 0.0027550673560298405,
"grad_norm": 0.003923391457647085,
"learning_rate": 0.0001970883955818731,
"loss": 0.0003,
"step": 77
},
{
"epoch": 0.0027908474515626956,
"grad_norm": 0.016484705731272697,
"learning_rate": 0.0001970126596490106,
"loss": 0.0006,
"step": 78
},
{
"epoch": 0.0028266275470955507,
"grad_norm": 0.046882666647434235,
"learning_rate": 0.00019693596624036292,
"loss": 0.0007,
"step": 79
},
{
"epoch": 0.0028624076426284058,
"grad_norm": 0.0036497802939265966,
"learning_rate": 0.0001968583161128631,
"loss": 0.0002,
"step": 80
},
{
"epoch": 0.002898187738161261,
"grad_norm": 0.0067147244699299335,
"learning_rate": 0.00019677971003288655,
"loss": 0.0003,
"step": 81
},
{
"epoch": 0.002933967833694116,
"grad_norm": 0.00465575372800231,
"learning_rate": 0.00019670014877624353,
"loss": 0.0002,
"step": 82
},
{
"epoch": 0.002969747929226971,
"grad_norm": 0.0572395846247673,
"learning_rate": 0.00019661963312817148,
"loss": 0.001,
"step": 83
},
{
"epoch": 0.003005528024759826,
"grad_norm": 0.006505393423140049,
"learning_rate": 0.0001965381638833274,
"loss": 0.0003,
"step": 84
},
{
"epoch": 0.003041308120292681,
"grad_norm": 0.005373773165047169,
"learning_rate": 0.00019645574184577982,
"loss": 0.0002,
"step": 85
},
{
"epoch": 0.0030770882158255362,
"grad_norm": 0.028440192341804504,
"learning_rate": 0.000196372367829001,
"loss": 0.0003,
"step": 86
},
{
"epoch": 0.0031128683113583913,
"grad_norm": 0.021315911784768105,
"learning_rate": 0.00019628804265585877,
"loss": 0.0005,
"step": 87
},
{
"epoch": 0.0031486484068912464,
"grad_norm": 0.0548127144575119,
"learning_rate": 0.0001962027671586086,
"loss": 0.0004,
"step": 88
},
{
"epoch": 0.0031844285024241015,
"grad_norm": 0.026664718985557556,
"learning_rate": 0.0001961165421788852,
"loss": 0.0004,
"step": 89
},
{
"epoch": 0.0032202085979569565,
"grad_norm": 0.0035014627501368523,
"learning_rate": 0.0001960293685676943,
"loss": 0.0001,
"step": 90
},
{
"epoch": 0.0032559886934898116,
"grad_norm": 2.119551181793213,
"learning_rate": 0.0001959412471854043,
"loss": 0.0222,
"step": 91
},
{
"epoch": 0.0032917687890226667,
"grad_norm": 0.10606224089860916,
"learning_rate": 0.0001958521789017376,
"loss": 0.0009,
"step": 92
},
{
"epoch": 0.0033275488845555218,
"grad_norm": 0.0029525908175855875,
"learning_rate": 0.00019576216459576222,
"loss": 0.0001,
"step": 93
},
{
"epoch": 0.003363328980088377,
"grad_norm": 0.003076421096920967,
"learning_rate": 0.00019567120515588308,
"loss": 0.0001,
"step": 94
},
{
"epoch": 0.003399109075621232,
"grad_norm": 0.021403349936008453,
"learning_rate": 0.00019557930147983302,
"loss": 0.0004,
"step": 95
},
{
"epoch": 0.003434889171154087,
"grad_norm": 0.001615760731510818,
"learning_rate": 0.00019548645447466431,
"loss": 0.0001,
"step": 96
},
{
"epoch": 0.003470669266686942,
"grad_norm": 0.0412115603685379,
"learning_rate": 0.00019539266505673938,
"loss": 0.0002,
"step": 97
},
{
"epoch": 0.003506449362219797,
"grad_norm": 0.606522262096405,
"learning_rate": 0.00019529793415172192,
"loss": 0.0036,
"step": 98
},
{
"epoch": 0.0035422294577526522,
"grad_norm": 7.513233184814453,
"learning_rate": 0.00019520226269456768,
"loss": 0.0428,
"step": 99
},
{
"epoch": 0.0035780095532855073,
"grad_norm": 0.0012378046521916986,
"learning_rate": 0.00019510565162951537,
"loss": 0.0001,
"step": 100
},
{
"epoch": 0.0036137896488183624,
"grad_norm": 0.04065516218543053,
"learning_rate": 0.00019500810191007718,
"loss": 0.0005,
"step": 101
},
{
"epoch": 0.0036495697443512175,
"grad_norm": 0.4646848440170288,
"learning_rate": 0.00019490961449902946,
"loss": 0.0041,
"step": 102
},
{
"epoch": 0.0036853498398840725,
"grad_norm": 0.0042214663699269295,
"learning_rate": 0.0001948101903684032,
"loss": 0.0002,
"step": 103
},
{
"epoch": 0.0037211299354169276,
"grad_norm": 0.3199816346168518,
"learning_rate": 0.00019470983049947444,
"loss": 0.0014,
"step": 104
},
{
"epoch": 0.0037569100309497827,
"grad_norm": 0.0463225431740284,
"learning_rate": 0.00019460853588275454,
"loss": 0.0009,
"step": 105
},
{
"epoch": 0.0037926901264826378,
"grad_norm": 0.31589099764823914,
"learning_rate": 0.00019450630751798048,
"loss": 0.0017,
"step": 106
},
{
"epoch": 0.003828470222015493,
"grad_norm": 0.16430477797985077,
"learning_rate": 0.000194403146414105,
"loss": 0.0012,
"step": 107
},
{
"epoch": 0.003864250317548348,
"grad_norm": 0.8777446746826172,
"learning_rate": 0.00019429905358928646,
"loss": 0.0065,
"step": 108
},
{
"epoch": 0.003900030413081203,
"grad_norm": 0.3933359682559967,
"learning_rate": 0.00019419403007087907,
"loss": 0.0026,
"step": 109
},
{
"epoch": 0.003935810508614058,
"grad_norm": 0.0034663318656384945,
"learning_rate": 0.00019408807689542257,
"loss": 0.0001,
"step": 110
},
{
"epoch": 0.003971590604146913,
"grad_norm": 0.0011069366009905934,
"learning_rate": 0.00019398119510863197,
"loss": 0.0001,
"step": 111
},
{
"epoch": 0.004007370699679768,
"grad_norm": 0.0016587006393820047,
"learning_rate": 0.00019387338576538744,
"loss": 0.0001,
"step": 112
},
{
"epoch": 0.004043150795212623,
"grad_norm": 0.002230650745332241,
"learning_rate": 0.00019376464992972356,
"loss": 0.0001,
"step": 113
},
{
"epoch": 0.004078930890745478,
"grad_norm": 0.000850948563311249,
"learning_rate": 0.00019365498867481923,
"loss": 0.0001,
"step": 114
},
{
"epoch": 0.004114710986278333,
"grad_norm": 0.0022517703473567963,
"learning_rate": 0.00019354440308298675,
"loss": 0.0001,
"step": 115
},
{
"epoch": 0.004150491081811188,
"grad_norm": 0.5699942708015442,
"learning_rate": 0.00019343289424566122,
"loss": 0.0051,
"step": 116
},
{
"epoch": 0.004186271177344043,
"grad_norm": 0.1059614047408104,
"learning_rate": 0.00019332046326338986,
"loss": 0.0007,
"step": 117
},
{
"epoch": 0.004222051272876898,
"grad_norm": 0.001039605587720871,
"learning_rate": 0.0001932071112458211,
"loss": 0.0001,
"step": 118
},
{
"epoch": 0.004257831368409753,
"grad_norm": 0.004336313344538212,
"learning_rate": 0.00019309283931169356,
"loss": 0.0001,
"step": 119
},
{
"epoch": 0.004293611463942608,
"grad_norm": 0.683163583278656,
"learning_rate": 0.00019297764858882514,
"loss": 0.0033,
"step": 120
},
{
"epoch": 0.0043293915594754635,
"grad_norm": 0.0009113202686421573,
"learning_rate": 0.00019286154021410173,
"loss": 0.0001,
"step": 121
},
{
"epoch": 0.004365171655008319,
"grad_norm": 0.0011711944825947285,
"learning_rate": 0.00019274451533346615,
"loss": 0.0001,
"step": 122
},
{
"epoch": 0.004400951750541174,
"grad_norm": 0.0020785066299140453,
"learning_rate": 0.00019262657510190666,
"loss": 0.0001,
"step": 123
},
{
"epoch": 0.004436731846074029,
"grad_norm": 0.5635930299758911,
"learning_rate": 0.0001925077206834458,
"loss": 0.003,
"step": 124
},
{
"epoch": 0.004472511941606884,
"grad_norm": 0.019610170274972916,
"learning_rate": 0.0001923879532511287,
"loss": 0.0003,
"step": 125
},
{
"epoch": 0.004508292037139739,
"grad_norm": 0.003800542326644063,
"learning_rate": 0.0001922672739870115,
"loss": 0.0001,
"step": 126
},
{
"epoch": 0.004544072132672594,
"grad_norm": 0.0015734516782686114,
"learning_rate": 0.00019214568408214985,
"loss": 0.0001,
"step": 127
},
{
"epoch": 0.004579852228205449,
"grad_norm": 0.16732634603977203,
"learning_rate": 0.00019202318473658705,
"loss": 0.0014,
"step": 128
},
{
"epoch": 0.004615632323738304,
"grad_norm": 0.03286750242114067,
"learning_rate": 0.00019189977715934213,
"loss": 0.0004,
"step": 129
},
{
"epoch": 0.004651412419271159,
"grad_norm": 0.013986490666866302,
"learning_rate": 0.00019177546256839812,
"loss": 0.0003,
"step": 130
},
{
"epoch": 0.004687192514804014,
"grad_norm": 0.0010463737417012453,
"learning_rate": 0.0001916502421906898,
"loss": 0.0001,
"step": 131
},
{
"epoch": 0.004722972610336869,
"grad_norm": 0.05846157670021057,
"learning_rate": 0.00019152411726209176,
"loss": 0.0004,
"step": 132
},
{
"epoch": 0.004758752705869724,
"grad_norm": 0.0012616278836503625,
"learning_rate": 0.00019139708902740613,
"loss": 0.0001,
"step": 133
},
{
"epoch": 0.0047945328014025795,
"grad_norm": 0.002735038986429572,
"learning_rate": 0.0001912691587403503,
"loss": 0.0001,
"step": 134
},
{
"epoch": 0.004830312896935435,
"grad_norm": 0.05025576055049896,
"learning_rate": 0.00019114032766354453,
"loss": 0.0004,
"step": 135
},
{
"epoch": 0.00486609299246829,
"grad_norm": 0.00338102038949728,
"learning_rate": 0.00019101059706849957,
"loss": 0.0002,
"step": 136
},
{
"epoch": 0.004901873088001145,
"grad_norm": 0.016932694241404533,
"learning_rate": 0.00019087996823560402,
"loss": 0.0003,
"step": 137
},
{
"epoch": 0.004937653183534,
"grad_norm": 0.02689458057284355,
"learning_rate": 0.0001907484424541117,
"loss": 0.0004,
"step": 138
},
{
"epoch": 0.004973433279066855,
"grad_norm": 0.4155731201171875,
"learning_rate": 0.00019061602102212898,
"loss": 0.0043,
"step": 139
},
{
"epoch": 0.00500921337459971,
"grad_norm": 0.10929184406995773,
"learning_rate": 0.00019048270524660196,
"loss": 0.0005,
"step": 140
},
{
"epoch": 0.005044993470132565,
"grad_norm": 0.011052286252379417,
"learning_rate": 0.0001903484964433035,
"loss": 0.0001,
"step": 141
},
{
"epoch": 0.00508077356566542,
"grad_norm": 0.01035118568688631,
"learning_rate": 0.00019021339593682028,
"loss": 0.0003,
"step": 142
},
{
"epoch": 0.005116553661198275,
"grad_norm": 0.019157446920871735,
"learning_rate": 0.00019007740506053983,
"loss": 0.0003,
"step": 143
},
{
"epoch": 0.00515233375673113,
"grad_norm": 0.0006221545045264065,
"learning_rate": 0.0001899405251566371,
"loss": 0.0,
"step": 144
},
{
"epoch": 0.005188113852263985,
"grad_norm": 0.027849700301885605,
"learning_rate": 0.00018980275757606157,
"loss": 0.0003,
"step": 145
},
{
"epoch": 0.00522389394779684,
"grad_norm": 0.001835379982367158,
"learning_rate": 0.00018966410367852362,
"loss": 0.0001,
"step": 146
},
{
"epoch": 0.0052596740433296955,
"grad_norm": 1.2824780941009521,
"learning_rate": 0.00018952456483248119,
"loss": 0.0366,
"step": 147
},
{
"epoch": 0.005295454138862551,
"grad_norm": 0.025991423055529594,
"learning_rate": 0.0001893841424151264,
"loss": 0.0004,
"step": 148
},
{
"epoch": 0.005331234234395406,
"grad_norm": 0.0042706448584795,
"learning_rate": 0.0001892428378123718,
"loss": 0.0002,
"step": 149
},
{
"epoch": 0.005367014329928261,
"grad_norm": 0.0003646000404842198,
"learning_rate": 0.0001891006524188368,
"loss": 0.0,
"step": 150
},
{
"epoch": 0.005402794425461116,
"grad_norm": 0.002723448909819126,
"learning_rate": 0.00018895758763783383,
"loss": 0.0002,
"step": 151
},
{
"epoch": 0.005438574520993971,
"grad_norm": 0.0007927878759801388,
"learning_rate": 0.00018881364488135448,
"loss": 0.0001,
"step": 152
},
{
"epoch": 0.005474354616526826,
"grad_norm": 0.0023497738875448704,
"learning_rate": 0.00018866882557005567,
"loss": 0.0001,
"step": 153
},
{
"epoch": 0.005510134712059681,
"grad_norm": 0.00858394242823124,
"learning_rate": 0.00018852313113324552,
"loss": 0.0002,
"step": 154
},
{
"epoch": 0.005545914807592536,
"grad_norm": 0.02338087372481823,
"learning_rate": 0.00018837656300886937,
"loss": 0.0003,
"step": 155
},
{
"epoch": 0.005581694903125391,
"grad_norm": 0.006292372476309538,
"learning_rate": 0.00018822912264349534,
"loss": 0.0002,
"step": 156
},
{
"epoch": 0.005617474998658246,
"grad_norm": 0.1395585685968399,
"learning_rate": 0.00018808081149230036,
"loss": 0.002,
"step": 157
},
{
"epoch": 0.005653255094191101,
"grad_norm": 0.06656046211719513,
"learning_rate": 0.00018793163101905563,
"loss": 0.0005,
"step": 158
},
{
"epoch": 0.0056890351897239564,
"grad_norm": 0.005269424058496952,
"learning_rate": 0.00018778158269611218,
"loss": 0.0001,
"step": 159
},
{
"epoch": 0.0057248152852568115,
"grad_norm": 0.005869029089808464,
"learning_rate": 0.00018763066800438636,
"loss": 0.0001,
"step": 160
},
{
"epoch": 0.005760595380789667,
"grad_norm": 0.007193957921117544,
"learning_rate": 0.0001874788884333453,
"loss": 0.0002,
"step": 161
},
{
"epoch": 0.005796375476322522,
"grad_norm": 0.23244933784008026,
"learning_rate": 0.00018732624548099204,
"loss": 0.0015,
"step": 162
},
{
"epoch": 0.005832155571855377,
"grad_norm": 0.0010494483867660165,
"learning_rate": 0.0001871727406538509,
"loss": 0.0001,
"step": 163
},
{
"epoch": 0.005867935667388232,
"grad_norm": 0.001451204065233469,
"learning_rate": 0.0001870183754669526,
"loss": 0.0001,
"step": 164
},
{
"epoch": 0.005903715762921087,
"grad_norm": 0.0035805683583021164,
"learning_rate": 0.00018686315144381913,
"loss": 0.0001,
"step": 165
},
{
"epoch": 0.005939495858453942,
"grad_norm": 0.003529100678861141,
"learning_rate": 0.000186707070116449,
"loss": 0.0002,
"step": 166
},
{
"epoch": 0.005975275953986797,
"grad_norm": 0.0062582106329500675,
"learning_rate": 0.0001865501330253019,
"loss": 0.0002,
"step": 167
},
{
"epoch": 0.006011056049519652,
"grad_norm": 2.6403636932373047,
"learning_rate": 0.00018639234171928353,
"loss": 0.0772,
"step": 168
},
{
"epoch": 0.006046836145052507,
"grad_norm": 3.0866150856018066,
"learning_rate": 0.0001862336977557304,
"loss": 0.0508,
"step": 169
},
{
"epoch": 0.006082616240585362,
"grad_norm": 0.00488316873088479,
"learning_rate": 0.0001860742027003944,
"loss": 0.0002,
"step": 170
},
{
"epoch": 0.006118396336118217,
"grad_norm": 0.005987066309899092,
"learning_rate": 0.00018591385812742725,
"loss": 0.0001,
"step": 171
},
{
"epoch": 0.0061541764316510724,
"grad_norm": 0.002021647058427334,
"learning_rate": 0.00018575266561936523,
"loss": 0.0001,
"step": 172
},
{
"epoch": 0.0061899565271839275,
"grad_norm": 0.005465734284371138,
"learning_rate": 0.00018559062676711332,
"loss": 0.0001,
"step": 173
},
{
"epoch": 0.006225736622716783,
"grad_norm": 0.04289950057864189,
"learning_rate": 0.0001854277431699295,
"loss": 0.0003,
"step": 174
},
{
"epoch": 0.006261516718249638,
"grad_norm": 0.0013694529188796878,
"learning_rate": 0.00018526401643540922,
"loss": 0.0001,
"step": 175
},
{
"epoch": 0.006297296813782493,
"grad_norm": 0.005164470989257097,
"learning_rate": 0.00018509944817946922,
"loss": 0.0001,
"step": 176
},
{
"epoch": 0.006333076909315348,
"grad_norm": 0.006279651075601578,
"learning_rate": 0.00018493404002633166,
"loss": 0.0001,
"step": 177
},
{
"epoch": 0.006368857004848203,
"grad_norm": 0.01261226087808609,
"learning_rate": 0.00018476779360850832,
"loss": 0.0002,
"step": 178
},
{
"epoch": 0.006404637100381058,
"grad_norm": 0.0049860430881381035,
"learning_rate": 0.00018460071056678422,
"loss": 0.0001,
"step": 179
},
{
"epoch": 0.006440417195913913,
"grad_norm": 0.06854995340108871,
"learning_rate": 0.00018443279255020152,
"loss": 0.0007,
"step": 180
},
{
"epoch": 0.006476197291446768,
"grad_norm": 0.0030735009349882603,
"learning_rate": 0.00018426404121604323,
"loss": 0.0001,
"step": 181
},
{
"epoch": 0.006511977386979623,
"grad_norm": 0.06162787973880768,
"learning_rate": 0.00018409445822981693,
"loss": 0.0008,
"step": 182
},
{
"epoch": 0.006547757482512478,
"grad_norm": 1.4743658304214478,
"learning_rate": 0.00018392404526523817,
"loss": 0.036,
"step": 183
},
{
"epoch": 0.006583537578045333,
"grad_norm": 0.0014406999107450247,
"learning_rate": 0.0001837528040042142,
"loss": 0.0001,
"step": 184
},
{
"epoch": 0.0066193176735781884,
"grad_norm": 0.001721672248095274,
"learning_rate": 0.00018358073613682706,
"loss": 0.0001,
"step": 185
},
{
"epoch": 0.0066550977691110435,
"grad_norm": 0.007810389623045921,
"learning_rate": 0.00018340784336131713,
"loss": 0.0003,
"step": 186
},
{
"epoch": 0.006690877864643899,
"grad_norm": 0.007608585525304079,
"learning_rate": 0.00018323412738406635,
"loss": 0.0003,
"step": 187
},
{
"epoch": 0.006726657960176754,
"grad_norm": 0.007065481040626764,
"learning_rate": 0.00018305958991958127,
"loss": 0.0003,
"step": 188
},
{
"epoch": 0.006762438055709609,
"grad_norm": 0.03384295850992203,
"learning_rate": 0.0001828842326904762,
"loss": 0.0006,
"step": 189
},
{
"epoch": 0.006798218151242464,
"grad_norm": 0.3284815847873688,
"learning_rate": 0.00018270805742745617,
"loss": 0.0059,
"step": 190
},
{
"epoch": 0.006833998246775319,
"grad_norm": 0.029190680012106895,
"learning_rate": 0.00018253106586929997,
"loss": 0.0006,
"step": 191
},
{
"epoch": 0.006869778342308174,
"grad_norm": 0.9759858846664429,
"learning_rate": 0.00018235325976284275,
"loss": 0.147,
"step": 192
},
{
"epoch": 0.006905558437841029,
"grad_norm": 0.02130061574280262,
"learning_rate": 0.00018217464086295904,
"loss": 0.0008,
"step": 193
},
{
"epoch": 0.006941338533373884,
"grad_norm": 0.011916525661945343,
"learning_rate": 0.00018199521093254523,
"loss": 0.0005,
"step": 194
},
{
"epoch": 0.006977118628906739,
"grad_norm": 0.010565850883722305,
"learning_rate": 0.00018181497174250236,
"loss": 0.0006,
"step": 195
},
{
"epoch": 0.007012898724439594,
"grad_norm": 0.022635890170931816,
"learning_rate": 0.00018163392507171842,
"loss": 0.0013,
"step": 196
},
{
"epoch": 0.007048678819972449,
"grad_norm": 1.2287172079086304,
"learning_rate": 0.00018145207270705096,
"loss": 0.0092,
"step": 197
},
{
"epoch": 0.0070844589155053045,
"grad_norm": 0.27400410175323486,
"learning_rate": 0.0001812694164433094,
"loss": 0.0099,
"step": 198
},
{
"epoch": 0.0071202390110381595,
"grad_norm": 0.014746950939297676,
"learning_rate": 0.00018108595808323736,
"loss": 0.0006,
"step": 199
},
{
"epoch": 0.007156019106571015,
"grad_norm": 0.13460226356983185,
"learning_rate": 0.00018090169943749476,
"loss": 0.0215,
"step": 200
},
{
"epoch": 0.00719179920210387,
"grad_norm": 0.0137384794652462,
"learning_rate": 0.00018071664232464002,
"loss": 0.001,
"step": 201
},
{
"epoch": 0.007227579297636725,
"grad_norm": 0.47047391533851624,
"learning_rate": 0.0001805307885711122,
"loss": 0.0075,
"step": 202
},
{
"epoch": 0.00726335939316958,
"grad_norm": 0.019856106489896774,
"learning_rate": 0.00018034414001121278,
"loss": 0.0011,
"step": 203
},
{
"epoch": 0.007299139488702435,
"grad_norm": 0.01606709137558937,
"learning_rate": 0.00018015669848708767,
"loss": 0.0012,
"step": 204
},
{
"epoch": 0.00733491958423529,
"grad_norm": 0.01651693508028984,
"learning_rate": 0.00017996846584870908,
"loss": 0.0008,
"step": 205
},
{
"epoch": 0.007370699679768145,
"grad_norm": 0.009905004873871803,
"learning_rate": 0.0001797794439538571,
"loss": 0.0007,
"step": 206
},
{
"epoch": 0.007406479775301,
"grad_norm": 0.013038737699389458,
"learning_rate": 0.0001795896346681016,
"loss": 0.0008,
"step": 207
},
{
"epoch": 0.007442259870833855,
"grad_norm": 0.009600020945072174,
"learning_rate": 0.00017939903986478355,
"loss": 0.0007,
"step": 208
},
{
"epoch": 0.00747803996636671,
"grad_norm": 0.005098757334053516,
"learning_rate": 0.00017920766142499672,
"loss": 0.0004,
"step": 209
},
{
"epoch": 0.007513820061899565,
"grad_norm": 0.005413730163127184,
"learning_rate": 0.00017901550123756906,
"loss": 0.0004,
"step": 210
},
{
"epoch": 0.0075496001574324205,
"grad_norm": 0.005524770822376013,
"learning_rate": 0.00017882256119904403,
"loss": 0.0004,
"step": 211
},
{
"epoch": 0.0075853802529652755,
"grad_norm": 0.02472483552992344,
"learning_rate": 0.00017862884321366188,
"loss": 0.0009,
"step": 212
},
{
"epoch": 0.007621160348498131,
"grad_norm": 0.004835169296711683,
"learning_rate": 0.000178434349193341,
"loss": 0.0004,
"step": 213
},
{
"epoch": 0.007656940444030986,
"grad_norm": 0.005667371209710836,
"learning_rate": 0.0001782390810576588,
"loss": 0.0003,
"step": 214
},
{
"epoch": 0.007692720539563841,
"grad_norm": 0.005504743196070194,
"learning_rate": 0.000178043040733833,
"loss": 0.0003,
"step": 215
},
{
"epoch": 0.007728500635096696,
"grad_norm": 0.004330215975642204,
"learning_rate": 0.00017784623015670238,
"loss": 0.0003,
"step": 216
},
{
"epoch": 0.007764280730629551,
"grad_norm": 0.008505130186676979,
"learning_rate": 0.00017764865126870786,
"loss": 0.0006,
"step": 217
},
{
"epoch": 0.007800060826162406,
"grad_norm": 0.005010291468352079,
"learning_rate": 0.00017745030601987337,
"loss": 0.0003,
"step": 218
},
{
"epoch": 0.00783584092169526,
"grad_norm": 0.003250179113820195,
"learning_rate": 0.00017725119636778644,
"loss": 0.0002,
"step": 219
},
{
"epoch": 0.007871621017228115,
"grad_norm": 0.00220483704470098,
"learning_rate": 0.00017705132427757895,
"loss": 0.0002,
"step": 220
},
{
"epoch": 0.00790740111276097,
"grad_norm": 0.02156749553978443,
"learning_rate": 0.00017685069172190766,
"loss": 0.0003,
"step": 221
},
{
"epoch": 0.007943181208293825,
"grad_norm": 0.00591958686709404,
"learning_rate": 0.00017664930068093498,
"loss": 0.0002,
"step": 222
},
{
"epoch": 0.00797896130382668,
"grad_norm": 0.01073639839887619,
"learning_rate": 0.00017644715314230918,
"loss": 0.0004,
"step": 223
},
{
"epoch": 0.008014741399359536,
"grad_norm": 0.007549067493528128,
"learning_rate": 0.0001762442511011448,
"loss": 0.0002,
"step": 224
},
{
"epoch": 0.00805052149489239,
"grad_norm": 0.0013972257729619741,
"learning_rate": 0.0001760405965600031,
"loss": 0.0001,
"step": 225
},
{
"epoch": 0.008086301590425246,
"grad_norm": 0.06953338533639908,
"learning_rate": 0.0001758361915288722,
"loss": 0.0007,
"step": 226
},
{
"epoch": 0.0081220816859581,
"grad_norm": 0.0011373666347935796,
"learning_rate": 0.0001756310380251472,
"loss": 0.0001,
"step": 227
},
{
"epoch": 0.008157861781490956,
"grad_norm": 0.0281671229749918,
"learning_rate": 0.00017542513807361037,
"loss": 0.0003,
"step": 228
},
{
"epoch": 0.008193641877023811,
"grad_norm": 0.003383536823093891,
"learning_rate": 0.00017521849370641114,
"loss": 0.0002,
"step": 229
},
{
"epoch": 0.008229421972556666,
"grad_norm": 0.004913358949124813,
"learning_rate": 0.00017501110696304596,
"loss": 0.0002,
"step": 230
},
{
"epoch": 0.008265202068089521,
"grad_norm": 0.0018077875720337033,
"learning_rate": 0.00017480297989033825,
"loss": 0.0001,
"step": 231
},
{
"epoch": 0.008300982163622376,
"grad_norm": 0.0008154757670126855,
"learning_rate": 0.00017459411454241822,
"loss": 0.0001,
"step": 232
},
{
"epoch": 0.008336762259155231,
"grad_norm": 0.4949972927570343,
"learning_rate": 0.00017438451298070252,
"loss": 0.0095,
"step": 233
},
{
"epoch": 0.008372542354688086,
"grad_norm": 0.0014726183144375682,
"learning_rate": 0.00017417417727387394,
"loss": 0.0001,
"step": 234
},
{
"epoch": 0.008408322450220941,
"grad_norm": 0.000985615304671228,
"learning_rate": 0.000173963109497861,
"loss": 0.0001,
"step": 235
},
{
"epoch": 0.008444102545753797,
"grad_norm": 0.0015382058918476105,
"learning_rate": 0.0001737513117358174,
"loss": 0.0001,
"step": 236
},
{
"epoch": 0.008479882641286652,
"grad_norm": 0.001569434185512364,
"learning_rate": 0.0001735387860781016,
"loss": 0.0001,
"step": 237
},
{
"epoch": 0.008515662736819507,
"grad_norm": 0.01702137105166912,
"learning_rate": 0.00017332553462225602,
"loss": 0.0004,
"step": 238
},
{
"epoch": 0.008551442832352362,
"grad_norm": 0.9787658452987671,
"learning_rate": 0.00017311155947298643,
"loss": 0.03,
"step": 239
},
{
"epoch": 0.008587222927885217,
"grad_norm": 0.0007933751912787557,
"learning_rate": 0.00017289686274214118,
"loss": 0.0001,
"step": 240
},
{
"epoch": 0.008623003023418072,
"grad_norm": 0.0029444191604852676,
"learning_rate": 0.0001726814465486903,
"loss": 0.0002,
"step": 241
},
{
"epoch": 0.008658783118950927,
"grad_norm": 0.0021299768704921007,
"learning_rate": 0.0001724653130187047,
"loss": 0.0001,
"step": 242
},
{
"epoch": 0.008694563214483782,
"grad_norm": 0.0013644633581861854,
"learning_rate": 0.00017224846428533499,
"loss": 0.0001,
"step": 243
},
{
"epoch": 0.008730343310016637,
"grad_norm": 0.0023565045557916164,
"learning_rate": 0.0001720309024887907,
"loss": 0.0001,
"step": 244
},
{
"epoch": 0.008766123405549492,
"grad_norm": 0.001884597004391253,
"learning_rate": 0.00017181262977631888,
"loss": 0.0001,
"step": 245
},
{
"epoch": 0.008801903501082347,
"grad_norm": 0.08846484124660492,
"learning_rate": 0.00017159364830218312,
"loss": 0.0008,
"step": 246
},
{
"epoch": 0.008837683596615202,
"grad_norm": 0.06892094761133194,
"learning_rate": 0.00017137396022764214,
"loss": 0.0006,
"step": 247
},
{
"epoch": 0.008873463692148057,
"grad_norm": 0.002805879106745124,
"learning_rate": 0.00017115356772092857,
"loss": 0.0002,
"step": 248
},
{
"epoch": 0.008909243787680913,
"grad_norm": 0.016457632184028625,
"learning_rate": 0.0001709324729572274,
"loss": 0.0003,
"step": 249
},
{
"epoch": 0.008945023883213768,
"grad_norm": 2.7799627780914307,
"learning_rate": 0.00017071067811865476,
"loss": 0.043,
"step": 250
},
{
"epoch": 0.008980803978746623,
"grad_norm": 0.07246087491512299,
"learning_rate": 0.00017048818539423615,
"loss": 0.0016,
"step": 251
},
{
"epoch": 0.009016584074279478,
"grad_norm": 0.003910925704985857,
"learning_rate": 0.00017026499697988493,
"loss": 0.0002,
"step": 252
},
{
"epoch": 0.009052364169812333,
"grad_norm": 0.002456547226756811,
"learning_rate": 0.00017004111507838064,
"loss": 0.0002,
"step": 253
},
{
"epoch": 0.009088144265345188,
"grad_norm": 0.0028989992570132017,
"learning_rate": 0.00016981654189934727,
"loss": 0.0002,
"step": 254
},
{
"epoch": 0.009123924360878043,
"grad_norm": 0.0032434300519526005,
"learning_rate": 0.00016959127965923142,
"loss": 0.0003,
"step": 255
},
{
"epoch": 0.009159704456410898,
"grad_norm": 0.00594690814614296,
"learning_rate": 0.0001693653305812805,
"loss": 0.0004,
"step": 256
},
{
"epoch": 0.009195484551943753,
"grad_norm": 0.038472067564725876,
"learning_rate": 0.00016913869689552064,
"loss": 0.0011,
"step": 257
},
{
"epoch": 0.009231264647476608,
"grad_norm": 0.006935628596693277,
"learning_rate": 0.00016891138083873487,
"loss": 0.0004,
"step": 258
},
{
"epoch": 0.009267044743009463,
"grad_norm": 0.08398015797138214,
"learning_rate": 0.00016868338465444085,
"loss": 0.0015,
"step": 259
},
{
"epoch": 0.009302824838542318,
"grad_norm": 0.04922045022249222,
"learning_rate": 0.00016845471059286887,
"loss": 0.0004,
"step": 260
},
{
"epoch": 0.009338604934075173,
"grad_norm": 0.013643675483763218,
"learning_rate": 0.00016822536091093965,
"loss": 0.0008,
"step": 261
},
{
"epoch": 0.009374385029608029,
"grad_norm": 0.011866009794175625,
"learning_rate": 0.00016799533787224192,
"loss": 0.0003,
"step": 262
},
{
"epoch": 0.009410165125140884,
"grad_norm": 0.00603786064311862,
"learning_rate": 0.00016776464374701025,
"loss": 0.0003,
"step": 263
},
{
"epoch": 0.009445945220673739,
"grad_norm": 0.015595407225191593,
"learning_rate": 0.00016753328081210245,
"loss": 0.0007,
"step": 264
},
{
"epoch": 0.009481725316206594,
"grad_norm": 0.005519995000213385,
"learning_rate": 0.00016730125135097735,
"loss": 0.0004,
"step": 265
},
{
"epoch": 0.009517505411739449,
"grad_norm": 0.013364361599087715,
"learning_rate": 0.000167068557653672,
"loss": 0.0004,
"step": 266
},
{
"epoch": 0.009553285507272304,
"grad_norm": 0.011465894058346748,
"learning_rate": 0.0001668352020167793,
"loss": 0.0005,
"step": 267
},
{
"epoch": 0.009589065602805159,
"grad_norm": 0.0022764878813177347,
"learning_rate": 0.00016660118674342517,
"loss": 0.0002,
"step": 268
},
{
"epoch": 0.009624845698338014,
"grad_norm": 0.04595799744129181,
"learning_rate": 0.00016636651414324587,
"loss": 0.0011,
"step": 269
},
{
"epoch": 0.00966062579387087,
"grad_norm": 0.008233287371695042,
"learning_rate": 0.00016613118653236518,
"loss": 0.0005,
"step": 270
},
{
"epoch": 0.009696405889403724,
"grad_norm": 0.016462009400129318,
"learning_rate": 0.0001658952062333717,
"loss": 0.0004,
"step": 271
},
{
"epoch": 0.00973218598493658,
"grad_norm": 0.08745335787534714,
"learning_rate": 0.00016565857557529566,
"loss": 0.0013,
"step": 272
},
{
"epoch": 0.009767966080469434,
"grad_norm": 0.03373585268855095,
"learning_rate": 0.00016542129689358612,
"loss": 0.0007,
"step": 273
},
{
"epoch": 0.00980374617600229,
"grad_norm": 0.5139638185501099,
"learning_rate": 0.0001651833725300879,
"loss": 0.0155,
"step": 274
},
{
"epoch": 0.009839526271535145,
"grad_norm": 0.006406674161553383,
"learning_rate": 0.00016494480483301836,
"loss": 0.0002,
"step": 275
},
{
"epoch": 0.009875306367068,
"grad_norm": 0.34044501185417175,
"learning_rate": 0.00016470559615694446,
"loss": 0.0352,
"step": 276
},
{
"epoch": 0.009911086462600855,
"grad_norm": 0.0065045482479035854,
"learning_rate": 0.00016446574886275913,
"loss": 0.0004,
"step": 277
},
{
"epoch": 0.00994686655813371,
"grad_norm": 0.06801342964172363,
"learning_rate": 0.00016422526531765846,
"loss": 0.0013,
"step": 278
},
{
"epoch": 0.009982646653666565,
"grad_norm": 0.0377359464764595,
"learning_rate": 0.00016398414789511786,
"loss": 0.0003,
"step": 279
},
{
"epoch": 0.01001842674919942,
"grad_norm": 0.015254302881658077,
"learning_rate": 0.000163742398974869,
"loss": 0.0007,
"step": 280
},
{
"epoch": 0.010054206844732275,
"grad_norm": 0.006927182897925377,
"learning_rate": 0.00016350002094287609,
"loss": 0.0002,
"step": 281
},
{
"epoch": 0.01008998694026513,
"grad_norm": 0.02803247794508934,
"learning_rate": 0.00016325701619131246,
"loss": 0.0006,
"step": 282
},
{
"epoch": 0.010125767035797985,
"grad_norm": 0.01434150617569685,
"learning_rate": 0.00016301338711853693,
"loss": 0.0005,
"step": 283
},
{
"epoch": 0.01016154713133084,
"grad_norm": 0.001818214193917811,
"learning_rate": 0.00016276913612907007,
"loss": 0.0002,
"step": 284
},
{
"epoch": 0.010197327226863695,
"grad_norm": 0.0021695042960345745,
"learning_rate": 0.00016252426563357055,
"loss": 0.0001,
"step": 285
},
{
"epoch": 0.01023310732239655,
"grad_norm": 0.001164208515547216,
"learning_rate": 0.00016227877804881127,
"loss": 0.0001,
"step": 286
},
{
"epoch": 0.010268887417929405,
"grad_norm": 0.002602560678496957,
"learning_rate": 0.00016203267579765563,
"loss": 0.0002,
"step": 287
},
{
"epoch": 0.01030466751346226,
"grad_norm": 0.002262539230287075,
"learning_rate": 0.00016178596130903344,
"loss": 0.0001,
"step": 288
},
{
"epoch": 0.010340447608995116,
"grad_norm": 0.0017323383362963796,
"learning_rate": 0.00016153863701791717,
"loss": 0.0001,
"step": 289
},
{
"epoch": 0.01037622770452797,
"grad_norm": 0.005987819284200668,
"learning_rate": 0.00016129070536529766,
"loss": 0.0003,
"step": 290
},
{
"epoch": 0.010412007800060826,
"grad_norm": 0.002981544006615877,
"learning_rate": 0.00016104216879816026,
"loss": 0.0002,
"step": 291
},
{
"epoch": 0.01044778789559368,
"grad_norm": 0.004128920845687389,
"learning_rate": 0.00016079302976946055,
"loss": 0.0002,
"step": 292
},
{
"epoch": 0.010483567991126536,
"grad_norm": 0.0421392023563385,
"learning_rate": 0.00016054329073810015,
"loss": 0.0006,
"step": 293
},
{
"epoch": 0.010519348086659391,
"grad_norm": 0.007170453667640686,
"learning_rate": 0.00016029295416890248,
"loss": 0.0003,
"step": 294
},
{
"epoch": 0.010555128182192246,
"grad_norm": 0.012681632302701473,
"learning_rate": 0.00016004202253258842,
"loss": 0.0003,
"step": 295
},
{
"epoch": 0.010590908277725101,
"grad_norm": 0.00398437911644578,
"learning_rate": 0.0001597904983057519,
"loss": 0.0002,
"step": 296
},
{
"epoch": 0.010626688373257956,
"grad_norm": 0.02630179561674595,
"learning_rate": 0.00015953838397083552,
"loss": 0.0005,
"step": 297
},
{
"epoch": 0.010662468468790811,
"grad_norm": 0.007436979562044144,
"learning_rate": 0.00015928568201610595,
"loss": 0.0003,
"step": 298
},
{
"epoch": 0.010698248564323666,
"grad_norm": 0.0016087141120806336,
"learning_rate": 0.00015903239493562948,
"loss": 0.0001,
"step": 299
},
{
"epoch": 0.010734028659856521,
"grad_norm": 0.0017188222846016288,
"learning_rate": 0.00015877852522924732,
"loss": 0.0001,
"step": 300
},
{
"epoch": 0.010769808755389377,
"grad_norm": 0.004087434615939856,
"learning_rate": 0.00015852407540255104,
"loss": 0.0002,
"step": 301
},
{
"epoch": 0.010805588850922232,
"grad_norm": 0.25743332505226135,
"learning_rate": 0.00015826904796685762,
"loss": 0.0021,
"step": 302
},
{
"epoch": 0.010841368946455087,
"grad_norm": 0.004327516537159681,
"learning_rate": 0.00015801344543918495,
"loss": 0.0002,
"step": 303
},
{
"epoch": 0.010877149041987942,
"grad_norm": 0.00956784002482891,
"learning_rate": 0.00015775727034222675,
"loss": 0.0002,
"step": 304
},
{
"epoch": 0.010912929137520797,
"grad_norm": 0.0017107793828472495,
"learning_rate": 0.00015750052520432787,
"loss": 0.0001,
"step": 305
},
{
"epoch": 0.010948709233053652,
"grad_norm": 0.011659998446702957,
"learning_rate": 0.0001572432125594591,
"loss": 0.0002,
"step": 306
},
{
"epoch": 0.010984489328586507,
"grad_norm": 0.007189000491052866,
"learning_rate": 0.00015698533494719238,
"loss": 0.0004,
"step": 307
},
{
"epoch": 0.011020269424119362,
"grad_norm": 0.0030206877272576094,
"learning_rate": 0.00015672689491267567,
"loss": 0.0002,
"step": 308
},
{
"epoch": 0.011056049519652217,
"grad_norm": 0.0016745973844081163,
"learning_rate": 0.00015646789500660773,
"loss": 0.0001,
"step": 309
},
{
"epoch": 0.011091829615185072,
"grad_norm": 0.0018627311801537871,
"learning_rate": 0.00015620833778521307,
"loss": 0.0001,
"step": 310
},
{
"epoch": 0.011127609710717927,
"grad_norm": 0.0017725079087540507,
"learning_rate": 0.0001559482258102167,
"loss": 0.0001,
"step": 311
},
{
"epoch": 0.011163389806250782,
"grad_norm": 0.0018159925239160657,
"learning_rate": 0.00015568756164881882,
"loss": 0.0001,
"step": 312
},
{
"epoch": 0.011199169901783637,
"grad_norm": 0.0026159253902733326,
"learning_rate": 0.00015542634787366942,
"loss": 0.0001,
"step": 313
},
{
"epoch": 0.011234949997316493,
"grad_norm": 0.005072129424661398,
"learning_rate": 0.00015516458706284303,
"loss": 0.0002,
"step": 314
},
{
"epoch": 0.011270730092849348,
"grad_norm": 0.009669373743236065,
"learning_rate": 0.0001549022817998132,
"loss": 0.0001,
"step": 315
},
{
"epoch": 0.011306510188382203,
"grad_norm": 0.029667021706700325,
"learning_rate": 0.00015463943467342693,
"loss": 0.0008,
"step": 316
},
{
"epoch": 0.011342290283915058,
"grad_norm": 0.0014934055507183075,
"learning_rate": 0.00015437604827787927,
"loss": 0.0001,
"step": 317
},
{
"epoch": 0.011378070379447913,
"grad_norm": 0.0008770531858317554,
"learning_rate": 0.00015411212521268758,
"loss": 0.0001,
"step": 318
},
{
"epoch": 0.011413850474980768,
"grad_norm": 0.001008510240353644,
"learning_rate": 0.00015384766808266602,
"loss": 0.0001,
"step": 319
},
{
"epoch": 0.011449630570513623,
"grad_norm": 0.6345011591911316,
"learning_rate": 0.00015358267949789966,
"loss": 0.0042,
"step": 320
},
{
"epoch": 0.011485410666046478,
"grad_norm": 0.005814701318740845,
"learning_rate": 0.00015331716207371888,
"loss": 0.0002,
"step": 321
},
{
"epoch": 0.011521190761579333,
"grad_norm": 0.0008782675722613931,
"learning_rate": 0.0001530511184306734,
"loss": 0.0001,
"step": 322
},
{
"epoch": 0.011556970857112188,
"grad_norm": 0.0006713901530019939,
"learning_rate": 0.00015278455119450664,
"loss": 0.0001,
"step": 323
},
{
"epoch": 0.011592750952645043,
"grad_norm": 0.005472630262374878,
"learning_rate": 0.0001525174629961296,
"loss": 0.0002,
"step": 324
},
{
"epoch": 0.011628531048177898,
"grad_norm": 0.005173255689442158,
"learning_rate": 0.0001522498564715949,
"loss": 0.0001,
"step": 325
},
{
"epoch": 0.011664311143710753,
"grad_norm": 0.003191661089658737,
"learning_rate": 0.00015198173426207094,
"loss": 0.0002,
"step": 326
},
{
"epoch": 0.011700091239243609,
"grad_norm": 0.0036652470007538795,
"learning_rate": 0.00015171309901381572,
"loss": 0.0001,
"step": 327
},
{
"epoch": 0.011735871334776464,
"grad_norm": 0.001312348758801818,
"learning_rate": 0.00015144395337815064,
"loss": 0.0001,
"step": 328
},
{
"epoch": 0.011771651430309319,
"grad_norm": 0.0021110977977514267,
"learning_rate": 0.00015117430001143452,
"loss": 0.0001,
"step": 329
},
{
"epoch": 0.011807431525842174,
"grad_norm": 0.004200182855129242,
"learning_rate": 0.00015090414157503714,
"loss": 0.0002,
"step": 330
},
{
"epoch": 0.011843211621375029,
"grad_norm": 0.0005367898265831172,
"learning_rate": 0.00015063348073531324,
"loss": 0.0,
"step": 331
},
{
"epoch": 0.011878991716907884,
"grad_norm": 0.007367909885942936,
"learning_rate": 0.0001503623201635761,
"loss": 0.0002,
"step": 332
},
{
"epoch": 0.011914771812440739,
"grad_norm": 0.013929062522947788,
"learning_rate": 0.000150090662536071,
"loss": 0.0002,
"step": 333
},
{
"epoch": 0.011950551907973594,
"grad_norm": 0.009065449237823486,
"learning_rate": 0.0001498185105339491,
"loss": 0.0001,
"step": 334
},
{
"epoch": 0.01198633200350645,
"grad_norm": 0.00572764640673995,
"learning_rate": 0.00014954586684324078,
"loss": 0.0001,
"step": 335
},
{
"epoch": 0.012022112099039304,
"grad_norm": 0.1300538331270218,
"learning_rate": 0.00014927273415482915,
"loss": 0.0009,
"step": 336
},
{
"epoch": 0.01205789219457216,
"grad_norm": 0.0021966646891087294,
"learning_rate": 0.00014899911516442365,
"loss": 0.0001,
"step": 337
},
{
"epoch": 0.012093672290105014,
"grad_norm": 0.029224226251244545,
"learning_rate": 0.00014872501257253323,
"loss": 0.0002,
"step": 338
},
{
"epoch": 0.01212945238563787,
"grad_norm": 0.0069636073894798756,
"learning_rate": 0.0001484504290844398,
"loss": 0.0002,
"step": 339
},
{
"epoch": 0.012165232481170725,
"grad_norm": 0.0027267655823379755,
"learning_rate": 0.00014817536741017152,
"loss": 0.0001,
"step": 340
},
{
"epoch": 0.01220101257670358,
"grad_norm": 0.0007624703575856984,
"learning_rate": 0.00014789983026447612,
"loss": 0.0001,
"step": 341
},
{
"epoch": 0.012236792672236435,
"grad_norm": 0.000985614606179297,
"learning_rate": 0.0001476238203667939,
"loss": 0.0001,
"step": 342
},
{
"epoch": 0.01227257276776929,
"grad_norm": 0.0015349843306466937,
"learning_rate": 0.0001473473404412312,
"loss": 0.0001,
"step": 343
},
{
"epoch": 0.012308352863302145,
"grad_norm": 0.21670787036418915,
"learning_rate": 0.0001470703932165333,
"loss": 0.003,
"step": 344
},
{
"epoch": 0.012344132958835,
"grad_norm": 0.0010241931304335594,
"learning_rate": 0.00014679298142605734,
"loss": 0.0001,
"step": 345
},
{
"epoch": 0.012379913054367855,
"grad_norm": 0.0006662398809567094,
"learning_rate": 0.00014651510780774583,
"loss": 0.0001,
"step": 346
},
{
"epoch": 0.01241569314990071,
"grad_norm": 0.001260753022506833,
"learning_rate": 0.00014623677510409918,
"loss": 0.0001,
"step": 347
},
{
"epoch": 0.012451473245433565,
"grad_norm": 0.0011193427490070462,
"learning_rate": 0.00014595798606214882,
"loss": 0.0001,
"step": 348
},
{
"epoch": 0.01248725334096642,
"grad_norm": 0.0006154956645332277,
"learning_rate": 0.00014567874343342997,
"loss": 0.0001,
"step": 349
},
{
"epoch": 0.012523033436499275,
"grad_norm": 0.0007212765631265938,
"learning_rate": 0.00014539904997395468,
"loss": 0.0001,
"step": 350
},
{
"epoch": 0.01255881353203213,
"grad_norm": 0.010549906641244888,
"learning_rate": 0.00014511890844418453,
"loss": 0.0003,
"step": 351
},
{
"epoch": 0.012594593627564986,
"grad_norm": 0.0005157431005500257,
"learning_rate": 0.00014483832160900326,
"loss": 0.0,
"step": 352
},
{
"epoch": 0.01263037372309784,
"grad_norm": 0.00042760182986967266,
"learning_rate": 0.00014455729223768966,
"loss": 0.0001,
"step": 353
},
{
"epoch": 0.012666153818630696,
"grad_norm": 0.004275426734238863,
"learning_rate": 0.0001442758231038902,
"loss": 0.0001,
"step": 354
},
{
"epoch": 0.01270193391416355,
"grad_norm": 0.00281893671490252,
"learning_rate": 0.00014399391698559152,
"loss": 0.0001,
"step": 355
},
{
"epoch": 0.012737714009696406,
"grad_norm": 0.0008448857697658241,
"learning_rate": 0.0001437115766650933,
"loss": 0.0001,
"step": 356
},
{
"epoch": 0.012773494105229261,
"grad_norm": 0.3817155361175537,
"learning_rate": 0.00014342880492898048,
"loss": 0.0024,
"step": 357
},
{
"epoch": 0.012809274200762116,
"grad_norm": 0.0003617767943069339,
"learning_rate": 0.0001431456045680959,
"loss": 0.0,
"step": 358
},
{
"epoch": 0.012845054296294971,
"grad_norm": 0.01476916391402483,
"learning_rate": 0.00014286197837751286,
"loss": 0.0002,
"step": 359
},
{
"epoch": 0.012880834391827826,
"grad_norm": 0.00045559153659269214,
"learning_rate": 0.00014257792915650728,
"loss": 0.0,
"step": 360
},
{
"epoch": 0.012916614487360681,
"grad_norm": 0.0004463440563995391,
"learning_rate": 0.00014229345970853032,
"loss": 0.0,
"step": 361
},
{
"epoch": 0.012952394582893536,
"grad_norm": 0.0008974383235909045,
"learning_rate": 0.00014200857284118066,
"loss": 0.0001,
"step": 362
},
{
"epoch": 0.012988174678426391,
"grad_norm": 0.0006577485473826528,
"learning_rate": 0.00014172327136617656,
"loss": 0.0001,
"step": 363
},
{
"epoch": 0.013023954773959246,
"grad_norm": 0.00020119256805628538,
"learning_rate": 0.00014143755809932845,
"loss": 0.0,
"step": 364
},
{
"epoch": 0.013059734869492102,
"grad_norm": 0.0012024675961583853,
"learning_rate": 0.00014115143586051088,
"loss": 0.0001,
"step": 365
},
{
"epoch": 0.013095514965024957,
"grad_norm": 0.0014580293791368604,
"learning_rate": 0.00014086490747363493,
"loss": 0.0001,
"step": 366
},
{
"epoch": 0.013131295060557812,
"grad_norm": 0.0003630980208981782,
"learning_rate": 0.00014057797576662,
"loss": 0.0,
"step": 367
},
{
"epoch": 0.013167075156090667,
"grad_norm": 0.0006153484573587775,
"learning_rate": 0.00014029064357136628,
"loss": 0.0,
"step": 368
},
{
"epoch": 0.013202855251623522,
"grad_norm": 0.0015171675477176905,
"learning_rate": 0.00014000291372372647,
"loss": 0.0001,
"step": 369
},
{
"epoch": 0.013238635347156377,
"grad_norm": 0.0003758160746656358,
"learning_rate": 0.00013971478906347806,
"loss": 0.0,
"step": 370
},
{
"epoch": 0.013274415442689232,
"grad_norm": 0.0028044632636010647,
"learning_rate": 0.00013942627243429512,
"loss": 0.0001,
"step": 371
},
{
"epoch": 0.013310195538222087,
"grad_norm": 0.0005744365626014769,
"learning_rate": 0.00013913736668372026,
"loss": 0.0001,
"step": 372
},
{
"epoch": 0.013345975633754942,
"grad_norm": 0.03431812301278114,
"learning_rate": 0.00013884807466313663,
"loss": 0.0003,
"step": 373
},
{
"epoch": 0.013381755729287797,
"grad_norm": 0.002126405481249094,
"learning_rate": 0.00013855839922773968,
"loss": 0.0001,
"step": 374
},
{
"epoch": 0.013417535824820652,
"grad_norm": 0.0006087534129619598,
"learning_rate": 0.000138268343236509,
"loss": 0.0,
"step": 375
},
{
"epoch": 0.013453315920353507,
"grad_norm": 0.0005148574709892273,
"learning_rate": 0.00013797790955218014,
"loss": 0.0,
"step": 376
},
{
"epoch": 0.013489096015886362,
"grad_norm": 0.0026000775396823883,
"learning_rate": 0.00013768710104121627,
"loss": 0.0001,
"step": 377
},
{
"epoch": 0.013524876111419218,
"grad_norm": 0.0016088751144707203,
"learning_rate": 0.00013739592057378003,
"loss": 0.0001,
"step": 378
},
{
"epoch": 0.013560656206952073,
"grad_norm": 0.00028388932696543634,
"learning_rate": 0.0001371043710237051,
"loss": 0.0,
"step": 379
},
{
"epoch": 0.013596436302484928,
"grad_norm": 0.0008413681644015014,
"learning_rate": 0.00013681245526846783,
"loss": 0.0,
"step": 380
},
{
"epoch": 0.013632216398017783,
"grad_norm": 0.0004796941066160798,
"learning_rate": 0.0001365201761891588,
"loss": 0.0,
"step": 381
},
{
"epoch": 0.013667996493550638,
"grad_norm": 0.007888946682214737,
"learning_rate": 0.00013622753667045457,
"loss": 0.0001,
"step": 382
},
{
"epoch": 0.013703776589083493,
"grad_norm": 0.0006594446022063494,
"learning_rate": 0.00013593453960058908,
"loss": 0.0001,
"step": 383
},
{
"epoch": 0.013739556684616348,
"grad_norm": 0.0005899665993638337,
"learning_rate": 0.00013564118787132506,
"loss": 0.0001,
"step": 384
},
{
"epoch": 0.013775336780149203,
"grad_norm": 0.03223516419529915,
"learning_rate": 0.00013534748437792573,
"loss": 0.0003,
"step": 385
},
{
"epoch": 0.013811116875682058,
"grad_norm": 0.0013933213194832206,
"learning_rate": 0.0001350534320191259,
"loss": 0.0001,
"step": 386
},
{
"epoch": 0.013846896971214913,
"grad_norm": 0.0004967997083440423,
"learning_rate": 0.0001347590336971037,
"loss": 0.0,
"step": 387
},
{
"epoch": 0.013882677066747768,
"grad_norm": 0.0020627062767744064,
"learning_rate": 0.0001344642923174517,
"loss": 0.0001,
"step": 388
},
{
"epoch": 0.013918457162280623,
"grad_norm": 0.0016057563479989767,
"learning_rate": 0.00013416921078914835,
"loss": 0.0001,
"step": 389
},
{
"epoch": 0.013954237257813478,
"grad_norm": 0.0008467244333587587,
"learning_rate": 0.00013387379202452917,
"loss": 0.0,
"step": 390
},
{
"epoch": 0.013990017353346334,
"grad_norm": 0.001441538450308144,
"learning_rate": 0.00013357803893925807,
"loss": 0.0001,
"step": 391
},
{
"epoch": 0.014025797448879189,
"grad_norm": 0.0010898974724113941,
"learning_rate": 0.00013328195445229868,
"loss": 0.0001,
"step": 392
},
{
"epoch": 0.014061577544412044,
"grad_norm": 0.0007908979896456003,
"learning_rate": 0.00013298554148588528,
"loss": 0.0001,
"step": 393
},
{
"epoch": 0.014097357639944899,
"grad_norm": 0.0010799920419231057,
"learning_rate": 0.00013268880296549425,
"loss": 0.0001,
"step": 394
},
{
"epoch": 0.014133137735477754,
"grad_norm": 0.0009932275861501694,
"learning_rate": 0.00013239174181981495,
"loss": 0.0001,
"step": 395
},
{
"epoch": 0.014168917831010609,
"grad_norm": 0.00047487238771282136,
"learning_rate": 0.00013209436098072095,
"loss": 0.0,
"step": 396
},
{
"epoch": 0.014204697926543464,
"grad_norm": 1.1907325983047485,
"learning_rate": 0.00013179666338324108,
"loss": 0.0067,
"step": 397
},
{
"epoch": 0.014240478022076319,
"grad_norm": 0.007387212011963129,
"learning_rate": 0.0001314986519655305,
"loss": 0.0001,
"step": 398
},
{
"epoch": 0.014276258117609174,
"grad_norm": 0.0006041564629413188,
"learning_rate": 0.0001312003296688415,
"loss": 0.0001,
"step": 399
},
{
"epoch": 0.01431203821314203,
"grad_norm": 0.005637326277792454,
"learning_rate": 0.00013090169943749476,
"loss": 0.0001,
"step": 400
},
{
"epoch": 0.014347818308674884,
"grad_norm": 0.0007078536436893046,
"learning_rate": 0.0001306027642188501,
"loss": 0.0,
"step": 401
},
{
"epoch": 0.01438359840420774,
"grad_norm": 0.0007561934762634337,
"learning_rate": 0.00013030352696327742,
"loss": 0.0,
"step": 402
},
{
"epoch": 0.014419378499740594,
"grad_norm": 0.0013270349008962512,
"learning_rate": 0.00013000399062412763,
"loss": 0.0,
"step": 403
},
{
"epoch": 0.01445515859527345,
"grad_norm": 0.0005704747745767236,
"learning_rate": 0.0001297041581577035,
"loss": 0.0001,
"step": 404
},
{
"epoch": 0.014490938690806305,
"grad_norm": 0.0007336271810345352,
"learning_rate": 0.0001294040325232304,
"loss": 0.0,
"step": 405
},
{
"epoch": 0.01452671878633916,
"grad_norm": 0.0029871645383536816,
"learning_rate": 0.00012910361668282719,
"loss": 0.0001,
"step": 406
},
{
"epoch": 0.014562498881872015,
"grad_norm": 0.0043711112812161446,
"learning_rate": 0.00012880291360147693,
"loss": 0.0001,
"step": 407
},
{
"epoch": 0.01459827897740487,
"grad_norm": 0.010784786194562912,
"learning_rate": 0.0001285019262469976,
"loss": 0.0001,
"step": 408
},
{
"epoch": 0.014634059072937725,
"grad_norm": 0.006265713833272457,
"learning_rate": 0.00012820065759001293,
"loss": 0.0002,
"step": 409
},
{
"epoch": 0.01466983916847058,
"grad_norm": 0.002279860433191061,
"learning_rate": 0.00012789911060392294,
"loss": 0.0001,
"step": 410
},
{
"epoch": 0.014705619264003435,
"grad_norm": 0.0015534780686721206,
"learning_rate": 0.0001275972882648746,
"loss": 0.0,
"step": 411
},
{
"epoch": 0.01474139935953629,
"grad_norm": 0.0033339038491249084,
"learning_rate": 0.00012729519355173254,
"loss": 0.0001,
"step": 412
},
{
"epoch": 0.014777179455069145,
"grad_norm": 0.05229537934064865,
"learning_rate": 0.00012699282944604967,
"loss": 0.0003,
"step": 413
},
{
"epoch": 0.014812959550602,
"grad_norm": 0.08266580104827881,
"learning_rate": 0.00012669019893203759,
"loss": 0.0008,
"step": 414
},
{
"epoch": 0.014848739646134855,
"grad_norm": 0.0018661010544747114,
"learning_rate": 0.0001263873049965373,
"loss": 0.0001,
"step": 415
},
{
"epoch": 0.01488451974166771,
"grad_norm": 0.0022214509081095457,
"learning_rate": 0.00012608415062898972,
"loss": 0.0001,
"step": 416
},
{
"epoch": 0.014920299837200566,
"grad_norm": 0.05749466270208359,
"learning_rate": 0.000125780738821406,
"loss": 0.0018,
"step": 417
},
{
"epoch": 0.01495607993273342,
"grad_norm": 0.004766398575156927,
"learning_rate": 0.00012547707256833823,
"loss": 0.0001,
"step": 418
},
{
"epoch": 0.014991860028266276,
"grad_norm": 0.009270568378269672,
"learning_rate": 0.00012517315486684972,
"loss": 0.0002,
"step": 419
},
{
"epoch": 0.01502764012379913,
"grad_norm": 0.006138884928077459,
"learning_rate": 0.0001248689887164855,
"loss": 0.0001,
"step": 420
},
{
"epoch": 0.015063420219331986,
"grad_norm": 0.0009792116470634937,
"learning_rate": 0.00012456457711924266,
"loss": 0.0001,
"step": 421
},
{
"epoch": 0.015099200314864841,
"grad_norm": 0.07432135939598083,
"learning_rate": 0.00012425992307954075,
"loss": 0.0013,
"step": 422
},
{
"epoch": 0.015134980410397696,
"grad_norm": 0.02271825633943081,
"learning_rate": 0.0001239550296041922,
"loss": 0.0002,
"step": 423
},
{
"epoch": 0.015170760505930551,
"grad_norm": 0.0027169391978532076,
"learning_rate": 0.00012364989970237248,
"loss": 0.0001,
"step": 424
},
{
"epoch": 0.015206540601463406,
"grad_norm": 0.8292320966720581,
"learning_rate": 0.00012334453638559057,
"loss": 0.02,
"step": 425
},
{
"epoch": 0.015242320696996261,
"grad_norm": 0.002853796351701021,
"learning_rate": 0.00012303894266765908,
"loss": 0.0001,
"step": 426
},
{
"epoch": 0.015278100792529116,
"grad_norm": 0.0020515965297818184,
"learning_rate": 0.00012273312156466464,
"loss": 0.0001,
"step": 427
},
{
"epoch": 0.015313880888061971,
"grad_norm": 0.0009926969651132822,
"learning_rate": 0.00012242707609493814,
"loss": 0.0001,
"step": 428
},
{
"epoch": 0.015349660983594826,
"grad_norm": 0.015119269490242004,
"learning_rate": 0.00012212080927902474,
"loss": 0.0003,
"step": 429
},
{
"epoch": 0.015385441079127682,
"grad_norm": 0.003984940703958273,
"learning_rate": 0.00012181432413965428,
"loss": 0.0001,
"step": 430
},
{
"epoch": 0.015421221174660537,
"grad_norm": 0.0013063902733847499,
"learning_rate": 0.00012150762370171136,
"loss": 0.0001,
"step": 431
},
{
"epoch": 0.015457001270193392,
"grad_norm": 0.01197239849716425,
"learning_rate": 0.00012120071099220549,
"loss": 0.0002,
"step": 432
},
{
"epoch": 0.015492781365726247,
"grad_norm": 0.0015485131880268455,
"learning_rate": 0.00012089358904024117,
"loss": 0.0001,
"step": 433
},
{
"epoch": 0.015528561461259102,
"grad_norm": 0.0016194679774343967,
"learning_rate": 0.00012058626087698814,
"loss": 0.0001,
"step": 434
},
{
"epoch": 0.015564341556791957,
"grad_norm": 0.14436820149421692,
"learning_rate": 0.00012027872953565125,
"loss": 0.0013,
"step": 435
},
{
"epoch": 0.015600121652324812,
"grad_norm": 0.01180162001401186,
"learning_rate": 0.00011997099805144069,
"loss": 0.0002,
"step": 436
},
{
"epoch": 0.015635901747857665,
"grad_norm": 0.0005493052303791046,
"learning_rate": 0.000119663069461542,
"loss": 0.0,
"step": 437
},
{
"epoch": 0.01567168184339052,
"grad_norm": 0.0012306346325203776,
"learning_rate": 0.00011935494680508606,
"loss": 0.0001,
"step": 438
},
{
"epoch": 0.015707461938923375,
"grad_norm": 0.0011705871438607574,
"learning_rate": 0.00011904663312311901,
"loss": 0.0001,
"step": 439
},
{
"epoch": 0.01574324203445623,
"grad_norm": 0.000773779465816915,
"learning_rate": 0.00011873813145857249,
"loss": 0.0001,
"step": 440
},
{
"epoch": 0.015779022129989086,
"grad_norm": 0.0009600446210242808,
"learning_rate": 0.00011842944485623335,
"loss": 0.0001,
"step": 441
},
{
"epoch": 0.01581480222552194,
"grad_norm": 0.0016839904710650444,
"learning_rate": 0.00011812057636271374,
"loss": 0.0001,
"step": 442
},
{
"epoch": 0.015850582321054796,
"grad_norm": 0.0010681917192414403,
"learning_rate": 0.000117811529026421,
"loss": 0.0001,
"step": 443
},
{
"epoch": 0.01588636241658765,
"grad_norm": 0.0053033060394227505,
"learning_rate": 0.00011750230589752762,
"loss": 0.0001,
"step": 444
},
{
"epoch": 0.015922142512120506,
"grad_norm": 0.001242525060661137,
"learning_rate": 0.00011719291002794096,
"loss": 0.0001,
"step": 445
},
{
"epoch": 0.01595792260765336,
"grad_norm": 0.8490289449691772,
"learning_rate": 0.00011688334447127338,
"loss": 0.0311,
"step": 446
},
{
"epoch": 0.015993702703186216,
"grad_norm": 0.0019403910264372826,
"learning_rate": 0.00011657361228281199,
"loss": 0.0001,
"step": 447
},
{
"epoch": 0.01602948279871907,
"grad_norm": 0.0009070538799278438,
"learning_rate": 0.00011626371651948838,
"loss": 0.0001,
"step": 448
},
{
"epoch": 0.016065262894251926,
"grad_norm": 0.0023522900883108377,
"learning_rate": 0.00011595366023984864,
"loss": 0.0001,
"step": 449
},
{
"epoch": 0.01610104298978478,
"grad_norm": 0.005850024987012148,
"learning_rate": 0.0001156434465040231,
"loss": 0.0002,
"step": 450
},
{
"epoch": 0.016136823085317636,
"grad_norm": 0.0020392134319990873,
"learning_rate": 0.00011533307837369607,
"loss": 0.0001,
"step": 451
},
{
"epoch": 0.01617260318085049,
"grad_norm": 0.0016762950690463185,
"learning_rate": 0.00011502255891207572,
"loss": 0.0001,
"step": 452
},
{
"epoch": 0.016208383276383347,
"grad_norm": 0.015178056433796883,
"learning_rate": 0.00011471189118386375,
"loss": 0.0003,
"step": 453
},
{
"epoch": 0.0162441633719162,
"grad_norm": 0.015929946675896645,
"learning_rate": 0.00011440107825522521,
"loss": 0.0004,
"step": 454
},
{
"epoch": 0.016279943467449057,
"grad_norm": 0.00879756174981594,
"learning_rate": 0.00011409012319375827,
"loss": 0.0003,
"step": 455
},
{
"epoch": 0.016315723562981912,
"grad_norm": 0.0025617824867367744,
"learning_rate": 0.0001137790290684638,
"loss": 0.0001,
"step": 456
},
{
"epoch": 0.016351503658514767,
"grad_norm": 0.004707003477960825,
"learning_rate": 0.00011346779894971527,
"loss": 0.0002,
"step": 457
},
{
"epoch": 0.016387283754047622,
"grad_norm": 0.002390863373875618,
"learning_rate": 0.00011315643590922827,
"loss": 0.0001,
"step": 458
},
{
"epoch": 0.016423063849580477,
"grad_norm": 0.00811726227402687,
"learning_rate": 0.0001128449430200303,
"loss": 0.0002,
"step": 459
},
{
"epoch": 0.016458843945113332,
"grad_norm": 0.004762257914990187,
"learning_rate": 0.00011253332335643043,
"loss": 0.0002,
"step": 460
},
{
"epoch": 0.016494624040646187,
"grad_norm": 0.0016065690433606505,
"learning_rate": 0.00011222157999398895,
"loss": 0.0001,
"step": 461
},
{
"epoch": 0.016530404136179042,
"grad_norm": 0.007567454129457474,
"learning_rate": 0.00011190971600948699,
"loss": 0.0003,
"step": 462
},
{
"epoch": 0.016566184231711897,
"grad_norm": 0.019168654456734657,
"learning_rate": 0.00011159773448089614,
"loss": 0.0005,
"step": 463
},
{
"epoch": 0.016601964327244752,
"grad_norm": 0.0021388723980635405,
"learning_rate": 0.00011128563848734816,
"loss": 0.0001,
"step": 464
},
{
"epoch": 0.016637744422777608,
"grad_norm": 0.00245377398096025,
"learning_rate": 0.00011097343110910452,
"loss": 0.0001,
"step": 465
},
{
"epoch": 0.016673524518310463,
"grad_norm": 0.023226840421557426,
"learning_rate": 0.000110661115427526,
"loss": 0.0003,
"step": 466
},
{
"epoch": 0.016709304613843318,
"grad_norm": 0.01760197803378105,
"learning_rate": 0.00011034869452504226,
"loss": 0.0004,
"step": 467
},
{
"epoch": 0.016745084709376173,
"grad_norm": 0.0035117343068122864,
"learning_rate": 0.00011003617148512149,
"loss": 0.0001,
"step": 468
},
{
"epoch": 0.016780864804909028,
"grad_norm": 0.010483100078999996,
"learning_rate": 0.00010972354939223996,
"loss": 0.0004,
"step": 469
},
{
"epoch": 0.016816644900441883,
"grad_norm": 0.019027642905712128,
"learning_rate": 0.00010941083133185146,
"loss": 0.0004,
"step": 470
},
{
"epoch": 0.016852424995974738,
"grad_norm": 0.0012662605149671435,
"learning_rate": 0.00010909802039035701,
"loss": 0.0001,
"step": 471
},
{
"epoch": 0.016888205091507593,
"grad_norm": 0.0016224164282903075,
"learning_rate": 0.00010878511965507434,
"loss": 0.0001,
"step": 472
},
{
"epoch": 0.016923985187040448,
"grad_norm": 0.005407401826232672,
"learning_rate": 0.00010847213221420736,
"loss": 0.0002,
"step": 473
},
{
"epoch": 0.016959765282573303,
"grad_norm": 0.003952264320105314,
"learning_rate": 0.00010815906115681578,
"loss": 0.0001,
"step": 474
},
{
"epoch": 0.01699554537810616,
"grad_norm": 0.0009798280661925673,
"learning_rate": 0.0001078459095727845,
"loss": 0.0001,
"step": 475
},
{
"epoch": 0.017031325473639013,
"grad_norm": 0.0017176421824842691,
"learning_rate": 0.00010753268055279329,
"loss": 0.0001,
"step": 476
},
{
"epoch": 0.01706710556917187,
"grad_norm": 0.11902503669261932,
"learning_rate": 0.0001072193771882861,
"loss": 0.0015,
"step": 477
},
{
"epoch": 0.017102885664704724,
"grad_norm": 0.0015040796715766191,
"learning_rate": 0.00010690600257144061,
"loss": 0.0001,
"step": 478
},
{
"epoch": 0.01713866576023758,
"grad_norm": 0.022858543321490288,
"learning_rate": 0.0001065925597951378,
"loss": 0.0003,
"step": 479
},
{
"epoch": 0.017174445855770434,
"grad_norm": 0.0016762394225224853,
"learning_rate": 0.00010627905195293135,
"loss": 0.0001,
"step": 480
},
{
"epoch": 0.01721022595130329,
"grad_norm": 0.003237987868487835,
"learning_rate": 0.00010596548213901708,
"loss": 0.0001,
"step": 481
},
{
"epoch": 0.017246006046836144,
"grad_norm": 0.0022923192009329796,
"learning_rate": 0.00010565185344820247,
"loss": 0.0001,
"step": 482
},
{
"epoch": 0.017281786142369,
"grad_norm": 0.20378467440605164,
"learning_rate": 0.00010533816897587606,
"loss": 0.0047,
"step": 483
},
{
"epoch": 0.017317566237901854,
"grad_norm": 0.0037489025853574276,
"learning_rate": 0.00010502443181797697,
"loss": 0.0001,
"step": 484
},
{
"epoch": 0.01735334633343471,
"grad_norm": 0.001064359094016254,
"learning_rate": 0.00010471064507096426,
"loss": 0.0001,
"step": 485
},
{
"epoch": 0.017389126428967564,
"grad_norm": 0.000837884726934135,
"learning_rate": 0.0001043968118317865,
"loss": 0.0,
"step": 486
},
{
"epoch": 0.01742490652450042,
"grad_norm": 0.2281397432088852,
"learning_rate": 0.00010408293519785101,
"loss": 0.0009,
"step": 487
},
{
"epoch": 0.017460686620033274,
"grad_norm": 0.019389115273952484,
"learning_rate": 0.00010376901826699348,
"loss": 0.0003,
"step": 488
},
{
"epoch": 0.01749646671556613,
"grad_norm": 0.0031479692552238703,
"learning_rate": 0.00010345506413744726,
"loss": 0.0001,
"step": 489
},
{
"epoch": 0.017532246811098984,
"grad_norm": 0.004489561077207327,
"learning_rate": 0.00010314107590781284,
"loss": 0.0001,
"step": 490
},
{
"epoch": 0.01756802690663184,
"grad_norm": 0.005492003634572029,
"learning_rate": 0.00010282705667702734,
"loss": 0.0001,
"step": 491
},
{
"epoch": 0.017603807002164695,
"grad_norm": 0.0008158302516676486,
"learning_rate": 0.00010251300954433376,
"loss": 0.0001,
"step": 492
},
{
"epoch": 0.01763958709769755,
"grad_norm": 0.016751619055867195,
"learning_rate": 0.00010219893760925052,
"loss": 0.0002,
"step": 493
},
{
"epoch": 0.017675367193230405,
"grad_norm": 0.001021690433844924,
"learning_rate": 0.00010188484397154084,
"loss": 0.0001,
"step": 494
},
{
"epoch": 0.01771114728876326,
"grad_norm": 0.008309063501656055,
"learning_rate": 0.00010157073173118208,
"loss": 0.0003,
"step": 495
},
{
"epoch": 0.017746927384296115,
"grad_norm": 3.1554737091064453,
"learning_rate": 0.00010125660398833528,
"loss": 0.0277,
"step": 496
},
{
"epoch": 0.01778270747982897,
"grad_norm": 0.0003272468165960163,
"learning_rate": 0.00010094246384331442,
"loss": 0.0,
"step": 497
},
{
"epoch": 0.017818487575361825,
"grad_norm": 0.0007006549858488142,
"learning_rate": 0.00010062831439655591,
"loss": 0.0,
"step": 498
},
{
"epoch": 0.01785426767089468,
"grad_norm": 0.0005305926315486431,
"learning_rate": 0.00010031415874858797,
"loss": 0.0001,
"step": 499
},
{
"epoch": 0.017890047766427535,
"grad_norm": 0.0013227920280769467,
"learning_rate": 0.0001,
"loss": 0.0001,
"step": 500
},
{
"epoch": 0.01792582786196039,
"grad_norm": 0.001107820076867938,
"learning_rate": 9.968584125141204e-05,
"loss": 0.0001,
"step": 501
},
{
"epoch": 0.017961607957493245,
"grad_norm": 0.001051326747983694,
"learning_rate": 9.937168560344412e-05,
"loss": 0.0001,
"step": 502
},
{
"epoch": 0.0179973880530261,
"grad_norm": 0.0029785106889903545,
"learning_rate": 9.90575361566856e-05,
"loss": 0.0001,
"step": 503
},
{
"epoch": 0.018033168148558956,
"grad_norm": 0.0009764096466824412,
"learning_rate": 9.874339601166473e-05,
"loss": 0.0,
"step": 504
},
{
"epoch": 0.01806894824409181,
"grad_norm": 0.0004290563229005784,
"learning_rate": 9.842926826881796e-05,
"loss": 0.0,
"step": 505
},
{
"epoch": 0.018104728339624666,
"grad_norm": 0.0006004414171911776,
"learning_rate": 9.81151560284592e-05,
"loss": 0.0001,
"step": 506
},
{
"epoch": 0.01814050843515752,
"grad_norm": 0.0011966415913775563,
"learning_rate": 9.78010623907495e-05,
"loss": 0.0001,
"step": 507
},
{
"epoch": 0.018176288530690376,
"grad_norm": 0.0027027500327676535,
"learning_rate": 9.748699045566626e-05,
"loss": 0.0001,
"step": 508
},
{
"epoch": 0.01821206862622323,
"grad_norm": 0.008526821620762348,
"learning_rate": 9.717294332297268e-05,
"loss": 0.0002,
"step": 509
},
{
"epoch": 0.018247848721756086,
"grad_norm": 0.009162348695099354,
"learning_rate": 9.685892409218717e-05,
"loss": 0.0003,
"step": 510
},
{
"epoch": 0.01828362881728894,
"grad_norm": 0.14034569263458252,
"learning_rate": 9.654493586255278e-05,
"loss": 0.0003,
"step": 511
},
{
"epoch": 0.018319408912821796,
"grad_norm": 0.012582145631313324,
"learning_rate": 9.623098173300654e-05,
"loss": 0.0002,
"step": 512
},
{
"epoch": 0.01835518900835465,
"grad_norm": 0.0017335236771032214,
"learning_rate": 9.591706480214901e-05,
"loss": 0.0001,
"step": 513
},
{
"epoch": 0.018390969103887506,
"grad_norm": 0.0018851887434720993,
"learning_rate": 9.560318816821353e-05,
"loss": 0.0001,
"step": 514
},
{
"epoch": 0.01842674919942036,
"grad_norm": 0.003452875418588519,
"learning_rate": 9.528935492903575e-05,
"loss": 0.0001,
"step": 515
},
{
"epoch": 0.018462529294953216,
"grad_norm": 0.005577889736741781,
"learning_rate": 9.497556818202306e-05,
"loss": 0.0002,
"step": 516
},
{
"epoch": 0.01849830939048607,
"grad_norm": 0.047353558242321014,
"learning_rate": 9.466183102412395e-05,
"loss": 0.0008,
"step": 517
},
{
"epoch": 0.018534089486018927,
"grad_norm": 0.0012205071980133653,
"learning_rate": 9.434814655179755e-05,
"loss": 0.0001,
"step": 518
},
{
"epoch": 0.01856986958155178,
"grad_norm": 0.00221634516492486,
"learning_rate": 9.403451786098294e-05,
"loss": 0.0001,
"step": 519
},
{
"epoch": 0.018605649677084637,
"grad_norm": 0.0012431563809514046,
"learning_rate": 9.372094804706867e-05,
"loss": 0.0001,
"step": 520
},
{
"epoch": 0.018641429772617492,
"grad_norm": 0.011209934949874878,
"learning_rate": 9.340744020486222e-05,
"loss": 0.0002,
"step": 521
},
{
"epoch": 0.018677209868150347,
"grad_norm": 0.0011534786317497492,
"learning_rate": 9.309399742855942e-05,
"loss": 0.0001,
"step": 522
},
{
"epoch": 0.018712989963683202,
"grad_norm": 0.001877554226666689,
"learning_rate": 9.278062281171393e-05,
"loss": 0.0001,
"step": 523
},
{
"epoch": 0.018748770059216057,
"grad_norm": 0.0005466327420435846,
"learning_rate": 9.246731944720675e-05,
"loss": 0.0,
"step": 524
},
{
"epoch": 0.018784550154748912,
"grad_norm": 0.00111495575401932,
"learning_rate": 9.215409042721552e-05,
"loss": 0.0001,
"step": 525
},
{
"epoch": 0.018820330250281767,
"grad_norm": 0.0009859137935563922,
"learning_rate": 9.184093884318425e-05,
"loss": 0.0001,
"step": 526
},
{
"epoch": 0.018856110345814622,
"grad_norm": 0.0020015740301460028,
"learning_rate": 9.152786778579267e-05,
"loss": 0.0001,
"step": 527
},
{
"epoch": 0.018891890441347477,
"grad_norm": 0.14584803581237793,
"learning_rate": 9.121488034492569e-05,
"loss": 0.0011,
"step": 528
},
{
"epoch": 0.018927670536880332,
"grad_norm": 2.284058094024658,
"learning_rate": 9.090197960964301e-05,
"loss": 0.0192,
"step": 529
},
{
"epoch": 0.018963450632413188,
"grad_norm": 0.0032946874853223562,
"learning_rate": 9.058916866814858e-05,
"loss": 0.0001,
"step": 530
},
{
"epoch": 0.018999230727946043,
"grad_norm": 0.0009048773790709674,
"learning_rate": 9.027645060776006e-05,
"loss": 0.0001,
"step": 531
},
{
"epoch": 0.019035010823478898,
"grad_norm": 0.0011141895083710551,
"learning_rate": 8.99638285148785e-05,
"loss": 0.0001,
"step": 532
},
{
"epoch": 0.019070790919011753,
"grad_norm": 0.0008086450980044901,
"learning_rate": 8.965130547495776e-05,
"loss": 0.0001,
"step": 533
},
{
"epoch": 0.019106571014544608,
"grad_norm": 0.0009894418762996793,
"learning_rate": 8.933888457247402e-05,
"loss": 0.0001,
"step": 534
},
{
"epoch": 0.019142351110077463,
"grad_norm": 0.10656411945819855,
"learning_rate": 8.902656889089548e-05,
"loss": 0.0008,
"step": 535
},
{
"epoch": 0.019178131205610318,
"grad_norm": 0.00936040747910738,
"learning_rate": 8.871436151265184e-05,
"loss": 0.0002,
"step": 536
},
{
"epoch": 0.019213911301143173,
"grad_norm": 0.00042610568925738335,
"learning_rate": 8.840226551910387e-05,
"loss": 0.0,
"step": 537
},
{
"epoch": 0.019249691396676028,
"grad_norm": 0.1127459779381752,
"learning_rate": 8.809028399051302e-05,
"loss": 0.0015,
"step": 538
},
{
"epoch": 0.019285471492208883,
"grad_norm": 0.31594935059547424,
"learning_rate": 8.777842000601105e-05,
"loss": 0.0049,
"step": 539
},
{
"epoch": 0.01932125158774174,
"grad_norm": 0.0016649545868858695,
"learning_rate": 8.746667664356956e-05,
"loss": 0.0001,
"step": 540
},
{
"epoch": 0.019357031683274593,
"grad_norm": 0.01569303870201111,
"learning_rate": 8.715505697996971e-05,
"loss": 0.0002,
"step": 541
},
{
"epoch": 0.01939281177880745,
"grad_norm": 0.004535132087767124,
"learning_rate": 8.684356409077176e-05,
"loss": 0.0001,
"step": 542
},
{
"epoch": 0.019428591874340304,
"grad_norm": 0.013089085929095745,
"learning_rate": 8.653220105028474e-05,
"loss": 0.0003,
"step": 543
},
{
"epoch": 0.01946437196987316,
"grad_norm": 0.002690235385671258,
"learning_rate": 8.62209709315362e-05,
"loss": 0.0001,
"step": 544
},
{
"epoch": 0.019500152065406014,
"grad_norm": 0.00139359082095325,
"learning_rate": 8.590987680624174e-05,
"loss": 0.0001,
"step": 545
},
{
"epoch": 0.01953593216093887,
"grad_norm": 0.3601469099521637,
"learning_rate": 8.559892174477479e-05,
"loss": 0.0063,
"step": 546
},
{
"epoch": 0.019571712256471724,
"grad_norm": 0.0005300491466186941,
"learning_rate": 8.528810881613626e-05,
"loss": 0.0,
"step": 547
},
{
"epoch": 0.01960749235200458,
"grad_norm": 0.004550674930214882,
"learning_rate": 8.497744108792429e-05,
"loss": 0.0001,
"step": 548
},
{
"epoch": 0.019643272447537434,
"grad_norm": 0.21630121767520905,
"learning_rate": 8.466692162630392e-05,
"loss": 0.0014,
"step": 549
},
{
"epoch": 0.01967905254307029,
"grad_norm": 0.0005254748393781483,
"learning_rate": 8.435655349597689e-05,
"loss": 0.0001,
"step": 550
},
{
"epoch": 0.019714832638603144,
"grad_norm": 0.0007254169904626906,
"learning_rate": 8.404633976015134e-05,
"loss": 0.0001,
"step": 551
},
{
"epoch": 0.019750612734136,
"grad_norm": 1.2918907403945923,
"learning_rate": 8.373628348051165e-05,
"loss": 0.005,
"step": 552
},
{
"epoch": 0.019786392829668854,
"grad_norm": 0.0005183537723496556,
"learning_rate": 8.342638771718802e-05,
"loss": 0.0,
"step": 553
},
{
"epoch": 0.01982217292520171,
"grad_norm": 0.0008759180782362819,
"learning_rate": 8.311665552872662e-05,
"loss": 0.0001,
"step": 554
},
{
"epoch": 0.019857953020734564,
"grad_norm": 0.0011825045803561807,
"learning_rate": 8.280708997205904e-05,
"loss": 0.0,
"step": 555
},
{
"epoch": 0.01989373311626742,
"grad_norm": 0.0011933365603908896,
"learning_rate": 8.249769410247239e-05,
"loss": 0.0001,
"step": 556
},
{
"epoch": 0.019929513211800275,
"grad_norm": 0.0029679578728973866,
"learning_rate": 8.218847097357898e-05,
"loss": 0.0002,
"step": 557
},
{
"epoch": 0.01996529330733313,
"grad_norm": 0.002634399337694049,
"learning_rate": 8.187942363728625e-05,
"loss": 0.0001,
"step": 558
},
{
"epoch": 0.020001073402865985,
"grad_norm": 0.0009519772138446569,
"learning_rate": 8.157055514376666e-05,
"loss": 0.0001,
"step": 559
},
{
"epoch": 0.02003685349839884,
"grad_norm": 0.012963998131453991,
"learning_rate": 8.126186854142752e-05,
"loss": 0.0001,
"step": 560
},
{
"epoch": 0.020072633593931695,
"grad_norm": 0.00807272456586361,
"learning_rate": 8.095336687688102e-05,
"loss": 0.0001,
"step": 561
},
{
"epoch": 0.02010841368946455,
"grad_norm": 0.001012706314213574,
"learning_rate": 8.064505319491398e-05,
"loss": 0.0001,
"step": 562
},
{
"epoch": 0.020144193784997405,
"grad_norm": 0.004604503512382507,
"learning_rate": 8.033693053845801e-05,
"loss": 0.0001,
"step": 563
},
{
"epoch": 0.02017997388053026,
"grad_norm": 0.000794455932918936,
"learning_rate": 8.002900194855932e-05,
"loss": 0.0001,
"step": 564
},
{
"epoch": 0.020215753976063115,
"grad_norm": 0.0009159276960417628,
"learning_rate": 7.972127046434878e-05,
"loss": 0.0,
"step": 565
},
{
"epoch": 0.02025153407159597,
"grad_norm": 0.004088504705578089,
"learning_rate": 7.941373912301189e-05,
"loss": 0.0002,
"step": 566
},
{
"epoch": 0.020287314167128825,
"grad_norm": 0.0018173141870647669,
"learning_rate": 7.910641095975886e-05,
"loss": 0.0001,
"step": 567
},
{
"epoch": 0.02032309426266168,
"grad_norm": 0.00044192952918820083,
"learning_rate": 7.879928900779456e-05,
"loss": 0.0,
"step": 568
},
{
"epoch": 0.020358874358194536,
"grad_norm": 0.0005358615890145302,
"learning_rate": 7.849237629828869e-05,
"loss": 0.0001,
"step": 569
},
{
"epoch": 0.02039465445372739,
"grad_norm": 0.024256188422441483,
"learning_rate": 7.818567586034577e-05,
"loss": 0.0002,
"step": 570
},
{
"epoch": 0.020430434549260246,
"grad_norm": 0.0016058129258453846,
"learning_rate": 7.787919072097531e-05,
"loss": 0.0001,
"step": 571
},
{
"epoch": 0.0204662146447931,
"grad_norm": 0.00042367077548988163,
"learning_rate": 7.75729239050619e-05,
"loss": 0.0,
"step": 572
},
{
"epoch": 0.020501994740325956,
"grad_norm": 0.0005126690957695246,
"learning_rate": 7.726687843533538e-05,
"loss": 0.0,
"step": 573
},
{
"epoch": 0.02053777483585881,
"grad_norm": 0.0016349004581570625,
"learning_rate": 7.696105733234098e-05,
"loss": 0.0001,
"step": 574
},
{
"epoch": 0.020573554931391666,
"grad_norm": 0.0010834066197276115,
"learning_rate": 7.66554636144095e-05,
"loss": 0.0001,
"step": 575
},
{
"epoch": 0.02060933502692452,
"grad_norm": 0.001176648074761033,
"learning_rate": 7.635010029762756e-05,
"loss": 0.0001,
"step": 576
},
{
"epoch": 0.020645115122457376,
"grad_norm": 0.0007962558884173632,
"learning_rate": 7.604497039580785e-05,
"loss": 0.0001,
"step": 577
},
{
"epoch": 0.02068089521799023,
"grad_norm": 0.0017486009746789932,
"learning_rate": 7.574007692045928e-05,
"loss": 0.0001,
"step": 578
},
{
"epoch": 0.020716675313523086,
"grad_norm": 0.0006156065501272678,
"learning_rate": 7.543542288075739e-05,
"loss": 0.0001,
"step": 579
},
{
"epoch": 0.02075245540905594,
"grad_norm": 0.0005485773435793817,
"learning_rate": 7.513101128351454e-05,
"loss": 0.0001,
"step": 580
},
{
"epoch": 0.020788235504588796,
"grad_norm": 0.0023702112957835197,
"learning_rate": 7.48268451331503e-05,
"loss": 0.0001,
"step": 581
},
{
"epoch": 0.02082401560012165,
"grad_norm": 0.0008253026171587408,
"learning_rate": 7.45229274316618e-05,
"loss": 0.0001,
"step": 582
},
{
"epoch": 0.020859795695654507,
"grad_norm": 0.0005967675824649632,
"learning_rate": 7.421926117859403e-05,
"loss": 0.0,
"step": 583
},
{
"epoch": 0.02089557579118736,
"grad_norm": 0.0028061717748641968,
"learning_rate": 7.391584937101033e-05,
"loss": 0.0001,
"step": 584
},
{
"epoch": 0.020931355886720217,
"grad_norm": 0.00168716034386307,
"learning_rate": 7.361269500346274e-05,
"loss": 0.0001,
"step": 585
},
{
"epoch": 0.020967135982253072,
"grad_norm": 0.0015768698649480939,
"learning_rate": 7.330980106796246e-05,
"loss": 0.0001,
"step": 586
},
{
"epoch": 0.021002916077785927,
"grad_norm": 0.001126199378632009,
"learning_rate": 7.300717055395039e-05,
"loss": 0.0001,
"step": 587
},
{
"epoch": 0.021038696173318782,
"grad_norm": 0.001138276536948979,
"learning_rate": 7.270480644826749e-05,
"loss": 0.0001,
"step": 588
},
{
"epoch": 0.021074476268851637,
"grad_norm": 0.002184976125136018,
"learning_rate": 7.240271173512546e-05,
"loss": 0.0001,
"step": 589
},
{
"epoch": 0.021110256364384492,
"grad_norm": 0.005373840685933828,
"learning_rate": 7.210088939607708e-05,
"loss": 0.0001,
"step": 590
},
{
"epoch": 0.021146036459917347,
"grad_norm": 0.021403763443231583,
"learning_rate": 7.179934240998706e-05,
"loss": 0.0003,
"step": 591
},
{
"epoch": 0.021181816555450202,
"grad_norm": 0.0008738846518099308,
"learning_rate": 7.149807375300239e-05,
"loss": 0.0001,
"step": 592
},
{
"epoch": 0.021217596650983057,
"grad_norm": 0.01297273300588131,
"learning_rate": 7.119708639852312e-05,
"loss": 0.0001,
"step": 593
},
{
"epoch": 0.021253376746515912,
"grad_norm": 0.0009190509445033967,
"learning_rate": 7.089638331717284e-05,
"loss": 0.0001,
"step": 594
},
{
"epoch": 0.021289156842048768,
"grad_norm": 0.0008834196487441659,
"learning_rate": 7.059596747676962e-05,
"loss": 0.0001,
"step": 595
},
{
"epoch": 0.021324936937581623,
"grad_norm": 0.0009299754165112972,
"learning_rate": 7.029584184229653e-05,
"loss": 0.0001,
"step": 596
},
{
"epoch": 0.021360717033114478,
"grad_norm": 0.0009522477630525827,
"learning_rate": 6.999600937587239e-05,
"loss": 0.0001,
"step": 597
},
{
"epoch": 0.021396497128647333,
"grad_norm": 0.001419812091626227,
"learning_rate": 6.969647303672262e-05,
"loss": 0.0001,
"step": 598
},
{
"epoch": 0.021432277224180188,
"grad_norm": 0.014936032705008984,
"learning_rate": 6.939723578114993e-05,
"loss": 0.0003,
"step": 599
},
{
"epoch": 0.021468057319713043,
"grad_norm": 0.0007730189245194197,
"learning_rate": 6.909830056250527e-05,
"loss": 0.0,
"step": 600
},
{
"epoch": 0.021503837415245898,
"grad_norm": 0.0008386225090362132,
"learning_rate": 6.879967033115853e-05,
"loss": 0.0001,
"step": 601
},
{
"epoch": 0.021539617510778753,
"grad_norm": 0.0006314528291113675,
"learning_rate": 6.850134803446954e-05,
"loss": 0.0001,
"step": 602
},
{
"epoch": 0.021575397606311608,
"grad_norm": 0.004665585700422525,
"learning_rate": 6.820333661675893e-05,
"loss": 0.0002,
"step": 603
},
{
"epoch": 0.021611177701844463,
"grad_norm": 0.0008167759515345097,
"learning_rate": 6.790563901927907e-05,
"loss": 0.0001,
"step": 604
},
{
"epoch": 0.02164695779737732,
"grad_norm": 0.0006439369171857834,
"learning_rate": 6.760825818018508e-05,
"loss": 0.0001,
"step": 605
},
{
"epoch": 0.021682737892910173,
"grad_norm": 0.0010109387803822756,
"learning_rate": 6.731119703450577e-05,
"loss": 0.0001,
"step": 606
},
{
"epoch": 0.02171851798844303,
"grad_norm": 0.00030730641447007656,
"learning_rate": 6.701445851411472e-05,
"loss": 0.0,
"step": 607
},
{
"epoch": 0.021754298083975884,
"grad_norm": 0.003990877885371447,
"learning_rate": 6.671804554770135e-05,
"loss": 0.0002,
"step": 608
},
{
"epoch": 0.02179007817950874,
"grad_norm": 0.00034466804936528206,
"learning_rate": 6.642196106074194e-05,
"loss": 0.0,
"step": 609
},
{
"epoch": 0.021825858275041594,
"grad_norm": 0.001726245740428567,
"learning_rate": 6.612620797547087e-05,
"loss": 0.0001,
"step": 610
},
{
"epoch": 0.02186163837057445,
"grad_norm": 0.001501871389336884,
"learning_rate": 6.583078921085167e-05,
"loss": 0.0001,
"step": 611
},
{
"epoch": 0.021897418466107304,
"grad_norm": 0.0008260820177383721,
"learning_rate": 6.55357076825483e-05,
"loss": 0.0001,
"step": 612
},
{
"epoch": 0.02193319856164016,
"grad_norm": 0.01785612665116787,
"learning_rate": 6.52409663028963e-05,
"loss": 0.0002,
"step": 613
},
{
"epoch": 0.021968978657173014,
"grad_norm": 0.002857221057638526,
"learning_rate": 6.494656798087412e-05,
"loss": 0.0,
"step": 614
},
{
"epoch": 0.02200475875270587,
"grad_norm": 0.0006424558814615011,
"learning_rate": 6.465251562207431e-05,
"loss": 0.0,
"step": 615
},
{
"epoch": 0.022040538848238724,
"grad_norm": 0.0006789982435293496,
"learning_rate": 6.435881212867493e-05,
"loss": 0.0001,
"step": 616
},
{
"epoch": 0.02207631894377158,
"grad_norm": 0.0005097879911772907,
"learning_rate": 6.406546039941094e-05,
"loss": 0.0,
"step": 617
},
{
"epoch": 0.022112099039304434,
"grad_norm": 0.0009265893022529781,
"learning_rate": 6.377246332954544e-05,
"loss": 0.0001,
"step": 618
},
{
"epoch": 0.02214787913483729,
"grad_norm": 0.0006062160828150809,
"learning_rate": 6.347982381084123e-05,
"loss": 0.0001,
"step": 619
},
{
"epoch": 0.022183659230370145,
"grad_norm": 0.0005328463157638907,
"learning_rate": 6.318754473153221e-05,
"loss": 0.0001,
"step": 620
},
{
"epoch": 0.022219439325903,
"grad_norm": 0.00047331457608379424,
"learning_rate": 6.289562897629492e-05,
"loss": 0.0,
"step": 621
},
{
"epoch": 0.022255219421435855,
"grad_norm": 0.08468073606491089,
"learning_rate": 6.260407942621998e-05,
"loss": 0.0005,
"step": 622
},
{
"epoch": 0.02229099951696871,
"grad_norm": 0.000396040384657681,
"learning_rate": 6.231289895878375e-05,
"loss": 0.0,
"step": 623
},
{
"epoch": 0.022326779612501565,
"grad_norm": 0.000619475613348186,
"learning_rate": 6.20220904478199e-05,
"loss": 0.0001,
"step": 624
},
{
"epoch": 0.02236255970803442,
"grad_norm": 0.006687821354717016,
"learning_rate": 6.173165676349103e-05,
"loss": 0.0001,
"step": 625
},
{
"epoch": 0.022398339803567275,
"grad_norm": 0.00044290735968388617,
"learning_rate": 6.144160077226036e-05,
"loss": 0.0001,
"step": 626
},
{
"epoch": 0.02243411989910013,
"grad_norm": 0.009390274994075298,
"learning_rate": 6.11519253368634e-05,
"loss": 0.0001,
"step": 627
},
{
"epoch": 0.022469899994632985,
"grad_norm": 0.0016717095859348774,
"learning_rate": 6.086263331627976e-05,
"loss": 0.0001,
"step": 628
},
{
"epoch": 0.02250568009016584,
"grad_norm": 0.00042551616206765175,
"learning_rate": 6.05737275657049e-05,
"loss": 0.0,
"step": 629
},
{
"epoch": 0.022541460185698695,
"grad_norm": 1.9672138690948486,
"learning_rate": 6.0285210936521955e-05,
"loss": 0.0221,
"step": 630
},
{
"epoch": 0.02257724028123155,
"grad_norm": 0.0004953712341375649,
"learning_rate": 5.999708627627354e-05,
"loss": 0.0,
"step": 631
},
{
"epoch": 0.022613020376764405,
"grad_norm": 0.0006604749942198396,
"learning_rate": 5.9709356428633746e-05,
"loss": 0.0001,
"step": 632
},
{
"epoch": 0.02264880047229726,
"grad_norm": 0.0006702704122290015,
"learning_rate": 5.9422024233380013e-05,
"loss": 0.0001,
"step": 633
},
{
"epoch": 0.022684580567830116,
"grad_norm": 0.0017094590002670884,
"learning_rate": 5.913509252636511e-05,
"loss": 0.0,
"step": 634
},
{
"epoch": 0.02272036066336297,
"grad_norm": 0.000579804356675595,
"learning_rate": 5.884856413948913e-05,
"loss": 0.0,
"step": 635
},
{
"epoch": 0.022756140758895826,
"grad_norm": 0.0003796029486693442,
"learning_rate": 5.856244190067159e-05,
"loss": 0.0,
"step": 636
},
{
"epoch": 0.02279192085442868,
"grad_norm": 0.0005215677083469927,
"learning_rate": 5.82767286338235e-05,
"loss": 0.0,
"step": 637
},
{
"epoch": 0.022827700949961536,
"grad_norm": 0.041516367346048355,
"learning_rate": 5.799142715881938e-05,
"loss": 0.0006,
"step": 638
},
{
"epoch": 0.02286348104549439,
"grad_norm": 0.0007705031312070787,
"learning_rate": 5.770654029146969e-05,
"loss": 0.0,
"step": 639
},
{
"epoch": 0.022899261141027246,
"grad_norm": 0.0017117122188210487,
"learning_rate": 5.7422070843492734e-05,
"loss": 0.0001,
"step": 640
},
{
"epoch": 0.0229350412365601,
"grad_norm": 0.0010502267396077514,
"learning_rate": 5.713802162248718e-05,
"loss": 0.0,
"step": 641
},
{
"epoch": 0.022970821332092956,
"grad_norm": 0.000529443146660924,
"learning_rate": 5.6854395431904094e-05,
"loss": 0.0,
"step": 642
},
{
"epoch": 0.02300660142762581,
"grad_norm": 0.0006939378217794001,
"learning_rate": 5.657119507101954e-05,
"loss": 0.0,
"step": 643
},
{
"epoch": 0.023042381523158666,
"grad_norm": 0.00042171860695816576,
"learning_rate": 5.6288423334906735e-05,
"loss": 0.0,
"step": 644
},
{
"epoch": 0.02307816161869152,
"grad_norm": 0.000523390423040837,
"learning_rate": 5.6006083014408484e-05,
"loss": 0.0,
"step": 645
},
{
"epoch": 0.023113941714224377,
"grad_norm": 0.000571369775570929,
"learning_rate": 5.572417689610987e-05,
"loss": 0.0,
"step": 646
},
{
"epoch": 0.02314972180975723,
"grad_norm": 0.0006714938790537417,
"learning_rate": 5.544270776231038e-05,
"loss": 0.0001,
"step": 647
},
{
"epoch": 0.023185501905290087,
"grad_norm": 0.0005017356015741825,
"learning_rate": 5.5161678390996796e-05,
"loss": 0.0,
"step": 648
},
{
"epoch": 0.023221282000822942,
"grad_norm": 0.0011646583443507552,
"learning_rate": 5.488109155581549e-05,
"loss": 0.0001,
"step": 649
},
{
"epoch": 0.023257062096355797,
"grad_norm": 0.00028895260766148567,
"learning_rate": 5.4600950026045326e-05,
"loss": 0.0,
"step": 650
},
{
"epoch": 0.023292842191888652,
"grad_norm": 0.0008328702533617616,
"learning_rate": 5.4321256566570036e-05,
"loss": 0.0001,
"step": 651
},
{
"epoch": 0.023328622287421507,
"grad_norm": 0.00439800601452589,
"learning_rate": 5.404201393785122e-05,
"loss": 0.0,
"step": 652
},
{
"epoch": 0.023364402382954362,
"grad_norm": 0.002686363412067294,
"learning_rate": 5.3763224895900846e-05,
"loss": 0.0001,
"step": 653
},
{
"epoch": 0.023400182478487217,
"grad_norm": 0.0007387395016849041,
"learning_rate": 5.348489219225416e-05,
"loss": 0.0001,
"step": 654
},
{
"epoch": 0.023435962574020072,
"grad_norm": 0.006214470602571964,
"learning_rate": 5.320701857394268e-05,
"loss": 0.0002,
"step": 655
},
{
"epoch": 0.023471742669552927,
"grad_norm": 0.000319985905662179,
"learning_rate": 5.292960678346675e-05,
"loss": 0.0,
"step": 656
},
{
"epoch": 0.023507522765085782,
"grad_norm": 0.0005882193217985332,
"learning_rate": 5.265265955876879e-05,
"loss": 0.0001,
"step": 657
},
{
"epoch": 0.023543302860618637,
"grad_norm": 0.0004243079456500709,
"learning_rate": 5.237617963320608e-05,
"loss": 0.0,
"step": 658
},
{
"epoch": 0.023579082956151493,
"grad_norm": 0.0003599765768740326,
"learning_rate": 5.210016973552391e-05,
"loss": 0.0,
"step": 659
},
{
"epoch": 0.023614863051684348,
"grad_norm": 0.0007886773091740906,
"learning_rate": 5.182463258982846e-05,
"loss": 0.0,
"step": 660
},
{
"epoch": 0.023650643147217203,
"grad_norm": 0.00048393604811280966,
"learning_rate": 5.1549570915560206e-05,
"loss": 0.0,
"step": 661
},
{
"epoch": 0.023686423242750058,
"grad_norm": 0.248721644282341,
"learning_rate": 5.127498742746675e-05,
"loss": 0.0029,
"step": 662
},
{
"epoch": 0.023722203338282913,
"grad_norm": 0.001305613899603486,
"learning_rate": 5.100088483557634e-05,
"loss": 0.0001,
"step": 663
},
{
"epoch": 0.023757983433815768,
"grad_norm": 0.0008696125587448478,
"learning_rate": 5.072726584517086e-05,
"loss": 0.0001,
"step": 664
},
{
"epoch": 0.023793763529348623,
"grad_norm": 0.0011506453156471252,
"learning_rate": 5.045413315675924e-05,
"loss": 0.0001,
"step": 665
},
{
"epoch": 0.023829543624881478,
"grad_norm": 0.0009921109303832054,
"learning_rate": 5.018148946605092e-05,
"loss": 0.0001,
"step": 666
},
{
"epoch": 0.023865323720414333,
"grad_norm": 0.0002917166566476226,
"learning_rate": 4.990933746392899e-05,
"loss": 0.0,
"step": 667
},
{
"epoch": 0.023901103815947188,
"grad_norm": 0.0019778795540332794,
"learning_rate": 4.9637679836423924e-05,
"loss": 0.0,
"step": 668
},
{
"epoch": 0.023936883911480043,
"grad_norm": 0.16058020293712616,
"learning_rate": 4.9366519264686725e-05,
"loss": 0.0007,
"step": 669
},
{
"epoch": 0.0239726640070129,
"grad_norm": 0.000699148396961391,
"learning_rate": 4.909585842496287e-05,
"loss": 0.0,
"step": 670
},
{
"epoch": 0.024008444102545753,
"grad_norm": 0.000457806047052145,
"learning_rate": 4.8825699988565485e-05,
"loss": 0.0,
"step": 671
},
{
"epoch": 0.02404422419807861,
"grad_norm": 0.0014318109024316072,
"learning_rate": 4.8556046621849346e-05,
"loss": 0.0001,
"step": 672
},
{
"epoch": 0.024080004293611464,
"grad_norm": 0.0009062907774932683,
"learning_rate": 4.828690098618429e-05,
"loss": 0.0,
"step": 673
},
{
"epoch": 0.02411578438914432,
"grad_norm": 0.0007847001543268561,
"learning_rate": 4.8018265737929044e-05,
"loss": 0.0001,
"step": 674
},
{
"epoch": 0.024151564484677174,
"grad_norm": 0.0003682690439745784,
"learning_rate": 4.7750143528405126e-05,
"loss": 0.0,
"step": 675
},
{
"epoch": 0.02418734458021003,
"grad_norm": 0.0014180666767060757,
"learning_rate": 4.748253700387042e-05,
"loss": 0.0001,
"step": 676
},
{
"epoch": 0.024223124675742884,
"grad_norm": 0.0004101350496057421,
"learning_rate": 4.721544880549337e-05,
"loss": 0.0001,
"step": 677
},
{
"epoch": 0.02425890477127574,
"grad_norm": 0.04580630362033844,
"learning_rate": 4.694888156932658e-05,
"loss": 0.0002,
"step": 678
},
{
"epoch": 0.024294684866808594,
"grad_norm": 0.0020803629886358976,
"learning_rate": 4.668283792628114e-05,
"loss": 0.0001,
"step": 679
},
{
"epoch": 0.02433046496234145,
"grad_norm": 0.0007991611491888762,
"learning_rate": 4.6417320502100316e-05,
"loss": 0.0001,
"step": 680
},
{
"epoch": 0.024366245057874304,
"grad_norm": 0.0005774202290922403,
"learning_rate": 4.615233191733398e-05,
"loss": 0.0001,
"step": 681
},
{
"epoch": 0.02440202515340716,
"grad_norm": 0.00040934517164714634,
"learning_rate": 4.588787478731242e-05,
"loss": 0.0,
"step": 682
},
{
"epoch": 0.024437805248940014,
"grad_norm": 0.004855821840465069,
"learning_rate": 4.5623951722120736e-05,
"loss": 0.0001,
"step": 683
},
{
"epoch": 0.02447358534447287,
"grad_norm": 0.022048255428671837,
"learning_rate": 4.5360565326573104e-05,
"loss": 0.0005,
"step": 684
},
{
"epoch": 0.024509365440005725,
"grad_norm": 0.0009525329223833978,
"learning_rate": 4.5097718200186814e-05,
"loss": 0.0001,
"step": 685
},
{
"epoch": 0.02454514553553858,
"grad_norm": 0.015187989920377731,
"learning_rate": 4.483541293715698e-05,
"loss": 0.0001,
"step": 686
},
{
"epoch": 0.024580925631071435,
"grad_norm": 0.0002924858999904245,
"learning_rate": 4.457365212633058e-05,
"loss": 0.0,
"step": 687
},
{
"epoch": 0.02461670572660429,
"grad_norm": 0.0030987297650426626,
"learning_rate": 4.431243835118124e-05,
"loss": 0.0001,
"step": 688
},
{
"epoch": 0.024652485822137145,
"grad_norm": 0.07195527851581573,
"learning_rate": 4.4051774189783315e-05,
"loss": 0.0004,
"step": 689
},
{
"epoch": 0.02468826591767,
"grad_norm": 0.00034498111926950514,
"learning_rate": 4.379166221478697e-05,
"loss": 0.0,
"step": 690
},
{
"epoch": 0.024724046013202855,
"grad_norm": 0.0004273645463399589,
"learning_rate": 4.3532104993392306e-05,
"loss": 0.0,
"step": 691
},
{
"epoch": 0.02475982610873571,
"grad_norm": 0.0006148627726361156,
"learning_rate": 4.327310508732437e-05,
"loss": 0.0,
"step": 692
},
{
"epoch": 0.024795606204268565,
"grad_norm": 0.0018783046398311853,
"learning_rate": 4.301466505280762e-05,
"loss": 0.0001,
"step": 693
},
{
"epoch": 0.02483138629980142,
"grad_norm": 0.0005436090286821127,
"learning_rate": 4.2756787440540936e-05,
"loss": 0.0,
"step": 694
},
{
"epoch": 0.024867166395334275,
"grad_norm": 0.0003239592188037932,
"learning_rate": 4.249947479567218e-05,
"loss": 0.0,
"step": 695
},
{
"epoch": 0.02490294649086713,
"grad_norm": 0.00047223715228028595,
"learning_rate": 4.224272965777326e-05,
"loss": 0.0,
"step": 696
},
{
"epoch": 0.024938726586399985,
"grad_norm": 0.005179517436772585,
"learning_rate": 4.1986554560815096e-05,
"loss": 0.0001,
"step": 697
},
{
"epoch": 0.02497450668193284,
"grad_norm": 0.001426901202648878,
"learning_rate": 4.173095203314241e-05,
"loss": 0.0001,
"step": 698
},
{
"epoch": 0.025010286777465696,
"grad_norm": 0.0039044171571731567,
"learning_rate": 4.1475924597449024e-05,
"loss": 0.0001,
"step": 699
},
{
"epoch": 0.02504606687299855,
"grad_norm": 0.000497961591463536,
"learning_rate": 4.12214747707527e-05,
"loss": 0.0,
"step": 700
},
{
"epoch": 0.025081846968531406,
"grad_norm": 0.0007646341691724956,
"learning_rate": 4.096760506437057e-05,
"loss": 0.0,
"step": 701
},
{
"epoch": 0.02511762706406426,
"grad_norm": 0.0016085326205939054,
"learning_rate": 4.071431798389408e-05,
"loss": 0.0,
"step": 702
},
{
"epoch": 0.025153407159597116,
"grad_norm": 0.0005108435289002955,
"learning_rate": 4.0461616029164526e-05,
"loss": 0.0,
"step": 703
},
{
"epoch": 0.02518918725512997,
"grad_norm": 0.0007308077183552086,
"learning_rate": 4.020950169424815e-05,
"loss": 0.0,
"step": 704
},
{
"epoch": 0.025224967350662826,
"grad_norm": 0.017377054318785667,
"learning_rate": 3.9957977467411615e-05,
"loss": 0.0001,
"step": 705
},
{
"epoch": 0.02526074744619568,
"grad_norm": 0.00024152210971806198,
"learning_rate": 3.9707045831097555e-05,
"loss": 0.0,
"step": 706
},
{
"epoch": 0.025296527541728536,
"grad_norm": 0.003006350016221404,
"learning_rate": 3.945670926189987e-05,
"loss": 0.0001,
"step": 707
},
{
"epoch": 0.02533230763726139,
"grad_norm": 0.00035428308183327317,
"learning_rate": 3.920697023053949e-05,
"loss": 0.0,
"step": 708
},
{
"epoch": 0.025368087732794246,
"grad_norm": 0.002442484488710761,
"learning_rate": 3.895783120183976e-05,
"loss": 0.0001,
"step": 709
},
{
"epoch": 0.0254038678283271,
"grad_norm": 0.0002176981361117214,
"learning_rate": 3.8709294634702376e-05,
"loss": 0.0,
"step": 710
},
{
"epoch": 0.025439647923859957,
"grad_norm": 0.000730315106920898,
"learning_rate": 3.846136298208285e-05,
"loss": 0.0,
"step": 711
},
{
"epoch": 0.02547542801939281,
"grad_norm": 0.0019350670045241714,
"learning_rate": 3.821403869096658e-05,
"loss": 0.0001,
"step": 712
},
{
"epoch": 0.025511208114925667,
"grad_norm": 0.001800208236090839,
"learning_rate": 3.796732420234443e-05,
"loss": 0.0001,
"step": 713
},
{
"epoch": 0.025546988210458522,
"grad_norm": 0.00022908074606675655,
"learning_rate": 3.7721221951188765e-05,
"loss": 0.0,
"step": 714
},
{
"epoch": 0.025582768305991377,
"grad_norm": 0.0016731393989175558,
"learning_rate": 3.747573436642951e-05,
"loss": 0.0001,
"step": 715
},
{
"epoch": 0.025618548401524232,
"grad_norm": 0.00038809413672424853,
"learning_rate": 3.7230863870929964e-05,
"loss": 0.0,
"step": 716
},
{
"epoch": 0.025654328497057087,
"grad_norm": 0.003590758191421628,
"learning_rate": 3.698661288146311e-05,
"loss": 0.0001,
"step": 717
},
{
"epoch": 0.025690108592589942,
"grad_norm": 0.0006934832199476659,
"learning_rate": 3.674298380868756e-05,
"loss": 0.0,
"step": 718
},
{
"epoch": 0.025725888688122797,
"grad_norm": 0.0002972781367134303,
"learning_rate": 3.649997905712396e-05,
"loss": 0.0,
"step": 719
},
{
"epoch": 0.025761668783655652,
"grad_norm": 0.00044423117651604116,
"learning_rate": 3.6257601025131026e-05,
"loss": 0.0,
"step": 720
},
{
"epoch": 0.025797448879188507,
"grad_norm": 0.0009410815546289086,
"learning_rate": 3.601585210488218e-05,
"loss": 0.0,
"step": 721
},
{
"epoch": 0.025833228974721362,
"grad_norm": 0.0018991850083693862,
"learning_rate": 3.577473468234156e-05,
"loss": 0.0001,
"step": 722
},
{
"epoch": 0.025869009070254217,
"grad_norm": 0.0007036121096462011,
"learning_rate": 3.553425113724088e-05,
"loss": 0.0,
"step": 723
},
{
"epoch": 0.025904789165787073,
"grad_norm": 0.036000337451696396,
"learning_rate": 3.52944038430556e-05,
"loss": 0.0001,
"step": 724
},
{
"epoch": 0.025940569261319928,
"grad_norm": 0.0010548071004450321,
"learning_rate": 3.5055195166981645e-05,
"loss": 0.0001,
"step": 725
},
{
"epoch": 0.025976349356852783,
"grad_norm": 0.0002591910306364298,
"learning_rate": 3.481662746991214e-05,
"loss": 0.0,
"step": 726
},
{
"epoch": 0.026012129452385638,
"grad_norm": 0.014273213222622871,
"learning_rate": 3.4578703106413904e-05,
"loss": 0.0002,
"step": 727
},
{
"epoch": 0.026047909547918493,
"grad_norm": 0.00023336062440648675,
"learning_rate": 3.4341424424704375e-05,
"loss": 0.0,
"step": 728
},
{
"epoch": 0.026083689643451348,
"grad_norm": 0.028344599530100822,
"learning_rate": 3.4104793766628304e-05,
"loss": 0.0002,
"step": 729
},
{
"epoch": 0.026119469738984203,
"grad_norm": 0.0005131230573169887,
"learning_rate": 3.386881346763483e-05,
"loss": 0.0,
"step": 730
},
{
"epoch": 0.026155249834517058,
"grad_norm": 0.0010172853944823146,
"learning_rate": 3.363348585675414e-05,
"loss": 0.0001,
"step": 731
},
{
"epoch": 0.026191029930049913,
"grad_norm": 0.0003787051828112453,
"learning_rate": 3.339881325657484e-05,
"loss": 0.0,
"step": 732
},
{
"epoch": 0.026226810025582768,
"grad_norm": 0.0009722478571347892,
"learning_rate": 3.316479798322072e-05,
"loss": 0.0001,
"step": 733
},
{
"epoch": 0.026262590121115623,
"grad_norm": 0.025112910196185112,
"learning_rate": 3.2931442346328004e-05,
"loss": 0.0004,
"step": 734
},
{
"epoch": 0.02629837021664848,
"grad_norm": 0.002964810933917761,
"learning_rate": 3.269874864902269e-05,
"loss": 0.0001,
"step": 735
},
{
"epoch": 0.026334150312181333,
"grad_norm": 0.00032617044053040445,
"learning_rate": 3.246671918789755e-05,
"loss": 0.0,
"step": 736
},
{
"epoch": 0.02636993040771419,
"grad_norm": 0.005642542615532875,
"learning_rate": 3.223535625298979e-05,
"loss": 0.0001,
"step": 737
},
{
"epoch": 0.026405710503247044,
"grad_norm": 0.0002995952090714127,
"learning_rate": 3.200466212775808e-05,
"loss": 0.0,
"step": 738
},
{
"epoch": 0.0264414905987799,
"grad_norm": 0.00034134986344724894,
"learning_rate": 3.1774639089060363e-05,
"loss": 0.0,
"step": 739
},
{
"epoch": 0.026477270694312754,
"grad_norm": 0.0021360015962272882,
"learning_rate": 3.154528940713113e-05,
"loss": 0.0001,
"step": 740
},
{
"epoch": 0.02651305078984561,
"grad_norm": 0.0003997268504463136,
"learning_rate": 3.1316615345559185e-05,
"loss": 0.0,
"step": 741
},
{
"epoch": 0.026548830885378464,
"grad_norm": 0.7127798795700073,
"learning_rate": 3.108861916126518e-05,
"loss": 0.0027,
"step": 742
},
{
"epoch": 0.02658461098091132,
"grad_norm": 0.0017480599926784635,
"learning_rate": 3.086130310447937e-05,
"loss": 0.0001,
"step": 743
},
{
"epoch": 0.026620391076444174,
"grad_norm": 0.0009076651185750961,
"learning_rate": 3.063466941871952e-05,
"loss": 0.0001,
"step": 744
},
{
"epoch": 0.02665617117197703,
"grad_norm": 0.002970295725390315,
"learning_rate": 3.0408720340768572e-05,
"loss": 0.0001,
"step": 745
},
{
"epoch": 0.026691951267509884,
"grad_norm": 0.00616664020344615,
"learning_rate": 3.018345810065275e-05,
"loss": 0.0001,
"step": 746
},
{
"epoch": 0.02672773136304274,
"grad_norm": 0.0025385096669197083,
"learning_rate": 2.9958884921619367e-05,
"loss": 0.0001,
"step": 747
},
{
"epoch": 0.026763511458575594,
"grad_norm": 0.0010526994010433555,
"learning_rate": 2.9735003020115092e-05,
"loss": 0.0,
"step": 748
},
{
"epoch": 0.02679929155410845,
"grad_norm": 0.0003839809214696288,
"learning_rate": 2.9511814605763855e-05,
"loss": 0.0,
"step": 749
},
{
"epoch": 0.026835071649641305,
"grad_norm": 0.001031058025546372,
"learning_rate": 2.9289321881345254e-05,
"loss": 0.0001,
"step": 750
},
{
"epoch": 0.02687085174517416,
"grad_norm": 0.000416404043789953,
"learning_rate": 2.9067527042772636e-05,
"loss": 0.0,
"step": 751
},
{
"epoch": 0.026906631840707015,
"grad_norm": 0.0006090101669542491,
"learning_rate": 2.8846432279071467e-05,
"loss": 0.0,
"step": 752
},
{
"epoch": 0.02694241193623987,
"grad_norm": 0.0008759150514379144,
"learning_rate": 2.8626039772357882e-05,
"loss": 0.0,
"step": 753
},
{
"epoch": 0.026978192031772725,
"grad_norm": 0.006808442063629627,
"learning_rate": 2.840635169781688e-05,
"loss": 0.0001,
"step": 754
},
{
"epoch": 0.02701397212730558,
"grad_norm": 0.0003685827541630715,
"learning_rate": 2.8187370223681132e-05,
"loss": 0.0,
"step": 755
},
{
"epoch": 0.027049752222838435,
"grad_norm": 0.001124305883422494,
"learning_rate": 2.7969097511209308e-05,
"loss": 0.0001,
"step": 756
},
{
"epoch": 0.02708553231837129,
"grad_norm": 0.00040056207217276096,
"learning_rate": 2.775153571466502e-05,
"loss": 0.0,
"step": 757
},
{
"epoch": 0.027121312413904145,
"grad_norm": 0.0006837467662990093,
"learning_rate": 2.753468698129533e-05,
"loss": 0.0,
"step": 758
},
{
"epoch": 0.027157092509437,
"grad_norm": 0.0013138077920302749,
"learning_rate": 2.7318553451309726e-05,
"loss": 0.0001,
"step": 759
},
{
"epoch": 0.027192872604969855,
"grad_norm": 0.00030627817614004016,
"learning_rate": 2.7103137257858868e-05,
"loss": 0.0,
"step": 760
},
{
"epoch": 0.02722865270050271,
"grad_norm": 0.0007263069855980575,
"learning_rate": 2.688844052701359e-05,
"loss": 0.0,
"step": 761
},
{
"epoch": 0.027264432796035565,
"grad_norm": 0.0025389890652149916,
"learning_rate": 2.6674465377744017e-05,
"loss": 0.0001,
"step": 762
},
{
"epoch": 0.02730021289156842,
"grad_norm": 0.0011348793050274253,
"learning_rate": 2.646121392189841e-05,
"loss": 0.0,
"step": 763
},
{
"epoch": 0.027335992987101276,
"grad_norm": 0.0003449412470217794,
"learning_rate": 2.624868826418262e-05,
"loss": 0.0,
"step": 764
},
{
"epoch": 0.02737177308263413,
"grad_norm": 0.0005563509184867144,
"learning_rate": 2.603689050213902e-05,
"loss": 0.0,
"step": 765
},
{
"epoch": 0.027407553178166986,
"grad_norm": 0.0008122368017211556,
"learning_rate": 2.582582272612609e-05,
"loss": 0.0001,
"step": 766
},
{
"epoch": 0.02744333327369984,
"grad_norm": 0.000786035496275872,
"learning_rate": 2.561548701929749e-05,
"loss": 0.0001,
"step": 767
},
{
"epoch": 0.027479113369232696,
"grad_norm": 0.02800973877310753,
"learning_rate": 2.540588545758179e-05,
"loss": 0.0002,
"step": 768
},
{
"epoch": 0.02751489346476555,
"grad_norm": 0.0003413466038182378,
"learning_rate": 2.5197020109661772e-05,
"loss": 0.0,
"step": 769
},
{
"epoch": 0.027550673560298406,
"grad_norm": 0.030475104227662086,
"learning_rate": 2.4988893036954043e-05,
"loss": 0.0001,
"step": 770
},
{
"epoch": 0.02758645365583126,
"grad_norm": 0.000724347191862762,
"learning_rate": 2.4781506293588873e-05,
"loss": 0.0001,
"step": 771
},
{
"epoch": 0.027622233751364116,
"grad_norm": 0.00045491516357287765,
"learning_rate": 2.4574861926389615e-05,
"loss": 0.0,
"step": 772
},
{
"epoch": 0.02765801384689697,
"grad_norm": 0.0005608052015304565,
"learning_rate": 2.436896197485282e-05,
"loss": 0.0001,
"step": 773
},
{
"epoch": 0.027693793942429826,
"grad_norm": 0.0009342865669168532,
"learning_rate": 2.4163808471127812e-05,
"loss": 0.0,
"step": 774
},
{
"epoch": 0.02772957403796268,
"grad_norm": 0.0005581339355558157,
"learning_rate": 2.3959403439996907e-05,
"loss": 0.0,
"step": 775
},
{
"epoch": 0.027765354133495537,
"grad_norm": 0.002823425456881523,
"learning_rate": 2.37557488988552e-05,
"loss": 0.0001,
"step": 776
},
{
"epoch": 0.02780113422902839,
"grad_norm": 0.0017435902263969183,
"learning_rate": 2.3552846857690846e-05,
"loss": 0.0001,
"step": 777
},
{
"epoch": 0.027836914324561247,
"grad_norm": 1.2892370223999023,
"learning_rate": 2.3350699319065026e-05,
"loss": 0.0044,
"step": 778
},
{
"epoch": 0.027872694420094102,
"grad_norm": 0.00036629923852160573,
"learning_rate": 2.3149308278092342e-05,
"loss": 0.0,
"step": 779
},
{
"epoch": 0.027908474515626957,
"grad_norm": 0.0004008855321444571,
"learning_rate": 2.2948675722421086e-05,
"loss": 0.0,
"step": 780
},
{
"epoch": 0.027944254611159812,
"grad_norm": 0.0011866980930790305,
"learning_rate": 2.2748803632213557e-05,
"loss": 0.0,
"step": 781
},
{
"epoch": 0.027980034706692667,
"grad_norm": 0.0009693397441878915,
"learning_rate": 2.254969398012663e-05,
"loss": 0.0001,
"step": 782
},
{
"epoch": 0.028015814802225522,
"grad_norm": 0.0003140924090985209,
"learning_rate": 2.235134873129213e-05,
"loss": 0.0,
"step": 783
},
{
"epoch": 0.028051594897758377,
"grad_norm": 0.0012890478828921914,
"learning_rate": 2.2153769843297667e-05,
"loss": 0.0,
"step": 784
},
{
"epoch": 0.028087374993291232,
"grad_norm": 0.00023359716578852385,
"learning_rate": 2.195695926616702e-05,
"loss": 0.0,
"step": 785
},
{
"epoch": 0.028123155088824087,
"grad_norm": NaN,
"learning_rate": 2.195695926616702e-05,
"loss": 0.0227,
"step": 786
},
{
"epoch": 0.028158935184356942,
"grad_norm": 0.0002614323457237333,
"learning_rate": 2.1760918942341192e-05,
"loss": 0.0,
"step": 787
},
{
"epoch": 0.028194715279889798,
"grad_norm": 0.0004320174048189074,
"learning_rate": 2.1565650806658975e-05,
"loss": 0.0,
"step": 788
},
{
"epoch": 0.028230495375422653,
"grad_norm": 0.0003752956399694085,
"learning_rate": 2.137115678633811e-05,
"loss": 0.0,
"step": 789
},
{
"epoch": 0.028266275470955508,
"grad_norm": 0.000625955464784056,
"learning_rate": 2.1177438800956007e-05,
"loss": 0.0,
"step": 790
},
{
"epoch": 0.028302055566488363,
"grad_norm": 0.0027366415597498417,
"learning_rate": 2.098449876243096e-05,
"loss": 0.0001,
"step": 791
},
{
"epoch": 0.028337835662021218,
"grad_norm": 0.0010580029338598251,
"learning_rate": 2.07923385750033e-05,
"loss": 0.0,
"step": 792
},
{
"epoch": 0.028373615757554073,
"grad_norm": 0.0003477209247648716,
"learning_rate": 2.0600960135216462e-05,
"loss": 0.0,
"step": 793
},
{
"epoch": 0.028409395853086928,
"grad_norm": 0.0003285256680101156,
"learning_rate": 2.0410365331898416e-05,
"loss": 0.0,
"step": 794
},
{
"epoch": 0.028445175948619783,
"grad_norm": 0.0011474394705146551,
"learning_rate": 2.0220556046142893e-05,
"loss": 0.0001,
"step": 795
},
{
"epoch": 0.028480956044152638,
"grad_norm": 0.0004369614471215755,
"learning_rate": 2.0031534151290943e-05,
"loss": 0.0,
"step": 796
},
{
"epoch": 0.028516736139685493,
"grad_norm": 0.00162404531147331,
"learning_rate": 1.9843301512912327e-05,
"loss": 0.0001,
"step": 797
},
{
"epoch": 0.02855251623521835,
"grad_norm": 0.0009096541325561702,
"learning_rate": 1.965585998878724e-05,
"loss": 0.0001,
"step": 798
},
{
"epoch": 0.028588296330751203,
"grad_norm": 0.006506095640361309,
"learning_rate": 1.946921142888781e-05,
"loss": 0.0003,
"step": 799
},
{
"epoch": 0.02862407642628406,
"grad_norm": 0.0012401292333379388,
"learning_rate": 1.928335767535997e-05,
"loss": 0.0001,
"step": 800
},
{
"epoch": 0.028659856521816914,
"grad_norm": 0.0010858647292479873,
"learning_rate": 1.9098300562505266e-05,
"loss": 0.0,
"step": 801
},
{
"epoch": 0.02869563661734977,
"grad_norm": 0.0007080929935909808,
"learning_rate": 1.891404191676265e-05,
"loss": 0.0,
"step": 802
},
{
"epoch": 0.028731416712882624,
"grad_norm": 0.0020980716217309237,
"learning_rate": 1.8730583556690605e-05,
"loss": 0.0001,
"step": 803
},
{
"epoch": 0.02876719680841548,
"grad_norm": 0.003992127254605293,
"learning_rate": 1.854792729294905e-05,
"loss": 0.0002,
"step": 804
},
{
"epoch": 0.028802976903948334,
"grad_norm": 0.016032757237553596,
"learning_rate": 1.8366074928281607e-05,
"loss": 0.0004,
"step": 805
},
{
"epoch": 0.02883875699948119,
"grad_norm": 0.013076834380626678,
"learning_rate": 1.818502825749764e-05,
"loss": 0.0002,
"step": 806
},
{
"epoch": 0.028874537095014044,
"grad_norm": 0.0002816877677105367,
"learning_rate": 1.8004789067454764e-05,
"loss": 0.0,
"step": 807
},
{
"epoch": 0.0289103171905469,
"grad_norm": 0.002238611225038767,
"learning_rate": 1.7825359137040988e-05,
"loss": 0.0001,
"step": 808
},
{
"epoch": 0.028946097286079754,
"grad_norm": 0.000730925879906863,
"learning_rate": 1.7646740237157256e-05,
"loss": 0.0001,
"step": 809
},
{
"epoch": 0.02898187738161261,
"grad_norm": 0.0032203211449086666,
"learning_rate": 1.7468934130700044e-05,
"loss": 0.0001,
"step": 810
},
{
"epoch": 0.029017657477145464,
"grad_norm": 0.0004394878342282027,
"learning_rate": 1.7291942572543807e-05,
"loss": 0.0,
"step": 811
},
{
"epoch": 0.02905343757267832,
"grad_norm": 0.0012944753980264068,
"learning_rate": 1.7115767309523812e-05,
"loss": 0.0,
"step": 812
},
{
"epoch": 0.029089217668211174,
"grad_norm": 0.0003389958874322474,
"learning_rate": 1.6940410080418723e-05,
"loss": 0.0,
"step": 813
},
{
"epoch": 0.02912499776374403,
"grad_norm": 0.00032627416658215225,
"learning_rate": 1.6765872615933677e-05,
"loss": 0.0,
"step": 814
},
{
"epoch": 0.029160777859276885,
"grad_norm": 0.0004266609321348369,
"learning_rate": 1.6592156638682886e-05,
"loss": 0.0,
"step": 815
},
{
"epoch": 0.02919655795480974,
"grad_norm": 0.0006152820424176753,
"learning_rate": 1.6419263863172997e-05,
"loss": 0.0001,
"step": 816
},
{
"epoch": 0.029232338050342595,
"grad_norm": 0.0014175320975482464,
"learning_rate": 1.6247195995785837e-05,
"loss": 0.0001,
"step": 817
},
{
"epoch": 0.02926811814587545,
"grad_norm": 0.0005357126356102526,
"learning_rate": 1.6075954734761845e-05,
"loss": 0.0,
"step": 818
},
{
"epoch": 0.029303898241408305,
"grad_norm": 0.0012012666556984186,
"learning_rate": 1.5905541770183096e-05,
"loss": 0.0001,
"step": 819
},
{
"epoch": 0.02933967833694116,
"grad_norm": 0.005087379831820726,
"learning_rate": 1.5735958783956794e-05,
"loss": 0.0001,
"step": 820
},
{
"epoch": 0.029375458432474015,
"grad_norm": 0.0005693581770174205,
"learning_rate": 1.5567207449798515e-05,
"loss": 0.0,
"step": 821
},
{
"epoch": 0.02941123852800687,
"grad_norm": 0.005406623240560293,
"learning_rate": 1.539928943321579e-05,
"loss": 0.0001,
"step": 822
},
{
"epoch": 0.029447018623539725,
"grad_norm": 0.0009374113869853318,
"learning_rate": 1.5232206391491699e-05,
"loss": 0.0,
"step": 823
},
{
"epoch": 0.02948279871907258,
"grad_norm": 0.0003775600343942642,
"learning_rate": 1.5065959973668353e-05,
"loss": 0.0,
"step": 824
},
{
"epoch": 0.029518578814605435,
"grad_norm": 0.0003483894106466323,
"learning_rate": 1.4900551820530828e-05,
"loss": 0.0,
"step": 825
},
{
"epoch": 0.02955435891013829,
"grad_norm": 0.0003740845713764429,
"learning_rate": 1.4735983564590783e-05,
"loss": 0.0,
"step": 826
},
{
"epoch": 0.029590139005671146,
"grad_norm": 0.0002811375306919217,
"learning_rate": 1.4572256830070497e-05,
"loss": 0.0,
"step": 827
},
{
"epoch": 0.029625919101204,
"grad_norm": 0.0006832360522821546,
"learning_rate": 1.4409373232886702e-05,
"loss": 0.0001,
"step": 828
},
{
"epoch": 0.029661699196736856,
"grad_norm": 0.000255306891631335,
"learning_rate": 1.4247334380634792e-05,
"loss": 0.0,
"step": 829
},
{
"epoch": 0.02969747929226971,
"grad_norm": 0.00022817605349700898,
"learning_rate": 1.4086141872572789e-05,
"loss": 0.0,
"step": 830
},
{
"epoch": 0.029733259387802566,
"grad_norm": 0.00029117820668034256,
"learning_rate": 1.3925797299605647e-05,
"loss": 0.0,
"step": 831
},
{
"epoch": 0.02976903948333542,
"grad_norm": 0.000468272075522691,
"learning_rate": 1.3766302244269624e-05,
"loss": 0.0,
"step": 832
},
{
"epoch": 0.029804819578868276,
"grad_norm": 0.007703302428126335,
"learning_rate": 1.3607658280716473e-05,
"loss": 0.0003,
"step": 833
},
{
"epoch": 0.02984059967440113,
"grad_norm": 0.0002647882793098688,
"learning_rate": 1.3449866974698122e-05,
"loss": 0.0,
"step": 834
},
{
"epoch": 0.029876379769933986,
"grad_norm": 0.000288227602140978,
"learning_rate": 1.3292929883550998e-05,
"loss": 0.0,
"step": 835
},
{
"epoch": 0.02991215986546684,
"grad_norm": 0.0011180542642250657,
"learning_rate": 1.3136848556180892e-05,
"loss": 0.0,
"step": 836
},
{
"epoch": 0.029947939960999696,
"grad_norm": 0.0015647278632968664,
"learning_rate": 1.2981624533047432e-05,
"loss": 0.0001,
"step": 837
},
{
"epoch": 0.02998372005653255,
"grad_norm": 0.22069744765758514,
"learning_rate": 1.2827259346149122e-05,
"loss": 0.0027,
"step": 838
},
{
"epoch": 0.030019500152065406,
"grad_norm": 0.001163410721346736,
"learning_rate": 1.2673754519008008e-05,
"loss": 0.0001,
"step": 839
},
{
"epoch": 0.03005528024759826,
"grad_norm": 0.00020182439766358584,
"learning_rate": 1.2521111566654731e-05,
"loss": 0.0,
"step": 840
},
{
"epoch": 0.030091060343131117,
"grad_norm": 0.0008434744086116552,
"learning_rate": 1.2369331995613665e-05,
"loss": 0.0001,
"step": 841
},
{
"epoch": 0.03012684043866397,
"grad_norm": 0.0004498157650232315,
"learning_rate": 1.2218417303887842e-05,
"loss": 0.0,
"step": 842
},
{
"epoch": 0.030162620534196827,
"grad_norm": 0.00038383417995646596,
"learning_rate": 1.206836898094439e-05,
"loss": 0.0,
"step": 843
},
{
"epoch": 0.030198400629729682,
"grad_norm": 0.0030869089532643557,
"learning_rate": 1.191918850769964e-05,
"loss": 0.0001,
"step": 844
},
{
"epoch": 0.030234180725262537,
"grad_norm": 0.0009010889334604144,
"learning_rate": 1.1770877356504683e-05,
"loss": 0.0001,
"step": 845
},
{
"epoch": 0.030269960820795392,
"grad_norm": 0.0013517620973289013,
"learning_rate": 1.1623436991130654e-05,
"loss": 0.0,
"step": 846
},
{
"epoch": 0.030305740916328247,
"grad_norm": 0.0005510963965207338,
"learning_rate": 1.1476868866754486e-05,
"loss": 0.0,
"step": 847
},
{
"epoch": 0.030341521011861102,
"grad_norm": 0.0006830064230598509,
"learning_rate": 1.1331174429944347e-05,
"loss": 0.0,
"step": 848
},
{
"epoch": 0.030377301107393957,
"grad_norm": 0.0007867237436585128,
"learning_rate": 1.1186355118645554e-05,
"loss": 0.0001,
"step": 849
},
{
"epoch": 0.030413081202926812,
"grad_norm": 0.0006200165371410549,
"learning_rate": 1.1042412362166222e-05,
"loss": 0.0,
"step": 850
},
{
"epoch": 0.030448861298459667,
"grad_norm": 0.001512668328359723,
"learning_rate": 1.0899347581163221e-05,
"loss": 0.0001,
"step": 851
},
{
"epoch": 0.030484641393992522,
"grad_norm": 0.0007012730347923934,
"learning_rate": 1.0757162187628222e-05,
"loss": 0.0,
"step": 852
},
{
"epoch": 0.030520421489525378,
"grad_norm": 0.0004401567275635898,
"learning_rate": 1.0615857584873623e-05,
"loss": 0.0001,
"step": 853
},
{
"epoch": 0.030556201585058233,
"grad_norm": 0.00031436511198990047,
"learning_rate": 1.0475435167518843e-05,
"loss": 0.0,
"step": 854
},
{
"epoch": 0.030591981680591088,
"grad_norm": 0.001058329944498837,
"learning_rate": 1.0335896321476413e-05,
"loss": 0.0001,
"step": 855
},
{
"epoch": 0.030627761776123943,
"grad_norm": 0.0007222515996545553,
"learning_rate": 1.0197242423938446e-05,
"loss": 0.0,
"step": 856
},
{
"epoch": 0.030663541871656798,
"grad_norm": 0.0014382116496562958,
"learning_rate": 1.0059474843362892e-05,
"loss": 0.0001,
"step": 857
},
{
"epoch": 0.030699321967189653,
"grad_norm": 0.0014747374225407839,
"learning_rate": 9.922594939460194e-06,
"loss": 0.0001,
"step": 858
},
{
"epoch": 0.030735102062722508,
"grad_norm": 0.00035679215216077864,
"learning_rate": 9.786604063179728e-06,
"loss": 0.0,
"step": 859
},
{
"epoch": 0.030770882158255363,
"grad_norm": 0.00018847925821319222,
"learning_rate": 9.651503556696516e-06,
"loss": 0.0,
"step": 860
},
{
"epoch": 0.030806662253788218,
"grad_norm": 0.00047721504233777523,
"learning_rate": 9.517294753398064e-06,
"loss": 0.0,
"step": 861
},
{
"epoch": 0.030842442349321073,
"grad_norm": 0.00044450335553847253,
"learning_rate": 9.383978977871021e-06,
"loss": 0.0,
"step": 862
},
{
"epoch": 0.03087822244485393,
"grad_norm": 0.000441976822912693,
"learning_rate": 9.251557545888312e-06,
"loss": 0.0,
"step": 863
},
{
"epoch": 0.030914002540386783,
"grad_norm": 0.0003807090106420219,
"learning_rate": 9.120031764395987e-06,
"loss": 0.0,
"step": 864
},
{
"epoch": 0.03094978263591964,
"grad_norm": 0.00022103596711531281,
"learning_rate": 8.989402931500434e-06,
"loss": 0.0,
"step": 865
},
{
"epoch": 0.030985562731452494,
"grad_norm": 0.0011002712417393923,
"learning_rate": 8.85967233645547e-06,
"loss": 0.0001,
"step": 866
},
{
"epoch": 0.03102134282698535,
"grad_norm": 0.000323577260132879,
"learning_rate": 8.730841259649725e-06,
"loss": 0.0,
"step": 867
},
{
"epoch": 0.031057122922518204,
"grad_norm": 0.0002585575566627085,
"learning_rate": 8.602910972593892e-06,
"loss": 0.0,
"step": 868
},
{
"epoch": 0.03109290301805106,
"grad_norm": 0.0009426943142898381,
"learning_rate": 8.475882737908248e-06,
"loss": 0.0,
"step": 869
},
{
"epoch": 0.031128683113583914,
"grad_norm": 0.0017182575538754463,
"learning_rate": 8.34975780931021e-06,
"loss": 0.0001,
"step": 870
},
{
"epoch": 0.03116446320911677,
"grad_norm": 0.0020622152369469404,
"learning_rate": 8.224537431601886e-06,
"loss": 0.0001,
"step": 871
},
{
"epoch": 0.031200243304649624,
"grad_norm": 0.0012766410363838077,
"learning_rate": 8.100222840657878e-06,
"loss": 0.0001,
"step": 872
},
{
"epoch": 0.03123602340018248,
"grad_norm": 0.0012319182278588414,
"learning_rate": 7.976815263412963e-06,
"loss": 0.0,
"step": 873
},
{
"epoch": 0.03127180349571533,
"grad_norm": 0.05080391466617584,
"learning_rate": 7.854315917850163e-06,
"loss": 0.0002,
"step": 874
},
{
"epoch": 0.03130758359124819,
"grad_norm": 0.0002540445129852742,
"learning_rate": 7.73272601298851e-06,
"loss": 0.0,
"step": 875
},
{
"epoch": 0.03134336368678104,
"grad_norm": 0.0005007732543163002,
"learning_rate": 7.612046748871327e-06,
"loss": 0.0,
"step": 876
},
{
"epoch": 0.0313791437823139,
"grad_norm": 0.00018185537192039192,
"learning_rate": 7.492279316554207e-06,
"loss": 0.0,
"step": 877
},
{
"epoch": 0.03141492387784675,
"grad_norm": 0.0014064558781683445,
"learning_rate": 7.3734248980933395e-06,
"loss": 0.0,
"step": 878
},
{
"epoch": 0.03145070397337961,
"grad_norm": 0.002208331134170294,
"learning_rate": 7.255484666533874e-06,
"loss": 0.0001,
"step": 879
},
{
"epoch": 0.03148648406891246,
"grad_norm": 0.0004916066536679864,
"learning_rate": 7.138459785898266e-06,
"loss": 0.0,
"step": 880
},
{
"epoch": 0.03152226416444532,
"grad_norm": 0.0004903101944364607,
"learning_rate": 7.022351411174866e-06,
"loss": 0.0,
"step": 881
},
{
"epoch": 0.03155804425997817,
"grad_norm": 0.0016464211512356997,
"learning_rate": 6.907160688306425e-06,
"loss": 0.0001,
"step": 882
},
{
"epoch": 0.03159382435551103,
"grad_norm": 0.00020444215624593198,
"learning_rate": 6.7928887541789055e-06,
"loss": 0.0,
"step": 883
},
{
"epoch": 0.03162960445104388,
"grad_norm": 0.039082761853933334,
"learning_rate": 6.679536736610137e-06,
"loss": 0.0003,
"step": 884
},
{
"epoch": 0.03166538454657674,
"grad_norm": 0.0004566194547805935,
"learning_rate": 6.5671057543387985e-06,
"loss": 0.0,
"step": 885
},
{
"epoch": 0.03170116464210959,
"grad_norm": 0.00025817123241722584,
"learning_rate": 6.455596917013273e-06,
"loss": 0.0,
"step": 886
},
{
"epoch": 0.03173694473764245,
"grad_norm": 0.0003827243053819984,
"learning_rate": 6.345011325180772e-06,
"loss": 0.0,
"step": 887
},
{
"epoch": 0.0317727248331753,
"grad_norm": 0.0004853069840464741,
"learning_rate": 6.235350070276447e-06,
"loss": 0.0,
"step": 888
},
{
"epoch": 0.03180850492870816,
"grad_norm": 0.00047525035915896297,
"learning_rate": 6.126614234612593e-06,
"loss": 0.0,
"step": 889
},
{
"epoch": 0.03184428502424101,
"grad_norm": 0.0005343626835383475,
"learning_rate": 6.018804891368035e-06,
"loss": 0.0,
"step": 890
},
{
"epoch": 0.03188006511977387,
"grad_norm": 0.0003221333317924291,
"learning_rate": 5.911923104577455e-06,
"loss": 0.0,
"step": 891
},
{
"epoch": 0.03191584521530672,
"grad_norm": 0.0007317602867260575,
"learning_rate": 5.805969929120947e-06,
"loss": 0.0,
"step": 892
},
{
"epoch": 0.03195162531083958,
"grad_norm": 0.0011024659033864737,
"learning_rate": 5.700946410713548e-06,
"loss": 0.0001,
"step": 893
},
{
"epoch": 0.03198740540637243,
"grad_norm": 0.0030077833216637373,
"learning_rate": 5.5968535858950345e-06,
"loss": 0.0001,
"step": 894
},
{
"epoch": 0.03202318550190529,
"grad_norm": 0.0056787352077662945,
"learning_rate": 5.49369248201953e-06,
"loss": 0.0001,
"step": 895
},
{
"epoch": 0.03205896559743814,
"grad_norm": 0.0005671354592777789,
"learning_rate": 5.39146411724547e-06,
"loss": 0.0,
"step": 896
},
{
"epoch": 0.032094745692971,
"grad_norm": 0.0006770672625862062,
"learning_rate": 5.290169500525577e-06,
"loss": 0.0,
"step": 897
},
{
"epoch": 0.03213052578850385,
"grad_norm": 0.00038105715066194534,
"learning_rate": 5.189809631596798e-06,
"loss": 0.0,
"step": 898
},
{
"epoch": 0.03216630588403671,
"grad_norm": 0.0031658518128097057,
"learning_rate": 5.0903855009705514e-06,
"loss": 0.0001,
"step": 899
},
{
"epoch": 0.03220208597956956,
"grad_norm": 0.0007510024588555098,
"learning_rate": 4.991898089922819e-06,
"loss": 0.0,
"step": 900
},
{
"epoch": 0.03223786607510242,
"grad_norm": 0.00039749740972183645,
"learning_rate": 4.8943483704846475e-06,
"loss": 0.0,
"step": 901
},
{
"epoch": 0.03227364617063527,
"grad_norm": 0.0016887275269255042,
"learning_rate": 4.797737305432337e-06,
"loss": 0.0001,
"step": 902
},
{
"epoch": 0.03230942626616813,
"grad_norm": 0.0017919878009706736,
"learning_rate": 4.702065848278126e-06,
"loss": 0.0001,
"step": 903
},
{
"epoch": 0.03234520636170098,
"grad_norm": 0.0026703733019530773,
"learning_rate": 4.607334943260655e-06,
"loss": 0.0001,
"step": 904
},
{
"epoch": 0.03238098645723384,
"grad_norm": 0.015657538548111916,
"learning_rate": 4.513545525335705e-06,
"loss": 0.0002,
"step": 905
},
{
"epoch": 0.03241676655276669,
"grad_norm": 0.0002171381638618186,
"learning_rate": 4.420698520166988e-06,
"loss": 0.0,
"step": 906
},
{
"epoch": 0.03245254664829955,
"grad_norm": 0.00031557457987219095,
"learning_rate": 4.328794844116946e-06,
"loss": 0.0,
"step": 907
},
{
"epoch": 0.0324883267438324,
"grad_norm": 0.0007938373018987477,
"learning_rate": 4.237835404237778e-06,
"loss": 0.0,
"step": 908
},
{
"epoch": 0.03252410683936526,
"grad_norm": 0.0008096988312900066,
"learning_rate": 4.147821098262405e-06,
"loss": 0.0,
"step": 909
},
{
"epoch": 0.032559886934898113,
"grad_norm": 0.0005311848362907767,
"learning_rate": 4.0587528145957235e-06,
"loss": 0.0,
"step": 910
},
{
"epoch": 0.03259566703043097,
"grad_norm": 0.0013915644958615303,
"learning_rate": 3.970631432305694e-06,
"loss": 0.0,
"step": 911
},
{
"epoch": 0.032631447125963824,
"grad_norm": 0.0006781916017644107,
"learning_rate": 3.883457821114811e-06,
"loss": 0.0001,
"step": 912
},
{
"epoch": 0.03266722722149668,
"grad_norm": 0.0002211649261880666,
"learning_rate": 3.797232841391407e-06,
"loss": 0.0,
"step": 913
},
{
"epoch": 0.032703007317029534,
"grad_norm": 0.0013252223143354058,
"learning_rate": 3.711957344141237e-06,
"loss": 0.0001,
"step": 914
},
{
"epoch": 0.03273878741256239,
"grad_norm": 0.0002602491294965148,
"learning_rate": 3.627632170999029e-06,
"loss": 0.0,
"step": 915
},
{
"epoch": 0.032774567508095244,
"grad_norm": 0.00041321589378640056,
"learning_rate": 3.5442581542201923e-06,
"loss": 0.0,
"step": 916
},
{
"epoch": 0.0328103476036281,
"grad_norm": 0.0012678432976827025,
"learning_rate": 3.461836116672612e-06,
"loss": 0.0001,
"step": 917
},
{
"epoch": 0.032846127699160954,
"grad_norm": 0.0001828453823691234,
"learning_rate": 3.380366871828522e-06,
"loss": 0.0,
"step": 918
},
{
"epoch": 0.03288190779469381,
"grad_norm": 0.001912861131131649,
"learning_rate": 3.2998512237565005e-06,
"loss": 0.0001,
"step": 919
},
{
"epoch": 0.032917687890226664,
"grad_norm": 0.00035834297887049615,
"learning_rate": 3.2202899671134546e-06,
"loss": 0.0,
"step": 920
},
{
"epoch": 0.03295346798575952,
"grad_norm": 0.0003436813422013074,
"learning_rate": 3.1416838871368924e-06,
"loss": 0.0,
"step": 921
},
{
"epoch": 0.032989248081292374,
"grad_norm": 0.0003717408108059317,
"learning_rate": 3.064033759637064e-06,
"loss": 0.0,
"step": 922
},
{
"epoch": 0.03302502817682523,
"grad_norm": 0.0005891156033612788,
"learning_rate": 2.9873403509894203e-06,
"loss": 0.0,
"step": 923
},
{
"epoch": 0.033060808272358085,
"grad_norm": 0.00025783927412703633,
"learning_rate": 2.9116044181269007e-06,
"loss": 0.0,
"step": 924
},
{
"epoch": 0.03309658836789094,
"grad_norm": 0.00029453469323925674,
"learning_rate": 2.836826708532603e-06,
"loss": 0.0,
"step": 925
},
{
"epoch": 0.033132368463423795,
"grad_norm": 0.0010198934469372034,
"learning_rate": 2.7630079602323442e-06,
"loss": 0.0001,
"step": 926
},
{
"epoch": 0.03316814855895665,
"grad_norm": 0.004502155818045139,
"learning_rate": 2.690148901787337e-06,
"loss": 0.0001,
"step": 927
},
{
"epoch": 0.033203928654489505,
"grad_norm": 0.00021984580962453038,
"learning_rate": 2.618250252287113e-06,
"loss": 0.0,
"step": 928
},
{
"epoch": 0.03323970875002236,
"grad_norm": 0.0007490668795071542,
"learning_rate": 2.5473127213422763e-06,
"loss": 0.0,
"step": 929
},
{
"epoch": 0.033275488845555215,
"grad_norm": 0.0006981800543144345,
"learning_rate": 2.4773370090776626e-06,
"loss": 0.0,
"step": 930
},
{
"epoch": 0.033311268941088074,
"grad_norm": 0.0020569113548845053,
"learning_rate": 2.4083238061252567e-06,
"loss": 0.0001,
"step": 931
},
{
"epoch": 0.033347049036620925,
"grad_norm": 0.0018162206979468465,
"learning_rate": 2.3402737936175425e-06,
"loss": 0.0001,
"step": 932
},
{
"epoch": 0.033382829132153784,
"grad_norm": 0.00036989006912335753,
"learning_rate": 2.273187643180652e-06,
"loss": 0.0001,
"step": 933
},
{
"epoch": 0.033418609227686635,
"grad_norm": 0.0003000934957526624,
"learning_rate": 2.2070660169278166e-06,
"loss": 0.0,
"step": 934
},
{
"epoch": 0.033454389323219494,
"grad_norm": 0.001891492516733706,
"learning_rate": 2.141909567452793e-06,
"loss": 0.0001,
"step": 935
},
{
"epoch": 0.033490169418752345,
"grad_norm": 0.0007120324298739433,
"learning_rate": 2.0777189378234143e-06,
"loss": 0.0001,
"step": 936
},
{
"epoch": 0.033525949514285204,
"grad_norm": 0.0004102467792108655,
"learning_rate": 2.014494761575314e-06,
"loss": 0.0,
"step": 937
},
{
"epoch": 0.033561729609818056,
"grad_norm": 0.00032427874975837767,
"learning_rate": 1.9522376627055583e-06,
"loss": 0.0,
"step": 938
},
{
"epoch": 0.033597509705350914,
"grad_norm": 0.0007267544860951602,
"learning_rate": 1.8909482556666024e-06,
"loss": 0.0,
"step": 939
},
{
"epoch": 0.033633289800883766,
"grad_norm": 0.000546029070392251,
"learning_rate": 1.8306271453601199e-06,
"loss": 0.0001,
"step": 940
},
{
"epoch": 0.033669069896416624,
"grad_norm": 0.013414498418569565,
"learning_rate": 1.771274927131139e-06,
"loss": 0.0002,
"step": 941
},
{
"epoch": 0.033704849991949476,
"grad_norm": 0.0038646236062049866,
"learning_rate": 1.712892186762083e-06,
"loss": 0.0001,
"step": 942
},
{
"epoch": 0.033740630087482334,
"grad_norm": 0.0012384445872157812,
"learning_rate": 1.6554795004670388e-06,
"loss": 0.0001,
"step": 943
},
{
"epoch": 0.033776410183015186,
"grad_norm": 0.0006567625678144395,
"learning_rate": 1.5990374348860305e-06,
"loss": 0.0001,
"step": 944
},
{
"epoch": 0.033812190278548045,
"grad_norm": 0.0008650491945445538,
"learning_rate": 1.543566547079467e-06,
"loss": 0.0,
"step": 945
},
{
"epoch": 0.033847970374080896,
"grad_norm": 0.0006459559663198888,
"learning_rate": 1.4890673845226133e-06,
"loss": 0.0001,
"step": 946
},
{
"epoch": 0.033883750469613755,
"grad_norm": 0.0005734586156904697,
"learning_rate": 1.4355404851001952e-06,
"loss": 0.0,
"step": 947
},
{
"epoch": 0.033919530565146606,
"grad_norm": 0.0004001356428489089,
"learning_rate": 1.3829863771011253e-06,
"loss": 0.0,
"step": 948
},
{
"epoch": 0.033955310660679465,
"grad_norm": 0.0002406181301921606,
"learning_rate": 1.3314055792131964e-06,
"loss": 0.0,
"step": 949
},
{
"epoch": 0.03399109075621232,
"grad_norm": 0.00018440363055560738,
"learning_rate": 1.280798600518085e-06,
"loss": 0.0,
"step": 950
},
{
"epoch": 0.034026870851745175,
"grad_norm": 0.00029384635854512453,
"learning_rate": 1.231165940486234e-06,
"loss": 0.0,
"step": 951
},
{
"epoch": 0.03406265094727803,
"grad_norm": 0.0008108361507765949,
"learning_rate": 1.1825080889719563e-06,
"loss": 0.0,
"step": 952
},
{
"epoch": 0.034098431042810885,
"grad_norm": 0.0003570028638932854,
"learning_rate": 1.134825526208605e-06,
"loss": 0.0,
"step": 953
},
{
"epoch": 0.03413421113834374,
"grad_norm": 0.0013423620257526636,
"learning_rate": 1.0881187228038215e-06,
"loss": 0.0001,
"step": 954
},
{
"epoch": 0.034169991233876595,
"grad_norm": 0.005175633821636438,
"learning_rate": 1.0423881397349068e-06,
"loss": 0.0002,
"step": 955
},
{
"epoch": 0.03420577132940945,
"grad_norm": 0.000696335278917104,
"learning_rate": 9.976342283442463e-07,
"loss": 0.0,
"step": 956
},
{
"epoch": 0.034241551424942306,
"grad_norm": 0.00048268615501001477,
"learning_rate": 9.538574303348813e-07,
"loss": 0.0,
"step": 957
},
{
"epoch": 0.03427733152047516,
"grad_norm": 0.0002519102126825601,
"learning_rate": 9.110581777661331e-07,
"loss": 0.0,
"step": 958
},
{
"epoch": 0.034313111616008016,
"grad_norm": 0.0014691960532218218,
"learning_rate": 8.692368930493521e-07,
"loss": 0.0001,
"step": 959
},
{
"epoch": 0.03434889171154087,
"grad_norm": 0.0012190488632768393,
"learning_rate": 8.283939889437209e-07,
"loss": 0.0,
"step": 960
},
{
"epoch": 0.034384671807073726,
"grad_norm": 0.00024619302712380886,
"learning_rate": 7.885298685522235e-07,
"loss": 0.0,
"step": 961
},
{
"epoch": 0.03442045190260658,
"grad_norm": 0.0028114488814026117,
"learning_rate": 7.496449253176274e-07,
"loss": 0.0001,
"step": 962
},
{
"epoch": 0.034456231998139436,
"grad_norm": 0.0008010416058823466,
"learning_rate": 7.117395430186414e-07,
"loss": 0.0,
"step": 963
},
{
"epoch": 0.03449201209367229,
"grad_norm": 0.007256933022290468,
"learning_rate": 6.748140957660631e-07,
"loss": 0.0002,
"step": 964
},
{
"epoch": 0.034527792189205146,
"grad_norm": 0.0004452927387319505,
"learning_rate": 6.388689479991605e-07,
"loss": 0.0,
"step": 965
},
{
"epoch": 0.034563572284738,
"grad_norm": 0.0015947840875014663,
"learning_rate": 6.039044544820404e-07,
"loss": 0.0001,
"step": 966
},
{
"epoch": 0.034599352380270856,
"grad_norm": 0.006381189450621605,
"learning_rate": 5.699209603001076e-07,
"loss": 0.0001,
"step": 967
},
{
"epoch": 0.03463513247580371,
"grad_norm": 0.00047914046444930136,
"learning_rate": 5.369188008567672e-07,
"loss": 0.0,
"step": 968
},
{
"epoch": 0.034670912571336567,
"grad_norm": 0.01858740672469139,
"learning_rate": 5.048983018699827e-07,
"loss": 0.0001,
"step": 969
},
{
"epoch": 0.03470669266686942,
"grad_norm": 0.001538052223622799,
"learning_rate": 4.738597793691679e-07,
"loss": 0.0001,
"step": 970
},
{
"epoch": 0.03474247276240228,
"grad_norm": 0.027744438499212265,
"learning_rate": 4.438035396920004e-07,
"loss": 0.0002,
"step": 971
},
{
"epoch": 0.03477825285793513,
"grad_norm": 0.00037432098179124296,
"learning_rate": 4.1472987948143473e-07,
"loss": 0.0,
"step": 972
},
{
"epoch": 0.03481403295346799,
"grad_norm": 0.0005791685543954372,
"learning_rate": 3.866390856827495e-07,
"loss": 0.0,
"step": 973
},
{
"epoch": 0.03484981304900084,
"grad_norm": 0.0002605381014291197,
"learning_rate": 3.595314355407609e-07,
"loss": 0.0,
"step": 974
},
{
"epoch": 0.0348855931445337,
"grad_norm": 0.0015622248174622655,
"learning_rate": 3.3340719659701313e-07,
"loss": 0.0001,
"step": 975
},
{
"epoch": 0.03492137324006655,
"grad_norm": 0.00022047720267437398,
"learning_rate": 3.0826662668720364e-07,
"loss": 0.0,
"step": 976
},
{
"epoch": 0.03495715333559941,
"grad_norm": 0.0006391391507349908,
"learning_rate": 2.841099739386066e-07,
"loss": 0.0,
"step": 977
},
{
"epoch": 0.03499293343113226,
"grad_norm": 0.2375146448612213,
"learning_rate": 2.609374767676309e-07,
"loss": 0.0042,
"step": 978
},
{
"epoch": 0.03502871352666512,
"grad_norm": 0.0008213887922465801,
"learning_rate": 2.387493638774774e-07,
"loss": 0.0,
"step": 979
},
{
"epoch": 0.03506449362219797,
"grad_norm": 0.0002956699172500521,
"learning_rate": 2.175458542558517e-07,
"loss": 0.0,
"step": 980
},
{
"epoch": 0.03510027371773083,
"grad_norm": 0.0015261947410181165,
"learning_rate": 1.973271571728441e-07,
"loss": 0.0001,
"step": 981
},
{
"epoch": 0.03513605381326368,
"grad_norm": 0.0001553288020659238,
"learning_rate": 1.7809347217881966e-07,
"loss": 0.0,
"step": 982
},
{
"epoch": 0.03517183390879654,
"grad_norm": 0.0015854670200496912,
"learning_rate": 1.598449891024978e-07,
"loss": 0.0001,
"step": 983
},
{
"epoch": 0.03520761400432939,
"grad_norm": 0.010904781520366669,
"learning_rate": 1.425818880490315e-07,
"loss": 0.0001,
"step": 984
},
{
"epoch": 0.03524339409986225,
"grad_norm": 0.00020986709569115192,
"learning_rate": 1.2630433939825327e-07,
"loss": 0.0,
"step": 985
},
{
"epoch": 0.0352791741953951,
"grad_norm": 0.0002844655537046492,
"learning_rate": 1.1101250380300965e-07,
"loss": 0.0,
"step": 986
},
{
"epoch": 0.03531495429092796,
"grad_norm": 0.00026205452741123736,
"learning_rate": 9.670653218752934e-08,
"loss": 0.0,
"step": 987
},
{
"epoch": 0.03535073438646081,
"grad_norm": 0.0008910206379368901,
"learning_rate": 8.33865657459909e-08,
"loss": 0.0001,
"step": 988
},
{
"epoch": 0.03538651448199367,
"grad_norm": 0.0008078790851868689,
"learning_rate": 7.105273594107953e-08,
"loss": 0.0,
"step": 989
},
{
"epoch": 0.03542229457752652,
"grad_norm": 0.00078550138277933,
"learning_rate": 5.970516450271025e-08,
"loss": 0.0001,
"step": 990
},
{
"epoch": 0.03545807467305938,
"grad_norm": 0.004978294484317303,
"learning_rate": 4.934396342684e-08,
"loss": 0.0001,
"step": 991
},
{
"epoch": 0.03549385476859223,
"grad_norm": 0.0011256830766797066,
"learning_rate": 3.996923497434635e-08,
"loss": 0.0001,
"step": 992
},
{
"epoch": 0.03552963486412509,
"grad_norm": 0.01774189993739128,
"learning_rate": 3.1581071670006015e-08,
"loss": 0.0001,
"step": 993
},
{
"epoch": 0.03556541495965794,
"grad_norm": 0.0002433753979858011,
"learning_rate": 2.417955630159563e-08,
"loss": 0.0,
"step": 994
},
{
"epoch": 0.0356011950551908,
"grad_norm": 0.0002669621608220041,
"learning_rate": 1.7764761919103477e-08,
"loss": 0.0,
"step": 995
},
{
"epoch": 0.03563697515072365,
"grad_norm": 0.0005420253146439791,
"learning_rate": 1.2336751833941229e-08,
"loss": 0.0,
"step": 996
},
{
"epoch": 0.03567275524625651,
"grad_norm": 0.00019939179765060544,
"learning_rate": 7.895579618388827e-09,
"loss": 0.0,
"step": 997
},
{
"epoch": 0.03570853534178936,
"grad_norm": 0.0011194644030183554,
"learning_rate": 4.4412891050171765e-09,
"loss": 0.0001,
"step": 998
},
{
"epoch": 0.03574431543732222,
"grad_norm": 0.0004404011706355959,
"learning_rate": 1.973914386288467e-09,
"loss": 0.0,
"step": 999
},
{
"epoch": 0.03578009553285507,
"grad_norm": 0.000688655418343842,
"learning_rate": 4.934798141786879e-10,
"loss": 0.0001,
"step": 1000
},
{
"epoch": 0.03578009553285507,
"eval_loss": 6.354823563015088e-05,
"eval_runtime": 859.7993,
"eval_samples_per_second": 6.843,
"eval_steps_per_second": 6.843,
"step": 1000
}
],
"logging_steps": 1,
"max_steps": 1000,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 1.6376911681604813e+17,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}