fats-fme's picture
Training in progress, step 1346, checkpoint
ed0ec3d verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 0.999628666914222,
"eval_steps": 337,
"global_step": 1346,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0007426661715558856,
"grad_norm": 1.4470734596252441,
"learning_rate": 2.0000000000000003e-06,
"loss": 1.3568,
"step": 1
},
{
"epoch": 0.0007426661715558856,
"eval_loss": 0.4649902582168579,
"eval_runtime": 189.2701,
"eval_samples_per_second": 5.991,
"eval_steps_per_second": 2.996,
"step": 1
},
{
"epoch": 0.0014853323431117712,
"grad_norm": 1.335021734237671,
"learning_rate": 4.000000000000001e-06,
"loss": 1.7148,
"step": 2
},
{
"epoch": 0.0022279985146676567,
"grad_norm": 2.0106494426727295,
"learning_rate": 6e-06,
"loss": 1.9387,
"step": 3
},
{
"epoch": 0.0029706646862235424,
"grad_norm": 2.0463790893554688,
"learning_rate": 8.000000000000001e-06,
"loss": 2.1194,
"step": 4
},
{
"epoch": 0.003713330857779428,
"grad_norm": 2.6946020126342773,
"learning_rate": 1e-05,
"loss": 2.2371,
"step": 5
},
{
"epoch": 0.004455997029335313,
"grad_norm": 2.0213823318481445,
"learning_rate": 1.2e-05,
"loss": 2.4407,
"step": 6
},
{
"epoch": 0.0051986632008912,
"grad_norm": 2.055884599685669,
"learning_rate": 1.4000000000000001e-05,
"loss": 2.5741,
"step": 7
},
{
"epoch": 0.005941329372447085,
"grad_norm": 2.562527656555176,
"learning_rate": 1.6000000000000003e-05,
"loss": 2.7273,
"step": 8
},
{
"epoch": 0.006683995544002971,
"grad_norm": 2.70515775680542,
"learning_rate": 1.8e-05,
"loss": 2.9015,
"step": 9
},
{
"epoch": 0.007426661715558856,
"grad_norm": 2.6423349380493164,
"learning_rate": 2e-05,
"loss": 2.9069,
"step": 10
},
{
"epoch": 0.008169327887114742,
"grad_norm": 2.7331981658935547,
"learning_rate": 2.2000000000000003e-05,
"loss": 2.8755,
"step": 11
},
{
"epoch": 0.008911994058670627,
"grad_norm": 4.324583053588867,
"learning_rate": 2.4e-05,
"loss": 3.0139,
"step": 12
},
{
"epoch": 0.009654660230226514,
"grad_norm": 3.278507947921753,
"learning_rate": 2.6000000000000002e-05,
"loss": 3.1708,
"step": 13
},
{
"epoch": 0.0103973264017824,
"grad_norm": 3.9470252990722656,
"learning_rate": 2.8000000000000003e-05,
"loss": 3.2871,
"step": 14
},
{
"epoch": 0.011139992573338284,
"grad_norm": 4.76015567779541,
"learning_rate": 3e-05,
"loss": 2.9986,
"step": 15
},
{
"epoch": 0.01188265874489417,
"grad_norm": 6.300633907318115,
"learning_rate": 3.2000000000000005e-05,
"loss": 3.4458,
"step": 16
},
{
"epoch": 0.012625324916450055,
"grad_norm": 4.9842095375061035,
"learning_rate": 3.4000000000000007e-05,
"loss": 3.2977,
"step": 17
},
{
"epoch": 0.013367991088005942,
"grad_norm": 4.979720592498779,
"learning_rate": 3.6e-05,
"loss": 3.285,
"step": 18
},
{
"epoch": 0.014110657259561827,
"grad_norm": 5.599627494812012,
"learning_rate": 3.8e-05,
"loss": 3.0219,
"step": 19
},
{
"epoch": 0.014853323431117713,
"grad_norm": 5.2525153160095215,
"learning_rate": 4e-05,
"loss": 2.9258,
"step": 20
},
{
"epoch": 0.015595989602673598,
"grad_norm": 5.435408115386963,
"learning_rate": 4.2e-05,
"loss": 2.7997,
"step": 21
},
{
"epoch": 0.016338655774229483,
"grad_norm": 7.129375457763672,
"learning_rate": 4.4000000000000006e-05,
"loss": 2.8325,
"step": 22
},
{
"epoch": 0.01708132194578537,
"grad_norm": 6.04606819152832,
"learning_rate": 4.600000000000001e-05,
"loss": 2.4109,
"step": 23
},
{
"epoch": 0.017823988117341254,
"grad_norm": 7.30807638168335,
"learning_rate": 4.8e-05,
"loss": 2.5344,
"step": 24
},
{
"epoch": 0.01856665428889714,
"grad_norm": 6.1737775802612305,
"learning_rate": 5e-05,
"loss": 2.2762,
"step": 25
},
{
"epoch": 0.019309320460453028,
"grad_norm": 1.2856624126434326,
"learning_rate": 5.2000000000000004e-05,
"loss": 0.4641,
"step": 26
},
{
"epoch": 0.02005198663200891,
"grad_norm": 1.5805637836456299,
"learning_rate": 5.4000000000000005e-05,
"loss": 0.4788,
"step": 27
},
{
"epoch": 0.0207946528035648,
"grad_norm": 1.7802543640136719,
"learning_rate": 5.6000000000000006e-05,
"loss": 0.4178,
"step": 28
},
{
"epoch": 0.021537318975120682,
"grad_norm": 1.6685826778411865,
"learning_rate": 5.8e-05,
"loss": 0.3613,
"step": 29
},
{
"epoch": 0.02227998514667657,
"grad_norm": 1.2964603900909424,
"learning_rate": 6e-05,
"loss": 0.2742,
"step": 30
},
{
"epoch": 0.023022651318232456,
"grad_norm": 1.162868618965149,
"learning_rate": 6.2e-05,
"loss": 0.1826,
"step": 31
},
{
"epoch": 0.02376531748978834,
"grad_norm": 1.0783743858337402,
"learning_rate": 6.400000000000001e-05,
"loss": 0.1383,
"step": 32
},
{
"epoch": 0.024507983661344226,
"grad_norm": 0.7907594442367554,
"learning_rate": 6.6e-05,
"loss": 0.0578,
"step": 33
},
{
"epoch": 0.02525064983290011,
"grad_norm": 0.45891255140304565,
"learning_rate": 6.800000000000001e-05,
"loss": 0.0416,
"step": 34
},
{
"epoch": 0.025993316004455997,
"grad_norm": 0.2138753980398178,
"learning_rate": 7e-05,
"loss": 0.0117,
"step": 35
},
{
"epoch": 0.026735982176011884,
"grad_norm": 0.09297079592943192,
"learning_rate": 7.2e-05,
"loss": 0.0046,
"step": 36
},
{
"epoch": 0.027478648347567768,
"grad_norm": 0.27470022439956665,
"learning_rate": 7.4e-05,
"loss": 0.0102,
"step": 37
},
{
"epoch": 0.028221314519123655,
"grad_norm": 0.05123934894800186,
"learning_rate": 7.6e-05,
"loss": 0.0022,
"step": 38
},
{
"epoch": 0.028963980690679538,
"grad_norm": 0.22415509819984436,
"learning_rate": 7.800000000000001e-05,
"loss": 0.0058,
"step": 39
},
{
"epoch": 0.029706646862235425,
"grad_norm": 1.0602823495864868,
"learning_rate": 8e-05,
"loss": 0.0308,
"step": 40
},
{
"epoch": 0.030449313033791312,
"grad_norm": 0.019605087116360664,
"learning_rate": 8.2e-05,
"loss": 0.0008,
"step": 41
},
{
"epoch": 0.031191979205347196,
"grad_norm": 0.020324615761637688,
"learning_rate": 8.4e-05,
"loss": 0.001,
"step": 42
},
{
"epoch": 0.03193464537690308,
"grad_norm": 0.0388132706284523,
"learning_rate": 8.6e-05,
"loss": 0.0009,
"step": 43
},
{
"epoch": 0.032677311548458966,
"grad_norm": 0.11963042616844177,
"learning_rate": 8.800000000000001e-05,
"loss": 0.0016,
"step": 44
},
{
"epoch": 0.03341997772001486,
"grad_norm": 0.04200240597128868,
"learning_rate": 9e-05,
"loss": 0.0007,
"step": 45
},
{
"epoch": 0.03416264389157074,
"grad_norm": 1.10586416721344,
"learning_rate": 9.200000000000001e-05,
"loss": 0.0202,
"step": 46
},
{
"epoch": 0.034905310063126624,
"grad_norm": 0.007396018132567406,
"learning_rate": 9.4e-05,
"loss": 0.0004,
"step": 47
},
{
"epoch": 0.03564797623468251,
"grad_norm": 0.012978550978004932,
"learning_rate": 9.6e-05,
"loss": 0.0005,
"step": 48
},
{
"epoch": 0.0363906424062384,
"grad_norm": 0.006396067328751087,
"learning_rate": 9.8e-05,
"loss": 0.0002,
"step": 49
},
{
"epoch": 0.03713330857779428,
"grad_norm": 0.024195007979869843,
"learning_rate": 0.0001,
"loss": 0.0007,
"step": 50
},
{
"epoch": 0.037875974749350165,
"grad_norm": 1.803991436958313,
"learning_rate": 9.999985309738107e-05,
"loss": 0.0337,
"step": 51
},
{
"epoch": 0.038618640920906055,
"grad_norm": 3.9776086807250977,
"learning_rate": 9.999941239038748e-05,
"loss": 0.0281,
"step": 52
},
{
"epoch": 0.03936130709246194,
"grad_norm": 0.039939988404512405,
"learning_rate": 9.999867788160888e-05,
"loss": 0.0005,
"step": 53
},
{
"epoch": 0.04010397326401782,
"grad_norm": 2.4416584968566895,
"learning_rate": 9.999764957536131e-05,
"loss": 0.0216,
"step": 54
},
{
"epoch": 0.04084663943557371,
"grad_norm": 0.005858874414116144,
"learning_rate": 9.999632747768722e-05,
"loss": 0.0003,
"step": 55
},
{
"epoch": 0.0415893056071296,
"grad_norm": 0.004351081792265177,
"learning_rate": 9.999471159635539e-05,
"loss": 0.0003,
"step": 56
},
{
"epoch": 0.04233197177868548,
"grad_norm": 0.37215837836265564,
"learning_rate": 9.999280194086089e-05,
"loss": 0.0037,
"step": 57
},
{
"epoch": 0.043074637950241364,
"grad_norm": 0.005829016678035259,
"learning_rate": 9.999059852242507e-05,
"loss": 0.0004,
"step": 58
},
{
"epoch": 0.043817304121797254,
"grad_norm": 0.11440680176019669,
"learning_rate": 9.998810135399546e-05,
"loss": 0.0011,
"step": 59
},
{
"epoch": 0.04455997029335314,
"grad_norm": 0.023830680176615715,
"learning_rate": 9.998531045024566e-05,
"loss": 0.0006,
"step": 60
},
{
"epoch": 0.04530263646490902,
"grad_norm": 5.839324474334717,
"learning_rate": 9.998222582757533e-05,
"loss": 0.0621,
"step": 61
},
{
"epoch": 0.04604530263646491,
"grad_norm": 0.17139233648777008,
"learning_rate": 9.997884750411005e-05,
"loss": 0.0021,
"step": 62
},
{
"epoch": 0.046787968808020795,
"grad_norm": 0.030187880620360374,
"learning_rate": 9.997517549970115e-05,
"loss": 0.0007,
"step": 63
},
{
"epoch": 0.04753063497957668,
"grad_norm": 0.015891535207629204,
"learning_rate": 9.997120983592574e-05,
"loss": 0.0004,
"step": 64
},
{
"epoch": 0.04827330115113257,
"grad_norm": 0.001739410450682044,
"learning_rate": 9.996695053608651e-05,
"loss": 0.0002,
"step": 65
},
{
"epoch": 0.04901596732268845,
"grad_norm": 0.010511704720556736,
"learning_rate": 9.996239762521151e-05,
"loss": 0.0003,
"step": 66
},
{
"epoch": 0.049758633494244336,
"grad_norm": 0.0017766759265214205,
"learning_rate": 9.995755113005414e-05,
"loss": 0.0002,
"step": 67
},
{
"epoch": 0.05050129966580022,
"grad_norm": 0.004469791427254677,
"learning_rate": 9.99524110790929e-05,
"loss": 0.0003,
"step": 68
},
{
"epoch": 0.05124396583735611,
"grad_norm": 0.0036557905841618776,
"learning_rate": 9.994697750253127e-05,
"loss": 0.0004,
"step": 69
},
{
"epoch": 0.051986632008911994,
"grad_norm": 0.0024416742380708456,
"learning_rate": 9.994125043229752e-05,
"loss": 0.0003,
"step": 70
},
{
"epoch": 0.05272929818046788,
"grad_norm": 0.015205912292003632,
"learning_rate": 9.993522990204453e-05,
"loss": 0.0004,
"step": 71
},
{
"epoch": 0.05347196435202377,
"grad_norm": 0.014475185424089432,
"learning_rate": 9.992891594714954e-05,
"loss": 0.0005,
"step": 72
},
{
"epoch": 0.05421463052357965,
"grad_norm": 0.007128004450351,
"learning_rate": 9.992230860471402e-05,
"loss": 0.0004,
"step": 73
},
{
"epoch": 0.054957296695135535,
"grad_norm": 0.014512602239847183,
"learning_rate": 9.991540791356342e-05,
"loss": 0.0004,
"step": 74
},
{
"epoch": 0.055699962866691426,
"grad_norm": 0.006873926613479853,
"learning_rate": 9.990821391424689e-05,
"loss": 0.0003,
"step": 75
},
{
"epoch": 0.05644262903824731,
"grad_norm": 0.4753952920436859,
"learning_rate": 9.990072664903717e-05,
"loss": 0.0152,
"step": 76
},
{
"epoch": 0.05718529520980319,
"grad_norm": 0.006181332748383284,
"learning_rate": 9.989294616193017e-05,
"loss": 0.0004,
"step": 77
},
{
"epoch": 0.057927961381359076,
"grad_norm": 0.005821248050779104,
"learning_rate": 9.988487249864489e-05,
"loss": 0.0004,
"step": 78
},
{
"epoch": 0.05867062755291497,
"grad_norm": 0.005324830766767263,
"learning_rate": 9.9876505706623e-05,
"loss": 0.0003,
"step": 79
},
{
"epoch": 0.05941329372447085,
"grad_norm": 0.0035147082526236773,
"learning_rate": 9.986784583502862e-05,
"loss": 0.0003,
"step": 80
},
{
"epoch": 0.060155959896026734,
"grad_norm": 0.011637026444077492,
"learning_rate": 9.98588929347481e-05,
"loss": 0.0006,
"step": 81
},
{
"epoch": 0.060898626067582624,
"grad_norm": 0.006236548535525799,
"learning_rate": 9.98496470583896e-05,
"loss": 0.0006,
"step": 82
},
{
"epoch": 0.06164129223913851,
"grad_norm": 0.006345892325043678,
"learning_rate": 9.984010826028288e-05,
"loss": 0.0003,
"step": 83
},
{
"epoch": 0.06238395841069439,
"grad_norm": 0.016584008932113647,
"learning_rate": 9.98302765964789e-05,
"loss": 0.0006,
"step": 84
},
{
"epoch": 0.06312662458225028,
"grad_norm": 0.0018910560756921768,
"learning_rate": 9.982015212474955e-05,
"loss": 0.0002,
"step": 85
},
{
"epoch": 0.06386929075380617,
"grad_norm": 0.0022837521973997355,
"learning_rate": 9.980973490458728e-05,
"loss": 0.0002,
"step": 86
},
{
"epoch": 0.06461195692536205,
"grad_norm": 1.3271163702011108,
"learning_rate": 9.979902499720477e-05,
"loss": 0.0071,
"step": 87
},
{
"epoch": 0.06535462309691793,
"grad_norm": 0.011062121950089931,
"learning_rate": 9.978802246553459e-05,
"loss": 0.0003,
"step": 88
},
{
"epoch": 0.06609728926847382,
"grad_norm": 0.007717825006693602,
"learning_rate": 9.97767273742287e-05,
"loss": 0.0004,
"step": 89
},
{
"epoch": 0.06683995544002971,
"grad_norm": 0.010918798856437206,
"learning_rate": 9.976513978965829e-05,
"loss": 0.0006,
"step": 90
},
{
"epoch": 0.0675826216115856,
"grad_norm": 0.006601768545806408,
"learning_rate": 9.975325977991322e-05,
"loss": 0.0003,
"step": 91
},
{
"epoch": 0.06832528778314148,
"grad_norm": 0.009449873119592667,
"learning_rate": 9.974108741480165e-05,
"loss": 0.0004,
"step": 92
},
{
"epoch": 0.06906795395469736,
"grad_norm": 0.015310431830585003,
"learning_rate": 9.97286227658497e-05,
"loss": 0.0004,
"step": 93
},
{
"epoch": 0.06981062012625325,
"grad_norm": 0.09934096038341522,
"learning_rate": 9.971586590630093e-05,
"loss": 0.0019,
"step": 94
},
{
"epoch": 0.07055328629780913,
"grad_norm": 0.1965462863445282,
"learning_rate": 9.970281691111598e-05,
"loss": 0.0012,
"step": 95
},
{
"epoch": 0.07129595246936501,
"grad_norm": 0.05416925996541977,
"learning_rate": 9.968947585697214e-05,
"loss": 0.0007,
"step": 96
},
{
"epoch": 0.07203861864092091,
"grad_norm": 0.012609624303877354,
"learning_rate": 9.967584282226281e-05,
"loss": 0.0005,
"step": 97
},
{
"epoch": 0.0727812848124768,
"grad_norm": 0.4876333177089691,
"learning_rate": 9.966191788709716e-05,
"loss": 0.0031,
"step": 98
},
{
"epoch": 0.07352395098403268,
"grad_norm": 0.01990601420402527,
"learning_rate": 9.964770113329953e-05,
"loss": 0.0009,
"step": 99
},
{
"epoch": 0.07426661715558856,
"grad_norm": 0.00828948151320219,
"learning_rate": 9.96331926444091e-05,
"loss": 0.0004,
"step": 100
},
{
"epoch": 0.07500928332714445,
"grad_norm": 0.005279685370624065,
"learning_rate": 9.961839250567924e-05,
"loss": 0.0004,
"step": 101
},
{
"epoch": 0.07575194949870033,
"grad_norm": 1.349811315536499,
"learning_rate": 9.960330080407711e-05,
"loss": 0.1017,
"step": 102
},
{
"epoch": 0.07649461567025621,
"grad_norm": 0.006720814388245344,
"learning_rate": 9.958791762828317e-05,
"loss": 0.0005,
"step": 103
},
{
"epoch": 0.07723728184181211,
"grad_norm": 0.026319369673728943,
"learning_rate": 9.957224306869053e-05,
"loss": 0.0007,
"step": 104
},
{
"epoch": 0.077979948013368,
"grad_norm": 0.013718683272600174,
"learning_rate": 9.955627721740454e-05,
"loss": 0.0007,
"step": 105
},
{
"epoch": 0.07872261418492388,
"grad_norm": 0.030150871723890305,
"learning_rate": 9.954002016824227e-05,
"loss": 0.0008,
"step": 106
},
{
"epoch": 0.07946528035647976,
"grad_norm": 0.03413335233926773,
"learning_rate": 9.95234720167318e-05,
"loss": 0.0009,
"step": 107
},
{
"epoch": 0.08020794652803565,
"grad_norm": 0.03414030373096466,
"learning_rate": 9.950663286011179e-05,
"loss": 0.0009,
"step": 108
},
{
"epoch": 0.08095061269959153,
"grad_norm": 0.7618962526321411,
"learning_rate": 9.948950279733093e-05,
"loss": 0.0031,
"step": 109
},
{
"epoch": 0.08169327887114743,
"grad_norm": 0.01711968518793583,
"learning_rate": 9.947208192904722e-05,
"loss": 0.0005,
"step": 110
},
{
"epoch": 0.08243594504270331,
"grad_norm": 0.014231199398636818,
"learning_rate": 9.945437035762754e-05,
"loss": 0.0005,
"step": 111
},
{
"epoch": 0.0831786112142592,
"grad_norm": 0.08236388117074966,
"learning_rate": 9.943636818714695e-05,
"loss": 0.0021,
"step": 112
},
{
"epoch": 0.08392127738581508,
"grad_norm": 0.008401082828640938,
"learning_rate": 9.941807552338804e-05,
"loss": 0.0004,
"step": 113
},
{
"epoch": 0.08466394355737096,
"grad_norm": 0.00980079360306263,
"learning_rate": 9.939949247384046e-05,
"loss": 0.0005,
"step": 114
},
{
"epoch": 0.08540660972892684,
"grad_norm": 0.01246787328273058,
"learning_rate": 9.938061914770012e-05,
"loss": 0.0006,
"step": 115
},
{
"epoch": 0.08614927590048273,
"grad_norm": 0.011044768616557121,
"learning_rate": 9.936145565586871e-05,
"loss": 0.0005,
"step": 116
},
{
"epoch": 0.08689194207203862,
"grad_norm": 0.0040915291756391525,
"learning_rate": 9.934200211095288e-05,
"loss": 0.0003,
"step": 117
},
{
"epoch": 0.08763460824359451,
"grad_norm": 0.004765264689922333,
"learning_rate": 9.93222586272637e-05,
"loss": 0.0004,
"step": 118
},
{
"epoch": 0.08837727441515039,
"grad_norm": 0.0034806388430297375,
"learning_rate": 9.930222532081597e-05,
"loss": 0.0003,
"step": 119
},
{
"epoch": 0.08911994058670628,
"grad_norm": 0.005507381167262793,
"learning_rate": 9.928190230932746e-05,
"loss": 0.0003,
"step": 120
},
{
"epoch": 0.08986260675826216,
"grad_norm": 0.42908811569213867,
"learning_rate": 9.926128971221835e-05,
"loss": 0.0026,
"step": 121
},
{
"epoch": 0.09060527292981804,
"grad_norm": 0.00801977701485157,
"learning_rate": 9.924038765061042e-05,
"loss": 0.0003,
"step": 122
},
{
"epoch": 0.09134793910137393,
"grad_norm": 0.007514494471251965,
"learning_rate": 9.921919624732635e-05,
"loss": 0.0004,
"step": 123
},
{
"epoch": 0.09209060527292982,
"grad_norm": 0.006028357893228531,
"learning_rate": 9.919771562688904e-05,
"loss": 0.0002,
"step": 124
},
{
"epoch": 0.09283327144448571,
"grad_norm": 0.009683290496468544,
"learning_rate": 9.917594591552089e-05,
"loss": 0.0005,
"step": 125
},
{
"epoch": 0.09357593761604159,
"grad_norm": 0.01857328973710537,
"learning_rate": 9.915388724114301e-05,
"loss": 0.0011,
"step": 126
},
{
"epoch": 0.09431860378759747,
"grad_norm": 0.0045487345196306705,
"learning_rate": 9.913153973337446e-05,
"loss": 0.0004,
"step": 127
},
{
"epoch": 0.09506126995915336,
"grad_norm": 0.019089125096797943,
"learning_rate": 9.910890352353153e-05,
"loss": 0.0009,
"step": 128
},
{
"epoch": 0.09580393613070924,
"grad_norm": 0.01162141002714634,
"learning_rate": 9.908597874462699e-05,
"loss": 0.0004,
"step": 129
},
{
"epoch": 0.09654660230226514,
"grad_norm": 0.0069759683683514595,
"learning_rate": 9.906276553136923e-05,
"loss": 0.0005,
"step": 130
},
{
"epoch": 0.09728926847382102,
"grad_norm": 1.388071894645691,
"learning_rate": 9.903926402016153e-05,
"loss": 0.0127,
"step": 131
},
{
"epoch": 0.0980319346453769,
"grad_norm": 0.006189362611621618,
"learning_rate": 9.901547434910122e-05,
"loss": 0.0004,
"step": 132
},
{
"epoch": 0.09877460081693279,
"grad_norm": 0.0009308147127740085,
"learning_rate": 9.899139665797887e-05,
"loss": 0.0001,
"step": 133
},
{
"epoch": 0.09951726698848867,
"grad_norm": 0.003263165010139346,
"learning_rate": 9.896703108827759e-05,
"loss": 0.0002,
"step": 134
},
{
"epoch": 0.10025993316004456,
"grad_norm": 0.0018793240888044238,
"learning_rate": 9.894237778317195e-05,
"loss": 0.0002,
"step": 135
},
{
"epoch": 0.10100259933160044,
"grad_norm": 0.2546728551387787,
"learning_rate": 9.891743688752738e-05,
"loss": 0.0021,
"step": 136
},
{
"epoch": 0.10174526550315634,
"grad_norm": 0.01736506260931492,
"learning_rate": 9.88922085478992e-05,
"loss": 0.0004,
"step": 137
},
{
"epoch": 0.10248793167471222,
"grad_norm": 0.003645472228527069,
"learning_rate": 9.88666929125318e-05,
"loss": 0.0003,
"step": 138
},
{
"epoch": 0.1032305978462681,
"grad_norm": 0.002554230624809861,
"learning_rate": 9.884089013135766e-05,
"loss": 0.0002,
"step": 139
},
{
"epoch": 0.10397326401782399,
"grad_norm": 0.003567540319636464,
"learning_rate": 9.881480035599667e-05,
"loss": 0.0002,
"step": 140
},
{
"epoch": 0.10471593018937987,
"grad_norm": 0.0011336462339386344,
"learning_rate": 9.87884237397551e-05,
"loss": 0.0002,
"step": 141
},
{
"epoch": 0.10545859636093576,
"grad_norm": 0.0014857546193525195,
"learning_rate": 9.876176043762467e-05,
"loss": 0.0001,
"step": 142
},
{
"epoch": 0.10620126253249164,
"grad_norm": 0.022715391591191292,
"learning_rate": 9.873481060628174e-05,
"loss": 0.0006,
"step": 143
},
{
"epoch": 0.10694392870404754,
"grad_norm": 1.0306154489517212,
"learning_rate": 9.870757440408638e-05,
"loss": 0.0073,
"step": 144
},
{
"epoch": 0.10768659487560342,
"grad_norm": 0.002907106187194586,
"learning_rate": 9.868005199108133e-05,
"loss": 0.0002,
"step": 145
},
{
"epoch": 0.1084292610471593,
"grad_norm": 0.0016110404394567013,
"learning_rate": 9.865224352899119e-05,
"loss": 0.0002,
"step": 146
},
{
"epoch": 0.10917192721871519,
"grad_norm": 0.0025813078973442316,
"learning_rate": 9.862414918122141e-05,
"loss": 0.0002,
"step": 147
},
{
"epoch": 0.10991459339027107,
"grad_norm": 0.0018323366530239582,
"learning_rate": 9.859576911285728e-05,
"loss": 0.0002,
"step": 148
},
{
"epoch": 0.11065725956182695,
"grad_norm": 0.7255179286003113,
"learning_rate": 9.856710349066307e-05,
"loss": 0.0081,
"step": 149
},
{
"epoch": 0.11139992573338285,
"grad_norm": 3.8413281440734863,
"learning_rate": 9.853815248308101e-05,
"loss": 0.2887,
"step": 150
},
{
"epoch": 0.11214259190493873,
"grad_norm": 0.01481606438755989,
"learning_rate": 9.850891626023022e-05,
"loss": 0.0009,
"step": 151
},
{
"epoch": 0.11288525807649462,
"grad_norm": 0.005497956182807684,
"learning_rate": 9.84793949939058e-05,
"loss": 0.0004,
"step": 152
},
{
"epoch": 0.1136279242480505,
"grad_norm": 0.05774915963411331,
"learning_rate": 9.844958885757784e-05,
"loss": 0.0008,
"step": 153
},
{
"epoch": 0.11437059041960639,
"grad_norm": 0.22279635071754456,
"learning_rate": 9.84194980263903e-05,
"loss": 0.0023,
"step": 154
},
{
"epoch": 0.11511325659116227,
"grad_norm": 0.0037247207947075367,
"learning_rate": 9.838912267716005e-05,
"loss": 0.0003,
"step": 155
},
{
"epoch": 0.11585592276271815,
"grad_norm": 0.0031422695610672235,
"learning_rate": 9.835846298837584e-05,
"loss": 0.0003,
"step": 156
},
{
"epoch": 0.11659858893427405,
"grad_norm": 0.004694859962910414,
"learning_rate": 9.83275191401972e-05,
"loss": 0.0003,
"step": 157
},
{
"epoch": 0.11734125510582993,
"grad_norm": 0.009304534643888474,
"learning_rate": 9.829629131445342e-05,
"loss": 0.0004,
"step": 158
},
{
"epoch": 0.11808392127738582,
"grad_norm": 0.0023734685964882374,
"learning_rate": 9.826477969464249e-05,
"loss": 0.0003,
"step": 159
},
{
"epoch": 0.1188265874489417,
"grad_norm": 0.004084714688360691,
"learning_rate": 9.823298446592998e-05,
"loss": 0.0003,
"step": 160
},
{
"epoch": 0.11956925362049758,
"grad_norm": 0.005245341453701258,
"learning_rate": 9.820090581514797e-05,
"loss": 0.0003,
"step": 161
},
{
"epoch": 0.12031191979205347,
"grad_norm": 0.00829974003136158,
"learning_rate": 9.816854393079403e-05,
"loss": 0.0004,
"step": 162
},
{
"epoch": 0.12105458596360935,
"grad_norm": 0.07217549532651901,
"learning_rate": 9.81358990030299e-05,
"loss": 0.001,
"step": 163
},
{
"epoch": 0.12179725213516525,
"grad_norm": 0.0019713877700269222,
"learning_rate": 9.810297122368067e-05,
"loss": 0.0002,
"step": 164
},
{
"epoch": 0.12253991830672113,
"grad_norm": 0.008508375845849514,
"learning_rate": 9.806976078623337e-05,
"loss": 0.0005,
"step": 165
},
{
"epoch": 0.12328258447827702,
"grad_norm": 0.0026160285342484713,
"learning_rate": 9.803626788583603e-05,
"loss": 0.0002,
"step": 166
},
{
"epoch": 0.1240252506498329,
"grad_norm": 0.004115263931453228,
"learning_rate": 9.800249271929645e-05,
"loss": 0.0003,
"step": 167
},
{
"epoch": 0.12476791682138878,
"grad_norm": 0.009765752591192722,
"learning_rate": 9.796843548508101e-05,
"loss": 0.0004,
"step": 168
},
{
"epoch": 0.12551058299294468,
"grad_norm": 0.032703742384910583,
"learning_rate": 9.793409638331363e-05,
"loss": 0.0009,
"step": 169
},
{
"epoch": 0.12625324916450056,
"grad_norm": 0.005391250364482403,
"learning_rate": 9.789947561577445e-05,
"loss": 0.0003,
"step": 170
},
{
"epoch": 0.12699591533605645,
"grad_norm": 0.2329409420490265,
"learning_rate": 9.786457338589872e-05,
"loss": 0.0022,
"step": 171
},
{
"epoch": 0.12773858150761233,
"grad_norm": 0.006565611343830824,
"learning_rate": 9.782938989877562e-05,
"loss": 0.0003,
"step": 172
},
{
"epoch": 0.12848124767916821,
"grad_norm": 0.00805259495973587,
"learning_rate": 9.779392536114698e-05,
"loss": 0.0005,
"step": 173
},
{
"epoch": 0.1292239138507241,
"grad_norm": 0.05728701129555702,
"learning_rate": 9.775817998140616e-05,
"loss": 0.0008,
"step": 174
},
{
"epoch": 0.12996658002227998,
"grad_norm": 0.030414534732699394,
"learning_rate": 9.772215396959674e-05,
"loss": 0.0006,
"step": 175
},
{
"epoch": 0.13070924619383587,
"grad_norm": 0.003174105193465948,
"learning_rate": 9.768584753741134e-05,
"loss": 0.0003,
"step": 176
},
{
"epoch": 0.13145191236539175,
"grad_norm": 0.005474573001265526,
"learning_rate": 9.764926089819038e-05,
"loss": 0.0005,
"step": 177
},
{
"epoch": 0.13219457853694763,
"grad_norm": 0.01835116557776928,
"learning_rate": 9.761239426692077e-05,
"loss": 0.0006,
"step": 178
},
{
"epoch": 0.13293724470850352,
"grad_norm": 0.07570798695087433,
"learning_rate": 9.757524786023468e-05,
"loss": 0.0025,
"step": 179
},
{
"epoch": 0.13367991088005943,
"grad_norm": 0.0024187075905501842,
"learning_rate": 9.753782189640834e-05,
"loss": 0.0002,
"step": 180
},
{
"epoch": 0.1344225770516153,
"grad_norm": 0.014368316158652306,
"learning_rate": 9.750011659536058e-05,
"loss": 0.0006,
"step": 181
},
{
"epoch": 0.1351652432231712,
"grad_norm": 0.013278312981128693,
"learning_rate": 9.74621321786517e-05,
"loss": 0.0005,
"step": 182
},
{
"epoch": 0.13590790939472708,
"grad_norm": 0.01532573252916336,
"learning_rate": 9.742386886948213e-05,
"loss": 0.0003,
"step": 183
},
{
"epoch": 0.13665057556628296,
"grad_norm": 0.0070452457293868065,
"learning_rate": 9.738532689269112e-05,
"loss": 0.0003,
"step": 184
},
{
"epoch": 0.13739324173783884,
"grad_norm": 0.5086126923561096,
"learning_rate": 9.73465064747553e-05,
"loss": 0.0046,
"step": 185
},
{
"epoch": 0.13813590790939473,
"grad_norm": 0.00677888048812747,
"learning_rate": 9.730740784378753e-05,
"loss": 0.0004,
"step": 186
},
{
"epoch": 0.1388785740809506,
"grad_norm": 0.014153935015201569,
"learning_rate": 9.726803122953547e-05,
"loss": 0.0005,
"step": 187
},
{
"epoch": 0.1396212402525065,
"grad_norm": 0.005835146643221378,
"learning_rate": 9.722837686338025e-05,
"loss": 0.0002,
"step": 188
},
{
"epoch": 0.14036390642406238,
"grad_norm": 0.004742791876196861,
"learning_rate": 9.718844497833504e-05,
"loss": 0.0003,
"step": 189
},
{
"epoch": 0.14110657259561826,
"grad_norm": 0.002392592839896679,
"learning_rate": 9.71482358090438e-05,
"loss": 0.0002,
"step": 190
},
{
"epoch": 0.14184923876717415,
"grad_norm": 0.009971565566956997,
"learning_rate": 9.710774959177983e-05,
"loss": 0.0004,
"step": 191
},
{
"epoch": 0.14259190493873003,
"grad_norm": 0.000985423568636179,
"learning_rate": 9.706698656444438e-05,
"loss": 0.0001,
"step": 192
},
{
"epoch": 0.1433345711102859,
"grad_norm": 0.002976580522954464,
"learning_rate": 9.702594696656524e-05,
"loss": 0.0003,
"step": 193
},
{
"epoch": 0.14407723728184182,
"grad_norm": 0.004122724290937185,
"learning_rate": 9.698463103929542e-05,
"loss": 0.0003,
"step": 194
},
{
"epoch": 0.1448199034533977,
"grad_norm": 0.004222078714519739,
"learning_rate": 9.694303902541163e-05,
"loss": 0.0002,
"step": 195
},
{
"epoch": 0.1455625696249536,
"grad_norm": 0.002287927782163024,
"learning_rate": 9.69011711693129e-05,
"loss": 0.0002,
"step": 196
},
{
"epoch": 0.14630523579650948,
"grad_norm": 0.001492491108365357,
"learning_rate": 9.685902771701913e-05,
"loss": 0.0002,
"step": 197
},
{
"epoch": 0.14704790196806536,
"grad_norm": 0.00485377898439765,
"learning_rate": 9.681660891616966e-05,
"loss": 0.0003,
"step": 198
},
{
"epoch": 0.14779056813962124,
"grad_norm": 0.014361650682985783,
"learning_rate": 9.677391501602182e-05,
"loss": 0.0005,
"step": 199
},
{
"epoch": 0.14853323431117713,
"grad_norm": 1.621951699256897,
"learning_rate": 9.673094626744942e-05,
"loss": 0.0662,
"step": 200
},
{
"epoch": 0.149275900482733,
"grad_norm": 0.0026435288600623608,
"learning_rate": 9.668770292294136e-05,
"loss": 0.0002,
"step": 201
},
{
"epoch": 0.1500185666542889,
"grad_norm": 0.0024962294846773148,
"learning_rate": 9.664418523660004e-05,
"loss": 0.0002,
"step": 202
},
{
"epoch": 0.15076123282584478,
"grad_norm": 0.002211767714470625,
"learning_rate": 9.660039346413994e-05,
"loss": 0.0002,
"step": 203
},
{
"epoch": 0.15150389899740066,
"grad_norm": 0.002533277263864875,
"learning_rate": 9.65563278628861e-05,
"loss": 0.0003,
"step": 204
},
{
"epoch": 0.15224656516895654,
"grad_norm": 0.006425884552299976,
"learning_rate": 9.651198869177263e-05,
"loss": 0.0004,
"step": 205
},
{
"epoch": 0.15298923134051243,
"grad_norm": 0.0048479656688869,
"learning_rate": 9.646737621134112e-05,
"loss": 0.0003,
"step": 206
},
{
"epoch": 0.15373189751206834,
"grad_norm": 0.004739306401461363,
"learning_rate": 9.642249068373921e-05,
"loss": 0.0004,
"step": 207
},
{
"epoch": 0.15447456368362422,
"grad_norm": 0.0035690851509571075,
"learning_rate": 9.637733237271894e-05,
"loss": 0.0002,
"step": 208
},
{
"epoch": 0.1552172298551801,
"grad_norm": 0.001971122808754444,
"learning_rate": 9.633190154363527e-05,
"loss": 0.0002,
"step": 209
},
{
"epoch": 0.155959896026736,
"grad_norm": 0.0017060886602848768,
"learning_rate": 9.628619846344454e-05,
"loss": 0.0002,
"step": 210
},
{
"epoch": 0.15670256219829187,
"grad_norm": 0.011329671368002892,
"learning_rate": 9.624022340070279e-05,
"loss": 0.0003,
"step": 211
},
{
"epoch": 0.15744522836984776,
"grad_norm": 0.002570141339674592,
"learning_rate": 9.619397662556435e-05,
"loss": 0.0002,
"step": 212
},
{
"epoch": 0.15818789454140364,
"grad_norm": 0.0023831925354897976,
"learning_rate": 9.614745840978008e-05,
"loss": 0.0002,
"step": 213
},
{
"epoch": 0.15893056071295952,
"grad_norm": 0.016878001391887665,
"learning_rate": 9.610066902669592e-05,
"loss": 0.0007,
"step": 214
},
{
"epoch": 0.1596732268845154,
"grad_norm": 0.008881067857146263,
"learning_rate": 9.605360875125117e-05,
"loss": 0.0002,
"step": 215
},
{
"epoch": 0.1604158930560713,
"grad_norm": 0.0010246317833662033,
"learning_rate": 9.600627785997696e-05,
"loss": 0.0001,
"step": 216
},
{
"epoch": 0.16115855922762717,
"grad_norm": 0.0019209448946639895,
"learning_rate": 9.595867663099453e-05,
"loss": 0.0002,
"step": 217
},
{
"epoch": 0.16190122539918306,
"grad_norm": 0.0022903543431311846,
"learning_rate": 9.591080534401371e-05,
"loss": 0.0002,
"step": 218
},
{
"epoch": 0.16264389157073894,
"grad_norm": 0.009357557632029057,
"learning_rate": 9.586266428033119e-05,
"loss": 0.0003,
"step": 219
},
{
"epoch": 0.16338655774229485,
"grad_norm": 0.003212034935131669,
"learning_rate": 9.581425372282891e-05,
"loss": 0.0002,
"step": 220
},
{
"epoch": 0.16412922391385074,
"grad_norm": 0.006715564522892237,
"learning_rate": 9.576557395597236e-05,
"loss": 0.0005,
"step": 221
},
{
"epoch": 0.16487189008540662,
"grad_norm": 0.009416691958904266,
"learning_rate": 9.571662526580898e-05,
"loss": 0.0007,
"step": 222
},
{
"epoch": 0.1656145562569625,
"grad_norm": 0.005933799315243959,
"learning_rate": 9.566740793996637e-05,
"loss": 0.0003,
"step": 223
},
{
"epoch": 0.1663572224285184,
"grad_norm": 0.0062180450186133385,
"learning_rate": 9.561792226765072e-05,
"loss": 0.0004,
"step": 224
},
{
"epoch": 0.16709988860007427,
"grad_norm": 0.007876376621425152,
"learning_rate": 9.5568168539645e-05,
"loss": 0.0004,
"step": 225
},
{
"epoch": 0.16784255477163015,
"grad_norm": 0.06271418929100037,
"learning_rate": 9.551814704830734e-05,
"loss": 0.0017,
"step": 226
},
{
"epoch": 0.16858522094318604,
"grad_norm": 0.002005163347348571,
"learning_rate": 9.546785808756926e-05,
"loss": 0.0002,
"step": 227
},
{
"epoch": 0.16932788711474192,
"grad_norm": 0.022918762639164925,
"learning_rate": 9.541730195293397e-05,
"loss": 0.0004,
"step": 228
},
{
"epoch": 0.1700705532862978,
"grad_norm": 0.018031733110547066,
"learning_rate": 9.53664789414746e-05,
"loss": 0.0004,
"step": 229
},
{
"epoch": 0.1708132194578537,
"grad_norm": 0.0034406071063131094,
"learning_rate": 9.53153893518325e-05,
"loss": 0.0002,
"step": 230
},
{
"epoch": 0.17155588562940957,
"grad_norm": 0.03680207580327988,
"learning_rate": 9.526403348421544e-05,
"loss": 0.0004,
"step": 231
},
{
"epoch": 0.17229855180096545,
"grad_norm": 0.001635130844078958,
"learning_rate": 9.521241164039589e-05,
"loss": 0.0002,
"step": 232
},
{
"epoch": 0.17304121797252134,
"grad_norm": 0.012099578976631165,
"learning_rate": 9.516052412370921e-05,
"loss": 0.0004,
"step": 233
},
{
"epoch": 0.17378388414407725,
"grad_norm": 0.001356737338937819,
"learning_rate": 9.51083712390519e-05,
"loss": 0.0001,
"step": 234
},
{
"epoch": 0.17452655031563313,
"grad_norm": 0.003389824880287051,
"learning_rate": 9.505595329287972e-05,
"loss": 0.0001,
"step": 235
},
{
"epoch": 0.17526921648718902,
"grad_norm": 0.004536811728030443,
"learning_rate": 9.500327059320606e-05,
"loss": 0.0003,
"step": 236
},
{
"epoch": 0.1760118826587449,
"grad_norm": 0.2518163323402405,
"learning_rate": 9.495032344959998e-05,
"loss": 0.0022,
"step": 237
},
{
"epoch": 0.17675454883030078,
"grad_norm": 0.0022397220600396395,
"learning_rate": 9.48971121731844e-05,
"loss": 0.0002,
"step": 238
},
{
"epoch": 0.17749721500185667,
"grad_norm": 0.0010416822042316198,
"learning_rate": 9.484363707663442e-05,
"loss": 0.0001,
"step": 239
},
{
"epoch": 0.17823988117341255,
"grad_norm": 0.0013076276518404484,
"learning_rate": 9.478989847417526e-05,
"loss": 0.0001,
"step": 240
},
{
"epoch": 0.17898254734496843,
"grad_norm": 0.0012382504064589739,
"learning_rate": 9.473589668158061e-05,
"loss": 0.0002,
"step": 241
},
{
"epoch": 0.17972521351652432,
"grad_norm": 0.0214553065598011,
"learning_rate": 9.468163201617062e-05,
"loss": 0.0006,
"step": 242
},
{
"epoch": 0.1804678796880802,
"grad_norm": 0.002527498174458742,
"learning_rate": 9.462710479681019e-05,
"loss": 0.0002,
"step": 243
},
{
"epoch": 0.18121054585963609,
"grad_norm": 0.011280801147222519,
"learning_rate": 9.457231534390694e-05,
"loss": 0.0003,
"step": 244
},
{
"epoch": 0.18195321203119197,
"grad_norm": 0.0018357493681833148,
"learning_rate": 9.451726397940945e-05,
"loss": 0.0002,
"step": 245
},
{
"epoch": 0.18269587820274785,
"grad_norm": 0.08694746345281601,
"learning_rate": 9.446195102680531e-05,
"loss": 0.0009,
"step": 246
},
{
"epoch": 0.18343854437430376,
"grad_norm": 0.04296145588159561,
"learning_rate": 9.440637681111922e-05,
"loss": 0.0006,
"step": 247
},
{
"epoch": 0.18418121054585965,
"grad_norm": 0.006709881592541933,
"learning_rate": 9.435054165891109e-05,
"loss": 0.0004,
"step": 248
},
{
"epoch": 0.18492387671741553,
"grad_norm": 0.003305165795609355,
"learning_rate": 9.429444589827412e-05,
"loss": 0.0002,
"step": 249
},
{
"epoch": 0.18566654288897141,
"grad_norm": 0.0106744933873415,
"learning_rate": 9.423808985883289e-05,
"loss": 0.0004,
"step": 250
},
{
"epoch": 0.1864092090605273,
"grad_norm": 0.2720184326171875,
"learning_rate": 9.418147387174139e-05,
"loss": 0.0074,
"step": 251
},
{
"epoch": 0.18715187523208318,
"grad_norm": 0.005303042940795422,
"learning_rate": 9.412459826968108e-05,
"loss": 0.0003,
"step": 252
},
{
"epoch": 0.18789454140363906,
"grad_norm": 0.0023732127156108618,
"learning_rate": 9.406746338685895e-05,
"loss": 0.0001,
"step": 253
},
{
"epoch": 0.18863720757519495,
"grad_norm": 0.006592648569494486,
"learning_rate": 9.401006955900556e-05,
"loss": 0.0002,
"step": 254
},
{
"epoch": 0.18937987374675083,
"grad_norm": 0.0008024009293876588,
"learning_rate": 9.395241712337307e-05,
"loss": 0.0001,
"step": 255
},
{
"epoch": 0.19012253991830672,
"grad_norm": 0.002137925708666444,
"learning_rate": 9.389450641873323e-05,
"loss": 0.0002,
"step": 256
},
{
"epoch": 0.1908652060898626,
"grad_norm": 0.005445448216050863,
"learning_rate": 9.38363377853754e-05,
"loss": 0.0002,
"step": 257
},
{
"epoch": 0.19160787226141848,
"grad_norm": 0.0010594564955681562,
"learning_rate": 9.377791156510455e-05,
"loss": 0.0001,
"step": 258
},
{
"epoch": 0.19235053843297437,
"grad_norm": 0.0036199286114424467,
"learning_rate": 9.371922810123929e-05,
"loss": 0.0002,
"step": 259
},
{
"epoch": 0.19309320460453028,
"grad_norm": 0.0007854366558603942,
"learning_rate": 9.36602877386098e-05,
"loss": 0.0001,
"step": 260
},
{
"epoch": 0.19383587077608616,
"grad_norm": 0.0012193581787869334,
"learning_rate": 9.360109082355582e-05,
"loss": 0.0001,
"step": 261
},
{
"epoch": 0.19457853694764204,
"grad_norm": 0.009622081182897091,
"learning_rate": 9.354163770392461e-05,
"loss": 0.0005,
"step": 262
},
{
"epoch": 0.19532120311919793,
"grad_norm": 0.0011145096505060792,
"learning_rate": 9.348192872906896e-05,
"loss": 0.0001,
"step": 263
},
{
"epoch": 0.1960638692907538,
"grad_norm": 0.004288391210138798,
"learning_rate": 9.342196424984504e-05,
"loss": 0.0001,
"step": 264
},
{
"epoch": 0.1968065354623097,
"grad_norm": 0.0054808189161121845,
"learning_rate": 9.33617446186104e-05,
"loss": 0.0003,
"step": 265
},
{
"epoch": 0.19754920163386558,
"grad_norm": 0.0028313531074672937,
"learning_rate": 9.330127018922194e-05,
"loss": 0.0002,
"step": 266
},
{
"epoch": 0.19829186780542146,
"grad_norm": 0.008807774633169174,
"learning_rate": 9.324054131703371e-05,
"loss": 0.0004,
"step": 267
},
{
"epoch": 0.19903453397697735,
"grad_norm": 0.008937092497944832,
"learning_rate": 9.317955835889494e-05,
"loss": 0.0002,
"step": 268
},
{
"epoch": 0.19977720014853323,
"grad_norm": 0.001885921461507678,
"learning_rate": 9.311832167314787e-05,
"loss": 0.0002,
"step": 269
},
{
"epoch": 0.2005198663200891,
"grad_norm": 0.0014201418962329626,
"learning_rate": 9.305683161962569e-05,
"loss": 0.0001,
"step": 270
},
{
"epoch": 0.201262532491645,
"grad_norm": 0.013191280886530876,
"learning_rate": 9.299508855965039e-05,
"loss": 0.0003,
"step": 271
},
{
"epoch": 0.20200519866320088,
"grad_norm": 1.5947636365890503,
"learning_rate": 9.293309285603067e-05,
"loss": 0.153,
"step": 272
},
{
"epoch": 0.20274786483475676,
"grad_norm": 0.005475195590406656,
"learning_rate": 9.287084487305975e-05,
"loss": 0.0002,
"step": 273
},
{
"epoch": 0.20349053100631267,
"grad_norm": 0.0020285584032535553,
"learning_rate": 9.280834497651334e-05,
"loss": 0.0002,
"step": 274
},
{
"epoch": 0.20423319717786856,
"grad_norm": 0.0018257065676152706,
"learning_rate": 9.274559353364734e-05,
"loss": 0.0001,
"step": 275
},
{
"epoch": 0.20497586334942444,
"grad_norm": 0.01402269210666418,
"learning_rate": 9.268259091319582e-05,
"loss": 0.0006,
"step": 276
},
{
"epoch": 0.20571852952098033,
"grad_norm": 0.05442607030272484,
"learning_rate": 9.261933748536878e-05,
"loss": 0.0006,
"step": 277
},
{
"epoch": 0.2064611956925362,
"grad_norm": 0.002209064783528447,
"learning_rate": 9.255583362184999e-05,
"loss": 0.0002,
"step": 278
},
{
"epoch": 0.2072038618640921,
"grad_norm": 0.019024129956960678,
"learning_rate": 9.24920796957948e-05,
"loss": 0.0003,
"step": 279
},
{
"epoch": 0.20794652803564798,
"grad_norm": 0.008843375369906425,
"learning_rate": 9.242807608182795e-05,
"loss": 0.0004,
"step": 280
},
{
"epoch": 0.20868919420720386,
"grad_norm": 0.11764897406101227,
"learning_rate": 9.23638231560414e-05,
"loss": 0.0015,
"step": 281
},
{
"epoch": 0.20943186037875974,
"grad_norm": 0.004572493955492973,
"learning_rate": 9.229932129599205e-05,
"loss": 0.0002,
"step": 282
},
{
"epoch": 0.21017452655031563,
"grad_norm": 0.006193062756210566,
"learning_rate": 9.223457088069962e-05,
"loss": 0.0003,
"step": 283
},
{
"epoch": 0.2109171927218715,
"grad_norm": 0.028236044570803642,
"learning_rate": 9.21695722906443e-05,
"loss": 0.0006,
"step": 284
},
{
"epoch": 0.2116598588934274,
"grad_norm": 0.012708684429526329,
"learning_rate": 9.210432590776461e-05,
"loss": 0.0008,
"step": 285
},
{
"epoch": 0.21240252506498328,
"grad_norm": 0.02218322828412056,
"learning_rate": 9.203883211545517e-05,
"loss": 0.0008,
"step": 286
},
{
"epoch": 0.2131451912365392,
"grad_norm": 0.015315636061131954,
"learning_rate": 9.197309129856433e-05,
"loss": 0.0005,
"step": 287
},
{
"epoch": 0.21388785740809507,
"grad_norm": 0.011535655707120895,
"learning_rate": 9.190710384339203e-05,
"loss": 0.0005,
"step": 288
},
{
"epoch": 0.21463052357965096,
"grad_norm": 0.013051263056695461,
"learning_rate": 9.184087013768745e-05,
"loss": 0.0003,
"step": 289
},
{
"epoch": 0.21537318975120684,
"grad_norm": 0.003687142627313733,
"learning_rate": 9.177439057064683e-05,
"loss": 0.0003,
"step": 290
},
{
"epoch": 0.21611585592276272,
"grad_norm": 0.007237662561237812,
"learning_rate": 9.170766553291103e-05,
"loss": 0.0003,
"step": 291
},
{
"epoch": 0.2168585220943186,
"grad_norm": 0.007928196340799332,
"learning_rate": 9.164069541656337e-05,
"loss": 0.0004,
"step": 292
},
{
"epoch": 0.2176011882658745,
"grad_norm": 0.04276131093502045,
"learning_rate": 9.157348061512727e-05,
"loss": 0.0005,
"step": 293
},
{
"epoch": 0.21834385443743037,
"grad_norm": 0.029039736837148666,
"learning_rate": 9.150602152356395e-05,
"loss": 0.0006,
"step": 294
},
{
"epoch": 0.21908652060898626,
"grad_norm": 0.08352446556091309,
"learning_rate": 9.143831853827009e-05,
"loss": 0.0015,
"step": 295
},
{
"epoch": 0.21982918678054214,
"grad_norm": 0.12498286366462708,
"learning_rate": 9.137037205707552e-05,
"loss": 0.0008,
"step": 296
},
{
"epoch": 0.22057185295209802,
"grad_norm": 0.012141775339841843,
"learning_rate": 9.130218247924092e-05,
"loss": 0.0005,
"step": 297
},
{
"epoch": 0.2213145191236539,
"grad_norm": 0.007531680166721344,
"learning_rate": 9.123375020545535e-05,
"loss": 0.0003,
"step": 298
},
{
"epoch": 0.2220571852952098,
"grad_norm": 0.009885065257549286,
"learning_rate": 9.116507563783403e-05,
"loss": 0.0004,
"step": 299
},
{
"epoch": 0.2227998514667657,
"grad_norm": 0.26482439041137695,
"learning_rate": 9.109615917991591e-05,
"loss": 0.0014,
"step": 300
},
{
"epoch": 0.2235425176383216,
"grad_norm": 0.00270358519628644,
"learning_rate": 9.102700123666132e-05,
"loss": 0.0002,
"step": 301
},
{
"epoch": 0.22428518380987747,
"grad_norm": 1.3726491928100586,
"learning_rate": 9.09576022144496e-05,
"loss": 0.0146,
"step": 302
},
{
"epoch": 0.22502784998143335,
"grad_norm": 0.2558269500732422,
"learning_rate": 9.088796252107665e-05,
"loss": 0.0018,
"step": 303
},
{
"epoch": 0.22577051615298924,
"grad_norm": 0.7685271501541138,
"learning_rate": 9.08180825657526e-05,
"loss": 0.0064,
"step": 304
},
{
"epoch": 0.22651318232454512,
"grad_norm": 0.010196542367339134,
"learning_rate": 9.07479627590994e-05,
"loss": 0.0004,
"step": 305
},
{
"epoch": 0.227255848496101,
"grad_norm": 0.0029280134476721287,
"learning_rate": 9.067760351314838e-05,
"loss": 0.0002,
"step": 306
},
{
"epoch": 0.2279985146676569,
"grad_norm": 0.0070107802748680115,
"learning_rate": 9.060700524133785e-05,
"loss": 0.0003,
"step": 307
},
{
"epoch": 0.22874118083921277,
"grad_norm": 0.014984749257564545,
"learning_rate": 9.053616835851062e-05,
"loss": 0.0003,
"step": 308
},
{
"epoch": 0.22948384701076865,
"grad_norm": 0.0027229287661612034,
"learning_rate": 9.046509328091166e-05,
"loss": 0.0002,
"step": 309
},
{
"epoch": 0.23022651318232454,
"grad_norm": 0.006428726948797703,
"learning_rate": 9.039378042618556e-05,
"loss": 0.0003,
"step": 310
},
{
"epoch": 0.23096917935388042,
"grad_norm": 0.007419643457978964,
"learning_rate": 9.032223021337414e-05,
"loss": 0.0004,
"step": 311
},
{
"epoch": 0.2317118455254363,
"grad_norm": 0.0074731167405843735,
"learning_rate": 9.025044306291392e-05,
"loss": 0.0002,
"step": 312
},
{
"epoch": 0.2324545116969922,
"grad_norm": 0.0038742341566830873,
"learning_rate": 9.017841939663374e-05,
"loss": 0.0002,
"step": 313
},
{
"epoch": 0.2331971778685481,
"grad_norm": 0.010770438238978386,
"learning_rate": 9.01061596377522e-05,
"loss": 0.0004,
"step": 314
},
{
"epoch": 0.23393984404010398,
"grad_norm": 0.009036507457494736,
"learning_rate": 9.003366421087521e-05,
"loss": 0.0003,
"step": 315
},
{
"epoch": 0.23468251021165987,
"grad_norm": 0.008864130824804306,
"learning_rate": 8.996093354199349e-05,
"loss": 0.0003,
"step": 316
},
{
"epoch": 0.23542517638321575,
"grad_norm": 0.00652578379958868,
"learning_rate": 8.988796805848007e-05,
"loss": 0.0003,
"step": 317
},
{
"epoch": 0.23616784255477163,
"grad_norm": 0.006959845311939716,
"learning_rate": 8.981476818908778e-05,
"loss": 0.0002,
"step": 318
},
{
"epoch": 0.23691050872632752,
"grad_norm": 0.0059072221629321575,
"learning_rate": 8.974133436394673e-05,
"loss": 0.0003,
"step": 319
},
{
"epoch": 0.2376531748978834,
"grad_norm": 0.005236865486949682,
"learning_rate": 8.966766701456177e-05,
"loss": 0.0003,
"step": 320
},
{
"epoch": 0.23839584106943928,
"grad_norm": 0.01116922963410616,
"learning_rate": 8.959376657380993e-05,
"loss": 0.0003,
"step": 321
},
{
"epoch": 0.23913850724099517,
"grad_norm": 0.02754572220146656,
"learning_rate": 8.951963347593797e-05,
"loss": 0.0011,
"step": 322
},
{
"epoch": 0.23988117341255105,
"grad_norm": 0.05620993301272392,
"learning_rate": 8.944526815655974e-05,
"loss": 0.0008,
"step": 323
},
{
"epoch": 0.24062383958410694,
"grad_norm": 0.013477266766130924,
"learning_rate": 8.937067105265362e-05,
"loss": 0.0005,
"step": 324
},
{
"epoch": 0.24136650575566282,
"grad_norm": 0.008718527853488922,
"learning_rate": 8.929584260256004e-05,
"loss": 0.0002,
"step": 325
},
{
"epoch": 0.2421091719272187,
"grad_norm": 0.0029952102340757847,
"learning_rate": 8.922078324597879e-05,
"loss": 0.0002,
"step": 326
},
{
"epoch": 0.24285183809877461,
"grad_norm": 0.19847828149795532,
"learning_rate": 8.914549342396652e-05,
"loss": 0.0012,
"step": 327
},
{
"epoch": 0.2435945042703305,
"grad_norm": 0.0039433506317436695,
"learning_rate": 8.906997357893412e-05,
"loss": 0.0002,
"step": 328
},
{
"epoch": 0.24433717044188638,
"grad_norm": 0.0013001116458326578,
"learning_rate": 8.899422415464409e-05,
"loss": 0.0001,
"step": 329
},
{
"epoch": 0.24507983661344226,
"grad_norm": 0.029752757400274277,
"learning_rate": 8.891824559620801e-05,
"loss": 0.0008,
"step": 330
},
{
"epoch": 0.24582250278499815,
"grad_norm": 0.009903517551720142,
"learning_rate": 8.884203835008382e-05,
"loss": 0.0003,
"step": 331
},
{
"epoch": 0.24656516895655403,
"grad_norm": 0.0017488920129835606,
"learning_rate": 8.87656028640733e-05,
"loss": 0.0002,
"step": 332
},
{
"epoch": 0.24730783512810992,
"grad_norm": 0.001140685984864831,
"learning_rate": 8.868893958731937e-05,
"loss": 0.0001,
"step": 333
},
{
"epoch": 0.2480505012996658,
"grad_norm": 0.001769506954587996,
"learning_rate": 8.861204897030346e-05,
"loss": 0.0002,
"step": 334
},
{
"epoch": 0.24879316747122168,
"grad_norm": 0.034780845046043396,
"learning_rate": 8.853493146484291e-05,
"loss": 0.0002,
"step": 335
},
{
"epoch": 0.24953583364277757,
"grad_norm": 0.0031120367348194122,
"learning_rate": 8.845758752408826e-05,
"loss": 0.0002,
"step": 336
},
{
"epoch": 0.2502784998143335,
"grad_norm": 0.0008751750574447215,
"learning_rate": 8.838001760252059e-05,
"loss": 0.0001,
"step": 337
},
{
"epoch": 0.2502784998143335,
"eval_loss": 8.811052975943312e-05,
"eval_runtime": 190.5265,
"eval_samples_per_second": 5.952,
"eval_steps_per_second": 2.976,
"step": 337
},
{
"epoch": 0.25102116598588936,
"grad_norm": 0.001558976829983294,
"learning_rate": 8.83022221559489e-05,
"loss": 0.0001,
"step": 338
},
{
"epoch": 0.25176383215744524,
"grad_norm": 0.0019584286492317915,
"learning_rate": 8.822420164150739e-05,
"loss": 0.0002,
"step": 339
},
{
"epoch": 0.25250649832900113,
"grad_norm": 0.0017735590226948261,
"learning_rate": 8.814595651765277e-05,
"loss": 0.0002,
"step": 340
},
{
"epoch": 0.253249164500557,
"grad_norm": 0.0010270827915519476,
"learning_rate": 8.806748724416156e-05,
"loss": 0.0001,
"step": 341
},
{
"epoch": 0.2539918306721129,
"grad_norm": 0.0006894596735946834,
"learning_rate": 8.798879428212747e-05,
"loss": 0.0001,
"step": 342
},
{
"epoch": 0.2547344968436688,
"grad_norm": 0.8450746536254883,
"learning_rate": 8.790987809395856e-05,
"loss": 0.0079,
"step": 343
},
{
"epoch": 0.25547716301522466,
"grad_norm": 0.0008732525166124105,
"learning_rate": 8.783073914337466e-05,
"loss": 0.0001,
"step": 344
},
{
"epoch": 0.25621982918678055,
"grad_norm": 0.001687792013399303,
"learning_rate": 8.775137789540446e-05,
"loss": 0.0002,
"step": 345
},
{
"epoch": 0.25696249535833643,
"grad_norm": 0.0013438455061987042,
"learning_rate": 8.767179481638303e-05,
"loss": 0.0002,
"step": 346
},
{
"epoch": 0.2577051615298923,
"grad_norm": 0.0013538196217268705,
"learning_rate": 8.759199037394887e-05,
"loss": 0.0001,
"step": 347
},
{
"epoch": 0.2584478277014482,
"grad_norm": 0.0015409106854349375,
"learning_rate": 8.751196503704123e-05,
"loss": 0.0001,
"step": 348
},
{
"epoch": 0.2591904938730041,
"grad_norm": 0.006490611936897039,
"learning_rate": 8.743171927589737e-05,
"loss": 0.0002,
"step": 349
},
{
"epoch": 0.25993316004455996,
"grad_norm": 0.0011190741788595915,
"learning_rate": 8.73512535620498e-05,
"loss": 0.0001,
"step": 350
},
{
"epoch": 0.26067582621611585,
"grad_norm": 0.7256274819374084,
"learning_rate": 8.727056836832348e-05,
"loss": 0.0483,
"step": 351
},
{
"epoch": 0.26141849238767173,
"grad_norm": 0.0036242681089788675,
"learning_rate": 8.718966416883306e-05,
"loss": 0.0002,
"step": 352
},
{
"epoch": 0.2621611585592276,
"grad_norm": 0.002452226122841239,
"learning_rate": 8.710854143898008e-05,
"loss": 0.0002,
"step": 353
},
{
"epoch": 0.2629038247307835,
"grad_norm": 0.024639679118990898,
"learning_rate": 8.702720065545024e-05,
"loss": 0.0002,
"step": 354
},
{
"epoch": 0.2636464909023394,
"grad_norm": 0.0007104252581484616,
"learning_rate": 8.694564229621046e-05,
"loss": 0.0001,
"step": 355
},
{
"epoch": 0.26438915707389526,
"grad_norm": 0.0009827159810811281,
"learning_rate": 8.68638668405062e-05,
"loss": 0.0002,
"step": 356
},
{
"epoch": 0.26513182324545115,
"grad_norm": 0.014522520825266838,
"learning_rate": 8.678187476885864e-05,
"loss": 0.0003,
"step": 357
},
{
"epoch": 0.26587448941700703,
"grad_norm": 0.004931020084768534,
"learning_rate": 8.669966656306176e-05,
"loss": 0.0002,
"step": 358
},
{
"epoch": 0.2666171555885629,
"grad_norm": 0.005294305272400379,
"learning_rate": 8.661724270617962e-05,
"loss": 0.0003,
"step": 359
},
{
"epoch": 0.26735982176011885,
"grad_norm": 0.0027490004431456327,
"learning_rate": 8.653460368254338e-05,
"loss": 0.0002,
"step": 360
},
{
"epoch": 0.26810248793167474,
"grad_norm": 0.0015370560577139258,
"learning_rate": 8.645174997774864e-05,
"loss": 0.0002,
"step": 361
},
{
"epoch": 0.2688451541032306,
"grad_norm": 0.001747027155943215,
"learning_rate": 8.636868207865244e-05,
"loss": 0.0002,
"step": 362
},
{
"epoch": 0.2695878202747865,
"grad_norm": 0.07692822813987732,
"learning_rate": 8.628540047337045e-05,
"loss": 0.0025,
"step": 363
},
{
"epoch": 0.2703304864463424,
"grad_norm": 0.003272181609645486,
"learning_rate": 8.620190565127413e-05,
"loss": 0.0002,
"step": 364
},
{
"epoch": 0.27107315261789827,
"grad_norm": 0.002899078419432044,
"learning_rate": 8.611819810298778e-05,
"loss": 0.0002,
"step": 365
},
{
"epoch": 0.27181581878945416,
"grad_norm": 0.0023670706432312727,
"learning_rate": 8.603427832038574e-05,
"loss": 0.0002,
"step": 366
},
{
"epoch": 0.27255848496101004,
"grad_norm": 0.006397952791303396,
"learning_rate": 8.595014679658941e-05,
"loss": 0.0004,
"step": 367
},
{
"epoch": 0.2733011511325659,
"grad_norm": 0.0371289923787117,
"learning_rate": 8.586580402596446e-05,
"loss": 0.0009,
"step": 368
},
{
"epoch": 0.2740438173041218,
"grad_norm": 0.0018306565470993519,
"learning_rate": 8.578125050411787e-05,
"loss": 0.0002,
"step": 369
},
{
"epoch": 0.2747864834756777,
"grad_norm": 0.003529072506353259,
"learning_rate": 8.569648672789497e-05,
"loss": 0.0002,
"step": 370
},
{
"epoch": 0.2755291496472336,
"grad_norm": 0.008671244606375694,
"learning_rate": 8.561151319537655e-05,
"loss": 0.0003,
"step": 371
},
{
"epoch": 0.27627181581878946,
"grad_norm": 0.0039878771640360355,
"learning_rate": 8.552633040587606e-05,
"loss": 0.0002,
"step": 372
},
{
"epoch": 0.27701448199034534,
"grad_norm": 0.025948379188776016,
"learning_rate": 8.544093885993643e-05,
"loss": 0.0004,
"step": 373
},
{
"epoch": 0.2777571481619012,
"grad_norm": 0.00219323905184865,
"learning_rate": 8.535533905932738e-05,
"loss": 0.0002,
"step": 374
},
{
"epoch": 0.2784998143334571,
"grad_norm": 0.015180499292910099,
"learning_rate": 8.526953150704229e-05,
"loss": 0.0005,
"step": 375
},
{
"epoch": 0.279242480505013,
"grad_norm": 0.012962475419044495,
"learning_rate": 8.518351670729529e-05,
"loss": 0.0004,
"step": 376
},
{
"epoch": 0.2799851466765689,
"grad_norm": 0.007179844658821821,
"learning_rate": 8.509729516551841e-05,
"loss": 0.0002,
"step": 377
},
{
"epoch": 0.28072781284812476,
"grad_norm": 0.06565556675195694,
"learning_rate": 8.501086738835843e-05,
"loss": 0.0012,
"step": 378
},
{
"epoch": 0.28147047901968064,
"grad_norm": 0.009149481542408466,
"learning_rate": 8.492423388367403e-05,
"loss": 0.0004,
"step": 379
},
{
"epoch": 0.2822131451912365,
"grad_norm": 0.007264537271112204,
"learning_rate": 8.483739516053276e-05,
"loss": 0.0003,
"step": 380
},
{
"epoch": 0.2829558113627924,
"grad_norm": 0.004194353707134724,
"learning_rate": 8.475035172920804e-05,
"loss": 0.0002,
"step": 381
},
{
"epoch": 0.2836984775343483,
"grad_norm": 0.0032033442985266447,
"learning_rate": 8.466310410117622e-05,
"loss": 0.0003,
"step": 382
},
{
"epoch": 0.2844411437059042,
"grad_norm": 0.004893294535577297,
"learning_rate": 8.457565278911348e-05,
"loss": 0.0005,
"step": 383
},
{
"epoch": 0.28518380987746006,
"grad_norm": 0.13098792731761932,
"learning_rate": 8.448799830689289e-05,
"loss": 0.0042,
"step": 384
},
{
"epoch": 0.28592647604901594,
"grad_norm": 0.0038283192552626133,
"learning_rate": 8.440014116958139e-05,
"loss": 0.0002,
"step": 385
},
{
"epoch": 0.2866691422205718,
"grad_norm": 0.0029847463592886925,
"learning_rate": 8.43120818934367e-05,
"loss": 0.0002,
"step": 386
},
{
"epoch": 0.28741180839212777,
"grad_norm": 0.0012196919415146112,
"learning_rate": 8.422382099590434e-05,
"loss": 0.0001,
"step": 387
},
{
"epoch": 0.28815447456368365,
"grad_norm": 0.0022556744515895844,
"learning_rate": 8.413535899561463e-05,
"loss": 0.0002,
"step": 388
},
{
"epoch": 0.28889714073523953,
"grad_norm": 0.0016208338784053922,
"learning_rate": 8.404669641237952e-05,
"loss": 0.0002,
"step": 389
},
{
"epoch": 0.2896398069067954,
"grad_norm": 0.0015344212297350168,
"learning_rate": 8.395783376718966e-05,
"loss": 0.0002,
"step": 390
},
{
"epoch": 0.2903824730783513,
"grad_norm": 0.0014779121847823262,
"learning_rate": 8.386877158221125e-05,
"loss": 0.0002,
"step": 391
},
{
"epoch": 0.2911251392499072,
"grad_norm": 0.0007657014648430049,
"learning_rate": 8.377951038078302e-05,
"loss": 0.0001,
"step": 392
},
{
"epoch": 0.29186780542146307,
"grad_norm": 0.6582887768745422,
"learning_rate": 8.369005068741314e-05,
"loss": 0.0099,
"step": 393
},
{
"epoch": 0.29261047159301895,
"grad_norm": 0.0017920684767886996,
"learning_rate": 8.360039302777612e-05,
"loss": 0.0002,
"step": 394
},
{
"epoch": 0.29335313776457483,
"grad_norm": 0.005679702386260033,
"learning_rate": 8.35105379287098e-05,
"loss": 0.0002,
"step": 395
},
{
"epoch": 0.2940958039361307,
"grad_norm": 0.0127026392146945,
"learning_rate": 8.342048591821212e-05,
"loss": 0.0003,
"step": 396
},
{
"epoch": 0.2948384701076866,
"grad_norm": 0.0027185885701328516,
"learning_rate": 8.333023752543816e-05,
"loss": 0.0002,
"step": 397
},
{
"epoch": 0.2955811362792425,
"grad_norm": 0.02255573309957981,
"learning_rate": 8.323979328069689e-05,
"loss": 0.0004,
"step": 398
},
{
"epoch": 0.29632380245079837,
"grad_norm": 0.005555687937885523,
"learning_rate": 8.314915371544822e-05,
"loss": 0.0002,
"step": 399
},
{
"epoch": 0.29706646862235425,
"grad_norm": 0.001257200026884675,
"learning_rate": 8.305831936229966e-05,
"loss": 0.0001,
"step": 400
},
{
"epoch": 0.29780913479391014,
"grad_norm": 0.07175086438655853,
"learning_rate": 8.296729075500344e-05,
"loss": 0.0013,
"step": 401
},
{
"epoch": 0.298551800965466,
"grad_norm": 0.0019333354430273175,
"learning_rate": 8.28760684284532e-05,
"loss": 0.0002,
"step": 402
},
{
"epoch": 0.2992944671370219,
"grad_norm": 0.17573364078998566,
"learning_rate": 8.278465291868083e-05,
"loss": 0.0023,
"step": 403
},
{
"epoch": 0.3000371333085778,
"grad_norm": 0.05853782966732979,
"learning_rate": 8.269304476285349e-05,
"loss": 0.0015,
"step": 404
},
{
"epoch": 0.30077979948013367,
"grad_norm": 0.09041046351194382,
"learning_rate": 8.260124449927028e-05,
"loss": 0.0022,
"step": 405
},
{
"epoch": 0.30152246565168955,
"grad_norm": 0.1666203737258911,
"learning_rate": 8.250925266735918e-05,
"loss": 0.002,
"step": 406
},
{
"epoch": 0.30226513182324544,
"grad_norm": 0.04917696863412857,
"learning_rate": 8.241706980767381e-05,
"loss": 0.0007,
"step": 407
},
{
"epoch": 0.3030077979948013,
"grad_norm": 0.0021273689344525337,
"learning_rate": 8.232469646189032e-05,
"loss": 0.0001,
"step": 408
},
{
"epoch": 0.3037504641663572,
"grad_norm": 0.0016986045520752668,
"learning_rate": 8.223213317280419e-05,
"loss": 0.0001,
"step": 409
},
{
"epoch": 0.3044931303379131,
"grad_norm": 0.03786956146359444,
"learning_rate": 8.213938048432697e-05,
"loss": 0.0006,
"step": 410
},
{
"epoch": 0.30523579650946897,
"grad_norm": 0.0048659974709153175,
"learning_rate": 8.204643894148318e-05,
"loss": 0.0002,
"step": 411
},
{
"epoch": 0.30597846268102485,
"grad_norm": 0.007152218371629715,
"learning_rate": 8.195330909040708e-05,
"loss": 0.0002,
"step": 412
},
{
"epoch": 0.30672112885258074,
"grad_norm": 0.0123271644115448,
"learning_rate": 8.185999147833943e-05,
"loss": 0.0004,
"step": 413
},
{
"epoch": 0.3074637950241367,
"grad_norm": 0.001973194070160389,
"learning_rate": 8.176648665362425e-05,
"loss": 0.0001,
"step": 414
},
{
"epoch": 0.30820646119569256,
"grad_norm": 0.007642180658876896,
"learning_rate": 8.167279516570575e-05,
"loss": 0.0005,
"step": 415
},
{
"epoch": 0.30894912736724844,
"grad_norm": 0.02631974406540394,
"learning_rate": 8.157891756512488e-05,
"loss": 0.0004,
"step": 416
},
{
"epoch": 0.3096917935388043,
"grad_norm": 0.0035103110130876303,
"learning_rate": 8.148485440351629e-05,
"loss": 0.0002,
"step": 417
},
{
"epoch": 0.3104344597103602,
"grad_norm": 0.004595728125423193,
"learning_rate": 8.139060623360493e-05,
"loss": 0.0002,
"step": 418
},
{
"epoch": 0.3111771258819161,
"grad_norm": 0.002736350754275918,
"learning_rate": 8.129617360920296e-05,
"loss": 0.0002,
"step": 419
},
{
"epoch": 0.311919792053472,
"grad_norm": 0.0011626658961176872,
"learning_rate": 8.120155708520636e-05,
"loss": 0.0001,
"step": 420
},
{
"epoch": 0.31266245822502786,
"grad_norm": 0.004247245844453573,
"learning_rate": 8.110675721759171e-05,
"loss": 0.0002,
"step": 421
},
{
"epoch": 0.31340512439658375,
"grad_norm": 0.001799743971787393,
"learning_rate": 8.1011774563413e-05,
"loss": 0.0001,
"step": 422
},
{
"epoch": 0.31414779056813963,
"grad_norm": 0.011404055170714855,
"learning_rate": 8.091660968079826e-05,
"loss": 0.0004,
"step": 423
},
{
"epoch": 0.3148904567396955,
"grad_norm": 0.00542003707960248,
"learning_rate": 8.082126312894626e-05,
"loss": 0.0003,
"step": 424
},
{
"epoch": 0.3156331229112514,
"grad_norm": 0.1897122859954834,
"learning_rate": 8.072573546812338e-05,
"loss": 0.0017,
"step": 425
},
{
"epoch": 0.3163757890828073,
"grad_norm": 0.004132653120905161,
"learning_rate": 8.063002725966015e-05,
"loss": 0.0002,
"step": 426
},
{
"epoch": 0.31711845525436316,
"grad_norm": 0.0019162169191986322,
"learning_rate": 8.0534139065948e-05,
"loss": 0.0002,
"step": 427
},
{
"epoch": 0.31786112142591905,
"grad_norm": 0.004962231498211622,
"learning_rate": 8.043807145043604e-05,
"loss": 0.0002,
"step": 428
},
{
"epoch": 0.31860378759747493,
"grad_norm": 0.0016735494136810303,
"learning_rate": 8.034182497762762e-05,
"loss": 0.0001,
"step": 429
},
{
"epoch": 0.3193464537690308,
"grad_norm": 0.0013713808730244637,
"learning_rate": 8.024540021307708e-05,
"loss": 0.0001,
"step": 430
},
{
"epoch": 0.3200891199405867,
"grad_norm": 0.00254115485586226,
"learning_rate": 8.014879772338649e-05,
"loss": 0.0002,
"step": 431
},
{
"epoch": 0.3208317861121426,
"grad_norm": 0.0037052470725029707,
"learning_rate": 8.005201807620215e-05,
"loss": 0.0002,
"step": 432
},
{
"epoch": 0.32157445228369846,
"grad_norm": 0.0029492946341633797,
"learning_rate": 7.995506184021142e-05,
"loss": 0.0002,
"step": 433
},
{
"epoch": 0.32231711845525435,
"grad_norm": 0.001130992779508233,
"learning_rate": 7.985792958513931e-05,
"loss": 0.0001,
"step": 434
},
{
"epoch": 0.32305978462681023,
"grad_norm": 0.001874015899375081,
"learning_rate": 7.976062188174512e-05,
"loss": 0.0001,
"step": 435
},
{
"epoch": 0.3238024507983661,
"grad_norm": 0.007868077605962753,
"learning_rate": 7.966313930181912e-05,
"loss": 0.0002,
"step": 436
},
{
"epoch": 0.324545116969922,
"grad_norm": 0.0044991993345320225,
"learning_rate": 7.956548241817912e-05,
"loss": 0.0002,
"step": 437
},
{
"epoch": 0.3252877831414779,
"grad_norm": 0.0006172333378344774,
"learning_rate": 7.946765180466724e-05,
"loss": 0.0001,
"step": 438
},
{
"epoch": 0.32603044931303377,
"grad_norm": 0.0030133500695228577,
"learning_rate": 7.936964803614641e-05,
"loss": 0.0001,
"step": 439
},
{
"epoch": 0.3267731154845897,
"grad_norm": 0.006758226081728935,
"learning_rate": 7.927147168849704e-05,
"loss": 0.0002,
"step": 440
},
{
"epoch": 0.3275157816561456,
"grad_norm": 0.004247500095516443,
"learning_rate": 7.91731233386136e-05,
"loss": 0.0003,
"step": 441
},
{
"epoch": 0.32825844782770147,
"grad_norm": 0.005194083787500858,
"learning_rate": 7.907460356440133e-05,
"loss": 0.0002,
"step": 442
},
{
"epoch": 0.32900111399925736,
"grad_norm": 0.002930448157712817,
"learning_rate": 7.897591294477275e-05,
"loss": 0.0001,
"step": 443
},
{
"epoch": 0.32974378017081324,
"grad_norm": 0.0016131598968058825,
"learning_rate": 7.887705205964426e-05,
"loss": 0.0001,
"step": 444
},
{
"epoch": 0.3304864463423691,
"grad_norm": 0.002172644715756178,
"learning_rate": 7.877802148993277e-05,
"loss": 0.0001,
"step": 445
},
{
"epoch": 0.331229112513925,
"grad_norm": 0.027609726414084435,
"learning_rate": 7.86788218175523e-05,
"loss": 0.0006,
"step": 446
},
{
"epoch": 0.3319717786854809,
"grad_norm": 0.015412206761538982,
"learning_rate": 7.857945362541053e-05,
"loss": 0.0005,
"step": 447
},
{
"epoch": 0.3327144448570368,
"grad_norm": 0.002325060311704874,
"learning_rate": 7.847991749740533e-05,
"loss": 0.0001,
"step": 448
},
{
"epoch": 0.33345711102859266,
"grad_norm": 0.0031837387941777706,
"learning_rate": 7.838021401842144e-05,
"loss": 0.0001,
"step": 449
},
{
"epoch": 0.33419977720014854,
"grad_norm": 0.003575611859560013,
"learning_rate": 7.828034377432693e-05,
"loss": 0.0001,
"step": 450
},
{
"epoch": 0.3349424433717044,
"grad_norm": 0.0006315786740742624,
"learning_rate": 7.818030735196984e-05,
"loss": 0.0001,
"step": 451
},
{
"epoch": 0.3356851095432603,
"grad_norm": 0.005519942846149206,
"learning_rate": 7.808010533917465e-05,
"loss": 0.0002,
"step": 452
},
{
"epoch": 0.3364277757148162,
"grad_norm": 0.0023312256671488285,
"learning_rate": 7.797973832473889e-05,
"loss": 0.0002,
"step": 453
},
{
"epoch": 0.3371704418863721,
"grad_norm": 0.0030489324126392603,
"learning_rate": 7.787920689842964e-05,
"loss": 0.0001,
"step": 454
},
{
"epoch": 0.33791310805792796,
"grad_norm": 0.0010298596462234855,
"learning_rate": 7.777851165098012e-05,
"loss": 0.0001,
"step": 455
},
{
"epoch": 0.33865577422948384,
"grad_norm": 0.0006818081019446254,
"learning_rate": 7.767765317408613e-05,
"loss": 0.0001,
"step": 456
},
{
"epoch": 0.3393984404010397,
"grad_norm": 0.0019495798042044044,
"learning_rate": 7.757663206040264e-05,
"loss": 0.0001,
"step": 457
},
{
"epoch": 0.3401411065725956,
"grad_norm": 0.012790280394256115,
"learning_rate": 7.74754489035403e-05,
"loss": 0.0002,
"step": 458
},
{
"epoch": 0.3408837727441515,
"grad_norm": 0.0026799726765602827,
"learning_rate": 7.737410429806196e-05,
"loss": 0.0001,
"step": 459
},
{
"epoch": 0.3416264389157074,
"grad_norm": 0.014524313621222973,
"learning_rate": 7.727259883947913e-05,
"loss": 0.0004,
"step": 460
},
{
"epoch": 0.34236910508726326,
"grad_norm": 0.0005140351131558418,
"learning_rate": 7.71709331242485e-05,
"loss": 0.0001,
"step": 461
},
{
"epoch": 0.34311177125881914,
"grad_norm": 0.003770928829908371,
"learning_rate": 7.706910774976849e-05,
"loss": 0.0001,
"step": 462
},
{
"epoch": 0.343854437430375,
"grad_norm": 0.0012231201399117708,
"learning_rate": 7.696712331437565e-05,
"loss": 0.0001,
"step": 463
},
{
"epoch": 0.3445971036019309,
"grad_norm": 0.005815563257783651,
"learning_rate": 7.68649804173412e-05,
"loss": 0.0001,
"step": 464
},
{
"epoch": 0.3453397697734868,
"grad_norm": 0.0009362814598716795,
"learning_rate": 7.676267965886752e-05,
"loss": 0.0001,
"step": 465
},
{
"epoch": 0.3460824359450427,
"grad_norm": 0.005367044825106859,
"learning_rate": 7.666022164008457e-05,
"loss": 0.0001,
"step": 466
},
{
"epoch": 0.3468251021165986,
"grad_norm": 0.0023662326857447624,
"learning_rate": 7.655760696304641e-05,
"loss": 0.0001,
"step": 467
},
{
"epoch": 0.3475677682881545,
"grad_norm": 0.0014945007860660553,
"learning_rate": 7.645483623072763e-05,
"loss": 0.0001,
"step": 468
},
{
"epoch": 0.3483104344597104,
"grad_norm": 0.0008832917083054781,
"learning_rate": 7.635191004701981e-05,
"loss": 0.0001,
"step": 469
},
{
"epoch": 0.34905310063126627,
"grad_norm": 0.001919624744914472,
"learning_rate": 7.6248829016728e-05,
"loss": 0.0001,
"step": 470
},
{
"epoch": 0.34979576680282215,
"grad_norm": 0.003197997808456421,
"learning_rate": 7.614559374556715e-05,
"loss": 0.0003,
"step": 471
},
{
"epoch": 0.35053843297437803,
"grad_norm": 0.004050462041050196,
"learning_rate": 7.60422048401585e-05,
"loss": 0.0001,
"step": 472
},
{
"epoch": 0.3512810991459339,
"grad_norm": 0.12640826404094696,
"learning_rate": 7.593866290802608e-05,
"loss": 0.0009,
"step": 473
},
{
"epoch": 0.3520237653174898,
"grad_norm": 0.003920804709196091,
"learning_rate": 7.583496855759316e-05,
"loss": 0.0002,
"step": 474
},
{
"epoch": 0.3527664314890457,
"grad_norm": 0.0023814719170331955,
"learning_rate": 7.573112239817857e-05,
"loss": 0.0001,
"step": 475
},
{
"epoch": 0.35350909766060157,
"grad_norm": 0.011592867784202099,
"learning_rate": 7.562712503999327e-05,
"loss": 0.0002,
"step": 476
},
{
"epoch": 0.35425176383215745,
"grad_norm": 0.0014887212309986353,
"learning_rate": 7.552297709413658e-05,
"loss": 0.0002,
"step": 477
},
{
"epoch": 0.35499443000371333,
"grad_norm": 0.0006836429820396006,
"learning_rate": 7.541867917259277e-05,
"loss": 0.0001,
"step": 478
},
{
"epoch": 0.3557370961752692,
"grad_norm": 0.025796182453632355,
"learning_rate": 7.531423188822737e-05,
"loss": 0.0005,
"step": 479
},
{
"epoch": 0.3564797623468251,
"grad_norm": 0.0009914631955325603,
"learning_rate": 7.520963585478353e-05,
"loss": 0.0001,
"step": 480
},
{
"epoch": 0.357222428518381,
"grad_norm": 0.0003263329854235053,
"learning_rate": 7.510489168687851e-05,
"loss": 0.0001,
"step": 481
},
{
"epoch": 0.35796509468993687,
"grad_norm": 0.0006598143372684717,
"learning_rate": 7.500000000000001e-05,
"loss": 0.0001,
"step": 482
},
{
"epoch": 0.35870776086149275,
"grad_norm": 0.0005369288846850395,
"learning_rate": 7.489496141050258e-05,
"loss": 0.0001,
"step": 483
},
{
"epoch": 0.35945042703304864,
"grad_norm": 0.00033336677006445825,
"learning_rate": 7.478977653560397e-05,
"loss": 0.0001,
"step": 484
},
{
"epoch": 0.3601930932046045,
"grad_norm": 0.0009709247970022261,
"learning_rate": 7.468444599338151e-05,
"loss": 0.0001,
"step": 485
},
{
"epoch": 0.3609357593761604,
"grad_norm": 0.0009371594642288983,
"learning_rate": 7.457897040276853e-05,
"loss": 0.0001,
"step": 486
},
{
"epoch": 0.3616784255477163,
"grad_norm": 0.0009692166349850595,
"learning_rate": 7.447335038355063e-05,
"loss": 0.0001,
"step": 487
},
{
"epoch": 0.36242109171927217,
"grad_norm": 0.0006186399841681123,
"learning_rate": 7.436758655636212e-05,
"loss": 0.0001,
"step": 488
},
{
"epoch": 0.36316375789082805,
"grad_norm": 0.0017053054179996252,
"learning_rate": 7.426167954268231e-05,
"loss": 0.0001,
"step": 489
},
{
"epoch": 0.36390642406238394,
"grad_norm": 0.0015286095440387726,
"learning_rate": 7.415562996483192e-05,
"loss": 0.0001,
"step": 490
},
{
"epoch": 0.3646490902339398,
"grad_norm": 0.004435093142092228,
"learning_rate": 7.404943844596939e-05,
"loss": 0.0003,
"step": 491
},
{
"epoch": 0.3653917564054957,
"grad_norm": 0.00381348910741508,
"learning_rate": 7.394310561008717e-05,
"loss": 0.0002,
"step": 492
},
{
"epoch": 0.3661344225770516,
"grad_norm": 0.0006017435807734728,
"learning_rate": 7.38366320820082e-05,
"loss": 0.0001,
"step": 493
},
{
"epoch": 0.3668770887486075,
"grad_norm": 0.00891191978007555,
"learning_rate": 7.373001848738202e-05,
"loss": 0.0003,
"step": 494
},
{
"epoch": 0.3676197549201634,
"grad_norm": 0.0004915704485028982,
"learning_rate": 7.362326545268133e-05,
"loss": 0.0001,
"step": 495
},
{
"epoch": 0.3683624210917193,
"grad_norm": 0.0015488892095163465,
"learning_rate": 7.351637360519813e-05,
"loss": 0.0001,
"step": 496
},
{
"epoch": 0.3691050872632752,
"grad_norm": 0.002463052747771144,
"learning_rate": 7.34093435730401e-05,
"loss": 0.0002,
"step": 497
},
{
"epoch": 0.36984775343483106,
"grad_norm": 0.0014587301993742585,
"learning_rate": 7.330217598512695e-05,
"loss": 0.0001,
"step": 498
},
{
"epoch": 0.37059041960638694,
"grad_norm": 0.001909661223180592,
"learning_rate": 7.319487147118663e-05,
"loss": 0.0001,
"step": 499
},
{
"epoch": 0.37133308577794283,
"grad_norm": 0.0004529351135715842,
"learning_rate": 7.308743066175172e-05,
"loss": 0.0001,
"step": 500
},
{
"epoch": 0.3720757519494987,
"grad_norm": 0.03147125244140625,
"learning_rate": 7.297985418815563e-05,
"loss": 0.0006,
"step": 501
},
{
"epoch": 0.3728184181210546,
"grad_norm": 0.0007625695434398949,
"learning_rate": 7.287214268252904e-05,
"loss": 0.0001,
"step": 502
},
{
"epoch": 0.3735610842926105,
"grad_norm": 0.003061138093471527,
"learning_rate": 7.276429677779602e-05,
"loss": 0.0001,
"step": 503
},
{
"epoch": 0.37430375046416636,
"grad_norm": 0.000722390366718173,
"learning_rate": 7.265631710767041e-05,
"loss": 0.0001,
"step": 504
},
{
"epoch": 0.37504641663572225,
"grad_norm": 0.0014981662388890982,
"learning_rate": 7.254820430665206e-05,
"loss": 0.0001,
"step": 505
},
{
"epoch": 0.37578908280727813,
"grad_norm": 0.0009839913109317422,
"learning_rate": 7.243995901002312e-05,
"loss": 0.0001,
"step": 506
},
{
"epoch": 0.376531748978834,
"grad_norm": 0.0012856576358899474,
"learning_rate": 7.233158185384426e-05,
"loss": 0.0001,
"step": 507
},
{
"epoch": 0.3772744151503899,
"grad_norm": 0.0018630792619660497,
"learning_rate": 7.222307347495105e-05,
"loss": 0.0002,
"step": 508
},
{
"epoch": 0.3780170813219458,
"grad_norm": 0.0028617579955607653,
"learning_rate": 7.211443451095007e-05,
"loss": 0.0002,
"step": 509
},
{
"epoch": 0.37875974749350166,
"grad_norm": 0.0007013090653344989,
"learning_rate": 7.200566560021524e-05,
"loss": 0.0001,
"step": 510
},
{
"epoch": 0.37950241366505755,
"grad_norm": 0.00415767403319478,
"learning_rate": 7.18967673818841e-05,
"loss": 0.0002,
"step": 511
},
{
"epoch": 0.38024507983661343,
"grad_norm": 0.006695980206131935,
"learning_rate": 7.178774049585397e-05,
"loss": 0.0002,
"step": 512
},
{
"epoch": 0.3809877460081693,
"grad_norm": 0.00166626728605479,
"learning_rate": 7.167858558277827e-05,
"loss": 0.0001,
"step": 513
},
{
"epoch": 0.3817304121797252,
"grad_norm": 0.0006517748697660863,
"learning_rate": 7.156930328406268e-05,
"loss": 0.0001,
"step": 514
},
{
"epoch": 0.3824730783512811,
"grad_norm": 0.0009984513744711876,
"learning_rate": 7.145989424186146e-05,
"loss": 0.0001,
"step": 515
},
{
"epoch": 0.38321574452283697,
"grad_norm": 0.0003130779368802905,
"learning_rate": 7.135035909907358e-05,
"loss": 0.0001,
"step": 516
},
{
"epoch": 0.38395841069439285,
"grad_norm": 0.006763228215277195,
"learning_rate": 7.124069849933903e-05,
"loss": 0.0003,
"step": 517
},
{
"epoch": 0.38470107686594873,
"grad_norm": 0.0008593749371357262,
"learning_rate": 7.113091308703498e-05,
"loss": 0.0001,
"step": 518
},
{
"epoch": 0.3854437430375046,
"grad_norm": 0.0006246920092962682,
"learning_rate": 7.102100350727201e-05,
"loss": 0.0001,
"step": 519
},
{
"epoch": 0.38618640920906055,
"grad_norm": 0.0012244340032339096,
"learning_rate": 7.091097040589032e-05,
"loss": 0.0001,
"step": 520
},
{
"epoch": 0.38692907538061644,
"grad_norm": 0.0009561783517710865,
"learning_rate": 7.080081442945596e-05,
"loss": 0.0001,
"step": 521
},
{
"epoch": 0.3876717415521723,
"grad_norm": 0.0005518002435564995,
"learning_rate": 7.069053622525696e-05,
"loss": 0.0001,
"step": 522
},
{
"epoch": 0.3884144077237282,
"grad_norm": 0.006016881670802832,
"learning_rate": 7.058013644129962e-05,
"loss": 0.0002,
"step": 523
},
{
"epoch": 0.3891570738952841,
"grad_norm": 0.002434986876323819,
"learning_rate": 7.046961572630462e-05,
"loss": 0.0002,
"step": 524
},
{
"epoch": 0.38989974006684,
"grad_norm": 0.001255060895346105,
"learning_rate": 7.035897472970329e-05,
"loss": 0.0001,
"step": 525
},
{
"epoch": 0.39064240623839586,
"grad_norm": 0.007994726300239563,
"learning_rate": 7.024821410163368e-05,
"loss": 0.0002,
"step": 526
},
{
"epoch": 0.39138507240995174,
"grad_norm": 0.002365407533943653,
"learning_rate": 7.013733449293687e-05,
"loss": 0.0002,
"step": 527
},
{
"epoch": 0.3921277385815076,
"grad_norm": 0.000903936626855284,
"learning_rate": 7.002633655515303e-05,
"loss": 0.0001,
"step": 528
},
{
"epoch": 0.3928704047530635,
"grad_norm": 0.001310791471041739,
"learning_rate": 6.99152209405177e-05,
"loss": 0.0001,
"step": 529
},
{
"epoch": 0.3936130709246194,
"grad_norm": 0.0015525859780609608,
"learning_rate": 6.980398830195785e-05,
"loss": 0.0001,
"step": 530
},
{
"epoch": 0.3943557370961753,
"grad_norm": 0.0004448418621905148,
"learning_rate": 6.969263929308812e-05,
"loss": 0.0001,
"step": 531
},
{
"epoch": 0.39509840326773116,
"grad_norm": 0.0003616507747210562,
"learning_rate": 6.958117456820696e-05,
"loss": 0.0001,
"step": 532
},
{
"epoch": 0.39584106943928704,
"grad_norm": 0.0032966274302452803,
"learning_rate": 6.946959478229276e-05,
"loss": 0.0001,
"step": 533
},
{
"epoch": 0.3965837356108429,
"grad_norm": 0.0006540945032611489,
"learning_rate": 6.935790059100003e-05,
"loss": 0.0001,
"step": 534
},
{
"epoch": 0.3973264017823988,
"grad_norm": 0.0009542697225697339,
"learning_rate": 6.924609265065556e-05,
"loss": 0.0001,
"step": 535
},
{
"epoch": 0.3980690679539547,
"grad_norm": 0.004549324978142977,
"learning_rate": 6.91341716182545e-05,
"loss": 0.0003,
"step": 536
},
{
"epoch": 0.3988117341255106,
"grad_norm": 0.0008191264350898564,
"learning_rate": 6.902213815145655e-05,
"loss": 0.0001,
"step": 537
},
{
"epoch": 0.39955440029706646,
"grad_norm": 0.0007376139401458204,
"learning_rate": 6.890999290858214e-05,
"loss": 0.0001,
"step": 538
},
{
"epoch": 0.40029706646862234,
"grad_norm": 0.0011172041995450854,
"learning_rate": 6.87977365486084e-05,
"loss": 0.0001,
"step": 539
},
{
"epoch": 0.4010397326401782,
"grad_norm": 0.0010884919902309775,
"learning_rate": 6.868536973116552e-05,
"loss": 0.0001,
"step": 540
},
{
"epoch": 0.4017823988117341,
"grad_norm": 0.0008193852263502777,
"learning_rate": 6.857289311653268e-05,
"loss": 0.0001,
"step": 541
},
{
"epoch": 0.40252506498329,
"grad_norm": 0.0013226550072431564,
"learning_rate": 6.846030736563422e-05,
"loss": 0.0001,
"step": 542
},
{
"epoch": 0.4032677311548459,
"grad_norm": 0.0019562705419957638,
"learning_rate": 6.834761314003584e-05,
"loss": 0.0001,
"step": 543
},
{
"epoch": 0.40401039732640176,
"grad_norm": 0.0033758783247321844,
"learning_rate": 6.82348111019406e-05,
"loss": 0.0001,
"step": 544
},
{
"epoch": 0.40475306349795764,
"grad_norm": 0.0009793438948690891,
"learning_rate": 6.812190191418508e-05,
"loss": 0.0001,
"step": 545
},
{
"epoch": 0.4054957296695135,
"grad_norm": 0.001416007406078279,
"learning_rate": 6.800888624023553e-05,
"loss": 0.0001,
"step": 546
},
{
"epoch": 0.40623839584106947,
"grad_norm": 0.0026139297988265753,
"learning_rate": 6.789576474418386e-05,
"loss": 0.0002,
"step": 547
},
{
"epoch": 0.40698106201262535,
"grad_norm": 1.4912738800048828,
"learning_rate": 6.778253809074385e-05,
"loss": 0.1274,
"step": 548
},
{
"epoch": 0.40772372818418123,
"grad_norm": 0.0013667610473930836,
"learning_rate": 6.766920694524714e-05,
"loss": 0.0001,
"step": 549
},
{
"epoch": 0.4084663943557371,
"grad_norm": 0.633527934551239,
"learning_rate": 6.755577197363944e-05,
"loss": 0.0168,
"step": 550
},
{
"epoch": 0.409209060527293,
"grad_norm": 0.0013449821854010224,
"learning_rate": 6.744223384247655e-05,
"loss": 0.0001,
"step": 551
},
{
"epoch": 0.4099517266988489,
"grad_norm": 0.0064977309666574,
"learning_rate": 6.732859321892037e-05,
"loss": 0.0003,
"step": 552
},
{
"epoch": 0.41069439287040477,
"grad_norm": 0.0006492669344879687,
"learning_rate": 6.721485077073518e-05,
"loss": 0.0001,
"step": 553
},
{
"epoch": 0.41143705904196065,
"grad_norm": 0.4942440986633301,
"learning_rate": 6.710100716628344e-05,
"loss": 0.0183,
"step": 554
},
{
"epoch": 0.41217972521351653,
"grad_norm": 0.0004884397494606674,
"learning_rate": 6.698706307452216e-05,
"loss": 0.0001,
"step": 555
},
{
"epoch": 0.4129223913850724,
"grad_norm": 0.0014100876869633794,
"learning_rate": 6.687301916499871e-05,
"loss": 0.0002,
"step": 556
},
{
"epoch": 0.4136650575566283,
"grad_norm": 0.0025068807881325483,
"learning_rate": 6.675887610784708e-05,
"loss": 0.0001,
"step": 557
},
{
"epoch": 0.4144077237281842,
"grad_norm": 0.0007504708482883871,
"learning_rate": 6.664463457378383e-05,
"loss": 0.0001,
"step": 558
},
{
"epoch": 0.41515038989974007,
"grad_norm": 0.0008488527382723987,
"learning_rate": 6.653029523410417e-05,
"loss": 0.0001,
"step": 559
},
{
"epoch": 0.41589305607129595,
"grad_norm": 3.241758108139038,
"learning_rate": 6.641585876067807e-05,
"loss": 0.0741,
"step": 560
},
{
"epoch": 0.41663572224285184,
"grad_norm": 0.0009746011346578598,
"learning_rate": 6.630132582594617e-05,
"loss": 0.0001,
"step": 561
},
{
"epoch": 0.4173783884144077,
"grad_norm": 0.004342979751527309,
"learning_rate": 6.618669710291606e-05,
"loss": 0.0002,
"step": 562
},
{
"epoch": 0.4181210545859636,
"grad_norm": 0.03014206513762474,
"learning_rate": 6.607197326515808e-05,
"loss": 0.001,
"step": 563
},
{
"epoch": 0.4188637207575195,
"grad_norm": 0.03773445263504982,
"learning_rate": 6.595715498680156e-05,
"loss": 0.0005,
"step": 564
},
{
"epoch": 0.41960638692907537,
"grad_norm": 0.23458726704120636,
"learning_rate": 6.584224294253069e-05,
"loss": 0.0019,
"step": 565
},
{
"epoch": 0.42034905310063125,
"grad_norm": 0.01963810622692108,
"learning_rate": 6.572723780758069e-05,
"loss": 0.0003,
"step": 566
},
{
"epoch": 0.42109171927218714,
"grad_norm": 0.002442006254568696,
"learning_rate": 6.56121402577338e-05,
"loss": 0.0002,
"step": 567
},
{
"epoch": 0.421834385443743,
"grad_norm": 0.026274830102920532,
"learning_rate": 6.549695096931527e-05,
"loss": 0.0002,
"step": 568
},
{
"epoch": 0.4225770516152989,
"grad_norm": 0.0038544712588191032,
"learning_rate": 6.538167061918941e-05,
"loss": 0.0002,
"step": 569
},
{
"epoch": 0.4233197177868548,
"grad_norm": 0.019080875441432,
"learning_rate": 6.526629988475567e-05,
"loss": 0.0007,
"step": 570
},
{
"epoch": 0.42406238395841067,
"grad_norm": 0.01698448695242405,
"learning_rate": 6.515083944394453e-05,
"loss": 0.0003,
"step": 571
},
{
"epoch": 0.42480505012996655,
"grad_norm": 0.006882249377667904,
"learning_rate": 6.503528997521366e-05,
"loss": 0.0004,
"step": 572
},
{
"epoch": 0.42554771630152244,
"grad_norm": 0.003293162677437067,
"learning_rate": 6.491965215754384e-05,
"loss": 0.0002,
"step": 573
},
{
"epoch": 0.4262903824730784,
"grad_norm": 0.004137948155403137,
"learning_rate": 6.4803926670435e-05,
"loss": 0.0002,
"step": 574
},
{
"epoch": 0.42703304864463426,
"grad_norm": 0.035909973084926605,
"learning_rate": 6.468811419390222e-05,
"loss": 0.0006,
"step": 575
},
{
"epoch": 0.42777571481619014,
"grad_norm": 0.004789507016539574,
"learning_rate": 6.457221540847176e-05,
"loss": 0.0003,
"step": 576
},
{
"epoch": 0.42851838098774603,
"grad_norm": 0.0077894036658108234,
"learning_rate": 6.4456230995177e-05,
"loss": 0.0002,
"step": 577
},
{
"epoch": 0.4292610471593019,
"grad_norm": 0.018438173457980156,
"learning_rate": 6.434016163555452e-05,
"loss": 0.0006,
"step": 578
},
{
"epoch": 0.4300037133308578,
"grad_norm": 0.004602341912686825,
"learning_rate": 6.422400801164003e-05,
"loss": 0.0002,
"step": 579
},
{
"epoch": 0.4307463795024137,
"grad_norm": 0.005459954962134361,
"learning_rate": 6.41077708059644e-05,
"loss": 0.0003,
"step": 580
},
{
"epoch": 0.43148904567396956,
"grad_norm": 0.2004091739654541,
"learning_rate": 6.399145070154961e-05,
"loss": 0.0013,
"step": 581
},
{
"epoch": 0.43223171184552545,
"grad_norm": 0.001691634999588132,
"learning_rate": 6.387504838190479e-05,
"loss": 0.0002,
"step": 582
},
{
"epoch": 0.43297437801708133,
"grad_norm": 0.002549877157434821,
"learning_rate": 6.375856453102217e-05,
"loss": 0.0002,
"step": 583
},
{
"epoch": 0.4337170441886372,
"grad_norm": 0.014543402940034866,
"learning_rate": 6.364199983337306e-05,
"loss": 0.0006,
"step": 584
},
{
"epoch": 0.4344597103601931,
"grad_norm": 0.025866270065307617,
"learning_rate": 6.352535497390381e-05,
"loss": 0.0006,
"step": 585
},
{
"epoch": 0.435202376531749,
"grad_norm": 0.001332697574980557,
"learning_rate": 6.340863063803188e-05,
"loss": 0.0001,
"step": 586
},
{
"epoch": 0.43594504270330486,
"grad_norm": 0.006850456353276968,
"learning_rate": 6.329182751164165e-05,
"loss": 0.0004,
"step": 587
},
{
"epoch": 0.43668770887486075,
"grad_norm": 0.002208834746852517,
"learning_rate": 6.317494628108054e-05,
"loss": 0.0002,
"step": 588
},
{
"epoch": 0.43743037504641663,
"grad_norm": 0.0022124634124338627,
"learning_rate": 6.305798763315491e-05,
"loss": 0.0002,
"step": 589
},
{
"epoch": 0.4381730412179725,
"grad_norm": 0.0013898340985178947,
"learning_rate": 6.294095225512603e-05,
"loss": 0.0001,
"step": 590
},
{
"epoch": 0.4389157073895284,
"grad_norm": 0.0031000098679214716,
"learning_rate": 6.282384083470605e-05,
"loss": 0.0002,
"step": 591
},
{
"epoch": 0.4396583735610843,
"grad_norm": 0.004475159104913473,
"learning_rate": 6.270665406005393e-05,
"loss": 0.0002,
"step": 592
},
{
"epoch": 0.44040103973264016,
"grad_norm": 0.002459387294948101,
"learning_rate": 6.258939261977143e-05,
"loss": 0.0002,
"step": 593
},
{
"epoch": 0.44114370590419605,
"grad_norm": 0.0015267659910023212,
"learning_rate": 6.247205720289907e-05,
"loss": 0.0001,
"step": 594
},
{
"epoch": 0.44188637207575193,
"grad_norm": 0.0016903842333704233,
"learning_rate": 6.235464849891205e-05,
"loss": 0.0002,
"step": 595
},
{
"epoch": 0.4426290382473078,
"grad_norm": 0.008800814859569073,
"learning_rate": 6.22371671977162e-05,
"loss": 0.0005,
"step": 596
},
{
"epoch": 0.4433717044188637,
"grad_norm": 0.006040891632437706,
"learning_rate": 6.211961398964396e-05,
"loss": 0.0004,
"step": 597
},
{
"epoch": 0.4441143705904196,
"grad_norm": 0.014548071660101414,
"learning_rate": 6.20019895654503e-05,
"loss": 0.0004,
"step": 598
},
{
"epoch": 0.44485703676197547,
"grad_norm": 0.0014387888368219137,
"learning_rate": 6.188429461630866e-05,
"loss": 0.0001,
"step": 599
},
{
"epoch": 0.4455997029335314,
"grad_norm": 0.0014056372456252575,
"learning_rate": 6.176652983380689e-05,
"loss": 0.0001,
"step": 600
},
{
"epoch": 0.4463423691050873,
"grad_norm": 0.0028386348858475685,
"learning_rate": 6.164869590994317e-05,
"loss": 0.0003,
"step": 601
},
{
"epoch": 0.4470850352766432,
"grad_norm": 0.0020914392080157995,
"learning_rate": 6.153079353712201e-05,
"loss": 0.0002,
"step": 602
},
{
"epoch": 0.44782770144819906,
"grad_norm": 0.0010804440826177597,
"learning_rate": 6.14128234081501e-05,
"loss": 0.0002,
"step": 603
},
{
"epoch": 0.44857036761975494,
"grad_norm": 0.11864292621612549,
"learning_rate": 6.129478621623224e-05,
"loss": 0.0011,
"step": 604
},
{
"epoch": 0.4493130337913108,
"grad_norm": 0.005421667359769344,
"learning_rate": 6.117668265496737e-05,
"loss": 0.0003,
"step": 605
},
{
"epoch": 0.4500556999628667,
"grad_norm": 0.002962655620649457,
"learning_rate": 6.105851341834439e-05,
"loss": 0.0002,
"step": 606
},
{
"epoch": 0.4507983661344226,
"grad_norm": 0.0069792428985238075,
"learning_rate": 6.094027920073811e-05,
"loss": 0.0003,
"step": 607
},
{
"epoch": 0.4515410323059785,
"grad_norm": 0.004223910626024008,
"learning_rate": 6.0821980696905146e-05,
"loss": 0.0003,
"step": 608
},
{
"epoch": 0.45228369847753436,
"grad_norm": 0.004594327881932259,
"learning_rate": 6.070361860197994e-05,
"loss": 0.0003,
"step": 609
},
{
"epoch": 0.45302636464909024,
"grad_norm": 0.0028815660625696182,
"learning_rate": 6.058519361147055e-05,
"loss": 0.0002,
"step": 610
},
{
"epoch": 0.4537690308206461,
"grad_norm": 0.05003628879785538,
"learning_rate": 6.04667064212546e-05,
"loss": 0.0015,
"step": 611
},
{
"epoch": 0.454511696992202,
"grad_norm": 0.004502360709011555,
"learning_rate": 6.034815772757528e-05,
"loss": 0.0003,
"step": 612
},
{
"epoch": 0.4552543631637579,
"grad_norm": 0.000712364970240742,
"learning_rate": 6.022954822703709e-05,
"loss": 0.0001,
"step": 613
},
{
"epoch": 0.4559970293353138,
"grad_norm": 0.010336031205952168,
"learning_rate": 6.0110878616601904e-05,
"loss": 0.0004,
"step": 614
},
{
"epoch": 0.45673969550686966,
"grad_norm": 0.0018599943723529577,
"learning_rate": 5.999214959358477e-05,
"loss": 0.0002,
"step": 615
},
{
"epoch": 0.45748236167842554,
"grad_norm": 0.0009950903477147222,
"learning_rate": 5.9873361855649876e-05,
"loss": 0.0001,
"step": 616
},
{
"epoch": 0.4582250278499814,
"grad_norm": 0.014687101356685162,
"learning_rate": 5.9754516100806423e-05,
"loss": 0.0002,
"step": 617
},
{
"epoch": 0.4589676940215373,
"grad_norm": 0.003395050298422575,
"learning_rate": 5.963561302740449e-05,
"loss": 0.0001,
"step": 618
},
{
"epoch": 0.4597103601930932,
"grad_norm": 0.0098502766340971,
"learning_rate": 5.9516653334131015e-05,
"loss": 0.0002,
"step": 619
},
{
"epoch": 0.4604530263646491,
"grad_norm": 0.011299900710582733,
"learning_rate": 5.9397637720005595e-05,
"loss": 0.0003,
"step": 620
},
{
"epoch": 0.46119569253620496,
"grad_norm": 0.001197346136905253,
"learning_rate": 5.9278566884376474e-05,
"loss": 0.0002,
"step": 621
},
{
"epoch": 0.46193835870776084,
"grad_norm": 0.0022983471862971783,
"learning_rate": 5.915944152691634e-05,
"loss": 0.0002,
"step": 622
},
{
"epoch": 0.4626810248793167,
"grad_norm": 0.001483081839978695,
"learning_rate": 5.904026234761827e-05,
"loss": 0.0001,
"step": 623
},
{
"epoch": 0.4634236910508726,
"grad_norm": 0.0636589452624321,
"learning_rate": 5.8921030046791614e-05,
"loss": 0.0008,
"step": 624
},
{
"epoch": 0.4641663572224285,
"grad_norm": 0.0022622577380388975,
"learning_rate": 5.880174532505786e-05,
"loss": 0.0002,
"step": 625
},
{
"epoch": 0.4649090233939844,
"grad_norm": 0.006052135024219751,
"learning_rate": 5.868240888334653e-05,
"loss": 0.0004,
"step": 626
},
{
"epoch": 0.4656516895655403,
"grad_norm": 0.0013821636093780398,
"learning_rate": 5.856302142289105e-05,
"loss": 0.0001,
"step": 627
},
{
"epoch": 0.4663943557370962,
"grad_norm": 0.0037645832635462284,
"learning_rate": 5.8443583645224655e-05,
"loss": 0.0002,
"step": 628
},
{
"epoch": 0.4671370219086521,
"grad_norm": 0.0014773118309676647,
"learning_rate": 5.832409625217623e-05,
"loss": 0.0002,
"step": 629
},
{
"epoch": 0.46787968808020797,
"grad_norm": 0.0014741544146090746,
"learning_rate": 5.820455994586621e-05,
"loss": 0.0001,
"step": 630
},
{
"epoch": 0.46862235425176385,
"grad_norm": 0.0006976615404710174,
"learning_rate": 5.808497542870246e-05,
"loss": 0.0001,
"step": 631
},
{
"epoch": 0.46936502042331973,
"grad_norm": 1.5406405925750732,
"learning_rate": 5.796534340337614e-05,
"loss": 0.0059,
"step": 632
},
{
"epoch": 0.4701076865948756,
"grad_norm": 0.0043447320349514484,
"learning_rate": 5.784566457285754e-05,
"loss": 0.0002,
"step": 633
},
{
"epoch": 0.4708503527664315,
"grad_norm": 0.007482700981199741,
"learning_rate": 5.772593964039203e-05,
"loss": 0.0003,
"step": 634
},
{
"epoch": 0.4715930189379874,
"grad_norm": 0.0010534449247643352,
"learning_rate": 5.7606169309495836e-05,
"loss": 0.0002,
"step": 635
},
{
"epoch": 0.47233568510954327,
"grad_norm": 0.0039465283043682575,
"learning_rate": 5.748635428395199e-05,
"loss": 0.0002,
"step": 636
},
{
"epoch": 0.47307835128109915,
"grad_norm": 0.0014873096952214837,
"learning_rate": 5.736649526780611e-05,
"loss": 0.0001,
"step": 637
},
{
"epoch": 0.47382101745265504,
"grad_norm": 0.0004832752456422895,
"learning_rate": 5.724659296536233e-05,
"loss": 0.0001,
"step": 638
},
{
"epoch": 0.4745636836242109,
"grad_norm": 0.00103551906067878,
"learning_rate": 5.712664808117918e-05,
"loss": 0.0001,
"step": 639
},
{
"epoch": 0.4753063497957668,
"grad_norm": 0.000814305676613003,
"learning_rate": 5.7006661320065315e-05,
"loss": 0.0001,
"step": 640
},
{
"epoch": 0.4760490159673227,
"grad_norm": 0.0019791568629443645,
"learning_rate": 5.688663338707554e-05,
"loss": 0.0003,
"step": 641
},
{
"epoch": 0.47679168213887857,
"grad_norm": 0.005826095584779978,
"learning_rate": 5.6766564987506566e-05,
"loss": 0.0002,
"step": 642
},
{
"epoch": 0.47753434831043445,
"grad_norm": 0.0011684205383062363,
"learning_rate": 5.664645682689287e-05,
"loss": 0.0001,
"step": 643
},
{
"epoch": 0.47827701448199034,
"grad_norm": 0.0011006612330675125,
"learning_rate": 5.6526309611002594e-05,
"loss": 0.0001,
"step": 644
},
{
"epoch": 0.4790196806535462,
"grad_norm": 0.0013071649009361863,
"learning_rate": 5.640612404583337e-05,
"loss": 0.0001,
"step": 645
},
{
"epoch": 0.4797623468251021,
"grad_norm": 0.0031639167573302984,
"learning_rate": 5.628590083760814e-05,
"loss": 0.0002,
"step": 646
},
{
"epoch": 0.480505012996658,
"grad_norm": 0.008663011714816093,
"learning_rate": 5.61656406927711e-05,
"loss": 0.0005,
"step": 647
},
{
"epoch": 0.48124767916821387,
"grad_norm": 0.004144098609685898,
"learning_rate": 5.604534431798347e-05,
"loss": 0.0002,
"step": 648
},
{
"epoch": 0.48199034533976975,
"grad_norm": 0.1093854233622551,
"learning_rate": 5.5925012420119326e-05,
"loss": 0.0024,
"step": 649
},
{
"epoch": 0.48273301151132564,
"grad_norm": 0.0017107095336541533,
"learning_rate": 5.5804645706261514e-05,
"loss": 0.0002,
"step": 650
},
{
"epoch": 0.4834756776828815,
"grad_norm": 1.8285502195358276,
"learning_rate": 5.568424488369746e-05,
"loss": 0.0136,
"step": 651
},
{
"epoch": 0.4842183438544374,
"grad_norm": 0.00895615667104721,
"learning_rate": 5.556381065991499e-05,
"loss": 0.0003,
"step": 652
},
{
"epoch": 0.4849610100259933,
"grad_norm": 0.02741318568587303,
"learning_rate": 5.544334374259823e-05,
"loss": 0.0003,
"step": 653
},
{
"epoch": 0.48570367619754923,
"grad_norm": 0.007290950510650873,
"learning_rate": 5.532284483962341e-05,
"loss": 0.0003,
"step": 654
},
{
"epoch": 0.4864463423691051,
"grad_norm": 0.008065281435847282,
"learning_rate": 5.520231465905471e-05,
"loss": 0.0002,
"step": 655
},
{
"epoch": 0.487189008540661,
"grad_norm": 0.1876053661108017,
"learning_rate": 5.5081753909140096e-05,
"loss": 0.001,
"step": 656
},
{
"epoch": 0.4879316747122169,
"grad_norm": 0.00261944648809731,
"learning_rate": 5.496116329830716e-05,
"loss": 0.0002,
"step": 657
},
{
"epoch": 0.48867434088377276,
"grad_norm": 0.00967632420361042,
"learning_rate": 5.484054353515896e-05,
"loss": 0.0002,
"step": 658
},
{
"epoch": 0.48941700705532865,
"grad_norm": 0.002796781715005636,
"learning_rate": 5.4719895328469874e-05,
"loss": 0.0002,
"step": 659
},
{
"epoch": 0.49015967322688453,
"grad_norm": 0.5190077424049377,
"learning_rate": 5.459921938718138e-05,
"loss": 0.0018,
"step": 660
},
{
"epoch": 0.4909023393984404,
"grad_norm": 0.00399174727499485,
"learning_rate": 5.447851642039798e-05,
"loss": 0.0002,
"step": 661
},
{
"epoch": 0.4916450055699963,
"grad_norm": 0.0023514707572758198,
"learning_rate": 5.435778713738292e-05,
"loss": 0.0001,
"step": 662
},
{
"epoch": 0.4923876717415522,
"grad_norm": 0.006780529860407114,
"learning_rate": 5.423703224755413e-05,
"loss": 0.0002,
"step": 663
},
{
"epoch": 0.49313033791310806,
"grad_norm": 2.0502431392669678,
"learning_rate": 5.411625246048e-05,
"loss": 0.0082,
"step": 664
},
{
"epoch": 0.49387300408466395,
"grad_norm": 0.0009503086330369115,
"learning_rate": 5.39954484858752e-05,
"loss": 0.0001,
"step": 665
},
{
"epoch": 0.49461567025621983,
"grad_norm": 0.00197896221652627,
"learning_rate": 5.387462103359655e-05,
"loss": 0.0001,
"step": 666
},
{
"epoch": 0.4953583364277757,
"grad_norm": 0.0009312194306403399,
"learning_rate": 5.3753770813638814e-05,
"loss": 0.0001,
"step": 667
},
{
"epoch": 0.4961010025993316,
"grad_norm": 0.0009839091217145324,
"learning_rate": 5.363289853613054e-05,
"loss": 0.0001,
"step": 668
},
{
"epoch": 0.4968436687708875,
"grad_norm": 0.0011252929689362645,
"learning_rate": 5.351200491132988e-05,
"loss": 0.0001,
"step": 669
},
{
"epoch": 0.49758633494244336,
"grad_norm": 0.0006150620174594223,
"learning_rate": 5.339109064962047e-05,
"loss": 0.0001,
"step": 670
},
{
"epoch": 0.49832900111399925,
"grad_norm": 0.0013650684850290418,
"learning_rate": 5.327015646150716e-05,
"loss": 0.0001,
"step": 671
},
{
"epoch": 0.49907166728555513,
"grad_norm": 0.0012382761342450976,
"learning_rate": 5.3149203057611906e-05,
"loss": 0.0001,
"step": 672
},
{
"epoch": 0.499814333457111,
"grad_norm": 0.0007622497505508363,
"learning_rate": 5.302823114866958e-05,
"loss": 0.0001,
"step": 673
},
{
"epoch": 0.500556999628667,
"grad_norm": 0.0008362553780898452,
"learning_rate": 5.290724144552379e-05,
"loss": 0.0001,
"step": 674
},
{
"epoch": 0.500556999628667,
"eval_loss": 0.00016636255895718932,
"eval_runtime": 190.6115,
"eval_samples_per_second": 5.949,
"eval_steps_per_second": 2.975,
"step": 674
},
{
"epoch": 0.5012996658002228,
"grad_norm": 0.0007262330618686974,
"learning_rate": 5.2786234659122726e-05,
"loss": 0.0001,
"step": 675
},
{
"epoch": 0.5020423319717787,
"grad_norm": 0.007732240483164787,
"learning_rate": 5.266521150051492e-05,
"loss": 0.0003,
"step": 676
},
{
"epoch": 0.5027849981433345,
"grad_norm": 0.0026611273642629385,
"learning_rate": 5.254417268084514e-05,
"loss": 0.0001,
"step": 677
},
{
"epoch": 0.5035276643148905,
"grad_norm": 0.00148585916031152,
"learning_rate": 5.242311891135016e-05,
"loss": 0.0002,
"step": 678
},
{
"epoch": 0.5042703304864463,
"grad_norm": 0.0006069787195883691,
"learning_rate": 5.2302050903354606e-05,
"loss": 0.0001,
"step": 679
},
{
"epoch": 0.5050129966580023,
"grad_norm": 0.002375812502577901,
"learning_rate": 5.218096936826681e-05,
"loss": 0.0002,
"step": 680
},
{
"epoch": 0.5057556628295581,
"grad_norm": 0.003138911910355091,
"learning_rate": 5.205987501757452e-05,
"loss": 0.0002,
"step": 681
},
{
"epoch": 0.506498329001114,
"grad_norm": 0.001015645801089704,
"learning_rate": 5.193876856284085e-05,
"loss": 0.0001,
"step": 682
},
{
"epoch": 0.5072409951726699,
"grad_norm": 0.005205164197832346,
"learning_rate": 5.1817650715699994e-05,
"loss": 0.0002,
"step": 683
},
{
"epoch": 0.5079836613442258,
"grad_norm": 0.0015316219069063663,
"learning_rate": 5.1696522187853126e-05,
"loss": 0.0002,
"step": 684
},
{
"epoch": 0.5087263275157816,
"grad_norm": 0.0026633795350790024,
"learning_rate": 5.157538369106414e-05,
"loss": 0.0002,
"step": 685
},
{
"epoch": 0.5094689936873376,
"grad_norm": 0.0016374588012695312,
"learning_rate": 5.145423593715557e-05,
"loss": 0.0001,
"step": 686
},
{
"epoch": 0.5102116598588934,
"grad_norm": 0.002312192926183343,
"learning_rate": 5.133307963800429e-05,
"loss": 0.0002,
"step": 687
},
{
"epoch": 0.5109543260304493,
"grad_norm": 0.004305702168494463,
"learning_rate": 5.121191550553741e-05,
"loss": 0.0002,
"step": 688
},
{
"epoch": 0.5116969922020052,
"grad_norm": 0.005227786023169756,
"learning_rate": 5.1090744251728064e-05,
"loss": 0.0002,
"step": 689
},
{
"epoch": 0.5124396583735611,
"grad_norm": 0.0004952704184688628,
"learning_rate": 5.096956658859122e-05,
"loss": 0.0001,
"step": 690
},
{
"epoch": 0.5131823245451169,
"grad_norm": 0.0006610776763409376,
"learning_rate": 5.084838322817954e-05,
"loss": 0.0001,
"step": 691
},
{
"epoch": 0.5139249907166729,
"grad_norm": 0.002141596982255578,
"learning_rate": 5.072719488257914e-05,
"loss": 0.0002,
"step": 692
},
{
"epoch": 0.5146676568882287,
"grad_norm": 0.0008098665275610983,
"learning_rate": 5.060600226390543e-05,
"loss": 0.0001,
"step": 693
},
{
"epoch": 0.5154103230597846,
"grad_norm": 0.0018381529953330755,
"learning_rate": 5.048480608429893e-05,
"loss": 0.0001,
"step": 694
},
{
"epoch": 0.5161529892313406,
"grad_norm": 0.0011757278116419911,
"learning_rate": 5.0363607055921094e-05,
"loss": 0.0002,
"step": 695
},
{
"epoch": 0.5168956554028964,
"grad_norm": 0.00043161376379430294,
"learning_rate": 5.0242405890950093e-05,
"loss": 0.0001,
"step": 696
},
{
"epoch": 0.5176383215744523,
"grad_norm": 0.0008273344719782472,
"learning_rate": 5.01212033015767e-05,
"loss": 0.0001,
"step": 697
},
{
"epoch": 0.5183809877460082,
"grad_norm": 0.0035381957422941923,
"learning_rate": 5e-05,
"loss": 0.0002,
"step": 698
},
{
"epoch": 0.5191236539175641,
"grad_norm": 0.0015232819132506847,
"learning_rate": 4.9878796698423324e-05,
"loss": 0.0001,
"step": 699
},
{
"epoch": 0.5198663200891199,
"grad_norm": 0.0007589098531752825,
"learning_rate": 4.975759410904992e-05,
"loss": 0.0001,
"step": 700
},
{
"epoch": 0.5206089862606759,
"grad_norm": 0.0017526590963825583,
"learning_rate": 4.963639294407893e-05,
"loss": 0.0001,
"step": 701
},
{
"epoch": 0.5213516524322317,
"grad_norm": 0.0024000522680580616,
"learning_rate": 4.951519391570108e-05,
"loss": 0.0001,
"step": 702
},
{
"epoch": 0.5220943186037876,
"grad_norm": 0.027467429637908936,
"learning_rate": 4.9393997736094596e-05,
"loss": 0.0002,
"step": 703
},
{
"epoch": 0.5228369847753435,
"grad_norm": 0.000491057347971946,
"learning_rate": 4.9272805117420865e-05,
"loss": 0.0001,
"step": 704
},
{
"epoch": 0.5235796509468994,
"grad_norm": 0.0012586608063429594,
"learning_rate": 4.915161677182048e-05,
"loss": 0.0001,
"step": 705
},
{
"epoch": 0.5243223171184552,
"grad_norm": 0.0025400435552001,
"learning_rate": 4.903043341140879e-05,
"loss": 0.0001,
"step": 706
},
{
"epoch": 0.5250649832900112,
"grad_norm": 0.0027627914678305387,
"learning_rate": 4.890925574827195e-05,
"loss": 0.0002,
"step": 707
},
{
"epoch": 0.525807649461567,
"grad_norm": 0.0010417604353278875,
"learning_rate": 4.878808449446259e-05,
"loss": 0.0001,
"step": 708
},
{
"epoch": 0.5265503156331229,
"grad_norm": 0.0006638254853896797,
"learning_rate": 4.866692036199572e-05,
"loss": 0.0001,
"step": 709
},
{
"epoch": 0.5272929818046788,
"grad_norm": 0.0013286214089021087,
"learning_rate": 4.854576406284443e-05,
"loss": 0.0001,
"step": 710
},
{
"epoch": 0.5280356479762347,
"grad_norm": 0.004056660924106836,
"learning_rate": 4.8424616308935875e-05,
"loss": 0.0001,
"step": 711
},
{
"epoch": 0.5287783141477905,
"grad_norm": 0.0007322711171582341,
"learning_rate": 4.830347781214689e-05,
"loss": 0.0001,
"step": 712
},
{
"epoch": 0.5295209803193465,
"grad_norm": 0.004066810943186283,
"learning_rate": 4.8182349284300024e-05,
"loss": 0.0002,
"step": 713
},
{
"epoch": 0.5302636464909023,
"grad_norm": 0.0007455990416929126,
"learning_rate": 4.806123143715916e-05,
"loss": 0.0001,
"step": 714
},
{
"epoch": 0.5310063126624582,
"grad_norm": 0.0005849769804626703,
"learning_rate": 4.7940124982425497e-05,
"loss": 0.0001,
"step": 715
},
{
"epoch": 0.5317489788340141,
"grad_norm": 0.0005887853913009167,
"learning_rate": 4.781903063173321e-05,
"loss": 0.0001,
"step": 716
},
{
"epoch": 0.53249164500557,
"grad_norm": 0.0007402475457638502,
"learning_rate": 4.76979490966454e-05,
"loss": 0.0001,
"step": 717
},
{
"epoch": 0.5332343111771258,
"grad_norm": 0.0029941131360828876,
"learning_rate": 4.757688108864986e-05,
"loss": 0.0002,
"step": 718
},
{
"epoch": 0.5339769773486818,
"grad_norm": 0.0025099741760641336,
"learning_rate": 4.7455827319154873e-05,
"loss": 0.0002,
"step": 719
},
{
"epoch": 0.5347196435202377,
"grad_norm": 0.0005227324436418712,
"learning_rate": 4.73347884994851e-05,
"loss": 0.0001,
"step": 720
},
{
"epoch": 0.5354623096917935,
"grad_norm": 0.0016509697306901217,
"learning_rate": 4.7213765340877286e-05,
"loss": 0.0001,
"step": 721
},
{
"epoch": 0.5362049758633495,
"grad_norm": 0.0011602011509239674,
"learning_rate": 4.709275855447621e-05,
"loss": 0.0001,
"step": 722
},
{
"epoch": 0.5369476420349053,
"grad_norm": 0.0008615905535407364,
"learning_rate": 4.697176885133042e-05,
"loss": 0.0001,
"step": 723
},
{
"epoch": 0.5376903082064612,
"grad_norm": 0.005168153438717127,
"learning_rate": 4.6850796942388106e-05,
"loss": 0.0002,
"step": 724
},
{
"epoch": 0.5384329743780171,
"grad_norm": 0.0014478538651019335,
"learning_rate": 4.6729843538492847e-05,
"loss": 0.0001,
"step": 725
},
{
"epoch": 0.539175640549573,
"grad_norm": 0.8397618532180786,
"learning_rate": 4.660890935037954e-05,
"loss": 0.013,
"step": 726
},
{
"epoch": 0.5399183067211288,
"grad_norm": 0.0026152590289711952,
"learning_rate": 4.648799508867012e-05,
"loss": 0.0002,
"step": 727
},
{
"epoch": 0.5406609728926848,
"grad_norm": 0.007058785296976566,
"learning_rate": 4.636710146386947e-05,
"loss": 0.0002,
"step": 728
},
{
"epoch": 0.5414036390642406,
"grad_norm": 0.00043141836067661643,
"learning_rate": 4.6246229186361184e-05,
"loss": 0.0001,
"step": 729
},
{
"epoch": 0.5421463052357965,
"grad_norm": 0.0015156837180256844,
"learning_rate": 4.612537896640346e-05,
"loss": 0.0002,
"step": 730
},
{
"epoch": 0.5428889714073524,
"grad_norm": 0.0005109444609843194,
"learning_rate": 4.600455151412482e-05,
"loss": 0.0001,
"step": 731
},
{
"epoch": 0.5436316375789083,
"grad_norm": 0.0052424585446715355,
"learning_rate": 4.588374753952001e-05,
"loss": 0.0003,
"step": 732
},
{
"epoch": 0.5443743037504641,
"grad_norm": 0.0008050160249695182,
"learning_rate": 4.576296775244588e-05,
"loss": 0.0001,
"step": 733
},
{
"epoch": 0.5451169699220201,
"grad_norm": 0.0020469501614570618,
"learning_rate": 4.564221286261709e-05,
"loss": 0.0002,
"step": 734
},
{
"epoch": 0.5458596360935759,
"grad_norm": 0.000345387845300138,
"learning_rate": 4.552148357960205e-05,
"loss": 0.0001,
"step": 735
},
{
"epoch": 0.5466023022651318,
"grad_norm": 0.002280315151438117,
"learning_rate": 4.5400780612818626e-05,
"loss": 0.0002,
"step": 736
},
{
"epoch": 0.5473449684366877,
"grad_norm": 0.0018161894986405969,
"learning_rate": 4.528010467153016e-05,
"loss": 0.0002,
"step": 737
},
{
"epoch": 0.5480876346082436,
"grad_norm": 0.0008220197050832212,
"learning_rate": 4.515945646484105e-05,
"loss": 0.0001,
"step": 738
},
{
"epoch": 0.5488303007797994,
"grad_norm": 0.000890806782990694,
"learning_rate": 4.503883670169285e-05,
"loss": 0.0001,
"step": 739
},
{
"epoch": 0.5495729669513554,
"grad_norm": 0.0015704066026955843,
"learning_rate": 4.491824609085991e-05,
"loss": 0.0002,
"step": 740
},
{
"epoch": 0.5503156331229112,
"grad_norm": 0.003599775955080986,
"learning_rate": 4.4797685340945295e-05,
"loss": 0.0002,
"step": 741
},
{
"epoch": 0.5510582992944671,
"grad_norm": 0.0044060661457479,
"learning_rate": 4.467715516037659e-05,
"loss": 0.0002,
"step": 742
},
{
"epoch": 0.551800965466023,
"grad_norm": 0.0007429459365084767,
"learning_rate": 4.4556656257401786e-05,
"loss": 0.0001,
"step": 743
},
{
"epoch": 0.5525436316375789,
"grad_norm": 0.0007204132853075862,
"learning_rate": 4.443618934008502e-05,
"loss": 0.0001,
"step": 744
},
{
"epoch": 0.5532862978091347,
"grad_norm": 0.05012976750731468,
"learning_rate": 4.4315755116302554e-05,
"loss": 0.0007,
"step": 745
},
{
"epoch": 0.5540289639806907,
"grad_norm": 0.0010998403886333108,
"learning_rate": 4.4195354293738484e-05,
"loss": 0.0001,
"step": 746
},
{
"epoch": 0.5547716301522466,
"grad_norm": 0.0022817838471382856,
"learning_rate": 4.4074987579880685e-05,
"loss": 0.0002,
"step": 747
},
{
"epoch": 0.5555142963238024,
"grad_norm": 0.009643745608627796,
"learning_rate": 4.395465568201655e-05,
"loss": 0.0005,
"step": 748
},
{
"epoch": 0.5562569624953584,
"grad_norm": 0.0010438463650643826,
"learning_rate": 4.38343593072289e-05,
"loss": 0.0001,
"step": 749
},
{
"epoch": 0.5569996286669142,
"grad_norm": 0.0021892802324146032,
"learning_rate": 4.3714099162391875e-05,
"loss": 0.0001,
"step": 750
},
{
"epoch": 0.5577422948384702,
"grad_norm": 0.04245134815573692,
"learning_rate": 4.359387595416665e-05,
"loss": 0.001,
"step": 751
},
{
"epoch": 0.558484961010026,
"grad_norm": 0.001306456862948835,
"learning_rate": 4.347369038899744e-05,
"loss": 0.0001,
"step": 752
},
{
"epoch": 0.5592276271815819,
"grad_norm": 0.0017448252765461802,
"learning_rate": 4.335354317310715e-05,
"loss": 0.0001,
"step": 753
},
{
"epoch": 0.5599702933531377,
"grad_norm": 0.014149321243166924,
"learning_rate": 4.323343501249346e-05,
"loss": 0.0002,
"step": 754
},
{
"epoch": 0.5607129595246937,
"grad_norm": 0.0010777488350868225,
"learning_rate": 4.3113366612924465e-05,
"loss": 0.0001,
"step": 755
},
{
"epoch": 0.5614556256962495,
"grad_norm": 0.0005878026131540537,
"learning_rate": 4.299333867993469e-05,
"loss": 0.0001,
"step": 756
},
{
"epoch": 0.5621982918678055,
"grad_norm": 0.004690335597842932,
"learning_rate": 4.287335191882083e-05,
"loss": 0.0003,
"step": 757
},
{
"epoch": 0.5629409580393613,
"grad_norm": 0.002779589965939522,
"learning_rate": 4.275340703463767e-05,
"loss": 0.0002,
"step": 758
},
{
"epoch": 0.5636836242109172,
"grad_norm": 0.01008251216262579,
"learning_rate": 4.2633504732193896e-05,
"loss": 0.0003,
"step": 759
},
{
"epoch": 0.564426290382473,
"grad_norm": 0.0012684455141425133,
"learning_rate": 4.2513645716048025e-05,
"loss": 0.0002,
"step": 760
},
{
"epoch": 0.565168956554029,
"grad_norm": 0.0006100367172621191,
"learning_rate": 4.239383069050417e-05,
"loss": 0.0001,
"step": 761
},
{
"epoch": 0.5659116227255848,
"grad_norm": 0.0015486053889617324,
"learning_rate": 4.227406035960798e-05,
"loss": 0.0001,
"step": 762
},
{
"epoch": 0.5666542888971408,
"grad_norm": 0.0052903578616678715,
"learning_rate": 4.215433542714248e-05,
"loss": 0.0003,
"step": 763
},
{
"epoch": 0.5673969550686966,
"grad_norm": 0.005356297362595797,
"learning_rate": 4.203465659662388e-05,
"loss": 0.0002,
"step": 764
},
{
"epoch": 0.5681396212402525,
"grad_norm": 0.003509636502712965,
"learning_rate": 4.191502457129756e-05,
"loss": 0.0001,
"step": 765
},
{
"epoch": 0.5688822874118084,
"grad_norm": 0.013686946593225002,
"learning_rate": 4.17954400541338e-05,
"loss": 0.0004,
"step": 766
},
{
"epoch": 0.5696249535833643,
"grad_norm": 0.0035359973553568125,
"learning_rate": 4.16759037478238e-05,
"loss": 0.0002,
"step": 767
},
{
"epoch": 0.5703676197549201,
"grad_norm": 0.0006897081038914621,
"learning_rate": 4.1556416354775364e-05,
"loss": 0.0001,
"step": 768
},
{
"epoch": 0.5711102859264761,
"grad_norm": 0.000840733468066901,
"learning_rate": 4.1436978577108975e-05,
"loss": 0.0001,
"step": 769
},
{
"epoch": 0.5718529520980319,
"grad_norm": 0.001890829997137189,
"learning_rate": 4.131759111665349e-05,
"loss": 0.0001,
"step": 770
},
{
"epoch": 0.5725956182695878,
"grad_norm": 0.006861279718577862,
"learning_rate": 4.1198254674942154e-05,
"loss": 0.0003,
"step": 771
},
{
"epoch": 0.5733382844411437,
"grad_norm": 0.020367255434393883,
"learning_rate": 4.107896995320839e-05,
"loss": 0.0001,
"step": 772
},
{
"epoch": 0.5740809506126996,
"grad_norm": 0.0010303305462002754,
"learning_rate": 4.095973765238175e-05,
"loss": 0.0001,
"step": 773
},
{
"epoch": 0.5748236167842555,
"grad_norm": 0.0006230950239114463,
"learning_rate": 4.0840558473083664e-05,
"loss": 0.0001,
"step": 774
},
{
"epoch": 0.5755662829558114,
"grad_norm": 0.06197100505232811,
"learning_rate": 4.0721433115623544e-05,
"loss": 0.0016,
"step": 775
},
{
"epoch": 0.5763089491273673,
"grad_norm": 0.015006426721811295,
"learning_rate": 4.060236227999441e-05,
"loss": 0.0003,
"step": 776
},
{
"epoch": 0.5770516152989231,
"grad_norm": 0.0008427058928646147,
"learning_rate": 4.0483346665869e-05,
"loss": 0.0001,
"step": 777
},
{
"epoch": 0.5777942814704791,
"grad_norm": 0.0018404822330921888,
"learning_rate": 4.036438697259551e-05,
"loss": 0.0002,
"step": 778
},
{
"epoch": 0.5785369476420349,
"grad_norm": 0.0025395648553967476,
"learning_rate": 4.0245483899193595e-05,
"loss": 0.0001,
"step": 779
},
{
"epoch": 0.5792796138135908,
"grad_norm": 0.0011295550502836704,
"learning_rate": 4.012663814435014e-05,
"loss": 0.0001,
"step": 780
},
{
"epoch": 0.5800222799851467,
"grad_norm": 0.0014332979917526245,
"learning_rate": 4.0007850406415234e-05,
"loss": 0.0002,
"step": 781
},
{
"epoch": 0.5807649461567026,
"grad_norm": 0.0006570751429535449,
"learning_rate": 3.9889121383398115e-05,
"loss": 0.0001,
"step": 782
},
{
"epoch": 0.5815076123282584,
"grad_norm": 0.0014288886450231075,
"learning_rate": 3.977045177296291e-05,
"loss": 0.0002,
"step": 783
},
{
"epoch": 0.5822502784998144,
"grad_norm": 0.000917198951356113,
"learning_rate": 3.965184227242474e-05,
"loss": 0.0001,
"step": 784
},
{
"epoch": 0.5829929446713702,
"grad_norm": 0.002546856412664056,
"learning_rate": 3.95332935787454e-05,
"loss": 0.0001,
"step": 785
},
{
"epoch": 0.5837356108429261,
"grad_norm": 0.0009835307719185948,
"learning_rate": 3.941480638852948e-05,
"loss": 0.0001,
"step": 786
},
{
"epoch": 0.584478277014482,
"grad_norm": 0.10560861229896545,
"learning_rate": 3.929638139802007e-05,
"loss": 0.0006,
"step": 787
},
{
"epoch": 0.5852209431860379,
"grad_norm": 0.0006187596009112895,
"learning_rate": 3.917801930309486e-05,
"loss": 0.0001,
"step": 788
},
{
"epoch": 0.5859636093575937,
"grad_norm": 0.027732783928513527,
"learning_rate": 3.90597207992619e-05,
"loss": 0.0005,
"step": 789
},
{
"epoch": 0.5867062755291497,
"grad_norm": 0.003759243292734027,
"learning_rate": 3.8941486581655615e-05,
"loss": 0.0002,
"step": 790
},
{
"epoch": 0.5874489417007055,
"grad_norm": 0.002819777699187398,
"learning_rate": 3.882331734503263e-05,
"loss": 0.0001,
"step": 791
},
{
"epoch": 0.5881916078722614,
"grad_norm": 0.0007966597331687808,
"learning_rate": 3.870521378376777e-05,
"loss": 0.0001,
"step": 792
},
{
"epoch": 0.5889342740438173,
"grad_norm": 0.0005825799889862537,
"learning_rate": 3.8587176591849916e-05,
"loss": 0.0001,
"step": 793
},
{
"epoch": 0.5896769402153732,
"grad_norm": 0.002244536532089114,
"learning_rate": 3.846920646287799e-05,
"loss": 0.0001,
"step": 794
},
{
"epoch": 0.590419606386929,
"grad_norm": 0.002780564595013857,
"learning_rate": 3.8351304090056825e-05,
"loss": 0.0002,
"step": 795
},
{
"epoch": 0.591162272558485,
"grad_norm": 0.0009447059710510075,
"learning_rate": 3.8233470166193126e-05,
"loss": 0.0001,
"step": 796
},
{
"epoch": 0.5919049387300408,
"grad_norm": 0.001110229641199112,
"learning_rate": 3.8115705383691355e-05,
"loss": 0.0001,
"step": 797
},
{
"epoch": 0.5926476049015967,
"grad_norm": 0.0013889704132452607,
"learning_rate": 3.799801043454971e-05,
"loss": 0.0001,
"step": 798
},
{
"epoch": 0.5933902710731526,
"grad_norm": 0.0008447144064120948,
"learning_rate": 3.7880386010356054e-05,
"loss": 0.0001,
"step": 799
},
{
"epoch": 0.5941329372447085,
"grad_norm": 0.003610602580010891,
"learning_rate": 3.776283280228381e-05,
"loss": 0.0001,
"step": 800
},
{
"epoch": 0.5948756034162644,
"grad_norm": 0.005582943558692932,
"learning_rate": 3.764535150108798e-05,
"loss": 0.0003,
"step": 801
},
{
"epoch": 0.5956182695878203,
"grad_norm": 0.003268187865614891,
"learning_rate": 3.752794279710094e-05,
"loss": 0.0002,
"step": 802
},
{
"epoch": 0.5963609357593762,
"grad_norm": 0.0018831202760338783,
"learning_rate": 3.741060738022858e-05,
"loss": 0.0002,
"step": 803
},
{
"epoch": 0.597103601930932,
"grad_norm": 0.0007239828119054437,
"learning_rate": 3.7293345939946065e-05,
"loss": 0.0001,
"step": 804
},
{
"epoch": 0.597846268102488,
"grad_norm": 0.21732911467552185,
"learning_rate": 3.717615916529395e-05,
"loss": 0.0017,
"step": 805
},
{
"epoch": 0.5985889342740438,
"grad_norm": 0.00127478688955307,
"learning_rate": 3.705904774487396e-05,
"loss": 0.0002,
"step": 806
},
{
"epoch": 0.5993316004455997,
"grad_norm": 0.0045968894846737385,
"learning_rate": 3.6942012366845096e-05,
"loss": 0.0001,
"step": 807
},
{
"epoch": 0.6000742666171556,
"grad_norm": 0.001871331944130361,
"learning_rate": 3.6825053718919464e-05,
"loss": 0.0002,
"step": 808
},
{
"epoch": 0.6008169327887115,
"grad_norm": 0.0010791551321744919,
"learning_rate": 3.670817248835836e-05,
"loss": 0.0001,
"step": 809
},
{
"epoch": 0.6015595989602673,
"grad_norm": 0.0006027501658536494,
"learning_rate": 3.6591369361968124e-05,
"loss": 0.0001,
"step": 810
},
{
"epoch": 0.6023022651318233,
"grad_norm": 0.0032688211649656296,
"learning_rate": 3.6474645026096185e-05,
"loss": 0.0002,
"step": 811
},
{
"epoch": 0.6030449313033791,
"grad_norm": 0.00113378232344985,
"learning_rate": 3.6358000166626966e-05,
"loss": 0.0001,
"step": 812
},
{
"epoch": 0.603787597474935,
"grad_norm": 0.0009587003150954843,
"learning_rate": 3.624143546897784e-05,
"loss": 0.0001,
"step": 813
},
{
"epoch": 0.6045302636464909,
"grad_norm": 0.0017409041756764054,
"learning_rate": 3.612495161809522e-05,
"loss": 0.0001,
"step": 814
},
{
"epoch": 0.6052729298180468,
"grad_norm": 0.0004892799770459533,
"learning_rate": 3.60085492984504e-05,
"loss": 0.0001,
"step": 815
},
{
"epoch": 0.6060155959896026,
"grad_norm": 0.0016567412531003356,
"learning_rate": 3.589222919403562e-05,
"loss": 0.0002,
"step": 816
},
{
"epoch": 0.6067582621611586,
"grad_norm": 0.0016312827356159687,
"learning_rate": 3.577599198835998e-05,
"loss": 0.0001,
"step": 817
},
{
"epoch": 0.6075009283327144,
"grad_norm": 0.0007810606039129198,
"learning_rate": 3.5659838364445505e-05,
"loss": 0.0001,
"step": 818
},
{
"epoch": 0.6082435945042703,
"grad_norm": 0.0013547971611842513,
"learning_rate": 3.554376900482301e-05,
"loss": 0.0001,
"step": 819
},
{
"epoch": 0.6089862606758262,
"grad_norm": 0.0006009297212585807,
"learning_rate": 3.542778459152826e-05,
"loss": 0.0001,
"step": 820
},
{
"epoch": 0.6097289268473821,
"grad_norm": 0.0010081218788400292,
"learning_rate": 3.531188580609778e-05,
"loss": 0.0001,
"step": 821
},
{
"epoch": 0.6104715930189379,
"grad_norm": 0.00042164497426711023,
"learning_rate": 3.5196073329565015e-05,
"loss": 0.0001,
"step": 822
},
{
"epoch": 0.6112142591904939,
"grad_norm": 0.0007461085333488882,
"learning_rate": 3.508034784245616e-05,
"loss": 0.0001,
"step": 823
},
{
"epoch": 0.6119569253620497,
"grad_norm": 1.3910738229751587,
"learning_rate": 3.4964710024786354e-05,
"loss": 0.0984,
"step": 824
},
{
"epoch": 0.6126995915336056,
"grad_norm": 0.0007660177652724087,
"learning_rate": 3.484916055605548e-05,
"loss": 0.0001,
"step": 825
},
{
"epoch": 0.6134422577051615,
"grad_norm": 0.07438488304615021,
"learning_rate": 3.473370011524435e-05,
"loss": 0.0012,
"step": 826
},
{
"epoch": 0.6141849238767174,
"grad_norm": 0.0015772825572639704,
"learning_rate": 3.461832938081059e-05,
"loss": 0.0002,
"step": 827
},
{
"epoch": 0.6149275900482734,
"grad_norm": 0.0016182976542040706,
"learning_rate": 3.450304903068475e-05,
"loss": 0.0001,
"step": 828
},
{
"epoch": 0.6156702562198292,
"grad_norm": 0.0015479021240025759,
"learning_rate": 3.4387859742266225e-05,
"loss": 0.0001,
"step": 829
},
{
"epoch": 0.6164129223913851,
"grad_norm": 0.00273375422693789,
"learning_rate": 3.427276219241933e-05,
"loss": 0.0002,
"step": 830
},
{
"epoch": 0.617155588562941,
"grad_norm": 0.001004122314043343,
"learning_rate": 3.415775705746934e-05,
"loss": 0.0001,
"step": 831
},
{
"epoch": 0.6178982547344969,
"grad_norm": 0.008125795051455498,
"learning_rate": 3.4042845013198456e-05,
"loss": 0.0002,
"step": 832
},
{
"epoch": 0.6186409209060527,
"grad_norm": 0.13623641431331635,
"learning_rate": 3.392802673484193e-05,
"loss": 0.0018,
"step": 833
},
{
"epoch": 0.6193835870776087,
"grad_norm": 0.000562251138035208,
"learning_rate": 3.381330289708396e-05,
"loss": 0.0001,
"step": 834
},
{
"epoch": 0.6201262532491645,
"grad_norm": 0.0013340302975848317,
"learning_rate": 3.3698674174053844e-05,
"loss": 0.0001,
"step": 835
},
{
"epoch": 0.6208689194207204,
"grad_norm": 0.0081217335537076,
"learning_rate": 3.358414123932195e-05,
"loss": 0.0005,
"step": 836
},
{
"epoch": 0.6216115855922763,
"grad_norm": 0.059895727783441544,
"learning_rate": 3.346970476589583e-05,
"loss": 0.0021,
"step": 837
},
{
"epoch": 0.6223542517638322,
"grad_norm": 0.0007285134051926434,
"learning_rate": 3.3355365426216164e-05,
"loss": 0.0001,
"step": 838
},
{
"epoch": 0.623096917935388,
"grad_norm": 0.0223313607275486,
"learning_rate": 3.324112389215293e-05,
"loss": 0.0005,
"step": 839
},
{
"epoch": 0.623839584106944,
"grad_norm": 0.0015575829893350601,
"learning_rate": 3.31269808350013e-05,
"loss": 0.0001,
"step": 840
},
{
"epoch": 0.6245822502784998,
"grad_norm": 0.001839678268879652,
"learning_rate": 3.3012936925477857e-05,
"loss": 0.0001,
"step": 841
},
{
"epoch": 0.6253249164500557,
"grad_norm": 0.0008310491102747619,
"learning_rate": 3.289899283371657e-05,
"loss": 0.0001,
"step": 842
},
{
"epoch": 0.6260675826216116,
"grad_norm": 0.0006663409876637161,
"learning_rate": 3.278514922926484e-05,
"loss": 0.0001,
"step": 843
},
{
"epoch": 0.6268102487931675,
"grad_norm": 0.0015589938266202807,
"learning_rate": 3.267140678107964e-05,
"loss": 0.0001,
"step": 844
},
{
"epoch": 0.6275529149647233,
"grad_norm": 0.003392728278413415,
"learning_rate": 3.2557766157523465e-05,
"loss": 0.0001,
"step": 845
},
{
"epoch": 0.6282955811362793,
"grad_norm": 0.0010547033743932843,
"learning_rate": 3.244422802636057e-05,
"loss": 0.0001,
"step": 846
},
{
"epoch": 0.6290382473078351,
"grad_norm": 0.0024194575380533934,
"learning_rate": 3.2330793054752873e-05,
"loss": 0.0002,
"step": 847
},
{
"epoch": 0.629780913479391,
"grad_norm": 0.0006333817727863789,
"learning_rate": 3.2217461909256185e-05,
"loss": 0.0001,
"step": 848
},
{
"epoch": 0.6305235796509469,
"grad_norm": 0.0008936111116781831,
"learning_rate": 3.210423525581615e-05,
"loss": 0.0001,
"step": 849
},
{
"epoch": 0.6312662458225028,
"grad_norm": 0.0010497324401512742,
"learning_rate": 3.199111375976449e-05,
"loss": 0.0001,
"step": 850
},
{
"epoch": 0.6320089119940586,
"grad_norm": 0.010097804479300976,
"learning_rate": 3.1878098085814924e-05,
"loss": 0.0005,
"step": 851
},
{
"epoch": 0.6327515781656146,
"grad_norm": 0.003449138719588518,
"learning_rate": 3.176518889805941e-05,
"loss": 0.0001,
"step": 852
},
{
"epoch": 0.6334942443371704,
"grad_norm": 0.020226208493113518,
"learning_rate": 3.1652386859964156e-05,
"loss": 0.0004,
"step": 853
},
{
"epoch": 0.6342369105087263,
"grad_norm": 0.0008942090207710862,
"learning_rate": 3.153969263436579e-05,
"loss": 0.0001,
"step": 854
},
{
"epoch": 0.6349795766802823,
"grad_norm": 0.005887329112738371,
"learning_rate": 3.142710688346733e-05,
"loss": 0.0002,
"step": 855
},
{
"epoch": 0.6357222428518381,
"grad_norm": 0.0013140842784196138,
"learning_rate": 3.131463026883449e-05,
"loss": 0.0001,
"step": 856
},
{
"epoch": 0.636464909023394,
"grad_norm": 0.001480376347899437,
"learning_rate": 3.12022634513916e-05,
"loss": 0.0002,
"step": 857
},
{
"epoch": 0.6372075751949499,
"grad_norm": 0.0014736526645720005,
"learning_rate": 3.109000709141788e-05,
"loss": 0.0001,
"step": 858
},
{
"epoch": 0.6379502413665058,
"grad_norm": 0.0005073303473182023,
"learning_rate": 3.0977861848543444e-05,
"loss": 0.0001,
"step": 859
},
{
"epoch": 0.6386929075380616,
"grad_norm": 0.0009759328095242381,
"learning_rate": 3.086582838174551e-05,
"loss": 0.0001,
"step": 860
},
{
"epoch": 0.6394355737096176,
"grad_norm": 0.0012429956113919616,
"learning_rate": 3.0753907349344464e-05,
"loss": 0.0001,
"step": 861
},
{
"epoch": 0.6401782398811734,
"grad_norm": 0.003437102073803544,
"learning_rate": 3.064209940899998e-05,
"loss": 0.0002,
"step": 862
},
{
"epoch": 0.6409209060527293,
"grad_norm": 0.0044972775503993034,
"learning_rate": 3.0530405217707256e-05,
"loss": 0.0003,
"step": 863
},
{
"epoch": 0.6416635722242852,
"grad_norm": 0.0003766078152693808,
"learning_rate": 3.0418825431793052e-05,
"loss": 0.0001,
"step": 864
},
{
"epoch": 0.6424062383958411,
"grad_norm": 0.000993401394225657,
"learning_rate": 3.0307360706911896e-05,
"loss": 0.0001,
"step": 865
},
{
"epoch": 0.6431489045673969,
"grad_norm": 0.0007390630780719221,
"learning_rate": 3.019601169804216e-05,
"loss": 0.0001,
"step": 866
},
{
"epoch": 0.6438915707389529,
"grad_norm": 0.0011397154303267598,
"learning_rate": 3.0084779059482322e-05,
"loss": 0.0001,
"step": 867
},
{
"epoch": 0.6446342369105087,
"grad_norm": 0.004833963233977556,
"learning_rate": 2.997366344484698e-05,
"loss": 0.0002,
"step": 868
},
{
"epoch": 0.6453769030820646,
"grad_norm": 0.007242708466947079,
"learning_rate": 2.9862665507063147e-05,
"loss": 0.0002,
"step": 869
},
{
"epoch": 0.6461195692536205,
"grad_norm": 0.0021677876356989145,
"learning_rate": 2.975178589836632e-05,
"loss": 0.0001,
"step": 870
},
{
"epoch": 0.6468622354251764,
"grad_norm": 0.0013036590535193682,
"learning_rate": 2.9641025270296722e-05,
"loss": 0.0001,
"step": 871
},
{
"epoch": 0.6476049015967322,
"grad_norm": 0.0008346426184289157,
"learning_rate": 2.953038427369537e-05,
"loss": 0.0001,
"step": 872
},
{
"epoch": 0.6483475677682882,
"grad_norm": 0.0006587895331904292,
"learning_rate": 2.9419863558700396e-05,
"loss": 0.0001,
"step": 873
},
{
"epoch": 0.649090233939844,
"grad_norm": 0.0005403941031545401,
"learning_rate": 2.9309463774743046e-05,
"loss": 0.0001,
"step": 874
},
{
"epoch": 0.6498329001113999,
"grad_norm": 0.001244431477971375,
"learning_rate": 2.919918557054405e-05,
"loss": 0.0001,
"step": 875
},
{
"epoch": 0.6505755662829558,
"grad_norm": 0.004742435645312071,
"learning_rate": 2.9089029594109683e-05,
"loss": 0.0002,
"step": 876
},
{
"epoch": 0.6513182324545117,
"grad_norm": 0.0021011109929531813,
"learning_rate": 2.8978996492727994e-05,
"loss": 0.0002,
"step": 877
},
{
"epoch": 0.6520608986260675,
"grad_norm": 0.001175252953544259,
"learning_rate": 2.886908691296504e-05,
"loss": 0.0001,
"step": 878
},
{
"epoch": 0.6528035647976235,
"grad_norm": 0.0017059053061529994,
"learning_rate": 2.8759301500660972e-05,
"loss": 0.0001,
"step": 879
},
{
"epoch": 0.6535462309691794,
"grad_norm": 0.0007660607225261629,
"learning_rate": 2.864964090092644e-05,
"loss": 0.0001,
"step": 880
},
{
"epoch": 0.6542888971407352,
"grad_norm": 0.001221244689077139,
"learning_rate": 2.854010575813856e-05,
"loss": 0.0001,
"step": 881
},
{
"epoch": 0.6550315633122912,
"grad_norm": 0.0004929836140945554,
"learning_rate": 2.8430696715937337e-05,
"loss": 0.0001,
"step": 882
},
{
"epoch": 0.655774229483847,
"grad_norm": 0.0009183657239191234,
"learning_rate": 2.8321414417221746e-05,
"loss": 0.0001,
"step": 883
},
{
"epoch": 0.6565168956554029,
"grad_norm": 0.0003201389918103814,
"learning_rate": 2.8212259504146043e-05,
"loss": 0.0001,
"step": 884
},
{
"epoch": 0.6572595618269588,
"grad_norm": 0.0023057435173541307,
"learning_rate": 2.8103232618115903e-05,
"loss": 0.0001,
"step": 885
},
{
"epoch": 0.6580022279985147,
"grad_norm": 0.0004039146879222244,
"learning_rate": 2.7994334399784772e-05,
"loss": 0.0001,
"step": 886
},
{
"epoch": 0.6587448941700705,
"grad_norm": 0.001328380312770605,
"learning_rate": 2.7885565489049946e-05,
"loss": 0.0001,
"step": 887
},
{
"epoch": 0.6594875603416265,
"grad_norm": 0.0009456725674681365,
"learning_rate": 2.777692652504895e-05,
"loss": 0.0001,
"step": 888
},
{
"epoch": 0.6602302265131823,
"grad_norm": 0.0006427007610909641,
"learning_rate": 2.766841814615573e-05,
"loss": 0.0001,
"step": 889
},
{
"epoch": 0.6609728926847382,
"grad_norm": 0.0006173664005473256,
"learning_rate": 2.7560040989976892e-05,
"loss": 0.0001,
"step": 890
},
{
"epoch": 0.6617155588562941,
"grad_norm": 0.0005695072468370199,
"learning_rate": 2.7451795693347936e-05,
"loss": 0.0001,
"step": 891
},
{
"epoch": 0.66245822502785,
"grad_norm": 0.000993382534943521,
"learning_rate": 2.734368289232959e-05,
"loss": 0.0001,
"step": 892
},
{
"epoch": 0.6632008911994058,
"grad_norm": 0.00402169581502676,
"learning_rate": 2.723570322220399e-05,
"loss": 0.0001,
"step": 893
},
{
"epoch": 0.6639435573709618,
"grad_norm": 0.00042257923632860184,
"learning_rate": 2.7127857317470968e-05,
"loss": 0.0001,
"step": 894
},
{
"epoch": 0.6646862235425176,
"grad_norm": 0.0011959762778133154,
"learning_rate": 2.702014581184439e-05,
"loss": 0.0001,
"step": 895
},
{
"epoch": 0.6654288897140735,
"grad_norm": 0.0022542597725987434,
"learning_rate": 2.6912569338248315e-05,
"loss": 0.0001,
"step": 896
},
{
"epoch": 0.6661715558856294,
"grad_norm": 0.0031469352543354034,
"learning_rate": 2.6805128528813384e-05,
"loss": 0.0002,
"step": 897
},
{
"epoch": 0.6669142220571853,
"grad_norm": 0.0017010547453537583,
"learning_rate": 2.6697824014873075e-05,
"loss": 0.0002,
"step": 898
},
{
"epoch": 0.6676568882287411,
"grad_norm": 0.003644139738753438,
"learning_rate": 2.6590656426959902e-05,
"loss": 0.0002,
"step": 899
},
{
"epoch": 0.6683995544002971,
"grad_norm": 0.0016295432578772306,
"learning_rate": 2.6483626394801873e-05,
"loss": 0.0002,
"step": 900
},
{
"epoch": 0.6691422205718529,
"grad_norm": 0.012327441945672035,
"learning_rate": 2.637673454731868e-05,
"loss": 0.0003,
"step": 901
},
{
"epoch": 0.6698848867434088,
"grad_norm": 0.2909511923789978,
"learning_rate": 2.626998151261798e-05,
"loss": 0.004,
"step": 902
},
{
"epoch": 0.6706275529149647,
"grad_norm": 0.001305720885284245,
"learning_rate": 2.6163367917991822e-05,
"loss": 0.0001,
"step": 903
},
{
"epoch": 0.6713702190865206,
"grad_norm": 0.0014534946531057358,
"learning_rate": 2.605689438991282e-05,
"loss": 0.0001,
"step": 904
},
{
"epoch": 0.6721128852580764,
"grad_norm": 0.003072174033150077,
"learning_rate": 2.595056155403063e-05,
"loss": 0.0002,
"step": 905
},
{
"epoch": 0.6728555514296324,
"grad_norm": 0.004174454137682915,
"learning_rate": 2.5844370035168073e-05,
"loss": 0.0002,
"step": 906
},
{
"epoch": 0.6735982176011883,
"grad_norm": 0.001340761547908187,
"learning_rate": 2.5738320457317698e-05,
"loss": 0.0001,
"step": 907
},
{
"epoch": 0.6743408837727441,
"grad_norm": 0.5977171063423157,
"learning_rate": 2.5632413443637888e-05,
"loss": 0.0386,
"step": 908
},
{
"epoch": 0.6750835499443001,
"grad_norm": 0.000558204366825521,
"learning_rate": 2.552664961644936e-05,
"loss": 0.0001,
"step": 909
},
{
"epoch": 0.6758262161158559,
"grad_norm": 0.0011802364606410265,
"learning_rate": 2.5421029597231478e-05,
"loss": 0.0001,
"step": 910
},
{
"epoch": 0.6765688822874119,
"grad_norm": 0.000828778138384223,
"learning_rate": 2.5315554006618485e-05,
"loss": 0.0001,
"step": 911
},
{
"epoch": 0.6773115484589677,
"grad_norm": 0.0003953818522859365,
"learning_rate": 2.5210223464396053e-05,
"loss": 0.0001,
"step": 912
},
{
"epoch": 0.6780542146305236,
"grad_norm": 0.0005724510992877185,
"learning_rate": 2.5105038589497433e-05,
"loss": 0.0001,
"step": 913
},
{
"epoch": 0.6787968808020794,
"grad_norm": 0.0006651621079072356,
"learning_rate": 2.500000000000001e-05,
"loss": 0.0001,
"step": 914
},
{
"epoch": 0.6795395469736354,
"grad_norm": 0.0022599331568926573,
"learning_rate": 2.4895108313121508e-05,
"loss": 0.0002,
"step": 915
},
{
"epoch": 0.6802822131451912,
"grad_norm": 0.0014916729414835572,
"learning_rate": 2.4790364145216495e-05,
"loss": 0.0001,
"step": 916
},
{
"epoch": 0.6810248793167472,
"grad_norm": 0.004133202601224184,
"learning_rate": 2.4685768111772644e-05,
"loss": 0.0002,
"step": 917
},
{
"epoch": 0.681767545488303,
"grad_norm": 0.06581906974315643,
"learning_rate": 2.458132082740724e-05,
"loss": 0.0024,
"step": 918
},
{
"epoch": 0.6825102116598589,
"grad_norm": 0.0005587110063061118,
"learning_rate": 2.4477022905863427e-05,
"loss": 0.0001,
"step": 919
},
{
"epoch": 0.6832528778314148,
"grad_norm": 0.0006018027779646218,
"learning_rate": 2.4372874960006743e-05,
"loss": 0.0001,
"step": 920
},
{
"epoch": 0.6839955440029707,
"grad_norm": 0.0009498025756329298,
"learning_rate": 2.4268877601821423e-05,
"loss": 0.0001,
"step": 921
},
{
"epoch": 0.6847382101745265,
"grad_norm": 0.0018847642932087183,
"learning_rate": 2.4165031442406855e-05,
"loss": 0.0001,
"step": 922
},
{
"epoch": 0.6854808763460825,
"grad_norm": 0.0015400544507429004,
"learning_rate": 2.4061337091973918e-05,
"loss": 0.0001,
"step": 923
},
{
"epoch": 0.6862235425176383,
"grad_norm": 0.0018971724202856421,
"learning_rate": 2.3957795159841517e-05,
"loss": 0.0001,
"step": 924
},
{
"epoch": 0.6869662086891942,
"grad_norm": 0.004078839905560017,
"learning_rate": 2.385440625443287e-05,
"loss": 0.0002,
"step": 925
},
{
"epoch": 0.68770887486075,
"grad_norm": 0.0012903795577585697,
"learning_rate": 2.3751170983272e-05,
"loss": 0.0002,
"step": 926
},
{
"epoch": 0.688451541032306,
"grad_norm": 0.0016058724140748382,
"learning_rate": 2.3648089952980202e-05,
"loss": 0.0001,
"step": 927
},
{
"epoch": 0.6891942072038618,
"grad_norm": 0.002747948979958892,
"learning_rate": 2.3545163769272383e-05,
"loss": 0.0001,
"step": 928
},
{
"epoch": 0.6899368733754178,
"grad_norm": 0.0010071592405438423,
"learning_rate": 2.3442393036953613e-05,
"loss": 0.0001,
"step": 929
},
{
"epoch": 0.6906795395469736,
"grad_norm": 0.003560745855793357,
"learning_rate": 2.333977835991545e-05,
"loss": 0.0001,
"step": 930
},
{
"epoch": 0.6914222057185295,
"grad_norm": 0.0034868186339735985,
"learning_rate": 2.3237320341132497e-05,
"loss": 0.0002,
"step": 931
},
{
"epoch": 0.6921648718900854,
"grad_norm": 0.0018978634616360068,
"learning_rate": 2.3135019582658802e-05,
"loss": 0.0002,
"step": 932
},
{
"epoch": 0.6929075380616413,
"grad_norm": 0.000817135616671294,
"learning_rate": 2.3032876685624367e-05,
"loss": 0.0001,
"step": 933
},
{
"epoch": 0.6936502042331972,
"grad_norm": 0.002365557011216879,
"learning_rate": 2.2930892250231516e-05,
"loss": 0.0001,
"step": 934
},
{
"epoch": 0.6943928704047531,
"grad_norm": 0.0016304274322465062,
"learning_rate": 2.282906687575151e-05,
"loss": 0.0001,
"step": 935
},
{
"epoch": 0.695135536576309,
"grad_norm": 0.004060409963130951,
"learning_rate": 2.2727401160520876e-05,
"loss": 0.0002,
"step": 936
},
{
"epoch": 0.6958782027478648,
"grad_norm": 0.0014790298882871866,
"learning_rate": 2.262589570193805e-05,
"loss": 0.0001,
"step": 937
},
{
"epoch": 0.6966208689194208,
"grad_norm": 0.0010480373166501522,
"learning_rate": 2.25245510964597e-05,
"loss": 0.0001,
"step": 938
},
{
"epoch": 0.6973635350909766,
"grad_norm": 0.0022476809099316597,
"learning_rate": 2.2423367939597385e-05,
"loss": 0.0002,
"step": 939
},
{
"epoch": 0.6981062012625325,
"grad_norm": 0.0014067289885133505,
"learning_rate": 2.2322346825913886e-05,
"loss": 0.0001,
"step": 940
},
{
"epoch": 0.6988488674340884,
"grad_norm": 0.002156575210392475,
"learning_rate": 2.2221488349019903e-05,
"loss": 0.0001,
"step": 941
},
{
"epoch": 0.6995915336056443,
"grad_norm": 0.0006094371201470494,
"learning_rate": 2.2120793101570364e-05,
"loss": 0.0001,
"step": 942
},
{
"epoch": 0.7003341997772001,
"grad_norm": 0.0005834063631482422,
"learning_rate": 2.202026167526111e-05,
"loss": 0.0001,
"step": 943
},
{
"epoch": 0.7010768659487561,
"grad_norm": 0.00110190873965621,
"learning_rate": 2.1919894660825365e-05,
"loss": 0.0001,
"step": 944
},
{
"epoch": 0.7018195321203119,
"grad_norm": 0.06204232573509216,
"learning_rate": 2.1819692648030165e-05,
"loss": 0.0018,
"step": 945
},
{
"epoch": 0.7025621982918678,
"grad_norm": 0.002225680975243449,
"learning_rate": 2.171965622567308e-05,
"loss": 0.0001,
"step": 946
},
{
"epoch": 0.7033048644634237,
"grad_norm": 0.005498223472386599,
"learning_rate": 2.1619785981578573e-05,
"loss": 0.0002,
"step": 947
},
{
"epoch": 0.7040475306349796,
"grad_norm": 0.2603996992111206,
"learning_rate": 2.1520082502594685e-05,
"loss": 0.0036,
"step": 948
},
{
"epoch": 0.7047901968065354,
"grad_norm": 0.0005693411221727729,
"learning_rate": 2.1420546374589484e-05,
"loss": 0.0001,
"step": 949
},
{
"epoch": 0.7055328629780914,
"grad_norm": 0.0005677467561326921,
"learning_rate": 2.132117818244771e-05,
"loss": 0.0001,
"step": 950
},
{
"epoch": 0.7062755291496472,
"grad_norm": 0.019135845825076103,
"learning_rate": 2.1221978510067236e-05,
"loss": 0.0006,
"step": 951
},
{
"epoch": 0.7070181953212031,
"grad_norm": 0.3902997672557831,
"learning_rate": 2.1122947940355747e-05,
"loss": 0.0052,
"step": 952
},
{
"epoch": 0.707760861492759,
"grad_norm": 0.0012116495054215193,
"learning_rate": 2.1024087055227248e-05,
"loss": 0.0001,
"step": 953
},
{
"epoch": 0.7085035276643149,
"grad_norm": 0.3175068795681,
"learning_rate": 2.0925396435598664e-05,
"loss": 0.0045,
"step": 954
},
{
"epoch": 0.7092461938358707,
"grad_norm": 0.015600265935063362,
"learning_rate": 2.0826876661386395e-05,
"loss": 0.0002,
"step": 955
},
{
"epoch": 0.7099888600074267,
"grad_norm": 0.0010375302517786622,
"learning_rate": 2.0728528311502976e-05,
"loss": 0.0001,
"step": 956
},
{
"epoch": 0.7107315261789825,
"grad_norm": 0.0014974784571677446,
"learning_rate": 2.0630351963853587e-05,
"loss": 0.0001,
"step": 957
},
{
"epoch": 0.7114741923505384,
"grad_norm": 0.0019292763900011778,
"learning_rate": 2.053234819533276e-05,
"loss": 0.0001,
"step": 958
},
{
"epoch": 0.7122168585220943,
"grad_norm": 0.0008647122303955257,
"learning_rate": 2.0434517581820896e-05,
"loss": 0.0001,
"step": 959
},
{
"epoch": 0.7129595246936502,
"grad_norm": 0.0007554054609499872,
"learning_rate": 2.0336860698180905e-05,
"loss": 0.0001,
"step": 960
},
{
"epoch": 0.7137021908652061,
"grad_norm": 0.01203538291156292,
"learning_rate": 2.0239378118254905e-05,
"loss": 0.0004,
"step": 961
},
{
"epoch": 0.714444857036762,
"grad_norm": 0.005261108744889498,
"learning_rate": 2.0142070414860704e-05,
"loss": 0.0003,
"step": 962
},
{
"epoch": 0.7151875232083179,
"grad_norm": 0.006285266485065222,
"learning_rate": 2.0044938159788584e-05,
"loss": 0.0002,
"step": 963
},
{
"epoch": 0.7159301893798737,
"grad_norm": 0.0008547162869945168,
"learning_rate": 1.9947981923797853e-05,
"loss": 0.0001,
"step": 964
},
{
"epoch": 0.7166728555514297,
"grad_norm": 0.00040477412403561175,
"learning_rate": 1.9851202276613523e-05,
"loss": 0.0001,
"step": 965
},
{
"epoch": 0.7174155217229855,
"grad_norm": 0.0028770139906555414,
"learning_rate": 1.975459978692291e-05,
"loss": 0.0003,
"step": 966
},
{
"epoch": 0.7181581878945414,
"grad_norm": 0.001716628554277122,
"learning_rate": 1.96581750223724e-05,
"loss": 0.0002,
"step": 967
},
{
"epoch": 0.7189008540660973,
"grad_norm": 0.0005640503368340433,
"learning_rate": 1.9561928549563968e-05,
"loss": 0.0001,
"step": 968
},
{
"epoch": 0.7196435202376532,
"grad_norm": 0.001024580909870565,
"learning_rate": 1.9465860934052006e-05,
"loss": 0.0001,
"step": 969
},
{
"epoch": 0.720386186409209,
"grad_norm": 0.0007752843666821718,
"learning_rate": 1.936997274033986e-05,
"loss": 0.0001,
"step": 970
},
{
"epoch": 0.721128852580765,
"grad_norm": 0.0008537242538295686,
"learning_rate": 1.927426453187663e-05,
"loss": 0.0001,
"step": 971
},
{
"epoch": 0.7218715187523208,
"grad_norm": 0.0009459343855269253,
"learning_rate": 1.9178736871053736e-05,
"loss": 0.0001,
"step": 972
},
{
"epoch": 0.7226141849238767,
"grad_norm": 0.0008696299046278,
"learning_rate": 1.9083390319201764e-05,
"loss": 0.0001,
"step": 973
},
{
"epoch": 0.7233568510954326,
"grad_norm": 0.0020470714662224054,
"learning_rate": 1.8988225436587002e-05,
"loss": 0.0002,
"step": 974
},
{
"epoch": 0.7240995172669885,
"grad_norm": 0.0032244205940514803,
"learning_rate": 1.889324278240829e-05,
"loss": 0.0002,
"step": 975
},
{
"epoch": 0.7248421834385443,
"grad_norm": 0.007482845336198807,
"learning_rate": 1.8798442914793663e-05,
"loss": 0.0002,
"step": 976
},
{
"epoch": 0.7255848496101003,
"grad_norm": 0.25979605317115784,
"learning_rate": 1.8703826390797048e-05,
"loss": 0.0077,
"step": 977
},
{
"epoch": 0.7263275157816561,
"grad_norm": 0.0036151015665382147,
"learning_rate": 1.8609393766395085e-05,
"loss": 0.0002,
"step": 978
},
{
"epoch": 0.727070181953212,
"grad_norm": 1.0644394159317017,
"learning_rate": 1.8515145596483723e-05,
"loss": 0.0138,
"step": 979
},
{
"epoch": 0.7278128481247679,
"grad_norm": 0.0007425799267366529,
"learning_rate": 1.842108243487513e-05,
"loss": 0.0001,
"step": 980
},
{
"epoch": 0.7285555142963238,
"grad_norm": 0.0005094231455586851,
"learning_rate": 1.832720483429426e-05,
"loss": 0.0001,
"step": 981
},
{
"epoch": 0.7292981804678796,
"grad_norm": 0.0007252280483953655,
"learning_rate": 1.8233513346375756e-05,
"loss": 0.0001,
"step": 982
},
{
"epoch": 0.7300408466394356,
"grad_norm": 0.0005672296392731369,
"learning_rate": 1.814000852166059e-05,
"loss": 0.0001,
"step": 983
},
{
"epoch": 0.7307835128109914,
"grad_norm": 0.9842846989631653,
"learning_rate": 1.8046690909592917e-05,
"loss": 0.1766,
"step": 984
},
{
"epoch": 0.7315261789825473,
"grad_norm": 0.000559376843739301,
"learning_rate": 1.7953561058516828e-05,
"loss": 0.0001,
"step": 985
},
{
"epoch": 0.7322688451541032,
"grad_norm": 0.0007592244073748589,
"learning_rate": 1.7860619515673033e-05,
"loss": 0.0001,
"step": 986
},
{
"epoch": 0.7330115113256591,
"grad_norm": 0.0009743353584781289,
"learning_rate": 1.7767866827195805e-05,
"loss": 0.0001,
"step": 987
},
{
"epoch": 0.733754177497215,
"grad_norm": 0.0004740802978631109,
"learning_rate": 1.767530353810968e-05,
"loss": 0.0001,
"step": 988
},
{
"epoch": 0.7344968436687709,
"grad_norm": 0.001709680538624525,
"learning_rate": 1.7582930192326186e-05,
"loss": 0.0001,
"step": 989
},
{
"epoch": 0.7352395098403268,
"grad_norm": 0.004645473323762417,
"learning_rate": 1.7490747332640833e-05,
"loss": 0.0002,
"step": 990
},
{
"epoch": 0.7359821760118826,
"grad_norm": 0.002532870974391699,
"learning_rate": 1.7398755500729734e-05,
"loss": 0.0001,
"step": 991
},
{
"epoch": 0.7367248421834386,
"grad_norm": 0.0024906517937779427,
"learning_rate": 1.730695523714652e-05,
"loss": 0.0001,
"step": 992
},
{
"epoch": 0.7374675083549944,
"grad_norm": 0.006111454218626022,
"learning_rate": 1.7215347081319188e-05,
"loss": 0.0003,
"step": 993
},
{
"epoch": 0.7382101745265504,
"grad_norm": 0.004187372513115406,
"learning_rate": 1.7123931571546827e-05,
"loss": 0.0002,
"step": 994
},
{
"epoch": 0.7389528406981062,
"grad_norm": 0.006407550070434809,
"learning_rate": 1.703270924499656e-05,
"loss": 0.0004,
"step": 995
},
{
"epoch": 0.7396955068696621,
"grad_norm": 0.01187540590763092,
"learning_rate": 1.6941680637700334e-05,
"loss": 0.0005,
"step": 996
},
{
"epoch": 0.740438173041218,
"grad_norm": 0.008408035151660442,
"learning_rate": 1.68508462845518e-05,
"loss": 0.0003,
"step": 997
},
{
"epoch": 0.7411808392127739,
"grad_norm": 0.00872902199625969,
"learning_rate": 1.6760206719303105e-05,
"loss": 0.0003,
"step": 998
},
{
"epoch": 0.7419235053843297,
"grad_norm": 0.013007326982915401,
"learning_rate": 1.6669762474561857e-05,
"loss": 0.0005,
"step": 999
},
{
"epoch": 0.7426661715558857,
"grad_norm": 0.044673044234514236,
"learning_rate": 1.657951408178788e-05,
"loss": 0.0013,
"step": 1000
},
{
"epoch": 0.7434088377274415,
"grad_norm": 0.017271386459469795,
"learning_rate": 1.6489462071290214e-05,
"loss": 0.0006,
"step": 1001
},
{
"epoch": 0.7441515038989974,
"grad_norm": 0.03663730248808861,
"learning_rate": 1.639960697222388e-05,
"loss": 0.001,
"step": 1002
},
{
"epoch": 0.7448941700705533,
"grad_norm": 0.01725134626030922,
"learning_rate": 1.630994931258688e-05,
"loss": 0.0005,
"step": 1003
},
{
"epoch": 0.7456368362421092,
"grad_norm": 0.0050296480767428875,
"learning_rate": 1.622048961921699e-05,
"loss": 0.0003,
"step": 1004
},
{
"epoch": 0.746379502413665,
"grad_norm": 0.013799471780657768,
"learning_rate": 1.6131228417788765e-05,
"loss": 0.0004,
"step": 1005
},
{
"epoch": 0.747122168585221,
"grad_norm": 0.005248996429145336,
"learning_rate": 1.6042166232810347e-05,
"loss": 0.0003,
"step": 1006
},
{
"epoch": 0.7478648347567768,
"grad_norm": 0.0058412919752299786,
"learning_rate": 1.595330358762047e-05,
"loss": 0.0005,
"step": 1007
},
{
"epoch": 0.7486075009283327,
"grad_norm": 0.006610256619751453,
"learning_rate": 1.5864641004385384e-05,
"loss": 0.0003,
"step": 1008
},
{
"epoch": 0.7493501670998886,
"grad_norm": 0.004714768845587969,
"learning_rate": 1.577617900409566e-05,
"loss": 0.0002,
"step": 1009
},
{
"epoch": 0.7500928332714445,
"grad_norm": 0.005967407487332821,
"learning_rate": 1.5687918106563326e-05,
"loss": 0.0003,
"step": 1010
},
{
"epoch": 0.7508354994430003,
"grad_norm": 0.004380329977720976,
"learning_rate": 1.5599858830418628e-05,
"loss": 0.0003,
"step": 1011
},
{
"epoch": 0.7508354994430003,
"eval_loss": 9.535554272588342e-05,
"eval_runtime": 187.7576,
"eval_samples_per_second": 6.04,
"eval_steps_per_second": 3.02,
"step": 1011
},
{
"epoch": 0.7515781656145563,
"grad_norm": 0.010244094766676426,
"learning_rate": 1.5512001693107124e-05,
"loss": 0.0003,
"step": 1012
},
{
"epoch": 0.7523208317861121,
"grad_norm": 0.003399641951546073,
"learning_rate": 1.5424347210886538e-05,
"loss": 0.0002,
"step": 1013
},
{
"epoch": 0.753063497957668,
"grad_norm": 0.004665118642151356,
"learning_rate": 1.53368958988238e-05,
"loss": 0.0003,
"step": 1014
},
{
"epoch": 0.753806164129224,
"grad_norm": 0.007897808216512203,
"learning_rate": 1.5249648270791966e-05,
"loss": 0.0004,
"step": 1015
},
{
"epoch": 0.7545488303007798,
"grad_norm": 0.005702964495867491,
"learning_rate": 1.5162604839467265e-05,
"loss": 0.0003,
"step": 1016
},
{
"epoch": 0.7552914964723357,
"grad_norm": 0.1535305380821228,
"learning_rate": 1.5075766116325985e-05,
"loss": 0.0013,
"step": 1017
},
{
"epoch": 0.7560341626438916,
"grad_norm": 0.0065938313491642475,
"learning_rate": 1.4989132611641576e-05,
"loss": 0.0003,
"step": 1018
},
{
"epoch": 0.7567768288154475,
"grad_norm": 0.011125356890261173,
"learning_rate": 1.4902704834481584e-05,
"loss": 0.0004,
"step": 1019
},
{
"epoch": 0.7575194949870033,
"grad_norm": 0.010162554681301117,
"learning_rate": 1.4816483292704708e-05,
"loss": 0.0004,
"step": 1020
},
{
"epoch": 0.7582621611585593,
"grad_norm": 0.012406081892549992,
"learning_rate": 1.4730468492957722e-05,
"loss": 0.0004,
"step": 1021
},
{
"epoch": 0.7590048273301151,
"grad_norm": 0.010129008442163467,
"learning_rate": 1.4644660940672627e-05,
"loss": 0.0004,
"step": 1022
},
{
"epoch": 0.759747493501671,
"grad_norm": 0.018571769818663597,
"learning_rate": 1.4559061140063578e-05,
"loss": 0.0006,
"step": 1023
},
{
"epoch": 0.7604901596732269,
"grad_norm": 0.01117369718849659,
"learning_rate": 1.4473669594123956e-05,
"loss": 0.0004,
"step": 1024
},
{
"epoch": 0.7612328258447828,
"grad_norm": 0.011828822083771229,
"learning_rate": 1.4388486804623464e-05,
"loss": 0.0005,
"step": 1025
},
{
"epoch": 0.7619754920163386,
"grad_norm": 0.006083609536290169,
"learning_rate": 1.4303513272105057e-05,
"loss": 0.0003,
"step": 1026
},
{
"epoch": 0.7627181581878946,
"grad_norm": 0.0038173352368175983,
"learning_rate": 1.4218749495882133e-05,
"loss": 0.0002,
"step": 1027
},
{
"epoch": 0.7634608243594504,
"grad_norm": 0.003378799417987466,
"learning_rate": 1.4134195974035525e-05,
"loss": 0.0002,
"step": 1028
},
{
"epoch": 0.7642034905310063,
"grad_norm": 0.0023517953231930733,
"learning_rate": 1.4049853203410596e-05,
"loss": 0.0002,
"step": 1029
},
{
"epoch": 0.7649461567025622,
"grad_norm": 0.005170909222215414,
"learning_rate": 1.396572167961427e-05,
"loss": 0.0002,
"step": 1030
},
{
"epoch": 0.7656888228741181,
"grad_norm": 0.003187798196449876,
"learning_rate": 1.3881801897012225e-05,
"loss": 0.0002,
"step": 1031
},
{
"epoch": 0.7664314890456739,
"grad_norm": 0.0076558575965464115,
"learning_rate": 1.3798094348725871e-05,
"loss": 0.0002,
"step": 1032
},
{
"epoch": 0.7671741552172299,
"grad_norm": 0.002008821815252304,
"learning_rate": 1.3714599526629551e-05,
"loss": 0.0002,
"step": 1033
},
{
"epoch": 0.7679168213887857,
"grad_norm": 0.011446031741797924,
"learning_rate": 1.3631317921347563e-05,
"loss": 0.0003,
"step": 1034
},
{
"epoch": 0.7686594875603416,
"grad_norm": 0.0025135111063718796,
"learning_rate": 1.3548250022251375e-05,
"loss": 0.0002,
"step": 1035
},
{
"epoch": 0.7694021537318975,
"grad_norm": 0.0011204791953787208,
"learning_rate": 1.346539631745663e-05,
"loss": 0.0001,
"step": 1036
},
{
"epoch": 0.7701448199034534,
"grad_norm": 0.002433129120618105,
"learning_rate": 1.3382757293820409e-05,
"loss": 0.0002,
"step": 1037
},
{
"epoch": 0.7708874860750092,
"grad_norm": 0.004581818822771311,
"learning_rate": 1.330033343693824e-05,
"loss": 0.0003,
"step": 1038
},
{
"epoch": 0.7716301522465652,
"grad_norm": 0.001932491664774716,
"learning_rate": 1.3218125231141354e-05,
"loss": 0.0002,
"step": 1039
},
{
"epoch": 0.7723728184181211,
"grad_norm": 0.001757967984303832,
"learning_rate": 1.3136133159493802e-05,
"loss": 0.0002,
"step": 1040
},
{
"epoch": 0.7731154845896769,
"grad_norm": 0.0024122344329953194,
"learning_rate": 1.3054357703789554e-05,
"loss": 0.0001,
"step": 1041
},
{
"epoch": 0.7738581507612329,
"grad_norm": 0.003989276476204395,
"learning_rate": 1.297279934454978e-05,
"loss": 0.0003,
"step": 1042
},
{
"epoch": 0.7746008169327887,
"grad_norm": 0.10799296200275421,
"learning_rate": 1.2891458561019915e-05,
"loss": 0.0017,
"step": 1043
},
{
"epoch": 0.7753434831043446,
"grad_norm": 0.005773243959993124,
"learning_rate": 1.2810335831166954e-05,
"loss": 0.0002,
"step": 1044
},
{
"epoch": 0.7760861492759005,
"grad_norm": 0.003262764774262905,
"learning_rate": 1.2729431631676525e-05,
"loss": 0.0002,
"step": 1045
},
{
"epoch": 0.7768288154474564,
"grad_norm": 0.0027453494258224964,
"learning_rate": 1.264874643795021e-05,
"loss": 0.0002,
"step": 1046
},
{
"epoch": 0.7775714816190122,
"grad_norm": 0.030763821676373482,
"learning_rate": 1.2568280724102632e-05,
"loss": 0.001,
"step": 1047
},
{
"epoch": 0.7783141477905682,
"grad_norm": 0.004409801680594683,
"learning_rate": 1.2488034962958789e-05,
"loss": 0.0002,
"step": 1048
},
{
"epoch": 0.779056813962124,
"grad_norm": 0.004938923753798008,
"learning_rate": 1.2408009626051137e-05,
"loss": 0.0003,
"step": 1049
},
{
"epoch": 0.77979948013368,
"grad_norm": 0.007012166548520327,
"learning_rate": 1.2328205183616965e-05,
"loss": 0.0003,
"step": 1050
},
{
"epoch": 0.7805421463052358,
"grad_norm": 0.004935544449836016,
"learning_rate": 1.2248622104595532e-05,
"loss": 0.0002,
"step": 1051
},
{
"epoch": 0.7812848124767917,
"grad_norm": 0.002522197552025318,
"learning_rate": 1.216926085662536e-05,
"loss": 0.0002,
"step": 1052
},
{
"epoch": 0.7820274786483475,
"grad_norm": 0.001708640600554645,
"learning_rate": 1.209012190604143e-05,
"loss": 0.0002,
"step": 1053
},
{
"epoch": 0.7827701448199035,
"grad_norm": 0.001268695225007832,
"learning_rate": 1.2011205717872537e-05,
"loss": 0.0001,
"step": 1054
},
{
"epoch": 0.7835128109914593,
"grad_norm": 0.0013489356497302651,
"learning_rate": 1.1932512755838449e-05,
"loss": 0.0001,
"step": 1055
},
{
"epoch": 0.7842554771630152,
"grad_norm": 0.002102524507790804,
"learning_rate": 1.1854043482347248e-05,
"loss": 0.0001,
"step": 1056
},
{
"epoch": 0.7849981433345711,
"grad_norm": 0.0030939369462430477,
"learning_rate": 1.1775798358492629e-05,
"loss": 0.0002,
"step": 1057
},
{
"epoch": 0.785740809506127,
"grad_norm": 0.001534225302748382,
"learning_rate": 1.1697777844051105e-05,
"loss": 0.0001,
"step": 1058
},
{
"epoch": 0.7864834756776828,
"grad_norm": 0.008333349600434303,
"learning_rate": 1.1619982397479412e-05,
"loss": 0.0002,
"step": 1059
},
{
"epoch": 0.7872261418492388,
"grad_norm": 0.2474621683359146,
"learning_rate": 1.1542412475911758e-05,
"loss": 0.0036,
"step": 1060
},
{
"epoch": 0.7879688080207946,
"grad_norm": 0.0020573630463331938,
"learning_rate": 1.1465068535157097e-05,
"loss": 0.0002,
"step": 1061
},
{
"epoch": 0.7887114741923505,
"grad_norm": 0.009289558976888657,
"learning_rate": 1.1387951029696542e-05,
"loss": 0.0003,
"step": 1062
},
{
"epoch": 0.7894541403639064,
"grad_norm": 0.0011494681239128113,
"learning_rate": 1.1311060412680647e-05,
"loss": 0.0001,
"step": 1063
},
{
"epoch": 0.7901968065354623,
"grad_norm": 0.002744733588770032,
"learning_rate": 1.1234397135926705e-05,
"loss": 0.0002,
"step": 1064
},
{
"epoch": 0.7909394727070181,
"grad_norm": 0.0026154378429055214,
"learning_rate": 1.1157961649916194e-05,
"loss": 0.0001,
"step": 1065
},
{
"epoch": 0.7916821388785741,
"grad_norm": 0.002585667185485363,
"learning_rate": 1.1081754403791999e-05,
"loss": 0.0002,
"step": 1066
},
{
"epoch": 0.79242480505013,
"grad_norm": 0.0019874395802617073,
"learning_rate": 1.100577584535592e-05,
"loss": 0.0002,
"step": 1067
},
{
"epoch": 0.7931674712216858,
"grad_norm": 0.0020060345996171236,
"learning_rate": 1.0930026421065887e-05,
"loss": 0.0002,
"step": 1068
},
{
"epoch": 0.7939101373932418,
"grad_norm": 0.0012965823989361525,
"learning_rate": 1.0854506576033491e-05,
"loss": 0.0001,
"step": 1069
},
{
"epoch": 0.7946528035647976,
"grad_norm": 0.0032621456775814295,
"learning_rate": 1.0779216754021215e-05,
"loss": 0.0002,
"step": 1070
},
{
"epoch": 0.7953954697363536,
"grad_norm": 0.002112373011186719,
"learning_rate": 1.0704157397439962e-05,
"loss": 0.0001,
"step": 1071
},
{
"epoch": 0.7961381359079094,
"grad_norm": 0.00291498564183712,
"learning_rate": 1.062932894734639e-05,
"loss": 0.0002,
"step": 1072
},
{
"epoch": 0.7968808020794653,
"grad_norm": 0.005537567660212517,
"learning_rate": 1.0554731843440275e-05,
"loss": 0.0002,
"step": 1073
},
{
"epoch": 0.7976234682510212,
"grad_norm": 0.002763421507552266,
"learning_rate": 1.0480366524062042e-05,
"loss": 0.0002,
"step": 1074
},
{
"epoch": 0.7983661344225771,
"grad_norm": 0.007374095730483532,
"learning_rate": 1.0406233426190076e-05,
"loss": 0.0003,
"step": 1075
},
{
"epoch": 0.7991088005941329,
"grad_norm": 0.009125984273850918,
"learning_rate": 1.0332332985438248e-05,
"loss": 0.0002,
"step": 1076
},
{
"epoch": 0.7998514667656889,
"grad_norm": 0.004513624124228954,
"learning_rate": 1.0258665636053272e-05,
"loss": 0.0003,
"step": 1077
},
{
"epoch": 0.8005941329372447,
"grad_norm": 0.002031454350799322,
"learning_rate": 1.0185231810912222e-05,
"loss": 0.0002,
"step": 1078
},
{
"epoch": 0.8013367991088006,
"grad_norm": 0.0018436467507854104,
"learning_rate": 1.0112031941519934e-05,
"loss": 0.0002,
"step": 1079
},
{
"epoch": 0.8020794652803565,
"grad_norm": 0.02794628217816353,
"learning_rate": 1.003906645800653e-05,
"loss": 0.0003,
"step": 1080
},
{
"epoch": 0.8028221314519124,
"grad_norm": 0.0030993130058050156,
"learning_rate": 9.966335789124809e-06,
"loss": 0.0002,
"step": 1081
},
{
"epoch": 0.8035647976234682,
"grad_norm": 0.01633264310657978,
"learning_rate": 9.893840362247809e-06,
"loss": 0.0004,
"step": 1082
},
{
"epoch": 0.8043074637950242,
"grad_norm": 0.002037476282566786,
"learning_rate": 9.821580603366259e-06,
"loss": 0.0001,
"step": 1083
},
{
"epoch": 0.80505012996658,
"grad_norm": 0.0021626856178045273,
"learning_rate": 9.749556937086085e-06,
"loss": 0.0001,
"step": 1084
},
{
"epoch": 0.8057927961381359,
"grad_norm": 0.0009408566984347999,
"learning_rate": 9.677769786625867e-06,
"loss": 0.0001,
"step": 1085
},
{
"epoch": 0.8065354623096918,
"grad_norm": 0.0013037261087447405,
"learning_rate": 9.606219573814445e-06,
"loss": 0.0001,
"step": 1086
},
{
"epoch": 0.8072781284812477,
"grad_norm": 0.0014423932880163193,
"learning_rate": 9.534906719088355e-06,
"loss": 0.0002,
"step": 1087
},
{
"epoch": 0.8080207946528035,
"grad_norm": 0.0028046793304383755,
"learning_rate": 9.46383164148939e-06,
"loss": 0.0002,
"step": 1088
},
{
"epoch": 0.8087634608243595,
"grad_norm": 0.0011104003060609102,
"learning_rate": 9.392994758662178e-06,
"loss": 0.0001,
"step": 1089
},
{
"epoch": 0.8095061269959153,
"grad_norm": 0.0008305896772071719,
"learning_rate": 9.322396486851626e-06,
"loss": 0.0001,
"step": 1090
},
{
"epoch": 0.8102487931674712,
"grad_norm": 0.049922533333301544,
"learning_rate": 9.252037240900619e-06,
"loss": 0.0017,
"step": 1091
},
{
"epoch": 0.810991459339027,
"grad_norm": 0.0017735987203195691,
"learning_rate": 9.181917434247416e-06,
"loss": 0.0002,
"step": 1092
},
{
"epoch": 0.811734125510583,
"grad_norm": 0.0012938451254740357,
"learning_rate": 9.112037478923363e-06,
"loss": 0.0002,
"step": 1093
},
{
"epoch": 0.8124767916821389,
"grad_norm": 0.0025100386701524258,
"learning_rate": 9.042397785550405e-06,
"loss": 0.0002,
"step": 1094
},
{
"epoch": 0.8132194578536948,
"grad_norm": 2.1535964012145996,
"learning_rate": 8.972998763338685e-06,
"loss": 0.291,
"step": 1095
},
{
"epoch": 0.8139621240252507,
"grad_norm": 0.02024056576192379,
"learning_rate": 8.903840820084096e-06,
"loss": 0.0004,
"step": 1096
},
{
"epoch": 0.8147047901968065,
"grad_norm": 0.002208460820838809,
"learning_rate": 8.834924362165992e-06,
"loss": 0.0002,
"step": 1097
},
{
"epoch": 0.8154474563683625,
"grad_norm": 0.0023833250161260366,
"learning_rate": 8.766249794544662e-06,
"loss": 0.0002,
"step": 1098
},
{
"epoch": 0.8161901225399183,
"grad_norm": 0.003369487589225173,
"learning_rate": 8.697817520759093e-06,
"loss": 0.0002,
"step": 1099
},
{
"epoch": 0.8169327887114742,
"grad_norm": 0.003595678135752678,
"learning_rate": 8.629627942924473e-06,
"loss": 0.0002,
"step": 1100
},
{
"epoch": 0.8176754548830301,
"grad_norm": 0.011070462875068188,
"learning_rate": 8.561681461729926e-06,
"loss": 0.0003,
"step": 1101
},
{
"epoch": 0.818418121054586,
"grad_norm": 0.000978004070930183,
"learning_rate": 8.49397847643606e-06,
"loss": 0.0001,
"step": 1102
},
{
"epoch": 0.8191607872261418,
"grad_norm": 0.0015130506362766027,
"learning_rate": 8.426519384872733e-06,
"loss": 0.0001,
"step": 1103
},
{
"epoch": 0.8199034533976978,
"grad_norm": 0.0014117389218881726,
"learning_rate": 8.359304583436644e-06,
"loss": 0.0002,
"step": 1104
},
{
"epoch": 0.8206461195692536,
"grad_norm": 0.0007138338405638933,
"learning_rate": 8.29233446708898e-06,
"loss": 0.0001,
"step": 1105
},
{
"epoch": 0.8213887857408095,
"grad_norm": 0.0014467401197180152,
"learning_rate": 8.225609429353187e-06,
"loss": 0.0001,
"step": 1106
},
{
"epoch": 0.8221314519123654,
"grad_norm": 0.0005772042204625905,
"learning_rate": 8.159129862312548e-06,
"loss": 0.0001,
"step": 1107
},
{
"epoch": 0.8228741180839213,
"grad_norm": 0.0007005248917266726,
"learning_rate": 8.092896156607987e-06,
"loss": 0.0001,
"step": 1108
},
{
"epoch": 0.8236167842554771,
"grad_norm": 0.0023481727112084627,
"learning_rate": 8.026908701435681e-06,
"loss": 0.0001,
"step": 1109
},
{
"epoch": 0.8243594504270331,
"grad_norm": 0.0042024110443890095,
"learning_rate": 7.96116788454485e-06,
"loss": 0.0002,
"step": 1110
},
{
"epoch": 0.8251021165985889,
"grad_norm": 0.001470068353228271,
"learning_rate": 7.895674092235395e-06,
"loss": 0.0001,
"step": 1111
},
{
"epoch": 0.8258447827701448,
"grad_norm": 0.0019429409876465797,
"learning_rate": 7.830427709355725e-06,
"loss": 0.0002,
"step": 1112
},
{
"epoch": 0.8265874489417007,
"grad_norm": 0.006123952101916075,
"learning_rate": 7.765429119300394e-06,
"loss": 0.0003,
"step": 1113
},
{
"epoch": 0.8273301151132566,
"grad_norm": 0.0007870638510212302,
"learning_rate": 7.700678704007947e-06,
"loss": 0.0001,
"step": 1114
},
{
"epoch": 0.8280727812848124,
"grad_norm": 0.0008543855510652065,
"learning_rate": 7.636176843958598e-06,
"loss": 0.0001,
"step": 1115
},
{
"epoch": 0.8288154474563684,
"grad_norm": 0.010817396454513073,
"learning_rate": 7.571923918172053e-06,
"loss": 0.0002,
"step": 1116
},
{
"epoch": 0.8295581136279242,
"grad_norm": 0.0015762196853756905,
"learning_rate": 7.507920304205202e-06,
"loss": 0.0002,
"step": 1117
},
{
"epoch": 0.8303007797994801,
"grad_norm": 0.0009204100351780653,
"learning_rate": 7.444166378150013e-06,
"loss": 0.0001,
"step": 1118
},
{
"epoch": 0.831043445971036,
"grad_norm": 0.0015323157422244549,
"learning_rate": 7.3806625146312205e-06,
"loss": 0.0002,
"step": 1119
},
{
"epoch": 0.8317861121425919,
"grad_norm": 0.001088933553546667,
"learning_rate": 7.317409086804189e-06,
"loss": 0.0001,
"step": 1120
},
{
"epoch": 0.8325287783141478,
"grad_norm": 0.002421481069177389,
"learning_rate": 7.2544064663526815e-06,
"loss": 0.0002,
"step": 1121
},
{
"epoch": 0.8332714444857037,
"grad_norm": 0.0009983620839193463,
"learning_rate": 7.191655023486682e-06,
"loss": 0.0001,
"step": 1122
},
{
"epoch": 0.8340141106572596,
"grad_norm": 0.003855592804029584,
"learning_rate": 7.129155126940268e-06,
"loss": 0.0002,
"step": 1123
},
{
"epoch": 0.8347567768288154,
"grad_norm": 0.003245392581447959,
"learning_rate": 7.066907143969353e-06,
"loss": 0.0002,
"step": 1124
},
{
"epoch": 0.8354994430003714,
"grad_norm": 0.005832053255289793,
"learning_rate": 7.004911440349615e-06,
"loss": 0.0003,
"step": 1125
},
{
"epoch": 0.8362421091719272,
"grad_norm": 0.0075843255035579205,
"learning_rate": 6.94316838037431e-06,
"loss": 0.0002,
"step": 1126
},
{
"epoch": 0.8369847753434831,
"grad_norm": 0.019108982756733894,
"learning_rate": 6.881678326852136e-06,
"loss": 0.0005,
"step": 1127
},
{
"epoch": 0.837727441515039,
"grad_norm": 0.0029295405838638544,
"learning_rate": 6.820441641105063e-06,
"loss": 0.0003,
"step": 1128
},
{
"epoch": 0.8384701076865949,
"grad_norm": 0.0009995042346417904,
"learning_rate": 6.759458682966296e-06,
"loss": 0.0001,
"step": 1129
},
{
"epoch": 0.8392127738581507,
"grad_norm": 0.0011027619475498796,
"learning_rate": 6.698729810778065e-06,
"loss": 0.0002,
"step": 1130
},
{
"epoch": 0.8399554400297067,
"grad_norm": 0.0015378465177491307,
"learning_rate": 6.6382553813896075e-06,
"loss": 0.0001,
"step": 1131
},
{
"epoch": 0.8406981062012625,
"grad_norm": 0.0011789267882704735,
"learning_rate": 6.578035750154976e-06,
"loss": 0.0001,
"step": 1132
},
{
"epoch": 0.8414407723728184,
"grad_norm": 0.003139357315376401,
"learning_rate": 6.518071270931059e-06,
"loss": 0.0002,
"step": 1133
},
{
"epoch": 0.8421834385443743,
"grad_norm": 0.004962114151567221,
"learning_rate": 6.458362296075399e-06,
"loss": 0.0003,
"step": 1134
},
{
"epoch": 0.8429261047159302,
"grad_norm": 0.0013825440546497703,
"learning_rate": 6.398909176444207e-06,
"loss": 0.0001,
"step": 1135
},
{
"epoch": 0.843668770887486,
"grad_norm": 0.0008226705831475556,
"learning_rate": 6.339712261390213e-06,
"loss": 0.0001,
"step": 1136
},
{
"epoch": 0.844411437059042,
"grad_norm": 0.00134433398488909,
"learning_rate": 6.280771898760718e-06,
"loss": 0.0001,
"step": 1137
},
{
"epoch": 0.8451541032305978,
"grad_norm": 0.0013929035048931837,
"learning_rate": 6.222088434895462e-06,
"loss": 0.0001,
"step": 1138
},
{
"epoch": 0.8458967694021537,
"grad_norm": 0.00138406315818429,
"learning_rate": 6.163662214624616e-06,
"loss": 0.0002,
"step": 1139
},
{
"epoch": 0.8466394355737096,
"grad_norm": 0.0019744529854506254,
"learning_rate": 6.1054935812667855e-06,
"loss": 0.0002,
"step": 1140
},
{
"epoch": 0.8473821017452655,
"grad_norm": 0.0007763861794956028,
"learning_rate": 6.047582876626934e-06,
"loss": 0.0001,
"step": 1141
},
{
"epoch": 0.8481247679168213,
"grad_norm": 0.0008731714333407581,
"learning_rate": 5.989930440994451e-06,
"loss": 0.0001,
"step": 1142
},
{
"epoch": 0.8488674340883773,
"grad_norm": 0.0010884518269449472,
"learning_rate": 5.932536613141065e-06,
"loss": 0.0001,
"step": 1143
},
{
"epoch": 0.8496101002599331,
"grad_norm": 0.004011042881757021,
"learning_rate": 5.875401730318947e-06,
"loss": 0.0002,
"step": 1144
},
{
"epoch": 0.850352766431489,
"grad_norm": 0.0016273773508146405,
"learning_rate": 5.818526128258622e-06,
"loss": 0.0002,
"step": 1145
},
{
"epoch": 0.8510954326030449,
"grad_norm": 0.001861872267909348,
"learning_rate": 5.7619101411671095e-06,
"loss": 0.0001,
"step": 1146
},
{
"epoch": 0.8518380987746008,
"grad_norm": 0.0037269063759595156,
"learning_rate": 5.7055541017258855e-06,
"loss": 0.0003,
"step": 1147
},
{
"epoch": 0.8525807649461568,
"grad_norm": 0.0021908970084041357,
"learning_rate": 5.649458341088915e-06,
"loss": 0.0002,
"step": 1148
},
{
"epoch": 0.8533234311177126,
"grad_norm": 0.002067559864372015,
"learning_rate": 5.593623188880781e-06,
"loss": 0.0002,
"step": 1149
},
{
"epoch": 0.8540660972892685,
"grad_norm": 0.004860437475144863,
"learning_rate": 5.538048973194698e-06,
"loss": 0.0002,
"step": 1150
},
{
"epoch": 0.8548087634608244,
"grad_norm": 0.00816015899181366,
"learning_rate": 5.482736020590551e-06,
"loss": 0.0002,
"step": 1151
},
{
"epoch": 0.8555514296323803,
"grad_norm": 0.23404280841350555,
"learning_rate": 5.427684656093074e-06,
"loss": 0.0006,
"step": 1152
},
{
"epoch": 0.8562940958039361,
"grad_norm": 0.0022261200938373804,
"learning_rate": 5.372895203189837e-06,
"loss": 0.0002,
"step": 1153
},
{
"epoch": 0.8570367619754921,
"grad_norm": 0.001256439252756536,
"learning_rate": 5.318367983829392e-06,
"loss": 0.0001,
"step": 1154
},
{
"epoch": 0.8577794281470479,
"grad_norm": 0.0017608028138056397,
"learning_rate": 5.2641033184194165e-06,
"loss": 0.0002,
"step": 1155
},
{
"epoch": 0.8585220943186038,
"grad_norm": 0.0018713950412347913,
"learning_rate": 5.210101525824746e-06,
"loss": 0.0001,
"step": 1156
},
{
"epoch": 0.8592647604901597,
"grad_norm": 0.0007540153455920517,
"learning_rate": 5.156362923365588e-06,
"loss": 0.0001,
"step": 1157
},
{
"epoch": 0.8600074266617156,
"grad_norm": 0.0012859681155532598,
"learning_rate": 5.102887826815589e-06,
"loss": 0.0001,
"step": 1158
},
{
"epoch": 0.8607500928332714,
"grad_norm": 0.0008075116202235222,
"learning_rate": 5.049676550400034e-06,
"loss": 0.0001,
"step": 1159
},
{
"epoch": 0.8614927590048274,
"grad_norm": 0.0007994707557372749,
"learning_rate": 4.996729406793943e-06,
"loss": 0.0001,
"step": 1160
},
{
"epoch": 0.8622354251763832,
"grad_norm": 0.004248554352670908,
"learning_rate": 4.944046707120287e-06,
"loss": 0.0002,
"step": 1161
},
{
"epoch": 0.8629780913479391,
"grad_norm": 0.0005359674105420709,
"learning_rate": 4.891628760948114e-06,
"loss": 0.0001,
"step": 1162
},
{
"epoch": 0.863720757519495,
"grad_norm": 0.0008803227683529258,
"learning_rate": 4.839475876290789e-06,
"loss": 0.0001,
"step": 1163
},
{
"epoch": 0.8644634236910509,
"grad_norm": 0.0011372326407581568,
"learning_rate": 4.787588359604106e-06,
"loss": 0.0002,
"step": 1164
},
{
"epoch": 0.8652060898626067,
"grad_norm": 0.004412113688886166,
"learning_rate": 4.735966515784562e-06,
"loss": 0.0002,
"step": 1165
},
{
"epoch": 0.8659487560341627,
"grad_norm": 0.0014664451591670513,
"learning_rate": 4.684610648167503e-06,
"loss": 0.0002,
"step": 1166
},
{
"epoch": 0.8666914222057185,
"grad_norm": 0.00369854923337698,
"learning_rate": 4.633521058525408e-06,
"loss": 0.0002,
"step": 1167
},
{
"epoch": 0.8674340883772744,
"grad_norm": 0.0015638495096936822,
"learning_rate": 4.582698047066036e-06,
"loss": 0.0001,
"step": 1168
},
{
"epoch": 0.8681767545488303,
"grad_norm": 0.026589730754494667,
"learning_rate": 4.532141912430743e-06,
"loss": 0.0005,
"step": 1169
},
{
"epoch": 0.8689194207203862,
"grad_norm": 0.0014144079759716988,
"learning_rate": 4.4818529516926726e-06,
"loss": 0.0001,
"step": 1170
},
{
"epoch": 0.869662086891942,
"grad_norm": 0.0023864826653152704,
"learning_rate": 4.4318314603550074e-06,
"loss": 0.0002,
"step": 1171
},
{
"epoch": 0.870404753063498,
"grad_norm": 0.00443720119073987,
"learning_rate": 4.382077732349299e-06,
"loss": 0.0002,
"step": 1172
},
{
"epoch": 0.8711474192350538,
"grad_norm": 0.0019203081028535962,
"learning_rate": 4.332592060033636e-06,
"loss": 0.0001,
"step": 1173
},
{
"epoch": 0.8718900854066097,
"grad_norm": 0.0016560673248022795,
"learning_rate": 4.283374734191037e-06,
"loss": 0.0002,
"step": 1174
},
{
"epoch": 0.8726327515781657,
"grad_norm": 0.0018693654565140605,
"learning_rate": 4.234426044027645e-06,
"loss": 0.0001,
"step": 1175
},
{
"epoch": 0.8733754177497215,
"grad_norm": 0.041886404156684875,
"learning_rate": 4.18574627717111e-06,
"loss": 0.0008,
"step": 1176
},
{
"epoch": 0.8741180839212774,
"grad_norm": 0.0033249545376747847,
"learning_rate": 4.137335719668822e-06,
"loss": 0.0001,
"step": 1177
},
{
"epoch": 0.8748607500928333,
"grad_norm": 0.0029907547868788242,
"learning_rate": 4.089194655986306e-06,
"loss": 0.0002,
"step": 1178
},
{
"epoch": 0.8756034162643892,
"grad_norm": 0.0019523641094565392,
"learning_rate": 4.041323369005479e-06,
"loss": 0.0002,
"step": 1179
},
{
"epoch": 0.876346082435945,
"grad_norm": 0.0027865059673786163,
"learning_rate": 3.9937221400230484e-06,
"loss": 0.0001,
"step": 1180
},
{
"epoch": 0.877088748607501,
"grad_norm": 0.0016474600415676832,
"learning_rate": 3.946391248748821e-06,
"loss": 0.0002,
"step": 1181
},
{
"epoch": 0.8778314147790568,
"grad_norm": 0.0016686657909303904,
"learning_rate": 3.899330973304083e-06,
"loss": 0.0001,
"step": 1182
},
{
"epoch": 0.8785740809506127,
"grad_norm": 0.03477117419242859,
"learning_rate": 3.8525415902199154e-06,
"loss": 0.0013,
"step": 1183
},
{
"epoch": 0.8793167471221686,
"grad_norm": 0.000568350194953382,
"learning_rate": 3.8060233744356633e-06,
"loss": 0.0001,
"step": 1184
},
{
"epoch": 0.8800594132937245,
"grad_norm": 0.0022050561383366585,
"learning_rate": 3.7597765992972267e-06,
"loss": 0.0002,
"step": 1185
},
{
"epoch": 0.8808020794652803,
"grad_norm": 0.0006907075876370072,
"learning_rate": 3.7138015365554833e-06,
"loss": 0.0001,
"step": 1186
},
{
"epoch": 0.8815447456368363,
"grad_norm": 0.0008904426940716803,
"learning_rate": 3.6680984563647434e-06,
"loss": 0.0001,
"step": 1187
},
{
"epoch": 0.8822874118083921,
"grad_norm": 0.0031244743149727583,
"learning_rate": 3.6226676272810735e-06,
"loss": 0.0002,
"step": 1188
},
{
"epoch": 0.883030077979948,
"grad_norm": 0.0016301183495670557,
"learning_rate": 3.5775093162607963e-06,
"loss": 0.0001,
"step": 1189
},
{
"epoch": 0.8837727441515039,
"grad_norm": 0.006416819058358669,
"learning_rate": 3.5326237886588732e-06,
"loss": 0.0003,
"step": 1190
},
{
"epoch": 0.8845154103230598,
"grad_norm": 0.0009310771129094064,
"learning_rate": 3.488011308227379e-06,
"loss": 0.0001,
"step": 1191
},
{
"epoch": 0.8852580764946156,
"grad_norm": 0.00095487164799124,
"learning_rate": 3.443672137113901e-06,
"loss": 0.0001,
"step": 1192
},
{
"epoch": 0.8860007426661716,
"grad_norm": 0.0011116062523797154,
"learning_rate": 3.3996065358600782e-06,
"loss": 0.0001,
"step": 1193
},
{
"epoch": 0.8867434088377274,
"grad_norm": 0.0024930539075285196,
"learning_rate": 3.3558147633999728e-06,
"loss": 0.0002,
"step": 1194
},
{
"epoch": 0.8874860750092833,
"grad_norm": 0.0017195651307702065,
"learning_rate": 3.3122970770586514e-06,
"loss": 0.0001,
"step": 1195
},
{
"epoch": 0.8882287411808392,
"grad_norm": 0.0015204905066639185,
"learning_rate": 3.269053732550581e-06,
"loss": 0.0002,
"step": 1196
},
{
"epoch": 0.8889714073523951,
"grad_norm": 0.005054555833339691,
"learning_rate": 3.226084983978195e-06,
"loss": 0.0003,
"step": 1197
},
{
"epoch": 0.8897140735239509,
"grad_norm": 0.0018111229874193668,
"learning_rate": 3.183391083830345e-06,
"loss": 0.0001,
"step": 1198
},
{
"epoch": 0.8904567396955069,
"grad_norm": 0.026347478851675987,
"learning_rate": 3.1409722829808863e-06,
"loss": 0.0005,
"step": 1199
},
{
"epoch": 0.8911994058670628,
"grad_norm": 0.002853583311662078,
"learning_rate": 3.0988288306871115e-06,
"loss": 0.0002,
"step": 1200
},
{
"epoch": 0.8919420720386186,
"grad_norm": 0.0017346449894830585,
"learning_rate": 3.0569609745883743e-06,
"loss": 0.0001,
"step": 1201
},
{
"epoch": 0.8926847382101746,
"grad_norm": 0.31705546379089355,
"learning_rate": 3.0153689607045845e-06,
"loss": 0.0023,
"step": 1202
},
{
"epoch": 0.8934274043817304,
"grad_norm": 0.2009972184896469,
"learning_rate": 2.9740530334347594e-06,
"loss": 0.0012,
"step": 1203
},
{
"epoch": 0.8941700705532863,
"grad_norm": 0.009426708333194256,
"learning_rate": 2.933013435555637e-06,
"loss": 0.0003,
"step": 1204
},
{
"epoch": 0.8949127367248422,
"grad_norm": 0.0008448066073469818,
"learning_rate": 2.8922504082201764e-06,
"loss": 0.0001,
"step": 1205
},
{
"epoch": 0.8956554028963981,
"grad_norm": 0.0007227634778246284,
"learning_rate": 2.8517641909562077e-06,
"loss": 0.0001,
"step": 1206
},
{
"epoch": 0.8963980690679539,
"grad_norm": 0.007234448567032814,
"learning_rate": 2.811555021664969e-06,
"loss": 0.0003,
"step": 1207
},
{
"epoch": 0.8971407352395099,
"grad_norm": 0.003920075949281454,
"learning_rate": 2.7716231366197663e-06,
"loss": 0.0002,
"step": 1208
},
{
"epoch": 0.8978834014110657,
"grad_norm": 0.0014649624936282635,
"learning_rate": 2.7319687704645326e-06,
"loss": 0.0001,
"step": 1209
},
{
"epoch": 0.8986260675826216,
"grad_norm": 0.0005429857992567122,
"learning_rate": 2.692592156212487e-06,
"loss": 0.0001,
"step": 1210
},
{
"epoch": 0.8993687337541775,
"grad_norm": 0.004393384791910648,
"learning_rate": 2.653493525244721e-06,
"loss": 0.0002,
"step": 1211
},
{
"epoch": 0.9001113999257334,
"grad_norm": 0.0011112246429547668,
"learning_rate": 2.614673107308896e-06,
"loss": 0.0001,
"step": 1212
},
{
"epoch": 0.9008540660972892,
"grad_norm": 0.0010931448778137565,
"learning_rate": 2.5761311305178614e-06,
"loss": 0.0001,
"step": 1213
},
{
"epoch": 0.9015967322688452,
"grad_norm": 0.0010206797160208225,
"learning_rate": 2.5378678213483054e-06,
"loss": 0.0001,
"step": 1214
},
{
"epoch": 0.902339398440401,
"grad_norm": 0.0024138889275491238,
"learning_rate": 2.499883404639436e-06,
"loss": 0.0002,
"step": 1215
},
{
"epoch": 0.903082064611957,
"grad_norm": 0.0011341659119352698,
"learning_rate": 2.462178103591678e-06,
"loss": 0.0001,
"step": 1216
},
{
"epoch": 0.9038247307835128,
"grad_norm": 0.0011666857171803713,
"learning_rate": 2.424752139765318e-06,
"loss": 0.0001,
"step": 1217
},
{
"epoch": 0.9045673969550687,
"grad_norm": 0.0019645995926111937,
"learning_rate": 2.3876057330792346e-06,
"loss": 0.0002,
"step": 1218
},
{
"epoch": 0.9053100631266245,
"grad_norm": 0.0026836565230041742,
"learning_rate": 2.350739101809624e-06,
"loss": 0.0003,
"step": 1219
},
{
"epoch": 0.9060527292981805,
"grad_norm": 0.0015338718658313155,
"learning_rate": 2.314152462588659e-06,
"loss": 0.0001,
"step": 1220
},
{
"epoch": 0.9067953954697363,
"grad_norm": 0.007026324979960918,
"learning_rate": 2.2778460304032634e-06,
"loss": 0.0003,
"step": 1221
},
{
"epoch": 0.9075380616412922,
"grad_norm": 0.0013827058719471097,
"learning_rate": 2.2418200185938487e-06,
"loss": 0.0002,
"step": 1222
},
{
"epoch": 0.9082807278128481,
"grad_norm": 0.0026905061677098274,
"learning_rate": 2.206074638853023e-06,
"loss": 0.0002,
"step": 1223
},
{
"epoch": 0.909023393984404,
"grad_norm": 0.001551009714603424,
"learning_rate": 2.170610101224385e-06,
"loss": 0.0001,
"step": 1224
},
{
"epoch": 0.9097660601559598,
"grad_norm": 0.004318626597523689,
"learning_rate": 2.1354266141012824e-06,
"loss": 0.0002,
"step": 1225
},
{
"epoch": 0.9105087263275158,
"grad_norm": 0.002215152606368065,
"learning_rate": 2.100524384225555e-06,
"loss": 0.0001,
"step": 1226
},
{
"epoch": 0.9112513924990717,
"grad_norm": 0.011130552738904953,
"learning_rate": 2.0659036166863766e-06,
"loss": 0.0004,
"step": 1227
},
{
"epoch": 0.9119940586706275,
"grad_norm": 0.0008930191397666931,
"learning_rate": 2.0315645149189935e-06,
"loss": 0.0001,
"step": 1228
},
{
"epoch": 0.9127367248421835,
"grad_norm": 0.0026403944939374924,
"learning_rate": 1.99750728070357e-06,
"loss": 0.0002,
"step": 1229
},
{
"epoch": 0.9134793910137393,
"grad_norm": 0.0009700857335701585,
"learning_rate": 1.9637321141639743e-06,
"loss": 0.0001,
"step": 1230
},
{
"epoch": 0.9142220571852953,
"grad_norm": 0.045662786811590195,
"learning_rate": 1.930239213766638e-06,
"loss": 0.0009,
"step": 1231
},
{
"epoch": 0.9149647233568511,
"grad_norm": 0.02511802315711975,
"learning_rate": 1.8970287763193429e-06,
"loss": 0.0007,
"step": 1232
},
{
"epoch": 0.915707389528407,
"grad_norm": 0.001158213010057807,
"learning_rate": 1.8641009969700917e-06,
"loss": 0.0001,
"step": 1233
},
{
"epoch": 0.9164500556999629,
"grad_norm": 0.00307093164883554,
"learning_rate": 1.8314560692059835e-06,
"loss": 0.0002,
"step": 1234
},
{
"epoch": 0.9171927218715188,
"grad_norm": 0.0006863306043669581,
"learning_rate": 1.799094184852018e-06,
"loss": 0.0001,
"step": 1235
},
{
"epoch": 0.9179353880430746,
"grad_norm": 0.001991459634155035,
"learning_rate": 1.7670155340700323e-06,
"loss": 0.0002,
"step": 1236
},
{
"epoch": 0.9186780542146306,
"grad_norm": 0.0033494269009679556,
"learning_rate": 1.7352203053575157e-06,
"loss": 0.0003,
"step": 1237
},
{
"epoch": 0.9194207203861864,
"grad_norm": 0.0011704204371199012,
"learning_rate": 1.70370868554659e-06,
"loss": 0.0001,
"step": 1238
},
{
"epoch": 0.9201633865577423,
"grad_norm": 0.000724752142559737,
"learning_rate": 1.6724808598028108e-06,
"loss": 0.0001,
"step": 1239
},
{
"epoch": 0.9209060527292982,
"grad_norm": 0.002084789564833045,
"learning_rate": 1.6415370116241724e-06,
"loss": 0.0001,
"step": 1240
},
{
"epoch": 0.9216487189008541,
"grad_norm": 0.0011653484543785453,
"learning_rate": 1.6108773228399543e-06,
"loss": 0.0001,
"step": 1241
},
{
"epoch": 0.9223913850724099,
"grad_norm": 0.001394697930663824,
"learning_rate": 1.5805019736097104e-06,
"loss": 0.0002,
"step": 1242
},
{
"epoch": 0.9231340512439659,
"grad_norm": 0.0010144009720534086,
"learning_rate": 1.5504111424221646e-06,
"loss": 0.0001,
"step": 1243
},
{
"epoch": 0.9238767174155217,
"grad_norm": 0.012748640961945057,
"learning_rate": 1.5206050060942e-06,
"loss": 0.0001,
"step": 1244
},
{
"epoch": 0.9246193835870776,
"grad_norm": 0.0012531749671325088,
"learning_rate": 1.4910837397697886e-06,
"loss": 0.0001,
"step": 1245
},
{
"epoch": 0.9253620497586335,
"grad_norm": 0.0027611353434622288,
"learning_rate": 1.4618475169190017e-06,
"loss": 0.0002,
"step": 1246
},
{
"epoch": 0.9261047159301894,
"grad_norm": 0.0028265246655792,
"learning_rate": 1.4328965093369283e-06,
"loss": 0.0001,
"step": 1247
},
{
"epoch": 0.9268473821017452,
"grad_norm": 0.0014653587713837624,
"learning_rate": 1.4042308871427324e-06,
"loss": 0.0001,
"step": 1248
},
{
"epoch": 0.9275900482733012,
"grad_norm": 0.0027727442793548107,
"learning_rate": 1.3758508187786068e-06,
"loss": 0.0002,
"step": 1249
},
{
"epoch": 0.928332714444857,
"grad_norm": 0.003438390791416168,
"learning_rate": 1.3477564710088098e-06,
"loss": 0.0002,
"step": 1250
},
{
"epoch": 0.9290753806164129,
"grad_norm": 0.0032505146227777004,
"learning_rate": 1.3199480089186756e-06,
"loss": 0.0002,
"step": 1251
},
{
"epoch": 0.9298180467879688,
"grad_norm": 0.005036122631281614,
"learning_rate": 1.2924255959136266e-06,
"loss": 0.0002,
"step": 1252
},
{
"epoch": 0.9305607129595247,
"grad_norm": 0.011345870792865753,
"learning_rate": 1.2651893937182635e-06,
"loss": 0.0003,
"step": 1253
},
{
"epoch": 0.9313033791310806,
"grad_norm": 0.0007504152599722147,
"learning_rate": 1.2382395623753485e-06,
"loss": 0.0001,
"step": 1254
},
{
"epoch": 0.9320460453026365,
"grad_norm": 0.0009204870439134538,
"learning_rate": 1.2115762602449177e-06,
"loss": 0.0001,
"step": 1255
},
{
"epoch": 0.9327887114741924,
"grad_norm": 0.021037423983216286,
"learning_rate": 1.1851996440033319e-06,
"loss": 0.0007,
"step": 1256
},
{
"epoch": 0.9335313776457482,
"grad_norm": 0.05482451990246773,
"learning_rate": 1.1591098686423496e-06,
"loss": 0.0006,
"step": 1257
},
{
"epoch": 0.9342740438173042,
"grad_norm": 0.0006609958363696933,
"learning_rate": 1.1333070874682216e-06,
"loss": 0.0001,
"step": 1258
},
{
"epoch": 0.93501670998886,
"grad_norm": 0.0011097380192950368,
"learning_rate": 1.1077914521007981e-06,
"loss": 0.0002,
"step": 1259
},
{
"epoch": 0.9357593761604159,
"grad_norm": 0.0019438870949670672,
"learning_rate": 1.0825631124726232e-06,
"loss": 0.0001,
"step": 1260
},
{
"epoch": 0.9365020423319718,
"grad_norm": 0.0007544804248027503,
"learning_rate": 1.0576222168280636e-06,
"loss": 0.0001,
"step": 1261
},
{
"epoch": 0.9372447085035277,
"grad_norm": 0.007262110244482756,
"learning_rate": 1.0329689117224262e-06,
"loss": 0.0004,
"step": 1262
},
{
"epoch": 0.9379873746750835,
"grad_norm": 0.0008670915849506855,
"learning_rate": 1.008603342021125e-06,
"loss": 0.0001,
"step": 1263
},
{
"epoch": 0.9387300408466395,
"grad_norm": 0.0009119977476075292,
"learning_rate": 9.845256508987989e-07,
"loss": 0.0001,
"step": 1264
},
{
"epoch": 0.9394727070181953,
"grad_norm": 0.00047243438893929124,
"learning_rate": 9.607359798384785e-07,
"loss": 0.0001,
"step": 1265
},
{
"epoch": 0.9402153731897512,
"grad_norm": 0.0028327691834419966,
"learning_rate": 9.372344686307655e-07,
"loss": 0.0002,
"step": 1266
},
{
"epoch": 0.9409580393613071,
"grad_norm": 0.044161271303892136,
"learning_rate": 9.140212553730043e-07,
"loss": 0.0016,
"step": 1267
},
{
"epoch": 0.941700705532863,
"grad_norm": 0.3684042692184448,
"learning_rate": 8.910964764684671e-07,
"loss": 0.0089,
"step": 1268
},
{
"epoch": 0.9424433717044188,
"grad_norm": 0.002094242488965392,
"learning_rate": 8.684602666255481e-07,
"loss": 0.0001,
"step": 1269
},
{
"epoch": 0.9431860378759748,
"grad_norm": 0.0017989203333854675,
"learning_rate": 8.46112758857004e-07,
"loss": 0.0002,
"step": 1270
},
{
"epoch": 0.9439287040475306,
"grad_norm": 0.0011526981834322214,
"learning_rate": 8.240540844791145e-07,
"loss": 0.0001,
"step": 1271
},
{
"epoch": 0.9446713702190865,
"grad_norm": 0.0008104105945676565,
"learning_rate": 8.022843731109674e-07,
"loss": 0.0001,
"step": 1272
},
{
"epoch": 0.9454140363906424,
"grad_norm": 0.0018936687847599387,
"learning_rate": 7.8080375267367e-07,
"loss": 0.0002,
"step": 1273
},
{
"epoch": 0.9461567025621983,
"grad_norm": 0.0015682062366977334,
"learning_rate": 7.596123493895991e-07,
"loss": 0.0001,
"step": 1274
},
{
"epoch": 0.9468993687337541,
"grad_norm": 0.0024748966097831726,
"learning_rate": 7.387102877816521e-07,
"loss": 0.0002,
"step": 1275
},
{
"epoch": 0.9476420349053101,
"grad_norm": 0.0018025932367891073,
"learning_rate": 7.180976906725423e-07,
"loss": 0.0001,
"step": 1276
},
{
"epoch": 0.9483847010768659,
"grad_norm": 0.0015493928221985698,
"learning_rate": 6.977746791840434e-07,
"loss": 0.0002,
"step": 1277
},
{
"epoch": 0.9491273672484218,
"grad_norm": 0.004556954838335514,
"learning_rate": 6.777413727363069e-07,
"loss": 0.0002,
"step": 1278
},
{
"epoch": 0.9498700334199777,
"grad_norm": 0.018575742840766907,
"learning_rate": 6.579978890471294e-07,
"loss": 0.0003,
"step": 1279
},
{
"epoch": 0.9506126995915336,
"grad_norm": 0.002247869735583663,
"learning_rate": 6.385443441312978e-07,
"loss": 0.0002,
"step": 1280
},
{
"epoch": 0.9513553657630895,
"grad_norm": 0.0014160757418721914,
"learning_rate": 6.193808522998723e-07,
"loss": 0.0001,
"step": 1281
},
{
"epoch": 0.9520980319346454,
"grad_norm": 0.0015209565171971917,
"learning_rate": 6.005075261595494e-07,
"loss": 0.0002,
"step": 1282
},
{
"epoch": 0.9528406981062013,
"grad_norm": 0.0009286428103223443,
"learning_rate": 5.81924476611967e-07,
"loss": 0.0001,
"step": 1283
},
{
"epoch": 0.9535833642777571,
"grad_norm": 0.013380018062889576,
"learning_rate": 5.63631812853066e-07,
"loss": 0.0002,
"step": 1284
},
{
"epoch": 0.9543260304493131,
"grad_norm": 0.00046919341548345983,
"learning_rate": 5.456296423724527e-07,
"loss": 0.0001,
"step": 1285
},
{
"epoch": 0.9550686966208689,
"grad_norm": 0.001202006940729916,
"learning_rate": 5.279180709527765e-07,
"loss": 0.0001,
"step": 1286
},
{
"epoch": 0.9558113627924248,
"grad_norm": 0.0014856454217806458,
"learning_rate": 5.104972026690802e-07,
"loss": 0.0001,
"step": 1287
},
{
"epoch": 0.9565540289639807,
"grad_norm": 0.0008986307075247169,
"learning_rate": 4.933671398882067e-07,
"loss": 0.0001,
"step": 1288
},
{
"epoch": 0.9572966951355366,
"grad_norm": 0.0012058410793542862,
"learning_rate": 4.765279832682101e-07,
"loss": 0.0001,
"step": 1289
},
{
"epoch": 0.9580393613070924,
"grad_norm": 0.003975100349634886,
"learning_rate": 4.5997983175773417e-07,
"loss": 0.0002,
"step": 1290
},
{
"epoch": 0.9587820274786484,
"grad_norm": 0.003919736947864294,
"learning_rate": 4.4372278259545155e-07,
"loss": 0.0003,
"step": 1291
},
{
"epoch": 0.9595246936502042,
"grad_norm": 0.019034747034311295,
"learning_rate": 4.277569313094809e-07,
"loss": 0.0006,
"step": 1292
},
{
"epoch": 0.9602673598217601,
"grad_norm": 0.004752116743475199,
"learning_rate": 4.120823717168432e-07,
"loss": 0.0002,
"step": 1293
},
{
"epoch": 0.961010025993316,
"grad_norm": 0.0008414981421083212,
"learning_rate": 3.9669919592288384e-07,
"loss": 0.0001,
"step": 1294
},
{
"epoch": 0.9617526921648719,
"grad_norm": 0.0018463104497641325,
"learning_rate": 3.8160749432076815e-07,
"loss": 0.0002,
"step": 1295
},
{
"epoch": 0.9624953583364277,
"grad_norm": 0.0009904575999826193,
"learning_rate": 3.6680735559090906e-07,
"loss": 0.0001,
"step": 1296
},
{
"epoch": 0.9632380245079837,
"grad_norm": 0.001538589014671743,
"learning_rate": 3.5229886670046783e-07,
"loss": 0.0002,
"step": 1297
},
{
"epoch": 0.9639806906795395,
"grad_norm": 0.0009757834486663342,
"learning_rate": 3.380821129028489e-07,
"loss": 0.0001,
"step": 1298
},
{
"epoch": 0.9647233568510954,
"grad_norm": 0.00236718263477087,
"learning_rate": 3.241571777371888e-07,
"loss": 0.0002,
"step": 1299
},
{
"epoch": 0.9654660230226513,
"grad_norm": 0.00395069969817996,
"learning_rate": 3.1052414302786823e-07,
"loss": 0.0002,
"step": 1300
},
{
"epoch": 0.9662086891942072,
"grad_norm": 0.006602929905056953,
"learning_rate": 2.971830888840177e-07,
"loss": 0.0003,
"step": 1301
},
{
"epoch": 0.966951355365763,
"grad_norm": 0.0023442876990884542,
"learning_rate": 2.8413409369907886e-07,
"loss": 0.0002,
"step": 1302
},
{
"epoch": 0.967694021537319,
"grad_norm": 0.012941988185048103,
"learning_rate": 2.7137723415030516e-07,
"loss": 0.0003,
"step": 1303
},
{
"epoch": 0.9684366877088748,
"grad_norm": 0.005490366835147142,
"learning_rate": 2.589125851983509e-07,
"loss": 0.0002,
"step": 1304
},
{
"epoch": 0.9691793538804307,
"grad_norm": 0.0007061643409542739,
"learning_rate": 2.46740220086783e-07,
"loss": 0.0001,
"step": 1305
},
{
"epoch": 0.9699220200519866,
"grad_norm": 0.0017634638352319598,
"learning_rate": 2.3486021034170857e-07,
"loss": 0.0002,
"step": 1306
},
{
"epoch": 0.9706646862235425,
"grad_norm": 0.0028723692521452904,
"learning_rate": 2.232726257713036e-07,
"loss": 0.0002,
"step": 1307
},
{
"epoch": 0.9714073523950985,
"grad_norm": 0.0016753628151491284,
"learning_rate": 2.1197753446542957e-07,
"loss": 0.0001,
"step": 1308
},
{
"epoch": 0.9721500185666543,
"grad_norm": 0.02156355232000351,
"learning_rate": 2.009750027952284e-07,
"loss": 0.0007,
"step": 1309
},
{
"epoch": 0.9728926847382102,
"grad_norm": 0.0010258769616484642,
"learning_rate": 1.9026509541272275e-07,
"loss": 0.0001,
"step": 1310
},
{
"epoch": 0.973635350909766,
"grad_norm": 0.001431930111721158,
"learning_rate": 1.798478752504551e-07,
"loss": 0.0001,
"step": 1311
},
{
"epoch": 0.974378017081322,
"grad_norm": 0.7691323161125183,
"learning_rate": 1.6972340352110483e-07,
"loss": 0.0318,
"step": 1312
},
{
"epoch": 0.9751206832528778,
"grad_norm": 0.005196771118789911,
"learning_rate": 1.598917397171218e-07,
"loss": 0.0003,
"step": 1313
},
{
"epoch": 0.9758633494244338,
"grad_norm": 0.0011321887141093612,
"learning_rate": 1.503529416103988e-07,
"loss": 0.0002,
"step": 1314
},
{
"epoch": 0.9766060155959896,
"grad_norm": 0.0005147146875970066,
"learning_rate": 1.4110706525190531e-07,
"loss": 0.0001,
"step": 1315
},
{
"epoch": 0.9773486817675455,
"grad_norm": 0.008908793330192566,
"learning_rate": 1.3215416497138754e-07,
"loss": 0.0003,
"step": 1316
},
{
"epoch": 0.9780913479391014,
"grad_norm": 0.002536405110731721,
"learning_rate": 1.234942933770189e-07,
"loss": 0.0002,
"step": 1317
},
{
"epoch": 0.9788340141106573,
"grad_norm": 0.0017400149954482913,
"learning_rate": 1.1512750135511674e-07,
"loss": 0.0001,
"step": 1318
},
{
"epoch": 0.9795766802822131,
"grad_norm": 0.0011383086675778031,
"learning_rate": 1.0705383806982606e-07,
"loss": 0.0001,
"step": 1319
},
{
"epoch": 0.9803193464537691,
"grad_norm": 0.0018386361189186573,
"learning_rate": 9.927335096283629e-08,
"loss": 0.0002,
"step": 1320
},
{
"epoch": 0.9810620126253249,
"grad_norm": 0.0018964792834594846,
"learning_rate": 9.178608575310388e-08,
"loss": 0.0001,
"step": 1321
},
{
"epoch": 0.9818046787968808,
"grad_norm": 0.001377547043375671,
"learning_rate": 8.459208643659122e-08,
"loss": 0.0001,
"step": 1322
},
{
"epoch": 0.9825473449684367,
"grad_norm": 0.0015313441399484873,
"learning_rate": 7.769139528598368e-08,
"loss": 0.0001,
"step": 1323
},
{
"epoch": 0.9832900111399926,
"grad_norm": 0.00252739991992712,
"learning_rate": 7.108405285046749e-08,
"loss": 0.0002,
"step": 1324
},
{
"epoch": 0.9840326773115484,
"grad_norm": 0.0026342032942920923,
"learning_rate": 6.477009795547995e-08,
"loss": 0.0002,
"step": 1325
},
{
"epoch": 0.9847753434831044,
"grad_norm": 0.0036653559654951096,
"learning_rate": 5.8749567702481856e-08,
"loss": 0.0002,
"step": 1326
},
{
"epoch": 0.9855180096546602,
"grad_norm": 0.031000560149550438,
"learning_rate": 5.3022497468735446e-08,
"loss": 0.0012,
"step": 1327
},
{
"epoch": 0.9862606758262161,
"grad_norm": 0.01462267991155386,
"learning_rate": 4.7588920907110094e-08,
"loss": 0.0005,
"step": 1328
},
{
"epoch": 0.987003341997772,
"grad_norm": 0.0009502037428319454,
"learning_rate": 4.244886994587138e-08,
"loss": 0.0001,
"step": 1329
},
{
"epoch": 0.9877460081693279,
"grad_norm": 0.0015973957488313317,
"learning_rate": 3.760237478849793e-08,
"loss": 0.0001,
"step": 1330
},
{
"epoch": 0.9884886743408837,
"grad_norm": 0.0013975318288430572,
"learning_rate": 3.3049463913498166e-08,
"loss": 0.0001,
"step": 1331
},
{
"epoch": 0.9892313405124397,
"grad_norm": 0.06534797698259354,
"learning_rate": 2.8790164074254943e-08,
"loss": 0.0017,
"step": 1332
},
{
"epoch": 0.9899740066839955,
"grad_norm": 0.000878207094501704,
"learning_rate": 2.4824500298858967e-08,
"loss": 0.0001,
"step": 1333
},
{
"epoch": 0.9907166728555514,
"grad_norm": 0.0011413278989493847,
"learning_rate": 2.1152495889970035e-08,
"loss": 0.0001,
"step": 1334
},
{
"epoch": 0.9914593390271074,
"grad_norm": 0.0010686512105166912,
"learning_rate": 1.777417242467272e-08,
"loss": 0.0001,
"step": 1335
},
{
"epoch": 0.9922020051986632,
"grad_norm": 0.001142362249083817,
"learning_rate": 1.4689549754337562e-08,
"loss": 0.0001,
"step": 1336
},
{
"epoch": 0.9929446713702191,
"grad_norm": 0.0008279301691800356,
"learning_rate": 1.189864600454338e-08,
"loss": 0.0001,
"step": 1337
},
{
"epoch": 0.993687337541775,
"grad_norm": 0.0007734330138191581,
"learning_rate": 9.401477574932926e-09,
"loss": 0.0001,
"step": 1338
},
{
"epoch": 0.9944300037133309,
"grad_norm": 0.004689542576670647,
"learning_rate": 7.198059139118529e-09,
"loss": 0.0002,
"step": 1339
},
{
"epoch": 0.9951726698848867,
"grad_norm": 0.0011075050570070744,
"learning_rate": 5.2884036446265714e-09,
"loss": 0.0001,
"step": 1340
},
{
"epoch": 0.9959153360564427,
"grad_norm": 0.0022147931158542633,
"learning_rate": 3.672522312786475e-09,
"loss": 0.0002,
"step": 1341
},
{
"epoch": 0.9966580022279985,
"grad_norm": 0.0008529072511009872,
"learning_rate": 2.3504246386918393e-09,
"loss": 0.0001,
"step": 1342
},
{
"epoch": 0.9974006683995544,
"grad_norm": 0.0028021601028740406,
"learning_rate": 1.3221183911282798e-09,
"loss": 0.0002,
"step": 1343
},
{
"epoch": 0.9981433345711103,
"grad_norm": 0.0028829630464315414,
"learning_rate": 5.876096125234653e-10,
"loss": 0.0001,
"step": 1344
},
{
"epoch": 0.9988860007426662,
"grad_norm": 0.002654945245012641,
"learning_rate": 1.4690261893601786e-10,
"loss": 0.0002,
"step": 1345
},
{
"epoch": 0.999628666914222,
"grad_norm": 0.0032625223975628614,
"learning_rate": 0.0,
"loss": 0.0001,
"step": 1346
}
],
"logging_steps": 1,
"max_steps": 1346,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 337,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 1.7591268226010972e+18,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}