lillian039's picture
Model save
8325eb7 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 2.9950576606260295,
"eval_steps": 500,
"global_step": 909,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0032948929159802307,
"grad_norm": 0.8727272666018974,
"learning_rate": 2.197802197802198e-06,
"loss": 1.2966,
"step": 1
},
{
"epoch": 0.006589785831960461,
"grad_norm": 0.9006388360232964,
"learning_rate": 4.395604395604396e-06,
"loss": 1.3333,
"step": 2
},
{
"epoch": 0.009884678747940691,
"grad_norm": 0.9149925703883882,
"learning_rate": 6.5934065934065935e-06,
"loss": 1.3275,
"step": 3
},
{
"epoch": 0.013179571663920923,
"grad_norm": 0.9362223066609972,
"learning_rate": 8.791208791208792e-06,
"loss": 1.3456,
"step": 4
},
{
"epoch": 0.016474464579901153,
"grad_norm": 0.8362157088104918,
"learning_rate": 1.0989010989010989e-05,
"loss": 1.2551,
"step": 5
},
{
"epoch": 0.019769357495881382,
"grad_norm": 0.8409114192243606,
"learning_rate": 1.3186813186813187e-05,
"loss": 1.2888,
"step": 6
},
{
"epoch": 0.023064250411861616,
"grad_norm": 0.8323916919747559,
"learning_rate": 1.5384615384615387e-05,
"loss": 1.2524,
"step": 7
},
{
"epoch": 0.026359143327841845,
"grad_norm": 0.7826776515036836,
"learning_rate": 1.7582417582417584e-05,
"loss": 1.2206,
"step": 8
},
{
"epoch": 0.029654036243822075,
"grad_norm": 0.7679461778818735,
"learning_rate": 1.978021978021978e-05,
"loss": 1.238,
"step": 9
},
{
"epoch": 0.032948929159802305,
"grad_norm": 0.6218001630318019,
"learning_rate": 2.1978021978021977e-05,
"loss": 1.144,
"step": 10
},
{
"epoch": 0.036243822075782535,
"grad_norm": 0.550844391887277,
"learning_rate": 2.4175824175824177e-05,
"loss": 1.1091,
"step": 11
},
{
"epoch": 0.039538714991762765,
"grad_norm": 0.5376512267561467,
"learning_rate": 2.6373626373626374e-05,
"loss": 1.1048,
"step": 12
},
{
"epoch": 0.042833607907743,
"grad_norm": 0.5061475154817852,
"learning_rate": 2.857142857142857e-05,
"loss": 1.0239,
"step": 13
},
{
"epoch": 0.04612850082372323,
"grad_norm": 0.5440235934771381,
"learning_rate": 3.0769230769230774e-05,
"loss": 0.9955,
"step": 14
},
{
"epoch": 0.04942339373970346,
"grad_norm": 0.5710584188811608,
"learning_rate": 3.296703296703297e-05,
"loss": 0.9453,
"step": 15
},
{
"epoch": 0.05271828665568369,
"grad_norm": 0.5629646386751581,
"learning_rate": 3.516483516483517e-05,
"loss": 0.8922,
"step": 16
},
{
"epoch": 0.05601317957166392,
"grad_norm": 0.5787661460532081,
"learning_rate": 3.7362637362637365e-05,
"loss": 0.853,
"step": 17
},
{
"epoch": 0.05930807248764415,
"grad_norm": 0.5219200821941683,
"learning_rate": 3.956043956043956e-05,
"loss": 0.8057,
"step": 18
},
{
"epoch": 0.06260296540362438,
"grad_norm": 0.4949320198088838,
"learning_rate": 4.1758241758241765e-05,
"loss": 0.7203,
"step": 19
},
{
"epoch": 0.06589785831960461,
"grad_norm": 0.5174702935923171,
"learning_rate": 4.3956043956043955e-05,
"loss": 0.7028,
"step": 20
},
{
"epoch": 0.06919275123558484,
"grad_norm": 0.4399438447558331,
"learning_rate": 4.615384615384616e-05,
"loss": 0.6712,
"step": 21
},
{
"epoch": 0.07248764415156507,
"grad_norm": 0.3850468864162824,
"learning_rate": 4.8351648351648355e-05,
"loss": 0.6205,
"step": 22
},
{
"epoch": 0.0757825370675453,
"grad_norm": 0.327866579681946,
"learning_rate": 5.054945054945055e-05,
"loss": 0.5923,
"step": 23
},
{
"epoch": 0.07907742998352553,
"grad_norm": 0.24857639537783982,
"learning_rate": 5.274725274725275e-05,
"loss": 0.5706,
"step": 24
},
{
"epoch": 0.08237232289950576,
"grad_norm": 0.24508430082340485,
"learning_rate": 5.494505494505495e-05,
"loss": 0.5534,
"step": 25
},
{
"epoch": 0.085667215815486,
"grad_norm": 0.21401457324982626,
"learning_rate": 5.714285714285714e-05,
"loss": 0.5343,
"step": 26
},
{
"epoch": 0.08896210873146623,
"grad_norm": 0.19432840487037037,
"learning_rate": 5.9340659340659345e-05,
"loss": 0.5205,
"step": 27
},
{
"epoch": 0.09225700164744646,
"grad_norm": 0.25188173455273966,
"learning_rate": 6.153846153846155e-05,
"loss": 0.517,
"step": 28
},
{
"epoch": 0.09555189456342669,
"grad_norm": 0.19175435794663873,
"learning_rate": 6.373626373626373e-05,
"loss": 0.5177,
"step": 29
},
{
"epoch": 0.09884678747940692,
"grad_norm": 0.22909359876581833,
"learning_rate": 6.593406593406594e-05,
"loss": 0.507,
"step": 30
},
{
"epoch": 0.10214168039538715,
"grad_norm": 0.21953419980838787,
"learning_rate": 6.813186813186814e-05,
"loss": 0.4935,
"step": 31
},
{
"epoch": 0.10543657331136738,
"grad_norm": 0.20750358706332223,
"learning_rate": 7.032967032967034e-05,
"loss": 0.4987,
"step": 32
},
{
"epoch": 0.10873146622734761,
"grad_norm": 0.19232398536892564,
"learning_rate": 7.252747252747253e-05,
"loss": 0.4993,
"step": 33
},
{
"epoch": 0.11202635914332784,
"grad_norm": 0.1775783275207957,
"learning_rate": 7.472527472527473e-05,
"loss": 0.4854,
"step": 34
},
{
"epoch": 0.11532125205930807,
"grad_norm": 0.14336113752224602,
"learning_rate": 7.692307692307693e-05,
"loss": 0.4659,
"step": 35
},
{
"epoch": 0.1186161449752883,
"grad_norm": 0.14129976475916678,
"learning_rate": 7.912087912087912e-05,
"loss": 0.4634,
"step": 36
},
{
"epoch": 0.12191103789126853,
"grad_norm": 0.12988205851707507,
"learning_rate": 8.131868131868132e-05,
"loss": 0.4648,
"step": 37
},
{
"epoch": 0.12520593080724876,
"grad_norm": 0.13738530280987127,
"learning_rate": 8.351648351648353e-05,
"loss": 0.4772,
"step": 38
},
{
"epoch": 0.128500823723229,
"grad_norm": 0.11545159705042944,
"learning_rate": 8.571428571428571e-05,
"loss": 0.4728,
"step": 39
},
{
"epoch": 0.13179571663920922,
"grad_norm": 0.11033726958873326,
"learning_rate": 8.791208791208791e-05,
"loss": 0.4621,
"step": 40
},
{
"epoch": 0.13509060955518945,
"grad_norm": 0.1201472253052247,
"learning_rate": 9.010989010989012e-05,
"loss": 0.4576,
"step": 41
},
{
"epoch": 0.13838550247116968,
"grad_norm": 0.11436096928615554,
"learning_rate": 9.230769230769232e-05,
"loss": 0.4371,
"step": 42
},
{
"epoch": 0.1416803953871499,
"grad_norm": 0.11149821850019738,
"learning_rate": 9.450549450549451e-05,
"loss": 0.4484,
"step": 43
},
{
"epoch": 0.14497528830313014,
"grad_norm": 0.1058121629816968,
"learning_rate": 9.670329670329671e-05,
"loss": 0.4389,
"step": 44
},
{
"epoch": 0.14827018121911037,
"grad_norm": 0.10501103645473217,
"learning_rate": 9.89010989010989e-05,
"loss": 0.4335,
"step": 45
},
{
"epoch": 0.1515650741350906,
"grad_norm": 0.0964684052416727,
"learning_rate": 0.0001010989010989011,
"loss": 0.4394,
"step": 46
},
{
"epoch": 0.15485996705107083,
"grad_norm": 0.10135506801778058,
"learning_rate": 0.00010329670329670331,
"loss": 0.4284,
"step": 47
},
{
"epoch": 0.15815485996705106,
"grad_norm": 0.10029097763419231,
"learning_rate": 0.0001054945054945055,
"loss": 0.4371,
"step": 48
},
{
"epoch": 0.1614497528830313,
"grad_norm": 0.10052795766573124,
"learning_rate": 0.0001076923076923077,
"loss": 0.4293,
"step": 49
},
{
"epoch": 0.16474464579901152,
"grad_norm": 0.10420997329264613,
"learning_rate": 0.0001098901098901099,
"loss": 0.4279,
"step": 50
},
{
"epoch": 0.16803953871499178,
"grad_norm": 0.1217798536526884,
"learning_rate": 0.0001120879120879121,
"loss": 0.4141,
"step": 51
},
{
"epoch": 0.171334431630972,
"grad_norm": 0.10417010160655436,
"learning_rate": 0.00011428571428571428,
"loss": 0.41,
"step": 52
},
{
"epoch": 0.17462932454695224,
"grad_norm": 0.10296977506815383,
"learning_rate": 0.0001164835164835165,
"loss": 0.4299,
"step": 53
},
{
"epoch": 0.17792421746293247,
"grad_norm": 0.10122420990330976,
"learning_rate": 0.00011868131868131869,
"loss": 0.4134,
"step": 54
},
{
"epoch": 0.1812191103789127,
"grad_norm": 0.09986057816216841,
"learning_rate": 0.00012087912087912087,
"loss": 0.4144,
"step": 55
},
{
"epoch": 0.18451400329489293,
"grad_norm": 0.10113082205051421,
"learning_rate": 0.0001230769230769231,
"loss": 0.4154,
"step": 56
},
{
"epoch": 0.18780889621087316,
"grad_norm": 0.09878123227751724,
"learning_rate": 0.00012527472527472527,
"loss": 0.4181,
"step": 57
},
{
"epoch": 0.19110378912685339,
"grad_norm": 0.10418790342701288,
"learning_rate": 0.00012747252747252746,
"loss": 0.4204,
"step": 58
},
{
"epoch": 0.19439868204283361,
"grad_norm": 0.10614228869769458,
"learning_rate": 0.0001296703296703297,
"loss": 0.428,
"step": 59
},
{
"epoch": 0.19769357495881384,
"grad_norm": 0.09964224792215227,
"learning_rate": 0.00013186813186813188,
"loss": 0.3888,
"step": 60
},
{
"epoch": 0.20098846787479407,
"grad_norm": 0.1060800549771199,
"learning_rate": 0.00013406593406593405,
"loss": 0.4045,
"step": 61
},
{
"epoch": 0.2042833607907743,
"grad_norm": 0.10220349513246392,
"learning_rate": 0.00013626373626373628,
"loss": 0.407,
"step": 62
},
{
"epoch": 0.20757825370675453,
"grad_norm": 0.10499038993641154,
"learning_rate": 0.00013846153846153847,
"loss": 0.4042,
"step": 63
},
{
"epoch": 0.21087314662273476,
"grad_norm": 0.10008875757336905,
"learning_rate": 0.00014065934065934067,
"loss": 0.3968,
"step": 64
},
{
"epoch": 0.214168039538715,
"grad_norm": 0.10767447090788189,
"learning_rate": 0.00014285714285714287,
"loss": 0.4096,
"step": 65
},
{
"epoch": 0.21746293245469522,
"grad_norm": 0.10409029011454787,
"learning_rate": 0.00014505494505494506,
"loss": 0.3925,
"step": 66
},
{
"epoch": 0.22075782537067545,
"grad_norm": 0.10006210224544987,
"learning_rate": 0.00014725274725274726,
"loss": 0.3733,
"step": 67
},
{
"epoch": 0.22405271828665568,
"grad_norm": 0.10154215775645745,
"learning_rate": 0.00014945054945054946,
"loss": 0.4008,
"step": 68
},
{
"epoch": 0.2273476112026359,
"grad_norm": 0.10280194055666982,
"learning_rate": 0.00015164835164835165,
"loss": 0.4074,
"step": 69
},
{
"epoch": 0.23064250411861614,
"grad_norm": 0.10449991568283325,
"learning_rate": 0.00015384615384615385,
"loss": 0.4007,
"step": 70
},
{
"epoch": 0.23393739703459637,
"grad_norm": 0.09867099523694993,
"learning_rate": 0.00015604395604395605,
"loss": 0.3945,
"step": 71
},
{
"epoch": 0.2372322899505766,
"grad_norm": 0.10364637285833565,
"learning_rate": 0.00015824175824175824,
"loss": 0.3756,
"step": 72
},
{
"epoch": 0.24052718286655683,
"grad_norm": 0.10500702289024652,
"learning_rate": 0.00016043956043956044,
"loss": 0.3976,
"step": 73
},
{
"epoch": 0.24382207578253706,
"grad_norm": 0.10508505128672692,
"learning_rate": 0.00016263736263736264,
"loss": 0.3936,
"step": 74
},
{
"epoch": 0.2471169686985173,
"grad_norm": 0.11077255112820204,
"learning_rate": 0.00016483516483516484,
"loss": 0.3844,
"step": 75
},
{
"epoch": 0.2504118616144975,
"grad_norm": 0.11497427991652769,
"learning_rate": 0.00016703296703296706,
"loss": 0.4002,
"step": 76
},
{
"epoch": 0.25370675453047775,
"grad_norm": 0.10361858110894616,
"learning_rate": 0.00016923076923076923,
"loss": 0.3877,
"step": 77
},
{
"epoch": 0.257001647446458,
"grad_norm": 0.10689216566545769,
"learning_rate": 0.00017142857142857143,
"loss": 0.375,
"step": 78
},
{
"epoch": 0.2602965403624382,
"grad_norm": 0.10379603284636893,
"learning_rate": 0.00017362637362637365,
"loss": 0.3855,
"step": 79
},
{
"epoch": 0.26359143327841844,
"grad_norm": 0.11008235355031443,
"learning_rate": 0.00017582417582417582,
"loss": 0.3928,
"step": 80
},
{
"epoch": 0.26688632619439867,
"grad_norm": 0.10801759571590709,
"learning_rate": 0.00017802197802197802,
"loss": 0.3796,
"step": 81
},
{
"epoch": 0.2701812191103789,
"grad_norm": 0.1072207208697183,
"learning_rate": 0.00018021978021978024,
"loss": 0.3806,
"step": 82
},
{
"epoch": 0.27347611202635913,
"grad_norm": 0.11074742721907231,
"learning_rate": 0.0001824175824175824,
"loss": 0.3759,
"step": 83
},
{
"epoch": 0.27677100494233936,
"grad_norm": 0.10332829028464724,
"learning_rate": 0.00018461538461538463,
"loss": 0.4013,
"step": 84
},
{
"epoch": 0.2800658978583196,
"grad_norm": 0.12121573096651414,
"learning_rate": 0.00018681318681318683,
"loss": 0.3718,
"step": 85
},
{
"epoch": 0.2833607907742998,
"grad_norm": 0.11029471534257396,
"learning_rate": 0.00018901098901098903,
"loss": 0.3957,
"step": 86
},
{
"epoch": 0.28665568369028005,
"grad_norm": 0.10611352834090172,
"learning_rate": 0.00019120879120879122,
"loss": 0.3775,
"step": 87
},
{
"epoch": 0.2899505766062603,
"grad_norm": 0.11444959045215879,
"learning_rate": 0.00019340659340659342,
"loss": 0.3911,
"step": 88
},
{
"epoch": 0.2932454695222405,
"grad_norm": 0.11698929258412147,
"learning_rate": 0.00019560439560439562,
"loss": 0.3801,
"step": 89
},
{
"epoch": 0.29654036243822074,
"grad_norm": 0.11887085945257751,
"learning_rate": 0.0001978021978021978,
"loss": 0.3824,
"step": 90
},
{
"epoch": 0.29983525535420097,
"grad_norm": 0.12394486608854069,
"learning_rate": 0.0002,
"loss": 0.3713,
"step": 91
},
{
"epoch": 0.3031301482701812,
"grad_norm": 0.11524526961416616,
"learning_rate": 0.00019999926249900844,
"loss": 0.375,
"step": 92
},
{
"epoch": 0.30642504118616143,
"grad_norm": 0.10954878706910288,
"learning_rate": 0.00019999705000691188,
"loss": 0.373,
"step": 93
},
{
"epoch": 0.30971993410214166,
"grad_norm": 0.11180610022301836,
"learning_rate": 0.00019999336255634465,
"loss": 0.3767,
"step": 94
},
{
"epoch": 0.3130148270181219,
"grad_norm": 0.10423320337270955,
"learning_rate": 0.00019998820020169668,
"loss": 0.3726,
"step": 95
},
{
"epoch": 0.3163097199341021,
"grad_norm": 0.10584925582232764,
"learning_rate": 0.00019998156301911284,
"loss": 0.3955,
"step": 96
},
{
"epoch": 0.31960461285008235,
"grad_norm": 0.10917585284115876,
"learning_rate": 0.00019997345110649167,
"loss": 0.38,
"step": 97
},
{
"epoch": 0.3228995057660626,
"grad_norm": 0.11250538116569028,
"learning_rate": 0.0001999638645834841,
"loss": 0.3659,
"step": 98
},
{
"epoch": 0.3261943986820428,
"grad_norm": 0.10144424444540087,
"learning_rate": 0.00019995280359149149,
"loss": 0.3724,
"step": 99
},
{
"epoch": 0.32948929159802304,
"grad_norm": 0.11312360619640419,
"learning_rate": 0.0001999402682936637,
"loss": 0.3675,
"step": 100
},
{
"epoch": 0.33278418451400327,
"grad_norm": 0.11072192514968879,
"learning_rate": 0.0001999262588748966,
"loss": 0.3734,
"step": 101
},
{
"epoch": 0.33607907742998355,
"grad_norm": 0.10852569603049844,
"learning_rate": 0.00019991077554182943,
"loss": 0.3729,
"step": 102
},
{
"epoch": 0.3393739703459638,
"grad_norm": 0.10138437779435457,
"learning_rate": 0.00019989381852284166,
"loss": 0.368,
"step": 103
},
{
"epoch": 0.342668863261944,
"grad_norm": 0.11109618855106276,
"learning_rate": 0.00019987538806804964,
"loss": 0.3728,
"step": 104
},
{
"epoch": 0.34596375617792424,
"grad_norm": 0.10566572499718903,
"learning_rate": 0.00019985548444930294,
"loss": 0.3625,
"step": 105
},
{
"epoch": 0.34925864909390447,
"grad_norm": 0.10329823166968895,
"learning_rate": 0.00019983410796018032,
"loss": 0.3736,
"step": 106
},
{
"epoch": 0.3525535420098847,
"grad_norm": 0.10526340058049245,
"learning_rate": 0.00019981125891598546,
"loss": 0.3776,
"step": 107
},
{
"epoch": 0.35584843492586493,
"grad_norm": 0.10634239695176183,
"learning_rate": 0.00019978693765374217,
"loss": 0.3627,
"step": 108
},
{
"epoch": 0.35914332784184516,
"grad_norm": 0.10473623073599642,
"learning_rate": 0.0001997611445321896,
"loss": 0.3651,
"step": 109
},
{
"epoch": 0.3624382207578254,
"grad_norm": 0.1038114878244784,
"learning_rate": 0.00019973387993177673,
"loss": 0.3795,
"step": 110
},
{
"epoch": 0.3657331136738056,
"grad_norm": 0.097631843964531,
"learning_rate": 0.00019970514425465705,
"loss": 0.3758,
"step": 111
},
{
"epoch": 0.36902800658978585,
"grad_norm": 0.105170092111551,
"learning_rate": 0.00019967493792468228,
"loss": 0.3689,
"step": 112
},
{
"epoch": 0.3723228995057661,
"grad_norm": 0.10400530821586597,
"learning_rate": 0.00019964326138739645,
"loss": 0.3772,
"step": 113
},
{
"epoch": 0.3756177924217463,
"grad_norm": 0.09711231346227854,
"learning_rate": 0.0001996101151100291,
"loss": 0.3568,
"step": 114
},
{
"epoch": 0.37891268533772654,
"grad_norm": 0.10219375076601767,
"learning_rate": 0.00019957549958148841,
"loss": 0.3733,
"step": 115
},
{
"epoch": 0.38220757825370677,
"grad_norm": 0.10620115483842468,
"learning_rate": 0.00019953941531235424,
"loss": 0.3813,
"step": 116
},
{
"epoch": 0.385502471169687,
"grad_norm": 0.09890990719776012,
"learning_rate": 0.0001995018628348702,
"loss": 0.3588,
"step": 117
},
{
"epoch": 0.38879736408566723,
"grad_norm": 0.10332397343960774,
"learning_rate": 0.00019946284270293605,
"loss": 0.3745,
"step": 118
},
{
"epoch": 0.39209225700164746,
"grad_norm": 0.0962153448347187,
"learning_rate": 0.00019942235549209953,
"loss": 0.3691,
"step": 119
},
{
"epoch": 0.3953871499176277,
"grad_norm": 0.10293197017899637,
"learning_rate": 0.00019938040179954785,
"loss": 0.3629,
"step": 120
},
{
"epoch": 0.3986820428336079,
"grad_norm": 0.10120783240834415,
"learning_rate": 0.00019933698224409876,
"loss": 0.3583,
"step": 121
},
{
"epoch": 0.40197693574958815,
"grad_norm": 0.09823996102922933,
"learning_rate": 0.0001992920974661916,
"loss": 0.3551,
"step": 122
},
{
"epoch": 0.4052718286655684,
"grad_norm": 0.11608055320274326,
"learning_rate": 0.00019924574812787768,
"loss": 0.3776,
"step": 123
},
{
"epoch": 0.4085667215815486,
"grad_norm": 0.1015390883033684,
"learning_rate": 0.00019919793491281069,
"loss": 0.3658,
"step": 124
},
{
"epoch": 0.41186161449752884,
"grad_norm": 0.11305236549565593,
"learning_rate": 0.0001991486585262365,
"loss": 0.3698,
"step": 125
},
{
"epoch": 0.41515650741350907,
"grad_norm": 0.09858293841585426,
"learning_rate": 0.0001990979196949828,
"loss": 0.3655,
"step": 126
},
{
"epoch": 0.4184514003294893,
"grad_norm": 0.09867163357687168,
"learning_rate": 0.00019904571916744837,
"loss": 0.3589,
"step": 127
},
{
"epoch": 0.42174629324546953,
"grad_norm": 0.099878248246959,
"learning_rate": 0.00019899205771359198,
"loss": 0.3694,
"step": 128
},
{
"epoch": 0.42504118616144976,
"grad_norm": 0.09933371254810067,
"learning_rate": 0.00019893693612492116,
"loss": 0.3698,
"step": 129
},
{
"epoch": 0.42833607907743,
"grad_norm": 0.09374415766405315,
"learning_rate": 0.00019888035521448044,
"loss": 0.3618,
"step": 130
},
{
"epoch": 0.4316309719934102,
"grad_norm": 0.10862350391810786,
"learning_rate": 0.00019882231581683937,
"loss": 0.3597,
"step": 131
},
{
"epoch": 0.43492586490939045,
"grad_norm": 0.09819927590552285,
"learning_rate": 0.0001987628187880802,
"loss": 0.3592,
"step": 132
},
{
"epoch": 0.4382207578253707,
"grad_norm": 0.09799469540398538,
"learning_rate": 0.0001987018650057853,
"loss": 0.3548,
"step": 133
},
{
"epoch": 0.4415156507413509,
"grad_norm": 0.10446031673946489,
"learning_rate": 0.0001986394553690242,
"loss": 0.3739,
"step": 134
},
{
"epoch": 0.44481054365733114,
"grad_norm": 0.09368559137183911,
"learning_rate": 0.00019857559079834022,
"loss": 0.3646,
"step": 135
},
{
"epoch": 0.44810543657331137,
"grad_norm": 0.09651140610849232,
"learning_rate": 0.0001985102722357371,
"loss": 0.3546,
"step": 136
},
{
"epoch": 0.4514003294892916,
"grad_norm": 0.10543606439531415,
"learning_rate": 0.00019844350064466486,
"loss": 0.3644,
"step": 137
},
{
"epoch": 0.4546952224052718,
"grad_norm": 0.0949282387849649,
"learning_rate": 0.00019837527701000587,
"loss": 0.3604,
"step": 138
},
{
"epoch": 0.45799011532125206,
"grad_norm": 0.09906601410462178,
"learning_rate": 0.00019830560233806006,
"loss": 0.3648,
"step": 139
},
{
"epoch": 0.4612850082372323,
"grad_norm": 0.1027605621404487,
"learning_rate": 0.00019823447765653023,
"loss": 0.3482,
"step": 140
},
{
"epoch": 0.4645799011532125,
"grad_norm": 0.09699652288591568,
"learning_rate": 0.0001981619040145068,
"loss": 0.3545,
"step": 141
},
{
"epoch": 0.46787479406919275,
"grad_norm": 0.10545828026299639,
"learning_rate": 0.00019808788248245249,
"loss": 0.3719,
"step": 142
},
{
"epoch": 0.471169686985173,
"grad_norm": 0.0969238306005527,
"learning_rate": 0.00019801241415218636,
"loss": 0.356,
"step": 143
},
{
"epoch": 0.4744645799011532,
"grad_norm": 0.09451741240756485,
"learning_rate": 0.00019793550013686773,
"loss": 0.3463,
"step": 144
},
{
"epoch": 0.47775947281713343,
"grad_norm": 0.09646227177006125,
"learning_rate": 0.0001978571415709799,
"loss": 0.3644,
"step": 145
},
{
"epoch": 0.48105436573311366,
"grad_norm": 0.09993416821340104,
"learning_rate": 0.00019777733961031326,
"loss": 0.3498,
"step": 146
},
{
"epoch": 0.4843492586490939,
"grad_norm": 0.09746839978917406,
"learning_rate": 0.00019769609543194826,
"loss": 0.3428,
"step": 147
},
{
"epoch": 0.4876441515650741,
"grad_norm": 0.10241864087501074,
"learning_rate": 0.00019761341023423826,
"loss": 0.3677,
"step": 148
},
{
"epoch": 0.49093904448105435,
"grad_norm": 0.0930243618471461,
"learning_rate": 0.00019752928523679143,
"loss": 0.357,
"step": 149
},
{
"epoch": 0.4942339373970346,
"grad_norm": 0.09542173403956193,
"learning_rate": 0.00019744372168045324,
"loss": 0.3623,
"step": 150
},
{
"epoch": 0.4975288303130148,
"grad_norm": 0.092951368800601,
"learning_rate": 0.00019735672082728782,
"loss": 0.3502,
"step": 151
},
{
"epoch": 0.500823723228995,
"grad_norm": 0.09468069312701037,
"learning_rate": 0.00019726828396055948,
"loss": 0.3422,
"step": 152
},
{
"epoch": 0.5041186161449753,
"grad_norm": 0.09372935130232003,
"learning_rate": 0.00019717841238471375,
"loss": 0.3533,
"step": 153
},
{
"epoch": 0.5074135090609555,
"grad_norm": 0.09623089008115779,
"learning_rate": 0.00019708710742535814,
"loss": 0.3521,
"step": 154
},
{
"epoch": 0.5107084019769358,
"grad_norm": 0.09433147739759222,
"learning_rate": 0.00019699437042924265,
"loss": 0.376,
"step": 155
},
{
"epoch": 0.514003294892916,
"grad_norm": 0.08740337875902095,
"learning_rate": 0.0001969002027642398,
"loss": 0.3419,
"step": 156
},
{
"epoch": 0.5172981878088962,
"grad_norm": 0.09306098636137253,
"learning_rate": 0.00019680460581932447,
"loss": 0.3578,
"step": 157
},
{
"epoch": 0.5205930807248764,
"grad_norm": 0.0901779549976545,
"learning_rate": 0.00019670758100455356,
"loss": 0.3597,
"step": 158
},
{
"epoch": 0.5238879736408567,
"grad_norm": 0.09418423029537448,
"learning_rate": 0.000196609129751045,
"loss": 0.3608,
"step": 159
},
{
"epoch": 0.5271828665568369,
"grad_norm": 0.09028368053330069,
"learning_rate": 0.0001965092535109567,
"loss": 0.3592,
"step": 160
},
{
"epoch": 0.5304777594728172,
"grad_norm": 0.0935932711587901,
"learning_rate": 0.00019640795375746518,
"loss": 0.3619,
"step": 161
},
{
"epoch": 0.5337726523887973,
"grad_norm": 0.09991159935467303,
"learning_rate": 0.00019630523198474386,
"loss": 0.3493,
"step": 162
},
{
"epoch": 0.5370675453047776,
"grad_norm": 0.09135500404430619,
"learning_rate": 0.00019620108970794088,
"loss": 0.3504,
"step": 163
},
{
"epoch": 0.5403624382207578,
"grad_norm": 0.09436717385152184,
"learning_rate": 0.00019609552846315694,
"loss": 0.3489,
"step": 164
},
{
"epoch": 0.5436573311367381,
"grad_norm": 0.10184053426464197,
"learning_rate": 0.0001959885498074224,
"loss": 0.3443,
"step": 165
},
{
"epoch": 0.5469522240527183,
"grad_norm": 0.09470012639193129,
"learning_rate": 0.00019588015531867464,
"loss": 0.3611,
"step": 166
},
{
"epoch": 0.5502471169686985,
"grad_norm": 0.0909557452721919,
"learning_rate": 0.0001957703465957345,
"loss": 0.3663,
"step": 167
},
{
"epoch": 0.5535420098846787,
"grad_norm": 0.09051693863828568,
"learning_rate": 0.0001956591252582828,
"loss": 0.3539,
"step": 168
},
{
"epoch": 0.556836902800659,
"grad_norm": 0.09596952767327611,
"learning_rate": 0.0001955464929468365,
"loss": 0.3432,
"step": 169
},
{
"epoch": 0.5601317957166392,
"grad_norm": 0.0908006040293162,
"learning_rate": 0.00019543245132272441,
"loss": 0.3518,
"step": 170
},
{
"epoch": 0.5634266886326195,
"grad_norm": 0.09261485040615038,
"learning_rate": 0.00019531700206806274,
"loss": 0.346,
"step": 171
},
{
"epoch": 0.5667215815485996,
"grad_norm": 0.08952858341904249,
"learning_rate": 0.0001952001468857303,
"loss": 0.341,
"step": 172
},
{
"epoch": 0.5700164744645799,
"grad_norm": 0.0912632841864883,
"learning_rate": 0.00019508188749934333,
"loss": 0.3369,
"step": 173
},
{
"epoch": 0.5733113673805601,
"grad_norm": 0.09208815220485178,
"learning_rate": 0.00019496222565323015,
"loss": 0.356,
"step": 174
},
{
"epoch": 0.5766062602965404,
"grad_norm": 0.08856220347094375,
"learning_rate": 0.00019484116311240532,
"loss": 0.3593,
"step": 175
},
{
"epoch": 0.5799011532125206,
"grad_norm": 0.09046349122843272,
"learning_rate": 0.00019471870166254377,
"loss": 0.3469,
"step": 176
},
{
"epoch": 0.5831960461285008,
"grad_norm": 0.09233531995170684,
"learning_rate": 0.0001945948431099543,
"loss": 0.3541,
"step": 177
},
{
"epoch": 0.586490939044481,
"grad_norm": 0.090106620935963,
"learning_rate": 0.00019446958928155298,
"loss": 0.3603,
"step": 178
},
{
"epoch": 0.5897858319604613,
"grad_norm": 0.0875097489689974,
"learning_rate": 0.00019434294202483633,
"loss": 0.359,
"step": 179
},
{
"epoch": 0.5930807248764415,
"grad_norm": 0.09123195997573252,
"learning_rate": 0.00019421490320785384,
"loss": 0.3565,
"step": 180
},
{
"epoch": 0.5963756177924218,
"grad_norm": 0.09114904646800603,
"learning_rate": 0.00019408547471918061,
"loss": 0.35,
"step": 181
},
{
"epoch": 0.5996705107084019,
"grad_norm": 0.08497423438629802,
"learning_rate": 0.00019395465846788946,
"loss": 0.3476,
"step": 182
},
{
"epoch": 0.6029654036243822,
"grad_norm": 0.09604855511067968,
"learning_rate": 0.00019382245638352262,
"loss": 0.3501,
"step": 183
},
{
"epoch": 0.6062602965403624,
"grad_norm": 0.0869157197559244,
"learning_rate": 0.0001936888704160635,
"loss": 0.3526,
"step": 184
},
{
"epoch": 0.6095551894563427,
"grad_norm": 0.08477176252928098,
"learning_rate": 0.00019355390253590775,
"loss": 0.3541,
"step": 185
},
{
"epoch": 0.6128500823723229,
"grad_norm": 0.089378043032419,
"learning_rate": 0.00019341755473383432,
"loss": 0.3558,
"step": 186
},
{
"epoch": 0.6161449752883031,
"grad_norm": 0.0844453554389038,
"learning_rate": 0.00019327982902097595,
"loss": 0.3505,
"step": 187
},
{
"epoch": 0.6194398682042833,
"grad_norm": 0.09223714783810881,
"learning_rate": 0.00019314072742878963,
"loss": 0.3467,
"step": 188
},
{
"epoch": 0.6227347611202636,
"grad_norm": 0.09154045104035775,
"learning_rate": 0.00019300025200902666,
"loss": 0.3485,
"step": 189
},
{
"epoch": 0.6260296540362438,
"grad_norm": 0.08978112849737604,
"learning_rate": 0.0001928584048337022,
"loss": 0.3535,
"step": 190
},
{
"epoch": 0.6293245469522241,
"grad_norm": 0.09219618310798985,
"learning_rate": 0.00019271518799506492,
"loss": 0.3492,
"step": 191
},
{
"epoch": 0.6326194398682042,
"grad_norm": 0.0878840125521925,
"learning_rate": 0.00019257060360556606,
"loss": 0.3517,
"step": 192
},
{
"epoch": 0.6359143327841845,
"grad_norm": 0.09421976864427346,
"learning_rate": 0.00019242465379782823,
"loss": 0.3592,
"step": 193
},
{
"epoch": 0.6392092257001647,
"grad_norm": 0.09096356033322509,
"learning_rate": 0.00019227734072461392,
"loss": 0.3576,
"step": 194
},
{
"epoch": 0.642504118616145,
"grad_norm": 0.08546178334325298,
"learning_rate": 0.00019212866655879396,
"loss": 0.3443,
"step": 195
},
{
"epoch": 0.6457990115321252,
"grad_norm": 0.08815077311380441,
"learning_rate": 0.00019197863349331522,
"loss": 0.35,
"step": 196
},
{
"epoch": 0.6490939044481054,
"grad_norm": 0.0909243070087766,
"learning_rate": 0.00019182724374116838,
"loss": 0.3477,
"step": 197
},
{
"epoch": 0.6523887973640856,
"grad_norm": 0.08716295830994963,
"learning_rate": 0.0001916744995353553,
"loss": 0.3425,
"step": 198
},
{
"epoch": 0.6556836902800659,
"grad_norm": 0.08437105411786357,
"learning_rate": 0.00019152040312885604,
"loss": 0.3473,
"step": 199
},
{
"epoch": 0.6589785831960461,
"grad_norm": 0.08999850132612741,
"learning_rate": 0.00019136495679459564,
"loss": 0.3446,
"step": 200
},
{
"epoch": 0.6622734761120264,
"grad_norm": 0.09295157213742279,
"learning_rate": 0.00019120816282541063,
"loss": 0.3509,
"step": 201
},
{
"epoch": 0.6655683690280065,
"grad_norm": 0.09096038036728704,
"learning_rate": 0.00019105002353401516,
"loss": 0.3462,
"step": 202
},
{
"epoch": 0.6688632619439868,
"grad_norm": 0.09051478487792422,
"learning_rate": 0.0001908905412529669,
"loss": 0.3448,
"step": 203
},
{
"epoch": 0.6721581548599671,
"grad_norm": 0.0932355722348614,
"learning_rate": 0.00019072971833463269,
"loss": 0.3519,
"step": 204
},
{
"epoch": 0.6754530477759473,
"grad_norm": 0.09198493368242532,
"learning_rate": 0.00019056755715115374,
"loss": 0.3474,
"step": 205
},
{
"epoch": 0.6787479406919276,
"grad_norm": 0.09105873669312843,
"learning_rate": 0.00019040406009441073,
"loss": 0.3446,
"step": 206
},
{
"epoch": 0.6820428336079077,
"grad_norm": 0.09474895757250051,
"learning_rate": 0.00019023922957598846,
"loss": 0.3549,
"step": 207
},
{
"epoch": 0.685337726523888,
"grad_norm": 0.09113820085850777,
"learning_rate": 0.0001900730680271404,
"loss": 0.3517,
"step": 208
},
{
"epoch": 0.6886326194398682,
"grad_norm": 0.08952952284278057,
"learning_rate": 0.00018990557789875265,
"loss": 0.3446,
"step": 209
},
{
"epoch": 0.6919275123558485,
"grad_norm": 0.08504291347151309,
"learning_rate": 0.00018973676166130795,
"loss": 0.3499,
"step": 210
},
{
"epoch": 0.6952224052718287,
"grad_norm": 0.08682346989926243,
"learning_rate": 0.00018956662180484913,
"loss": 0.3461,
"step": 211
},
{
"epoch": 0.6985172981878089,
"grad_norm": 0.09135947097159414,
"learning_rate": 0.00018939516083894248,
"loss": 0.3507,
"step": 212
},
{
"epoch": 0.7018121911037891,
"grad_norm": 0.08817857221763035,
"learning_rate": 0.0001892223812926406,
"loss": 0.34,
"step": 213
},
{
"epoch": 0.7051070840197694,
"grad_norm": 0.08660281169520562,
"learning_rate": 0.00018904828571444525,
"loss": 0.3467,
"step": 214
},
{
"epoch": 0.7084019769357496,
"grad_norm": 0.08873620967277855,
"learning_rate": 0.00018887287667226964,
"loss": 0.3359,
"step": 215
},
{
"epoch": 0.7116968698517299,
"grad_norm": 0.09198572626736658,
"learning_rate": 0.00018869615675340068,
"loss": 0.3419,
"step": 216
},
{
"epoch": 0.71499176276771,
"grad_norm": 0.08907612433680563,
"learning_rate": 0.0001885181285644606,
"loss": 0.3587,
"step": 217
},
{
"epoch": 0.7182866556836903,
"grad_norm": 0.08850895851919431,
"learning_rate": 0.00018833879473136877,
"loss": 0.3477,
"step": 218
},
{
"epoch": 0.7215815485996705,
"grad_norm": 0.0846389728816628,
"learning_rate": 0.00018815815789930275,
"loss": 0.3366,
"step": 219
},
{
"epoch": 0.7248764415156508,
"grad_norm": 0.09005118866716647,
"learning_rate": 0.00018797622073265946,
"loss": 0.3602,
"step": 220
},
{
"epoch": 0.728171334431631,
"grad_norm": 0.08535147302632334,
"learning_rate": 0.00018779298591501564,
"loss": 0.3422,
"step": 221
},
{
"epoch": 0.7314662273476112,
"grad_norm": 0.0854751704724938,
"learning_rate": 0.0001876084561490885,
"loss": 0.3426,
"step": 222
},
{
"epoch": 0.7347611202635914,
"grad_norm": 0.08664582706109708,
"learning_rate": 0.00018742263415669582,
"loss": 0.3532,
"step": 223
},
{
"epoch": 0.7380560131795717,
"grad_norm": 0.08458101578511544,
"learning_rate": 0.00018723552267871555,
"loss": 0.3438,
"step": 224
},
{
"epoch": 0.7413509060955519,
"grad_norm": 0.08431293605775432,
"learning_rate": 0.0001870471244750458,
"loss": 0.3502,
"step": 225
},
{
"epoch": 0.7446457990115322,
"grad_norm": 0.08369525987098254,
"learning_rate": 0.00018685744232456374,
"loss": 0.3333,
"step": 226
},
{
"epoch": 0.7479406919275123,
"grad_norm": 0.08784634286641266,
"learning_rate": 0.00018666647902508494,
"loss": 0.3401,
"step": 227
},
{
"epoch": 0.7512355848434926,
"grad_norm": 0.08421464642591442,
"learning_rate": 0.00018647423739332175,
"loss": 0.3275,
"step": 228
},
{
"epoch": 0.7545304777594728,
"grad_norm": 0.08154049872789887,
"learning_rate": 0.00018628072026484214,
"loss": 0.3333,
"step": 229
},
{
"epoch": 0.7578253706754531,
"grad_norm": 0.08114999530616501,
"learning_rate": 0.00018608593049402754,
"loss": 0.3434,
"step": 230
},
{
"epoch": 0.7611202635914333,
"grad_norm": 0.08374578563091178,
"learning_rate": 0.0001858898709540309,
"loss": 0.3449,
"step": 231
},
{
"epoch": 0.7644151565074135,
"grad_norm": 0.07916589630276719,
"learning_rate": 0.00018569254453673444,
"loss": 0.3268,
"step": 232
},
{
"epoch": 0.7677100494233937,
"grad_norm": 0.0867833099769683,
"learning_rate": 0.00018549395415270664,
"loss": 0.3292,
"step": 233
},
{
"epoch": 0.771004942339374,
"grad_norm": 0.08554090940119152,
"learning_rate": 0.00018529410273115962,
"loss": 0.3476,
"step": 234
},
{
"epoch": 0.7742998352553542,
"grad_norm": 0.08576144914828523,
"learning_rate": 0.0001850929932199058,
"loss": 0.3454,
"step": 235
},
{
"epoch": 0.7775947281713345,
"grad_norm": 0.09025913774937107,
"learning_rate": 0.0001848906285853145,
"loss": 0.347,
"step": 236
},
{
"epoch": 0.7808896210873146,
"grad_norm": 0.08519936716335193,
"learning_rate": 0.00018468701181226804,
"loss": 0.3397,
"step": 237
},
{
"epoch": 0.7841845140032949,
"grad_norm": 0.08499222661359229,
"learning_rate": 0.0001844821459041179,
"loss": 0.3458,
"step": 238
},
{
"epoch": 0.7874794069192751,
"grad_norm": 0.08609729000174243,
"learning_rate": 0.00018427603388264025,
"loss": 0.3332,
"step": 239
},
{
"epoch": 0.7907742998352554,
"grad_norm": 0.09181680148202499,
"learning_rate": 0.00018406867878799154,
"loss": 0.3499,
"step": 240
},
{
"epoch": 0.7940691927512356,
"grad_norm": 0.08755042199814178,
"learning_rate": 0.0001838600836786635,
"loss": 0.3442,
"step": 241
},
{
"epoch": 0.7973640856672158,
"grad_norm": 0.08357114463901723,
"learning_rate": 0.00018365025163143814,
"loss": 0.3496,
"step": 242
},
{
"epoch": 0.800658978583196,
"grad_norm": 0.08851618055895055,
"learning_rate": 0.0001834391857413423,
"loss": 0.3513,
"step": 243
},
{
"epoch": 0.8039538714991763,
"grad_norm": 0.09323076567865188,
"learning_rate": 0.0001832268891216021,
"loss": 0.3611,
"step": 244
},
{
"epoch": 0.8072487644151565,
"grad_norm": 0.0835634009338884,
"learning_rate": 0.0001830133649035968,
"loss": 0.3387,
"step": 245
},
{
"epoch": 0.8105436573311368,
"grad_norm": 0.08610843703599567,
"learning_rate": 0.0001827986162368129,
"loss": 0.3465,
"step": 246
},
{
"epoch": 0.8138385502471169,
"grad_norm": 0.08371169841943385,
"learning_rate": 0.00018258264628879752,
"loss": 0.3535,
"step": 247
},
{
"epoch": 0.8171334431630972,
"grad_norm": 0.08370848845452357,
"learning_rate": 0.00018236545824511168,
"loss": 0.3267,
"step": 248
},
{
"epoch": 0.8204283360790774,
"grad_norm": 0.09272034869991407,
"learning_rate": 0.0001821470553092832,
"loss": 0.3484,
"step": 249
},
{
"epoch": 0.8237232289950577,
"grad_norm": 0.08921657004712517,
"learning_rate": 0.0001819274407027599,
"loss": 0.3373,
"step": 250
},
{
"epoch": 0.8270181219110379,
"grad_norm": 0.08576896779949927,
"learning_rate": 0.0001817066176648615,
"loss": 0.3533,
"step": 251
},
{
"epoch": 0.8303130148270181,
"grad_norm": 0.08520255452310015,
"learning_rate": 0.00018148458945273213,
"loss": 0.3542,
"step": 252
},
{
"epoch": 0.8336079077429983,
"grad_norm": 0.08556329735314366,
"learning_rate": 0.0001812613593412924,
"loss": 0.3312,
"step": 253
},
{
"epoch": 0.8369028006589786,
"grad_norm": 0.08644232163750781,
"learning_rate": 0.0001810369306231909,
"loss": 0.3454,
"step": 254
},
{
"epoch": 0.8401976935749588,
"grad_norm": 0.08165688948658312,
"learning_rate": 0.00018081130660875557,
"loss": 0.3444,
"step": 255
},
{
"epoch": 0.8434925864909391,
"grad_norm": 0.08092566240118927,
"learning_rate": 0.0001805844906259452,
"loss": 0.341,
"step": 256
},
{
"epoch": 0.8467874794069192,
"grad_norm": 0.08175414891463337,
"learning_rate": 0.00018035648602029997,
"loss": 0.3474,
"step": 257
},
{
"epoch": 0.8500823723228995,
"grad_norm": 0.0852653315237305,
"learning_rate": 0.00018012729615489236,
"loss": 0.3445,
"step": 258
},
{
"epoch": 0.8533772652388797,
"grad_norm": 0.0839021217620115,
"learning_rate": 0.00017989692441027744,
"loss": 0.3478,
"step": 259
},
{
"epoch": 0.85667215815486,
"grad_norm": 0.08924662802136864,
"learning_rate": 0.000179665374184443,
"loss": 0.3392,
"step": 260
},
{
"epoch": 0.8599670510708401,
"grad_norm": 0.08424677502777078,
"learning_rate": 0.00017943264889275944,
"loss": 0.3451,
"step": 261
},
{
"epoch": 0.8632619439868204,
"grad_norm": 0.08560296301440222,
"learning_rate": 0.00017919875196792948,
"loss": 0.3438,
"step": 262
},
{
"epoch": 0.8665568369028006,
"grad_norm": 0.08330737239164673,
"learning_rate": 0.00017896368685993736,
"loss": 0.3499,
"step": 263
},
{
"epoch": 0.8698517298187809,
"grad_norm": 0.08043192435705314,
"learning_rate": 0.00017872745703599808,
"loss": 0.3326,
"step": 264
},
{
"epoch": 0.8731466227347611,
"grad_norm": 0.08601789750278353,
"learning_rate": 0.00017849006598050625,
"loss": 0.3377,
"step": 265
},
{
"epoch": 0.8764415156507414,
"grad_norm": 0.084248493499374,
"learning_rate": 0.00017825151719498466,
"loss": 0.3394,
"step": 266
},
{
"epoch": 0.8797364085667215,
"grad_norm": 0.08413208325626391,
"learning_rate": 0.00017801181419803256,
"loss": 0.3463,
"step": 267
},
{
"epoch": 0.8830313014827018,
"grad_norm": 0.08490917843632152,
"learning_rate": 0.00017777096052527398,
"loss": 0.3395,
"step": 268
},
{
"epoch": 0.886326194398682,
"grad_norm": 0.08529218362151314,
"learning_rate": 0.00017752895972930537,
"loss": 0.3432,
"step": 269
},
{
"epoch": 0.8896210873146623,
"grad_norm": 0.08406202028031955,
"learning_rate": 0.0001772858153796432,
"loss": 0.3382,
"step": 270
},
{
"epoch": 0.8929159802306426,
"grad_norm": 0.08192359095153046,
"learning_rate": 0.0001770415310626715,
"loss": 0.3438,
"step": 271
},
{
"epoch": 0.8962108731466227,
"grad_norm": 0.08667337369960532,
"learning_rate": 0.0001767961103815888,
"loss": 0.3467,
"step": 272
},
{
"epoch": 0.899505766062603,
"grad_norm": 0.09030944746983277,
"learning_rate": 0.00017654955695635497,
"loss": 0.3353,
"step": 273
},
{
"epoch": 0.9028006589785832,
"grad_norm": 0.08866625882756005,
"learning_rate": 0.00017630187442363798,
"loss": 0.3436,
"step": 274
},
{
"epoch": 0.9060955518945635,
"grad_norm": 0.08505719283151414,
"learning_rate": 0.00017605306643676008,
"loss": 0.3391,
"step": 275
},
{
"epoch": 0.9093904448105437,
"grad_norm": 0.08322038992732612,
"learning_rate": 0.00017580313666564395,
"loss": 0.3393,
"step": 276
},
{
"epoch": 0.9126853377265239,
"grad_norm": 0.0866505683085061,
"learning_rate": 0.00017555208879675875,
"loss": 0.3409,
"step": 277
},
{
"epoch": 0.9159802306425041,
"grad_norm": 0.08205474422496563,
"learning_rate": 0.00017529992653306548,
"loss": 0.3429,
"step": 278
},
{
"epoch": 0.9192751235584844,
"grad_norm": 0.08286735294573946,
"learning_rate": 0.00017504665359396255,
"loss": 0.3457,
"step": 279
},
{
"epoch": 0.9225700164744646,
"grad_norm": 0.0844618640503933,
"learning_rate": 0.00017479227371523082,
"loss": 0.3447,
"step": 280
},
{
"epoch": 0.9258649093904449,
"grad_norm": 0.07920730838964726,
"learning_rate": 0.0001745367906489786,
"loss": 0.3297,
"step": 281
},
{
"epoch": 0.929159802306425,
"grad_norm": 0.08334771190565178,
"learning_rate": 0.00017428020816358605,
"loss": 0.3474,
"step": 282
},
{
"epoch": 0.9324546952224053,
"grad_norm": 0.08786988637856916,
"learning_rate": 0.00017402253004365008,
"loss": 0.3302,
"step": 283
},
{
"epoch": 0.9357495881383855,
"grad_norm": 0.07938060869152685,
"learning_rate": 0.00017376376008992797,
"loss": 0.3215,
"step": 284
},
{
"epoch": 0.9390444810543658,
"grad_norm": 0.08542382095743929,
"learning_rate": 0.00017350390211928166,
"loss": 0.3391,
"step": 285
},
{
"epoch": 0.942339373970346,
"grad_norm": 0.08832893571241651,
"learning_rate": 0.00017324295996462146,
"loss": 0.3475,
"step": 286
},
{
"epoch": 0.9456342668863262,
"grad_norm": 0.08963369742867137,
"learning_rate": 0.00017298093747484923,
"loss": 0.3516,
"step": 287
},
{
"epoch": 0.9489291598023064,
"grad_norm": 0.08105453925768774,
"learning_rate": 0.00017271783851480194,
"loss": 0.3387,
"step": 288
},
{
"epoch": 0.9522240527182867,
"grad_norm": 0.08806421113907988,
"learning_rate": 0.00017245366696519448,
"loss": 0.3384,
"step": 289
},
{
"epoch": 0.9555189456342669,
"grad_norm": 0.08613965774858882,
"learning_rate": 0.0001721884267225624,
"loss": 0.3402,
"step": 290
},
{
"epoch": 0.9588138385502472,
"grad_norm": 0.08154607108470488,
"learning_rate": 0.00017192212169920459,
"loss": 0.3379,
"step": 291
},
{
"epoch": 0.9621087314662273,
"grad_norm": 0.08288246340352515,
"learning_rate": 0.00017165475582312537,
"loss": 0.3375,
"step": 292
},
{
"epoch": 0.9654036243822076,
"grad_norm": 0.09926330786301062,
"learning_rate": 0.00017138633303797674,
"loss": 0.346,
"step": 293
},
{
"epoch": 0.9686985172981878,
"grad_norm": 0.08416129865679868,
"learning_rate": 0.00017111685730300015,
"loss": 0.3397,
"step": 294
},
{
"epoch": 0.9719934102141681,
"grad_norm": 0.08420994658478355,
"learning_rate": 0.00017084633259296797,
"loss": 0.344,
"step": 295
},
{
"epoch": 0.9752883031301482,
"grad_norm": 0.0798569849854742,
"learning_rate": 0.00017057476289812504,
"loss": 0.3346,
"step": 296
},
{
"epoch": 0.9785831960461285,
"grad_norm": 0.08247914082738622,
"learning_rate": 0.0001703021522241298,
"loss": 0.3327,
"step": 297
},
{
"epoch": 0.9818780889621087,
"grad_norm": 0.08351637496453113,
"learning_rate": 0.00017002850459199505,
"loss": 0.3465,
"step": 298
},
{
"epoch": 0.985172981878089,
"grad_norm": 0.08746419940698986,
"learning_rate": 0.00016975382403802878,
"loss": 0.3511,
"step": 299
},
{
"epoch": 0.9884678747940692,
"grad_norm": 0.08467784721413267,
"learning_rate": 0.00016947811461377467,
"loss": 0.3482,
"step": 300
},
{
"epoch": 0.9917627677100495,
"grad_norm": 0.07930487498006872,
"learning_rate": 0.00016920138038595216,
"loss": 0.3347,
"step": 301
},
{
"epoch": 0.9950576606260296,
"grad_norm": 0.08974209141023505,
"learning_rate": 0.00016892362543639654,
"loss": 0.3435,
"step": 302
},
{
"epoch": 0.9983525535420099,
"grad_norm": 0.08015781337535312,
"learning_rate": 0.00016864485386199892,
"loss": 0.337,
"step": 303
},
{
"epoch": 0.9983525535420099,
"eval_loss": 0.33799201250076294,
"eval_runtime": 167.3264,
"eval_samples_per_second": 30.509,
"eval_steps_per_second": 0.956,
"step": 303
},
{
"epoch": 1.00164744645799,
"grad_norm": 0.08187065929331228,
"learning_rate": 0.0001683650697746455,
"loss": 0.331,
"step": 304
},
{
"epoch": 1.0049423393739703,
"grad_norm": 0.08344895940789815,
"learning_rate": 0.00016808427730115715,
"loss": 0.3212,
"step": 305
},
{
"epoch": 1.0082372322899507,
"grad_norm": 0.0812051492859621,
"learning_rate": 0.0001678024805832284,
"loss": 0.321,
"step": 306
},
{
"epoch": 1.0115321252059308,
"grad_norm": 0.082485666654798,
"learning_rate": 0.00016751968377736639,
"loss": 0.3291,
"step": 307
},
{
"epoch": 1.014827018121911,
"grad_norm": 0.08852023762319512,
"learning_rate": 0.00016723589105482967,
"loss": 0.3243,
"step": 308
},
{
"epoch": 1.0181219110378912,
"grad_norm": 0.08541502987915972,
"learning_rate": 0.00016695110660156653,
"loss": 0.3179,
"step": 309
},
{
"epoch": 1.0214168039538716,
"grad_norm": 0.08162657768023696,
"learning_rate": 0.00016666533461815326,
"loss": 0.3128,
"step": 310
},
{
"epoch": 1.0247116968698518,
"grad_norm": 0.08866859604447454,
"learning_rate": 0.0001663785793197323,
"loss": 0.3263,
"step": 311
},
{
"epoch": 1.028006589785832,
"grad_norm": 0.08377699516728787,
"learning_rate": 0.00016609084493595,
"loss": 0.3172,
"step": 312
},
{
"epoch": 1.031301482701812,
"grad_norm": 0.08249601913350711,
"learning_rate": 0.00016580213571089426,
"loss": 0.3273,
"step": 313
},
{
"epoch": 1.0345963756177925,
"grad_norm": 0.08916625891991832,
"learning_rate": 0.00016551245590303178,
"loss": 0.3306,
"step": 314
},
{
"epoch": 1.0378912685337727,
"grad_norm": 0.08580016654634558,
"learning_rate": 0.00016522180978514555,
"loss": 0.322,
"step": 315
},
{
"epoch": 1.0411861614497528,
"grad_norm": 0.08571304387115758,
"learning_rate": 0.00016493020164427152,
"loss": 0.3275,
"step": 316
},
{
"epoch": 1.044481054365733,
"grad_norm": 0.08216233263790286,
"learning_rate": 0.00016463763578163562,
"loss": 0.3312,
"step": 317
},
{
"epoch": 1.0477759472817134,
"grad_norm": 0.0875991270266792,
"learning_rate": 0.00016434411651259007,
"loss": 0.3313,
"step": 318
},
{
"epoch": 1.0510708401976936,
"grad_norm": 0.08448691383529304,
"learning_rate": 0.00016404964816654993,
"loss": 0.3255,
"step": 319
},
{
"epoch": 1.0543657331136738,
"grad_norm": 0.0845703455173748,
"learning_rate": 0.00016375423508692912,
"loss": 0.3359,
"step": 320
},
{
"epoch": 1.057660626029654,
"grad_norm": 0.0882361267271713,
"learning_rate": 0.00016345788163107646,
"loss": 0.3341,
"step": 321
},
{
"epoch": 1.0609555189456343,
"grad_norm": 0.0849311944593189,
"learning_rate": 0.00016316059217021125,
"loss": 0.3239,
"step": 322
},
{
"epoch": 1.0642504118616145,
"grad_norm": 0.08152300437502101,
"learning_rate": 0.000162862371089359,
"loss": 0.3243,
"step": 323
},
{
"epoch": 1.0675453047775947,
"grad_norm": 0.08857601001972923,
"learning_rate": 0.0001625632227872865,
"loss": 0.333,
"step": 324
},
{
"epoch": 1.0708401976935749,
"grad_norm": 0.08801532835686154,
"learning_rate": 0.00016226315167643723,
"loss": 0.3322,
"step": 325
},
{
"epoch": 1.0741350906095553,
"grad_norm": 0.08525184904271058,
"learning_rate": 0.0001619621621828659,
"loss": 0.3167,
"step": 326
},
{
"epoch": 1.0774299835255354,
"grad_norm": 0.08135029197370526,
"learning_rate": 0.0001616602587461736,
"loss": 0.3269,
"step": 327
},
{
"epoch": 1.0807248764415156,
"grad_norm": 0.08424072997426707,
"learning_rate": 0.000161357445819442,
"loss": 0.3239,
"step": 328
},
{
"epoch": 1.084019769357496,
"grad_norm": 0.08258821495595987,
"learning_rate": 0.00016105372786916775,
"loss": 0.3183,
"step": 329
},
{
"epoch": 1.0873146622734762,
"grad_norm": 0.08498350432029378,
"learning_rate": 0.00016074910937519663,
"loss": 0.3328,
"step": 330
},
{
"epoch": 1.0906095551894563,
"grad_norm": 0.08667493093004443,
"learning_rate": 0.0001604435948306575,
"loss": 0.3298,
"step": 331
},
{
"epoch": 1.0939044481054365,
"grad_norm": 0.08869319837110255,
"learning_rate": 0.00016013718874189595,
"loss": 0.3336,
"step": 332
},
{
"epoch": 1.0971993410214167,
"grad_norm": 0.08379372154609564,
"learning_rate": 0.00015982989562840784,
"loss": 0.3206,
"step": 333
},
{
"epoch": 1.100494233937397,
"grad_norm": 0.08172530708530337,
"learning_rate": 0.0001595217200227727,
"loss": 0.3212,
"step": 334
},
{
"epoch": 1.1037891268533773,
"grad_norm": 0.08535886274333539,
"learning_rate": 0.0001592126664705868,
"loss": 0.3243,
"step": 335
},
{
"epoch": 1.1070840197693574,
"grad_norm": 0.08360595473099759,
"learning_rate": 0.0001589027395303962,
"loss": 0.3233,
"step": 336
},
{
"epoch": 1.1103789126853378,
"grad_norm": 0.08265998271604205,
"learning_rate": 0.0001585919437736294,
"loss": 0.3271,
"step": 337
},
{
"epoch": 1.113673805601318,
"grad_norm": 0.08237810191987908,
"learning_rate": 0.00015828028378452998,
"loss": 0.3238,
"step": 338
},
{
"epoch": 1.1169686985172982,
"grad_norm": 0.08730291854461844,
"learning_rate": 0.00015796776416008898,
"loss": 0.3195,
"step": 339
},
{
"epoch": 1.1202635914332784,
"grad_norm": 0.08634254849058057,
"learning_rate": 0.00015765438950997705,
"loss": 0.323,
"step": 340
},
{
"epoch": 1.1235584843492585,
"grad_norm": 0.08081122456257506,
"learning_rate": 0.0001573401644564764,
"loss": 0.3102,
"step": 341
},
{
"epoch": 1.126853377265239,
"grad_norm": 0.08841633690696776,
"learning_rate": 0.00015702509363441295,
"loss": 0.3345,
"step": 342
},
{
"epoch": 1.130148270181219,
"grad_norm": 0.08830337626554721,
"learning_rate": 0.00015670918169108752,
"loss": 0.3282,
"step": 343
},
{
"epoch": 1.1334431630971993,
"grad_norm": 0.08099355585017688,
"learning_rate": 0.00015639243328620744,
"loss": 0.3058,
"step": 344
},
{
"epoch": 1.1367380560131797,
"grad_norm": 0.08076571239439685,
"learning_rate": 0.00015607485309181813,
"loss": 0.3182,
"step": 345
},
{
"epoch": 1.1400329489291599,
"grad_norm": 0.0829246184070146,
"learning_rate": 0.00015575644579223362,
"loss": 0.323,
"step": 346
},
{
"epoch": 1.14332784184514,
"grad_norm": 0.08668721952043845,
"learning_rate": 0.00015543721608396796,
"loss": 0.3163,
"step": 347
},
{
"epoch": 1.1466227347611202,
"grad_norm": 0.08394538492198296,
"learning_rate": 0.0001551171686756657,
"loss": 0.3116,
"step": 348
},
{
"epoch": 1.1499176276771004,
"grad_norm": 0.08597909584931814,
"learning_rate": 0.00015479630828803235,
"loss": 0.3205,
"step": 349
},
{
"epoch": 1.1532125205930808,
"grad_norm": 0.0840149508071814,
"learning_rate": 0.0001544746396537651,
"loss": 0.3093,
"step": 350
},
{
"epoch": 1.156507413509061,
"grad_norm": 0.08658284232618915,
"learning_rate": 0.00015415216751748264,
"loss": 0.316,
"step": 351
},
{
"epoch": 1.1598023064250411,
"grad_norm": 0.08152435596529481,
"learning_rate": 0.0001538288966356554,
"loss": 0.3177,
"step": 352
},
{
"epoch": 1.1630971993410215,
"grad_norm": 0.08209820514345408,
"learning_rate": 0.00015350483177653526,
"loss": 0.3261,
"step": 353
},
{
"epoch": 1.1663920922570017,
"grad_norm": 0.08272819539416575,
"learning_rate": 0.00015317997772008537,
"loss": 0.3226,
"step": 354
},
{
"epoch": 1.1696869851729819,
"grad_norm": 0.08660002845038178,
"learning_rate": 0.00015285433925790945,
"loss": 0.3191,
"step": 355
},
{
"epoch": 1.172981878088962,
"grad_norm": 0.08632540443067051,
"learning_rate": 0.0001525279211931813,
"loss": 0.3263,
"step": 356
},
{
"epoch": 1.1762767710049424,
"grad_norm": 0.08565341191026239,
"learning_rate": 0.00015220072834057387,
"loss": 0.3192,
"step": 357
},
{
"epoch": 1.1795716639209226,
"grad_norm": 0.07902224269764019,
"learning_rate": 0.00015187276552618817,
"loss": 0.3052,
"step": 358
},
{
"epoch": 1.1828665568369028,
"grad_norm": 0.08704133599468743,
"learning_rate": 0.0001515440375874823,
"loss": 0.3129,
"step": 359
},
{
"epoch": 1.186161449752883,
"grad_norm": 0.08910719635327755,
"learning_rate": 0.00015121454937319976,
"loss": 0.3393,
"step": 360
},
{
"epoch": 1.1894563426688634,
"grad_norm": 0.08936796334171324,
"learning_rate": 0.00015088430574329836,
"loss": 0.3237,
"step": 361
},
{
"epoch": 1.1927512355848435,
"grad_norm": 0.08635637992605481,
"learning_rate": 0.0001505533115688781,
"loss": 0.3317,
"step": 362
},
{
"epoch": 1.1960461285008237,
"grad_norm": 0.08610779702269872,
"learning_rate": 0.00015022157173210968,
"loss": 0.3293,
"step": 363
},
{
"epoch": 1.1993410214168039,
"grad_norm": 0.08526653923014106,
"learning_rate": 0.0001498890911261622,
"loss": 0.3245,
"step": 364
},
{
"epoch": 1.2026359143327843,
"grad_norm": 0.08300876143938384,
"learning_rate": 0.0001495558746551313,
"loss": 0.3341,
"step": 365
},
{
"epoch": 1.2059308072487644,
"grad_norm": 0.08284607312666305,
"learning_rate": 0.00014922192723396645,
"loss": 0.3219,
"step": 366
},
{
"epoch": 1.2092257001647446,
"grad_norm": 0.08002802793230862,
"learning_rate": 0.00014888725378839877,
"loss": 0.3143,
"step": 367
},
{
"epoch": 1.2125205930807248,
"grad_norm": 0.08140933851788057,
"learning_rate": 0.00014855185925486818,
"loss": 0.3243,
"step": 368
},
{
"epoch": 1.2158154859967052,
"grad_norm": 0.08664085115985616,
"learning_rate": 0.00014821574858045074,
"loss": 0.3229,
"step": 369
},
{
"epoch": 1.2191103789126854,
"grad_norm": 0.08612508394887794,
"learning_rate": 0.00014787892672278556,
"loss": 0.3259,
"step": 370
},
{
"epoch": 1.2224052718286655,
"grad_norm": 0.08770343929357799,
"learning_rate": 0.00014754139865000168,
"loss": 0.3315,
"step": 371
},
{
"epoch": 1.2257001647446457,
"grad_norm": 0.08425286738586797,
"learning_rate": 0.00014720316934064496,
"loss": 0.3348,
"step": 372
},
{
"epoch": 1.2289950576606261,
"grad_norm": 0.08560046261080007,
"learning_rate": 0.00014686424378360433,
"loss": 0.3278,
"step": 373
},
{
"epoch": 1.2322899505766063,
"grad_norm": 0.08692213775944008,
"learning_rate": 0.00014652462697803848,
"loss": 0.3289,
"step": 374
},
{
"epoch": 1.2355848434925865,
"grad_norm": 0.0850172377986241,
"learning_rate": 0.0001461843239333021,
"loss": 0.3289,
"step": 375
},
{
"epoch": 1.2388797364085666,
"grad_norm": 0.08538674165797111,
"learning_rate": 0.00014584333966887177,
"loss": 0.3077,
"step": 376
},
{
"epoch": 1.242174629324547,
"grad_norm": 0.08225612872698992,
"learning_rate": 0.0001455016792142722,
"loss": 0.3207,
"step": 377
},
{
"epoch": 1.2454695222405272,
"grad_norm": 0.0824992791408092,
"learning_rate": 0.00014515934760900184,
"loss": 0.3123,
"step": 378
},
{
"epoch": 1.2487644151565074,
"grad_norm": 0.08817004563803918,
"learning_rate": 0.0001448163499024587,
"loss": 0.331,
"step": 379
},
{
"epoch": 1.2520593080724876,
"grad_norm": 0.08988900733842042,
"learning_rate": 0.00014447269115386573,
"loss": 0.3171,
"step": 380
},
{
"epoch": 1.255354200988468,
"grad_norm": 0.08242288142785008,
"learning_rate": 0.00014412837643219625,
"loss": 0.3195,
"step": 381
},
{
"epoch": 1.2586490939044481,
"grad_norm": 0.08150045524497893,
"learning_rate": 0.00014378341081609927,
"loss": 0.3287,
"step": 382
},
{
"epoch": 1.2619439868204283,
"grad_norm": 0.08343807243424793,
"learning_rate": 0.00014343779939382452,
"loss": 0.3111,
"step": 383
},
{
"epoch": 1.2652388797364087,
"grad_norm": 0.08374911663870195,
"learning_rate": 0.0001430915472631472,
"loss": 0.3278,
"step": 384
},
{
"epoch": 1.2685337726523889,
"grad_norm": 0.08428186467144357,
"learning_rate": 0.00014274465953129325,
"loss": 0.3186,
"step": 385
},
{
"epoch": 1.271828665568369,
"grad_norm": 0.08176159742747383,
"learning_rate": 0.00014239714131486348,
"loss": 0.3196,
"step": 386
},
{
"epoch": 1.2751235584843492,
"grad_norm": 0.08350565101214433,
"learning_rate": 0.00014204899773975855,
"loss": 0.3235,
"step": 387
},
{
"epoch": 1.2784184514003294,
"grad_norm": 0.08179249241721015,
"learning_rate": 0.00014170023394110306,
"loss": 0.3137,
"step": 388
},
{
"epoch": 1.2817133443163098,
"grad_norm": 0.0837263180330502,
"learning_rate": 0.00014135085506316997,
"loss": 0.3229,
"step": 389
},
{
"epoch": 1.28500823723229,
"grad_norm": 0.08607305244666162,
"learning_rate": 0.00014100086625930464,
"loss": 0.3231,
"step": 390
},
{
"epoch": 1.2883031301482701,
"grad_norm": 0.08575599533724002,
"learning_rate": 0.00014065027269184887,
"loss": 0.3231,
"step": 391
},
{
"epoch": 1.2915980230642505,
"grad_norm": 0.08064829044874067,
"learning_rate": 0.00014029907953206475,
"loss": 0.3199,
"step": 392
},
{
"epoch": 1.2948929159802307,
"grad_norm": 0.08408021103636296,
"learning_rate": 0.0001399472919600584,
"loss": 0.3273,
"step": 393
},
{
"epoch": 1.2981878088962109,
"grad_norm": 0.08114989710998861,
"learning_rate": 0.00013959491516470334,
"loss": 0.3162,
"step": 394
},
{
"epoch": 1.301482701812191,
"grad_norm": 0.0834997194534925,
"learning_rate": 0.00013924195434356442,
"loss": 0.3185,
"step": 395
},
{
"epoch": 1.3047775947281712,
"grad_norm": 0.08216535139972479,
"learning_rate": 0.0001388884147028207,
"loss": 0.3318,
"step": 396
},
{
"epoch": 1.3080724876441516,
"grad_norm": 0.08151769664701901,
"learning_rate": 0.0001385343014571889,
"loss": 0.3242,
"step": 397
},
{
"epoch": 1.3113673805601318,
"grad_norm": 0.0807742083114109,
"learning_rate": 0.00013817961982984643,
"loss": 0.3205,
"step": 398
},
{
"epoch": 1.314662273476112,
"grad_norm": 0.08184040624493487,
"learning_rate": 0.0001378243750523543,
"loss": 0.3149,
"step": 399
},
{
"epoch": 1.3179571663920924,
"grad_norm": 0.08102459745060192,
"learning_rate": 0.00013746857236458007,
"loss": 0.319,
"step": 400
},
{
"epoch": 1.3212520593080725,
"grad_norm": 0.08059247007108714,
"learning_rate": 0.00013711221701462036,
"loss": 0.3127,
"step": 401
},
{
"epoch": 1.3245469522240527,
"grad_norm": 0.0793438468854946,
"learning_rate": 0.0001367553142587237,
"loss": 0.3251,
"step": 402
},
{
"epoch": 1.327841845140033,
"grad_norm": 0.08216864917484416,
"learning_rate": 0.00013639786936121286,
"loss": 0.3197,
"step": 403
},
{
"epoch": 1.331136738056013,
"grad_norm": 0.08321227130258257,
"learning_rate": 0.0001360398875944071,
"loss": 0.321,
"step": 404
},
{
"epoch": 1.3344316309719935,
"grad_norm": 0.08148653549162627,
"learning_rate": 0.00013568137423854458,
"loss": 0.3238,
"step": 405
},
{
"epoch": 1.3377265238879736,
"grad_norm": 0.08624309126721959,
"learning_rate": 0.00013532233458170444,
"loss": 0.3239,
"step": 406
},
{
"epoch": 1.3410214168039538,
"grad_norm": 0.0797835803516585,
"learning_rate": 0.00013496277391972873,
"loss": 0.3269,
"step": 407
},
{
"epoch": 1.3443163097199342,
"grad_norm": 0.08094589206278008,
"learning_rate": 0.00013460269755614435,
"loss": 0.3041,
"step": 408
},
{
"epoch": 1.3476112026359144,
"grad_norm": 0.0852983943023146,
"learning_rate": 0.00013424211080208479,
"loss": 0.3118,
"step": 409
},
{
"epoch": 1.3509060955518946,
"grad_norm": 0.08749111120896899,
"learning_rate": 0.00013388101897621182,
"loss": 0.3271,
"step": 410
},
{
"epoch": 1.3542009884678747,
"grad_norm": 0.07952649573183727,
"learning_rate": 0.00013351942740463707,
"loss": 0.3081,
"step": 411
},
{
"epoch": 1.357495881383855,
"grad_norm": 0.08578795160366025,
"learning_rate": 0.00013315734142084335,
"loss": 0.32,
"step": 412
},
{
"epoch": 1.3607907742998353,
"grad_norm": 0.08928715903816203,
"learning_rate": 0.00013279476636560608,
"loss": 0.3227,
"step": 413
},
{
"epoch": 1.3640856672158155,
"grad_norm": 0.08229016926007318,
"learning_rate": 0.0001324317075869146,
"loss": 0.3275,
"step": 414
},
{
"epoch": 1.3673805601317957,
"grad_norm": 0.0887890717135067,
"learning_rate": 0.00013206817043989302,
"loss": 0.3323,
"step": 415
},
{
"epoch": 1.370675453047776,
"grad_norm": 0.08699973758850778,
"learning_rate": 0.0001317041602867215,
"loss": 0.325,
"step": 416
},
{
"epoch": 1.3739703459637562,
"grad_norm": 0.08467845536010866,
"learning_rate": 0.000131339682496557,
"loss": 0.3078,
"step": 417
},
{
"epoch": 1.3772652388797364,
"grad_norm": 0.08776785203585662,
"learning_rate": 0.0001309747424454542,
"loss": 0.3418,
"step": 418
},
{
"epoch": 1.3805601317957166,
"grad_norm": 0.08658565822289628,
"learning_rate": 0.00013060934551628603,
"loss": 0.3328,
"step": 419
},
{
"epoch": 1.3838550247116967,
"grad_norm": 0.08764833043201878,
"learning_rate": 0.00013024349709866447,
"loss": 0.309,
"step": 420
},
{
"epoch": 1.3871499176276771,
"grad_norm": 0.08756434828387893,
"learning_rate": 0.00012987720258886095,
"loss": 0.3306,
"step": 421
},
{
"epoch": 1.3904448105436573,
"grad_norm": 0.08469701138029223,
"learning_rate": 0.00012951046738972672,
"loss": 0.323,
"step": 422
},
{
"epoch": 1.3937397034596375,
"grad_norm": 0.08504023280141097,
"learning_rate": 0.00012914329691061328,
"loss": 0.3211,
"step": 423
},
{
"epoch": 1.3970345963756179,
"grad_norm": 0.08487408883622825,
"learning_rate": 0.00012877569656729243,
"loss": 0.3313,
"step": 424
},
{
"epoch": 1.400329489291598,
"grad_norm": 0.07939354397337003,
"learning_rate": 0.00012840767178187655,
"loss": 0.3102,
"step": 425
},
{
"epoch": 1.4036243822075782,
"grad_norm": 0.0829139296391848,
"learning_rate": 0.00012803922798273852,
"loss": 0.316,
"step": 426
},
{
"epoch": 1.4069192751235584,
"grad_norm": 0.0823703090760657,
"learning_rate": 0.00012767037060443172,
"loss": 0.3251,
"step": 427
},
{
"epoch": 1.4102141680395386,
"grad_norm": 0.08066102303361472,
"learning_rate": 0.00012730110508760975,
"loss": 0.3238,
"step": 428
},
{
"epoch": 1.413509060955519,
"grad_norm": 0.0844768933412219,
"learning_rate": 0.0001269314368789463,
"loss": 0.3242,
"step": 429
},
{
"epoch": 1.4168039538714992,
"grad_norm": 0.0815698910146405,
"learning_rate": 0.00012656137143105483,
"loss": 0.3242,
"step": 430
},
{
"epoch": 1.4200988467874793,
"grad_norm": 0.08017268325963865,
"learning_rate": 0.00012619091420240793,
"loss": 0.3034,
"step": 431
},
{
"epoch": 1.4233937397034597,
"grad_norm": 0.08083122415122998,
"learning_rate": 0.0001258200706572572,
"loss": 0.3169,
"step": 432
},
{
"epoch": 1.42668863261944,
"grad_norm": 0.08337187987916603,
"learning_rate": 0.00012544884626555225,
"loss": 0.3193,
"step": 433
},
{
"epoch": 1.42998352553542,
"grad_norm": 0.08129518922024724,
"learning_rate": 0.00012507724650286014,
"loss": 0.3071,
"step": 434
},
{
"epoch": 1.4332784184514002,
"grad_norm": 0.08367220188501179,
"learning_rate": 0.00012470527685028482,
"loss": 0.3238,
"step": 435
},
{
"epoch": 1.4365733113673804,
"grad_norm": 0.08412225464889816,
"learning_rate": 0.00012433294279438602,
"loss": 0.3157,
"step": 436
},
{
"epoch": 1.4398682042833608,
"grad_norm": 0.0831281998958449,
"learning_rate": 0.00012396024982709843,
"loss": 0.3141,
"step": 437
},
{
"epoch": 1.443163097199341,
"grad_norm": 0.08365116413450047,
"learning_rate": 0.00012358720344565077,
"loss": 0.3159,
"step": 438
},
{
"epoch": 1.4464579901153214,
"grad_norm": 0.08191433963107164,
"learning_rate": 0.00012321380915248446,
"loss": 0.3098,
"step": 439
},
{
"epoch": 1.4497528830313016,
"grad_norm": 0.08348553478357966,
"learning_rate": 0.0001228400724551728,
"loss": 0.3129,
"step": 440
},
{
"epoch": 1.4530477759472817,
"grad_norm": 0.0881498231342535,
"learning_rate": 0.0001224659988663395,
"loss": 0.3372,
"step": 441
},
{
"epoch": 1.456342668863262,
"grad_norm": 0.08112710228760632,
"learning_rate": 0.0001220915939035774,
"loss": 0.3169,
"step": 442
},
{
"epoch": 1.459637561779242,
"grad_norm": 0.0836876463479396,
"learning_rate": 0.00012171686308936709,
"loss": 0.3192,
"step": 443
},
{
"epoch": 1.4629324546952225,
"grad_norm": 0.08499306017819758,
"learning_rate": 0.00012134181195099555,
"loss": 0.3309,
"step": 444
},
{
"epoch": 1.4662273476112027,
"grad_norm": 0.07947051276130836,
"learning_rate": 0.00012096644602047447,
"loss": 0.3137,
"step": 445
},
{
"epoch": 1.4695222405271828,
"grad_norm": 0.08141505738594229,
"learning_rate": 0.0001205907708344588,
"loss": 0.3183,
"step": 446
},
{
"epoch": 1.4728171334431632,
"grad_norm": 0.08267307338524962,
"learning_rate": 0.000120214791934165,
"loss": 0.3195,
"step": 447
},
{
"epoch": 1.4761120263591434,
"grad_norm": 0.08366455798813263,
"learning_rate": 0.00011983851486528925,
"loss": 0.3252,
"step": 448
},
{
"epoch": 1.4794069192751236,
"grad_norm": 0.08075885404123719,
"learning_rate": 0.00011946194517792584,
"loss": 0.3255,
"step": 449
},
{
"epoch": 1.4827018121911038,
"grad_norm": 0.0812184444903526,
"learning_rate": 0.00011908508842648506,
"loss": 0.3196,
"step": 450
},
{
"epoch": 1.485996705107084,
"grad_norm": 0.08188793010828326,
"learning_rate": 0.00011870795016961156,
"loss": 0.3191,
"step": 451
},
{
"epoch": 1.4892915980230643,
"grad_norm": 0.08074626674132604,
"learning_rate": 0.00011833053597010201,
"loss": 0.3082,
"step": 452
},
{
"epoch": 1.4925864909390445,
"grad_norm": 0.0799989877308315,
"learning_rate": 0.00011795285139482342,
"loss": 0.3185,
"step": 453
},
{
"epoch": 1.4958813838550247,
"grad_norm": 0.07888888308680399,
"learning_rate": 0.00011757490201463065,
"loss": 0.3158,
"step": 454
},
{
"epoch": 1.499176276771005,
"grad_norm": 0.07859416029173805,
"learning_rate": 0.00011719669340428472,
"loss": 0.316,
"step": 455
},
{
"epoch": 1.5024711696869852,
"grad_norm": 0.07954941297717674,
"learning_rate": 0.00011681823114237,
"loss": 0.3058,
"step": 456
},
{
"epoch": 1.5057660626029654,
"grad_norm": 0.07977327521145369,
"learning_rate": 0.00011643952081121238,
"loss": 0.3125,
"step": 457
},
{
"epoch": 1.5090609555189456,
"grad_norm": 0.08507806643743294,
"learning_rate": 0.00011606056799679684,
"loss": 0.3286,
"step": 458
},
{
"epoch": 1.5123558484349258,
"grad_norm": 0.08398623989677817,
"learning_rate": 0.00011568137828868477,
"loss": 0.3242,
"step": 459
},
{
"epoch": 1.515650741350906,
"grad_norm": 0.07796453705937313,
"learning_rate": 0.000115301957279932,
"loss": 0.3042,
"step": 460
},
{
"epoch": 1.5189456342668863,
"grad_norm": 0.08177587861673677,
"learning_rate": 0.0001149223105670059,
"loss": 0.3088,
"step": 461
},
{
"epoch": 1.5222405271828665,
"grad_norm": 0.07924829822850359,
"learning_rate": 0.00011454244374970297,
"loss": 0.3203,
"step": 462
},
{
"epoch": 1.525535420098847,
"grad_norm": 0.07918582660420821,
"learning_rate": 0.00011416236243106637,
"loss": 0.3176,
"step": 463
},
{
"epoch": 1.528830313014827,
"grad_norm": 0.08010698180871471,
"learning_rate": 0.00011378207221730301,
"loss": 0.3124,
"step": 464
},
{
"epoch": 1.5321252059308073,
"grad_norm": 0.08128777832664769,
"learning_rate": 0.00011340157871770117,
"loss": 0.3044,
"step": 465
},
{
"epoch": 1.5354200988467874,
"grad_norm": 0.08036561662806507,
"learning_rate": 0.00011302088754454744,
"loss": 0.3129,
"step": 466
},
{
"epoch": 1.5387149917627676,
"grad_norm": 0.08224346354578807,
"learning_rate": 0.00011264000431304422,
"loss": 0.3259,
"step": 467
},
{
"epoch": 1.5420098846787478,
"grad_norm": 0.08411964168011243,
"learning_rate": 0.00011225893464122673,
"loss": 0.3242,
"step": 468
},
{
"epoch": 1.5453047775947282,
"grad_norm": 0.08217977092177307,
"learning_rate": 0.00011187768414988014,
"loss": 0.3133,
"step": 469
},
{
"epoch": 1.5485996705107083,
"grad_norm": 0.07968019018419265,
"learning_rate": 0.00011149625846245682,
"loss": 0.3129,
"step": 470
},
{
"epoch": 1.5518945634266887,
"grad_norm": 0.08282232295877028,
"learning_rate": 0.00011111466320499317,
"loss": 0.3238,
"step": 471
},
{
"epoch": 1.555189456342669,
"grad_norm": 0.07977651680710264,
"learning_rate": 0.00011073290400602681,
"loss": 0.3152,
"step": 472
},
{
"epoch": 1.558484349258649,
"grad_norm": 0.0796874941168346,
"learning_rate": 0.00011035098649651355,
"loss": 0.3189,
"step": 473
},
{
"epoch": 1.5617792421746293,
"grad_norm": 0.07896439490671953,
"learning_rate": 0.00010996891630974415,
"loss": 0.315,
"step": 474
},
{
"epoch": 1.5650741350906094,
"grad_norm": 0.08381587983527669,
"learning_rate": 0.00010958669908126151,
"loss": 0.3265,
"step": 475
},
{
"epoch": 1.5683690280065898,
"grad_norm": 0.08244330689629394,
"learning_rate": 0.00010920434044877732,
"loss": 0.3256,
"step": 476
},
{
"epoch": 1.57166392092257,
"grad_norm": 0.0826167817539002,
"learning_rate": 0.00010882184605208894,
"loss": 0.313,
"step": 477
},
{
"epoch": 1.5749588138385504,
"grad_norm": 0.08267352564043831,
"learning_rate": 0.00010843922153299637,
"loss": 0.329,
"step": 478
},
{
"epoch": 1.5782537067545306,
"grad_norm": 0.08070973779200152,
"learning_rate": 0.0001080564725352188,
"loss": 0.3196,
"step": 479
},
{
"epoch": 1.5815485996705108,
"grad_norm": 0.08132267986915792,
"learning_rate": 0.00010767360470431158,
"loss": 0.3235,
"step": 480
},
{
"epoch": 1.584843492586491,
"grad_norm": 0.07850537692603574,
"learning_rate": 0.00010729062368758278,
"loss": 0.3181,
"step": 481
},
{
"epoch": 1.588138385502471,
"grad_norm": 0.07881922957573682,
"learning_rate": 0.00010690753513401003,
"loss": 0.3205,
"step": 482
},
{
"epoch": 1.5914332784184513,
"grad_norm": 0.08487734661734532,
"learning_rate": 0.00010652434469415705,
"loss": 0.3256,
"step": 483
},
{
"epoch": 1.5947281713344317,
"grad_norm": 0.08101732034272957,
"learning_rate": 0.00010614105802009044,
"loss": 0.3275,
"step": 484
},
{
"epoch": 1.5980230642504119,
"grad_norm": 0.08118895741694347,
"learning_rate": 0.00010575768076529626,
"loss": 0.3153,
"step": 485
},
{
"epoch": 1.6013179571663922,
"grad_norm": 0.08250314923552278,
"learning_rate": 0.00010537421858459661,
"loss": 0.3227,
"step": 486
},
{
"epoch": 1.6046128500823724,
"grad_norm": 0.07731877431714403,
"learning_rate": 0.00010499067713406623,
"loss": 0.3099,
"step": 487
},
{
"epoch": 1.6079077429983526,
"grad_norm": 0.08521321410108347,
"learning_rate": 0.0001046070620709492,
"loss": 0.3207,
"step": 488
},
{
"epoch": 1.6112026359143328,
"grad_norm": 0.08226505701423492,
"learning_rate": 0.00010422337905357523,
"loss": 0.317,
"step": 489
},
{
"epoch": 1.614497528830313,
"grad_norm": 0.08182233558893907,
"learning_rate": 0.00010383963374127645,
"loss": 0.3153,
"step": 490
},
{
"epoch": 1.6177924217462931,
"grad_norm": 0.08005303314312309,
"learning_rate": 0.00010345583179430388,
"loss": 0.3255,
"step": 491
},
{
"epoch": 1.6210873146622735,
"grad_norm": 0.08168467422688232,
"learning_rate": 0.00010307197887374376,
"loss": 0.3149,
"step": 492
},
{
"epoch": 1.6243822075782537,
"grad_norm": 0.0794434626126838,
"learning_rate": 0.00010268808064143438,
"loss": 0.3135,
"step": 493
},
{
"epoch": 1.627677100494234,
"grad_norm": 0.08056080525788428,
"learning_rate": 0.00010230414275988218,
"loss": 0.3167,
"step": 494
},
{
"epoch": 1.6309719934102143,
"grad_norm": 0.08147329845434119,
"learning_rate": 0.00010192017089217862,
"loss": 0.3232,
"step": 495
},
{
"epoch": 1.6342668863261944,
"grad_norm": 0.07839262012603093,
"learning_rate": 0.00010153617070191632,
"loss": 0.3123,
"step": 496
},
{
"epoch": 1.6375617792421746,
"grad_norm": 0.07951939016035157,
"learning_rate": 0.00010115214785310568,
"loss": 0.3116,
"step": 497
},
{
"epoch": 1.6408566721581548,
"grad_norm": 0.07912600699062161,
"learning_rate": 0.00010076810801009137,
"loss": 0.3142,
"step": 498
},
{
"epoch": 1.644151565074135,
"grad_norm": 0.08317226545954248,
"learning_rate": 0.00010038405683746867,
"loss": 0.3189,
"step": 499
},
{
"epoch": 1.6474464579901154,
"grad_norm": 0.08011582289342602,
"learning_rate": 0.0001,
"loss": 0.3173,
"step": 500
},
{
"epoch": 1.6507413509060955,
"grad_norm": 0.08299947106298346,
"learning_rate": 9.961594316253134e-05,
"loss": 0.3317,
"step": 501
},
{
"epoch": 1.654036243822076,
"grad_norm": 0.08048632067214515,
"learning_rate": 9.923189198990868e-05,
"loss": 0.3155,
"step": 502
},
{
"epoch": 1.657331136738056,
"grad_norm": 0.08095937151716077,
"learning_rate": 9.884785214689435e-05,
"loss": 0.3125,
"step": 503
},
{
"epoch": 1.6606260296540363,
"grad_norm": 0.08711970279631695,
"learning_rate": 9.84638292980837e-05,
"loss": 0.3231,
"step": 504
},
{
"epoch": 1.6639209225700164,
"grad_norm": 0.08339433460792585,
"learning_rate": 9.807982910782141e-05,
"loss": 0.3176,
"step": 505
},
{
"epoch": 1.6672158154859966,
"grad_norm": 0.08207683826613399,
"learning_rate": 9.769585724011783e-05,
"loss": 0.3218,
"step": 506
},
{
"epoch": 1.6705107084019768,
"grad_norm": 0.08030351365951861,
"learning_rate": 9.731191935856565e-05,
"loss": 0.3266,
"step": 507
},
{
"epoch": 1.6738056013179572,
"grad_norm": 0.08063540809733169,
"learning_rate": 9.692802112625623e-05,
"loss": 0.3109,
"step": 508
},
{
"epoch": 1.6771004942339374,
"grad_norm": 0.08217788739453986,
"learning_rate": 9.654416820569617e-05,
"loss": 0.3212,
"step": 509
},
{
"epoch": 1.6803953871499178,
"grad_norm": 0.08104865270695036,
"learning_rate": 9.616036625872357e-05,
"loss": 0.3159,
"step": 510
},
{
"epoch": 1.683690280065898,
"grad_norm": 0.07916408331958537,
"learning_rate": 9.577662094642478e-05,
"loss": 0.3175,
"step": 511
},
{
"epoch": 1.6869851729818781,
"grad_norm": 0.0778681677011439,
"learning_rate": 9.539293792905083e-05,
"loss": 0.3004,
"step": 512
},
{
"epoch": 1.6902800658978583,
"grad_norm": 0.08479179086459149,
"learning_rate": 9.500932286593377e-05,
"loss": 0.3218,
"step": 513
},
{
"epoch": 1.6935749588138385,
"grad_norm": 0.08491747525792748,
"learning_rate": 9.462578141540341e-05,
"loss": 0.3187,
"step": 514
},
{
"epoch": 1.6968698517298186,
"grad_norm": 0.08428239314838194,
"learning_rate": 9.424231923470377e-05,
"loss": 0.3261,
"step": 515
},
{
"epoch": 1.700164744645799,
"grad_norm": 0.0776528664851824,
"learning_rate": 9.385894197990957e-05,
"loss": 0.3154,
"step": 516
},
{
"epoch": 1.7034596375617792,
"grad_norm": 0.08160169468766518,
"learning_rate": 9.347565530584298e-05,
"loss": 0.3208,
"step": 517
},
{
"epoch": 1.7067545304777596,
"grad_norm": 0.08219835239653911,
"learning_rate": 9.309246486598999e-05,
"loss": 0.3185,
"step": 518
},
{
"epoch": 1.7100494233937398,
"grad_norm": 0.07844970543869025,
"learning_rate": 9.270937631241723e-05,
"loss": 0.3095,
"step": 519
},
{
"epoch": 1.71334431630972,
"grad_norm": 0.0804533791298634,
"learning_rate": 9.232639529568843e-05,
"loss": 0.3062,
"step": 520
},
{
"epoch": 1.7166392092257001,
"grad_norm": 0.07915250160793909,
"learning_rate": 9.194352746478123e-05,
"loss": 0.3193,
"step": 521
},
{
"epoch": 1.7199341021416803,
"grad_norm": 0.08112939736857154,
"learning_rate": 9.156077846700367e-05,
"loss": 0.3246,
"step": 522
},
{
"epoch": 1.7232289950576605,
"grad_norm": 0.0838606481003645,
"learning_rate": 9.117815394791107e-05,
"loss": 0.3147,
"step": 523
},
{
"epoch": 1.7265238879736409,
"grad_norm": 0.08242793925392256,
"learning_rate": 9.07956595512227e-05,
"loss": 0.3038,
"step": 524
},
{
"epoch": 1.729818780889621,
"grad_norm": 0.08176093816973408,
"learning_rate": 9.041330091873853e-05,
"loss": 0.3104,
"step": 525
},
{
"epoch": 1.7331136738056014,
"grad_norm": 0.08767477644679618,
"learning_rate": 9.003108369025586e-05,
"loss": 0.3317,
"step": 526
},
{
"epoch": 1.7364085667215816,
"grad_norm": 0.07767826934356571,
"learning_rate": 8.964901350348648e-05,
"loss": 0.3109,
"step": 527
},
{
"epoch": 1.7397034596375618,
"grad_norm": 0.08145456958981293,
"learning_rate": 8.926709599397318e-05,
"loss": 0.3189,
"step": 528
},
{
"epoch": 1.742998352553542,
"grad_norm": 0.08009880884270958,
"learning_rate": 8.888533679500688e-05,
"loss": 0.3148,
"step": 529
},
{
"epoch": 1.7462932454695221,
"grad_norm": 0.07944592757175745,
"learning_rate": 8.850374153754322e-05,
"loss": 0.3205,
"step": 530
},
{
"epoch": 1.7495881383855023,
"grad_norm": 0.08077078414479961,
"learning_rate": 8.812231585011986e-05,
"loss": 0.3109,
"step": 531
},
{
"epoch": 1.7528830313014827,
"grad_norm": 0.07868302874406433,
"learning_rate": 8.774106535877331e-05,
"loss": 0.3115,
"step": 532
},
{
"epoch": 1.7561779242174629,
"grad_norm": 0.07987610438576559,
"learning_rate": 8.735999568695579e-05,
"loss": 0.3129,
"step": 533
},
{
"epoch": 1.7594728171334433,
"grad_norm": 0.08073251563008862,
"learning_rate": 8.697911245545257e-05,
"loss": 0.3113,
"step": 534
},
{
"epoch": 1.7627677100494235,
"grad_norm": 0.07944843744740043,
"learning_rate": 8.659842128229887e-05,
"loss": 0.3155,
"step": 535
},
{
"epoch": 1.7660626029654036,
"grad_norm": 0.07780637939142554,
"learning_rate": 8.6217927782697e-05,
"loss": 0.3,
"step": 536
},
{
"epoch": 1.7693574958813838,
"grad_norm": 0.08113477442420164,
"learning_rate": 8.583763756893365e-05,
"loss": 0.3167,
"step": 537
},
{
"epoch": 1.772652388797364,
"grad_norm": 0.08057722076446969,
"learning_rate": 8.545755625029704e-05,
"loss": 0.3175,
"step": 538
},
{
"epoch": 1.7759472817133442,
"grad_norm": 0.08313615782876255,
"learning_rate": 8.507768943299415e-05,
"loss": 0.3151,
"step": 539
},
{
"epoch": 1.7792421746293245,
"grad_norm": 0.08066682962003371,
"learning_rate": 8.469804272006801e-05,
"loss": 0.3113,
"step": 540
},
{
"epoch": 1.782537067545305,
"grad_norm": 0.0813733641664339,
"learning_rate": 8.431862171131523e-05,
"loss": 0.3025,
"step": 541
},
{
"epoch": 1.7858319604612851,
"grad_norm": 0.08070284448139889,
"learning_rate": 8.393943200320323e-05,
"loss": 0.3196,
"step": 542
},
{
"epoch": 1.7891268533772653,
"grad_norm": 0.08287982363174917,
"learning_rate": 8.356047918878763e-05,
"loss": 0.3148,
"step": 543
},
{
"epoch": 1.7924217462932455,
"grad_norm": 0.08019250653063249,
"learning_rate": 8.318176885763002e-05,
"loss": 0.3028,
"step": 544
},
{
"epoch": 1.7957166392092256,
"grad_norm": 0.08071265971974027,
"learning_rate": 8.280330659571531e-05,
"loss": 0.3039,
"step": 545
},
{
"epoch": 1.7990115321252058,
"grad_norm": 0.07903551548611944,
"learning_rate": 8.242509798536935e-05,
"loss": 0.3209,
"step": 546
},
{
"epoch": 1.8023064250411862,
"grad_norm": 0.07993774098400507,
"learning_rate": 8.204714860517662e-05,
"loss": 0.315,
"step": 547
},
{
"epoch": 1.8056013179571664,
"grad_norm": 0.08071601924849857,
"learning_rate": 8.1669464029898e-05,
"loss": 0.3103,
"step": 548
},
{
"epoch": 1.8088962108731468,
"grad_norm": 0.08034136007232408,
"learning_rate": 8.129204983038847e-05,
"loss": 0.3055,
"step": 549
},
{
"epoch": 1.812191103789127,
"grad_norm": 0.080811797791999,
"learning_rate": 8.091491157351495e-05,
"loss": 0.3106,
"step": 550
},
{
"epoch": 1.8154859967051071,
"grad_norm": 0.08057302103747395,
"learning_rate": 8.053805482207419e-05,
"loss": 0.3167,
"step": 551
},
{
"epoch": 1.8187808896210873,
"grad_norm": 0.07972513982263217,
"learning_rate": 8.016148513471077e-05,
"loss": 0.3195,
"step": 552
},
{
"epoch": 1.8220757825370675,
"grad_norm": 0.08065942919433094,
"learning_rate": 7.978520806583502e-05,
"loss": 0.3137,
"step": 553
},
{
"epoch": 1.8253706754530477,
"grad_norm": 0.07866104765728227,
"learning_rate": 7.940922916554122e-05,
"loss": 0.3003,
"step": 554
},
{
"epoch": 1.828665568369028,
"grad_norm": 0.08248026306411695,
"learning_rate": 7.903355397952557e-05,
"loss": 0.3091,
"step": 555
},
{
"epoch": 1.8319604612850082,
"grad_norm": 0.07940956558724678,
"learning_rate": 7.865818804900449e-05,
"loss": 0.3083,
"step": 556
},
{
"epoch": 1.8352553542009886,
"grad_norm": 0.0793106056162054,
"learning_rate": 7.828313691063293e-05,
"loss": 0.3148,
"step": 557
},
{
"epoch": 1.8385502471169688,
"grad_norm": 0.08268047744068477,
"learning_rate": 7.79084060964226e-05,
"loss": 0.2999,
"step": 558
},
{
"epoch": 1.841845140032949,
"grad_norm": 0.08118175241280721,
"learning_rate": 7.753400113366051e-05,
"loss": 0.3169,
"step": 559
},
{
"epoch": 1.8451400329489291,
"grad_norm": 0.0828667048207493,
"learning_rate": 7.71599275448272e-05,
"loss": 0.3186,
"step": 560
},
{
"epoch": 1.8484349258649093,
"grad_norm": 0.08283621296978765,
"learning_rate": 7.678619084751553e-05,
"loss": 0.3143,
"step": 561
},
{
"epoch": 1.8517298187808895,
"grad_norm": 0.08274280076295433,
"learning_rate": 7.641279655434928e-05,
"loss": 0.3267,
"step": 562
},
{
"epoch": 1.8550247116968699,
"grad_norm": 0.08039004247048885,
"learning_rate": 7.603975017290158e-05,
"loss": 0.3172,
"step": 563
},
{
"epoch": 1.85831960461285,
"grad_norm": 0.07867131124374156,
"learning_rate": 7.566705720561399e-05,
"loss": 0.3126,
"step": 564
},
{
"epoch": 1.8616144975288305,
"grad_norm": 0.08253187445048786,
"learning_rate": 7.529472314971521e-05,
"loss": 0.3115,
"step": 565
},
{
"epoch": 1.8649093904448106,
"grad_norm": 0.08202843679224964,
"learning_rate": 7.492275349713988e-05,
"loss": 0.3114,
"step": 566
},
{
"epoch": 1.8682042833607908,
"grad_norm": 0.08477138135829519,
"learning_rate": 7.455115373444779e-05,
"loss": 0.3194,
"step": 567
},
{
"epoch": 1.871499176276771,
"grad_norm": 0.08194783958889466,
"learning_rate": 7.417992934274279e-05,
"loss": 0.316,
"step": 568
},
{
"epoch": 1.8747940691927512,
"grad_norm": 0.07844240178385448,
"learning_rate": 7.380908579759206e-05,
"loss": 0.3132,
"step": 569
},
{
"epoch": 1.8780889621087313,
"grad_norm": 0.08034073534981348,
"learning_rate": 7.343862856894521e-05,
"loss": 0.3109,
"step": 570
},
{
"epoch": 1.8813838550247117,
"grad_norm": 0.07683009249076064,
"learning_rate": 7.30685631210537e-05,
"loss": 0.3118,
"step": 571
},
{
"epoch": 1.884678747940692,
"grad_norm": 0.078737692987615,
"learning_rate": 7.26988949123903e-05,
"loss": 0.3037,
"step": 572
},
{
"epoch": 1.8879736408566723,
"grad_norm": 0.08036884301385262,
"learning_rate": 7.232962939556832e-05,
"loss": 0.3077,
"step": 573
},
{
"epoch": 1.8912685337726525,
"grad_norm": 0.08225292912320385,
"learning_rate": 7.196077201726148e-05,
"loss": 0.3153,
"step": 574
},
{
"epoch": 1.8945634266886326,
"grad_norm": 0.0814304024397034,
"learning_rate": 7.159232821812348e-05,
"loss": 0.3222,
"step": 575
},
{
"epoch": 1.8978583196046128,
"grad_norm": 0.08201543804410394,
"learning_rate": 7.122430343270761e-05,
"loss": 0.3085,
"step": 576
},
{
"epoch": 1.901153212520593,
"grad_norm": 0.08252119673034763,
"learning_rate": 7.085670308938675e-05,
"loss": 0.3177,
"step": 577
},
{
"epoch": 1.9044481054365732,
"grad_norm": 0.08193246340388376,
"learning_rate": 7.048953261027328e-05,
"loss": 0.3146,
"step": 578
},
{
"epoch": 1.9077429983525536,
"grad_norm": 0.08467744748067746,
"learning_rate": 7.012279741113909e-05,
"loss": 0.3154,
"step": 579
},
{
"epoch": 1.9110378912685337,
"grad_norm": 0.0796890981600387,
"learning_rate": 6.975650290133554e-05,
"loss": 0.3089,
"step": 580
},
{
"epoch": 1.9143327841845141,
"grad_norm": 0.08018458516133334,
"learning_rate": 6.939065448371398e-05,
"loss": 0.3145,
"step": 581
},
{
"epoch": 1.9176276771004943,
"grad_norm": 0.08351184814776264,
"learning_rate": 6.902525755454582e-05,
"loss": 0.3072,
"step": 582
},
{
"epoch": 1.9209225700164745,
"grad_norm": 0.08470137530950465,
"learning_rate": 6.866031750344302e-05,
"loss": 0.3199,
"step": 583
},
{
"epoch": 1.9242174629324547,
"grad_norm": 0.08085473387051476,
"learning_rate": 6.829583971327851e-05,
"loss": 0.3135,
"step": 584
},
{
"epoch": 1.9275123558484348,
"grad_norm": 0.07885114940373524,
"learning_rate": 6.793182956010699e-05,
"loss": 0.2951,
"step": 585
},
{
"epoch": 1.930807248764415,
"grad_norm": 0.08341968429527848,
"learning_rate": 6.756829241308542e-05,
"loss": 0.316,
"step": 586
},
{
"epoch": 1.9341021416803954,
"grad_norm": 0.08157140843535905,
"learning_rate": 6.720523363439393e-05,
"loss": 0.3109,
"step": 587
},
{
"epoch": 1.9373970345963756,
"grad_norm": 0.08023033371726958,
"learning_rate": 6.684265857915669e-05,
"loss": 0.3035,
"step": 588
},
{
"epoch": 1.940691927512356,
"grad_norm": 0.08530632792173666,
"learning_rate": 6.648057259536297e-05,
"loss": 0.3179,
"step": 589
},
{
"epoch": 1.9439868204283361,
"grad_norm": 0.0816362830414679,
"learning_rate": 6.611898102378818e-05,
"loss": 0.3116,
"step": 590
},
{
"epoch": 1.9472817133443163,
"grad_norm": 0.0808945434707495,
"learning_rate": 6.575788919791521e-05,
"loss": 0.3233,
"step": 591
},
{
"epoch": 1.9505766062602965,
"grad_norm": 0.08188718070776142,
"learning_rate": 6.539730244385568e-05,
"loss": 0.319,
"step": 592
},
{
"epoch": 1.9538714991762767,
"grad_norm": 0.07841386595300816,
"learning_rate": 6.503722608027128e-05,
"loss": 0.306,
"step": 593
},
{
"epoch": 1.9571663920922568,
"grad_norm": 0.08232575487936705,
"learning_rate": 6.467766541829557e-05,
"loss": 0.306,
"step": 594
},
{
"epoch": 1.9604612850082372,
"grad_norm": 0.08054867187757639,
"learning_rate": 6.431862576145546e-05,
"loss": 0.3066,
"step": 595
},
{
"epoch": 1.9637561779242174,
"grad_norm": 0.07860962030970246,
"learning_rate": 6.396011240559294e-05,
"loss": 0.3201,
"step": 596
},
{
"epoch": 1.9670510708401978,
"grad_norm": 0.07986110172464526,
"learning_rate": 6.360213063878715e-05,
"loss": 0.3055,
"step": 597
},
{
"epoch": 1.970345963756178,
"grad_norm": 0.0789796364693798,
"learning_rate": 6.324468574127627e-05,
"loss": 0.3097,
"step": 598
},
{
"epoch": 1.9736408566721582,
"grad_norm": 0.0787082961725244,
"learning_rate": 6.288778298537967e-05,
"loss": 0.3109,
"step": 599
},
{
"epoch": 1.9769357495881383,
"grad_norm": 0.08085871461604628,
"learning_rate": 6.253142763541996e-05,
"loss": 0.3082,
"step": 600
},
{
"epoch": 1.9802306425041185,
"grad_norm": 0.08173480993968053,
"learning_rate": 6.21756249476457e-05,
"loss": 0.3027,
"step": 601
},
{
"epoch": 1.9835255354200987,
"grad_norm": 0.07951252806564535,
"learning_rate": 6.182038017015359e-05,
"loss": 0.3064,
"step": 602
},
{
"epoch": 1.986820428336079,
"grad_norm": 0.07908799027316071,
"learning_rate": 6.14656985428111e-05,
"loss": 0.2988,
"step": 603
},
{
"epoch": 1.9901153212520593,
"grad_norm": 0.07976971510017963,
"learning_rate": 6.111158529717931e-05,
"loss": 0.3076,
"step": 604
},
{
"epoch": 1.9934102141680397,
"grad_norm": 0.07973855612867402,
"learning_rate": 6.075804565643561e-05,
"loss": 0.3052,
"step": 605
},
{
"epoch": 1.9967051070840198,
"grad_norm": 0.08113426466491773,
"learning_rate": 6.0405084835296674e-05,
"loss": 0.3114,
"step": 606
},
{
"epoch": 2.0,
"grad_norm": 0.07946560263483063,
"learning_rate": 6.005270803994165e-05,
"loss": 0.2954,
"step": 607
},
{
"epoch": 2.0,
"eval_loss": 0.3216867446899414,
"eval_runtime": 160.0399,
"eval_samples_per_second": 31.898,
"eval_steps_per_second": 1.0,
"step": 607
},
{
"epoch": 2.00329489291598,
"grad_norm": 0.07877266800911653,
"learning_rate": 5.970092046793523e-05,
"loss": 0.2924,
"step": 608
},
{
"epoch": 2.0065897858319603,
"grad_norm": 0.07812553246498843,
"learning_rate": 5.934972730815115e-05,
"loss": 0.291,
"step": 609
},
{
"epoch": 2.0098846787479405,
"grad_norm": 0.08088679451304487,
"learning_rate": 5.899913374069539e-05,
"loss": 0.2928,
"step": 610
},
{
"epoch": 2.013179571663921,
"grad_norm": 0.07976290454539975,
"learning_rate": 5.864914493683005e-05,
"loss": 0.2894,
"step": 611
},
{
"epoch": 2.0164744645799013,
"grad_norm": 0.07969314347284605,
"learning_rate": 5.829976605889695e-05,
"loss": 0.2982,
"step": 612
},
{
"epoch": 2.0197693574958815,
"grad_norm": 0.0815615988419636,
"learning_rate": 5.795100226024145e-05,
"loss": 0.2971,
"step": 613
},
{
"epoch": 2.0230642504118617,
"grad_norm": 0.08736468979222958,
"learning_rate": 5.760285868513649e-05,
"loss": 0.3038,
"step": 614
},
{
"epoch": 2.026359143327842,
"grad_norm": 0.08409772537245033,
"learning_rate": 5.7255340468706776e-05,
"loss": 0.2859,
"step": 615
},
{
"epoch": 2.029654036243822,
"grad_norm": 0.08020824554821433,
"learning_rate": 5.690845273685279e-05,
"loss": 0.2842,
"step": 616
},
{
"epoch": 2.032948929159802,
"grad_norm": 0.08818956404130317,
"learning_rate": 5.6562200606175495e-05,
"loss": 0.3075,
"step": 617
},
{
"epoch": 2.0362438220757824,
"grad_norm": 0.08599576872590033,
"learning_rate": 5.6216589183900695e-05,
"loss": 0.2966,
"step": 618
},
{
"epoch": 2.039538714991763,
"grad_norm": 0.08560071569717868,
"learning_rate": 5.587162356780376e-05,
"loss": 0.291,
"step": 619
},
{
"epoch": 2.042833607907743,
"grad_norm": 0.0862088898346008,
"learning_rate": 5.552730884613429e-05,
"loss": 0.2912,
"step": 620
},
{
"epoch": 2.0461285008237233,
"grad_norm": 0.08680473893419544,
"learning_rate": 5.5183650097541293e-05,
"loss": 0.3036,
"step": 621
},
{
"epoch": 2.0494233937397035,
"grad_norm": 0.08699770517707416,
"learning_rate": 5.484065239099817e-05,
"loss": 0.296,
"step": 622
},
{
"epoch": 2.0527182866556837,
"grad_norm": 0.08379594467728943,
"learning_rate": 5.449832078572781e-05,
"loss": 0.2921,
"step": 623
},
{
"epoch": 2.056013179571664,
"grad_norm": 0.08411666762788954,
"learning_rate": 5.4156660331128225e-05,
"loss": 0.2911,
"step": 624
},
{
"epoch": 2.059308072487644,
"grad_norm": 0.08606290445123865,
"learning_rate": 5.381567606669794e-05,
"loss": 0.2891,
"step": 625
},
{
"epoch": 2.062602965403624,
"grad_norm": 0.0878110595169254,
"learning_rate": 5.347537302196153e-05,
"loss": 0.3094,
"step": 626
},
{
"epoch": 2.065897858319605,
"grad_norm": 0.08473022854436194,
"learning_rate": 5.313575621639568e-05,
"loss": 0.2935,
"step": 627
},
{
"epoch": 2.069192751235585,
"grad_norm": 0.08277404639202951,
"learning_rate": 5.279683065935505e-05,
"loss": 0.3011,
"step": 628
},
{
"epoch": 2.072487644151565,
"grad_norm": 0.08576477710245138,
"learning_rate": 5.245860134999831e-05,
"loss": 0.298,
"step": 629
},
{
"epoch": 2.0757825370675453,
"grad_norm": 0.0861916805489822,
"learning_rate": 5.212107327721445e-05,
"loss": 0.2914,
"step": 630
},
{
"epoch": 2.0790774299835255,
"grad_norm": 0.08488906269091921,
"learning_rate": 5.178425141954925e-05,
"loss": 0.294,
"step": 631
},
{
"epoch": 2.0823723228995057,
"grad_norm": 0.08098586035137433,
"learning_rate": 5.1448140745131844e-05,
"loss": 0.2855,
"step": 632
},
{
"epoch": 2.085667215815486,
"grad_norm": 0.08838145724831743,
"learning_rate": 5.111274621160127e-05,
"loss": 0.2952,
"step": 633
},
{
"epoch": 2.088962108731466,
"grad_norm": 0.08648279828081434,
"learning_rate": 5.077807276603357e-05,
"loss": 0.2928,
"step": 634
},
{
"epoch": 2.0922570016474467,
"grad_norm": 0.08314911866387735,
"learning_rate": 5.044412534486873e-05,
"loss": 0.2945,
"step": 635
},
{
"epoch": 2.095551894563427,
"grad_norm": 0.08461362273542454,
"learning_rate": 5.01109088738378e-05,
"loss": 0.299,
"step": 636
},
{
"epoch": 2.098846787479407,
"grad_norm": 0.08377057965335477,
"learning_rate": 4.9778428267890345e-05,
"loss": 0.2891,
"step": 637
},
{
"epoch": 2.102141680395387,
"grad_norm": 0.08543795983675807,
"learning_rate": 4.9446688431121944e-05,
"loss": 0.2912,
"step": 638
},
{
"epoch": 2.1054365733113674,
"grad_norm": 0.0819773341791353,
"learning_rate": 4.911569425670168e-05,
"loss": 0.2945,
"step": 639
},
{
"epoch": 2.1087314662273475,
"grad_norm": 0.08749676356833817,
"learning_rate": 4.878545062680025e-05,
"loss": 0.3111,
"step": 640
},
{
"epoch": 2.1120263591433277,
"grad_norm": 0.08931336745649711,
"learning_rate": 4.845596241251773e-05,
"loss": 0.2967,
"step": 641
},
{
"epoch": 2.115321252059308,
"grad_norm": 0.08738403486711675,
"learning_rate": 4.8127234473811855e-05,
"loss": 0.2983,
"step": 642
},
{
"epoch": 2.1186161449752885,
"grad_norm": 0.08631801633541691,
"learning_rate": 4.7799271659426156e-05,
"loss": 0.2979,
"step": 643
},
{
"epoch": 2.1219110378912687,
"grad_norm": 0.08204476625680207,
"learning_rate": 4.74720788068187e-05,
"loss": 0.2911,
"step": 644
},
{
"epoch": 2.125205930807249,
"grad_norm": 0.08540431532875102,
"learning_rate": 4.714566074209058e-05,
"loss": 0.2934,
"step": 645
},
{
"epoch": 2.128500823723229,
"grad_norm": 0.0867243714824401,
"learning_rate": 4.682002227991466e-05,
"loss": 0.295,
"step": 646
},
{
"epoch": 2.131795716639209,
"grad_norm": 0.08772574559749194,
"learning_rate": 4.6495168223464734e-05,
"loss": 0.2992,
"step": 647
},
{
"epoch": 2.1350906095551894,
"grad_norm": 0.0862846034965142,
"learning_rate": 4.617110336434464e-05,
"loss": 0.2934,
"step": 648
},
{
"epoch": 2.1383855024711695,
"grad_norm": 0.08279698024636946,
"learning_rate": 4.5847832482517386e-05,
"loss": 0.2905,
"step": 649
},
{
"epoch": 2.1416803953871497,
"grad_norm": 0.08449602542472846,
"learning_rate": 4.5525360346234916e-05,
"loss": 0.2762,
"step": 650
},
{
"epoch": 2.1449752883031303,
"grad_norm": 0.08512352335672224,
"learning_rate": 4.520369171196765e-05,
"loss": 0.2819,
"step": 651
},
{
"epoch": 2.1482701812191105,
"grad_norm": 0.08942106754915288,
"learning_rate": 4.488283132433437e-05,
"loss": 0.2809,
"step": 652
},
{
"epoch": 2.1515650741350907,
"grad_norm": 0.08538361680827215,
"learning_rate": 4.456278391603207e-05,
"loss": 0.2875,
"step": 653
},
{
"epoch": 2.154859967051071,
"grad_norm": 0.08716447313431239,
"learning_rate": 4.42435542077664e-05,
"loss": 0.3049,
"step": 654
},
{
"epoch": 2.158154859967051,
"grad_norm": 0.08722819981927521,
"learning_rate": 4.392514690818193e-05,
"loss": 0.2947,
"step": 655
},
{
"epoch": 2.161449752883031,
"grad_norm": 0.08703119195800675,
"learning_rate": 4.360756671379258e-05,
"loss": 0.2899,
"step": 656
},
{
"epoch": 2.1647446457990114,
"grad_norm": 0.08554895733632949,
"learning_rate": 4.329081830891253e-05,
"loss": 0.3008,
"step": 657
},
{
"epoch": 2.168039538714992,
"grad_norm": 0.0855723358947586,
"learning_rate": 4.2974906365587095e-05,
"loss": 0.2948,
"step": 658
},
{
"epoch": 2.171334431630972,
"grad_norm": 0.08669144280813641,
"learning_rate": 4.265983554352361e-05,
"loss": 0.2918,
"step": 659
},
{
"epoch": 2.1746293245469523,
"grad_norm": 0.08587506440809513,
"learning_rate": 4.2345610490023004e-05,
"loss": 0.3007,
"step": 660
},
{
"epoch": 2.1779242174629325,
"grad_norm": 0.08325186764285471,
"learning_rate": 4.203223583991103e-05,
"loss": 0.2862,
"step": 661
},
{
"epoch": 2.1812191103789127,
"grad_norm": 0.0848179385036651,
"learning_rate": 4.171971621547004e-05,
"loss": 0.2941,
"step": 662
},
{
"epoch": 2.184514003294893,
"grad_norm": 0.08744598220624812,
"learning_rate": 4.140805622637062e-05,
"loss": 0.2884,
"step": 663
},
{
"epoch": 2.187808896210873,
"grad_norm": 0.08730152511128712,
"learning_rate": 4.1097260469603815e-05,
"loss": 0.2939,
"step": 664
},
{
"epoch": 2.191103789126853,
"grad_norm": 0.08479459661361502,
"learning_rate": 4.0787333529413216e-05,
"loss": 0.2907,
"step": 665
},
{
"epoch": 2.1943986820428334,
"grad_norm": 0.08748979890541152,
"learning_rate": 4.0478279977227364e-05,
"loss": 0.3054,
"step": 666
},
{
"epoch": 2.197693574958814,
"grad_norm": 0.08688506963459419,
"learning_rate": 4.0170104371592196e-05,
"loss": 0.294,
"step": 667
},
{
"epoch": 2.200988467874794,
"grad_norm": 0.08813970885177967,
"learning_rate": 3.986281125810408e-05,
"loss": 0.2906,
"step": 668
},
{
"epoch": 2.2042833607907744,
"grad_norm": 0.0862215239726716,
"learning_rate": 3.9556405169342506e-05,
"loss": 0.2919,
"step": 669
},
{
"epoch": 2.2075782537067545,
"grad_norm": 0.0864141576056617,
"learning_rate": 3.925089062480339e-05,
"loss": 0.2988,
"step": 670
},
{
"epoch": 2.2108731466227347,
"grad_norm": 0.08741141927551971,
"learning_rate": 3.8946272130832276e-05,
"loss": 0.2973,
"step": 671
},
{
"epoch": 2.214168039538715,
"grad_norm": 0.087264203309356,
"learning_rate": 3.864255418055801e-05,
"loss": 0.2984,
"step": 672
},
{
"epoch": 2.217462932454695,
"grad_norm": 0.0863024056441452,
"learning_rate": 3.8339741253826386e-05,
"loss": 0.2894,
"step": 673
},
{
"epoch": 2.2207578253706757,
"grad_norm": 0.08686962836143866,
"learning_rate": 3.803783781713411e-05,
"loss": 0.2943,
"step": 674
},
{
"epoch": 2.224052718286656,
"grad_norm": 0.088506180884522,
"learning_rate": 3.7736848323562804e-05,
"loss": 0.2936,
"step": 675
},
{
"epoch": 2.227347611202636,
"grad_norm": 0.08429663780585792,
"learning_rate": 3.7436777212713484e-05,
"loss": 0.2883,
"step": 676
},
{
"epoch": 2.230642504118616,
"grad_norm": 0.0861610009496725,
"learning_rate": 3.7137628910641e-05,
"loss": 0.3014,
"step": 677
},
{
"epoch": 2.2339373970345964,
"grad_norm": 0.08744439598418424,
"learning_rate": 3.683940782978875e-05,
"loss": 0.3027,
"step": 678
},
{
"epoch": 2.2372322899505765,
"grad_norm": 0.08478804953508354,
"learning_rate": 3.654211836892356e-05,
"loss": 0.2885,
"step": 679
},
{
"epoch": 2.2405271828665567,
"grad_norm": 0.08396188944325574,
"learning_rate": 3.624576491307088e-05,
"loss": 0.2898,
"step": 680
},
{
"epoch": 2.243822075782537,
"grad_norm": 0.08464943476670307,
"learning_rate": 3.595035183345007e-05,
"loss": 0.2932,
"step": 681
},
{
"epoch": 2.247116968698517,
"grad_norm": 0.08361972692496587,
"learning_rate": 3.565588348740995e-05,
"loss": 0.2846,
"step": 682
},
{
"epoch": 2.2504118616144977,
"grad_norm": 0.08473337066131743,
"learning_rate": 3.536236421836438e-05,
"loss": 0.2873,
"step": 683
},
{
"epoch": 2.253706754530478,
"grad_norm": 0.08560947782135529,
"learning_rate": 3.5069798355728456e-05,
"loss": 0.2952,
"step": 684
},
{
"epoch": 2.257001647446458,
"grad_norm": 0.08273696228936674,
"learning_rate": 3.477819021485448e-05,
"loss": 0.2843,
"step": 685
},
{
"epoch": 2.260296540362438,
"grad_norm": 0.0856458280242604,
"learning_rate": 3.448754409696823e-05,
"loss": 0.2862,
"step": 686
},
{
"epoch": 2.2635914332784184,
"grad_norm": 0.0849473074749724,
"learning_rate": 3.4197864289105763e-05,
"loss": 0.2844,
"step": 687
},
{
"epoch": 2.2668863261943986,
"grad_norm": 0.08752747087542179,
"learning_rate": 3.390915506405e-05,
"loss": 0.3009,
"step": 688
},
{
"epoch": 2.2701812191103787,
"grad_norm": 0.08515710595917833,
"learning_rate": 3.36214206802677e-05,
"loss": 0.2917,
"step": 689
},
{
"epoch": 2.2734761120263594,
"grad_norm": 0.08735259929721675,
"learning_rate": 3.3334665381846744e-05,
"loss": 0.3025,
"step": 690
},
{
"epoch": 2.2767710049423395,
"grad_norm": 0.08615519636042336,
"learning_rate": 3.3048893398433465e-05,
"loss": 0.293,
"step": 691
},
{
"epoch": 2.2800658978583197,
"grad_norm": 0.08824900221463129,
"learning_rate": 3.2764108945170336e-05,
"loss": 0.3008,
"step": 692
},
{
"epoch": 2.2833607907743,
"grad_norm": 0.08484959409699497,
"learning_rate": 3.248031622263361e-05,
"loss": 0.2873,
"step": 693
},
{
"epoch": 2.28665568369028,
"grad_norm": 0.08724514821548604,
"learning_rate": 3.2197519416771615e-05,
"loss": 0.2973,
"step": 694
},
{
"epoch": 2.2899505766062602,
"grad_norm": 0.08393767608817229,
"learning_rate": 3.1915722698842877e-05,
"loss": 0.2841,
"step": 695
},
{
"epoch": 2.2932454695222404,
"grad_norm": 0.08641309723191103,
"learning_rate": 3.163493022535451e-05,
"loss": 0.2872,
"step": 696
},
{
"epoch": 2.2965403624382206,
"grad_norm": 0.08788590177688262,
"learning_rate": 3.135514613800108e-05,
"loss": 0.2995,
"step": 697
},
{
"epoch": 2.2998352553542007,
"grad_norm": 0.08531593399084322,
"learning_rate": 3.107637456360348e-05,
"loss": 0.2894,
"step": 698
},
{
"epoch": 2.3031301482701814,
"grad_norm": 0.0872718919185223,
"learning_rate": 3.079861961404789e-05,
"loss": 0.2887,
"step": 699
},
{
"epoch": 2.3064250411861615,
"grad_norm": 0.08612255232765391,
"learning_rate": 3.052188538622535e-05,
"loss": 0.2901,
"step": 700
},
{
"epoch": 2.3097199341021417,
"grad_norm": 0.08922298918575919,
"learning_rate": 3.0246175961971212e-05,
"loss": 0.2937,
"step": 701
},
{
"epoch": 2.313014827018122,
"grad_norm": 0.0882010224849904,
"learning_rate": 2.997149540800498e-05,
"loss": 0.2931,
"step": 702
},
{
"epoch": 2.316309719934102,
"grad_norm": 0.08622719885654752,
"learning_rate": 2.9697847775870224e-05,
"loss": 0.2898,
"step": 703
},
{
"epoch": 2.3196046128500822,
"grad_norm": 0.08359805006615195,
"learning_rate": 2.942523710187496e-05,
"loss": 0.2847,
"step": 704
},
{
"epoch": 2.3228995057660624,
"grad_norm": 0.08449003074110172,
"learning_rate": 2.915366740703207e-05,
"loss": 0.28,
"step": 705
},
{
"epoch": 2.326194398682043,
"grad_norm": 0.08855451697096253,
"learning_rate": 2.8883142696999878e-05,
"loss": 0.2987,
"step": 706
},
{
"epoch": 2.329489291598023,
"grad_norm": 0.08622350256458039,
"learning_rate": 2.8613666962023254e-05,
"loss": 0.2968,
"step": 707
},
{
"epoch": 2.3327841845140034,
"grad_norm": 0.08652168396802937,
"learning_rate": 2.8345244176874663e-05,
"loss": 0.2823,
"step": 708
},
{
"epoch": 2.3360790774299836,
"grad_norm": 0.08823592083642218,
"learning_rate": 2.8077878300795448e-05,
"loss": 0.2912,
"step": 709
},
{
"epoch": 2.3393739703459637,
"grad_norm": 0.08507829603456869,
"learning_rate": 2.7811573277437608e-05,
"loss": 0.2835,
"step": 710
},
{
"epoch": 2.342668863261944,
"grad_norm": 0.08549841674015039,
"learning_rate": 2.7546333034805527e-05,
"loss": 0.292,
"step": 711
},
{
"epoch": 2.345963756177924,
"grad_norm": 0.08443979295008668,
"learning_rate": 2.7282161485198078e-05,
"loss": 0.2974,
"step": 712
},
{
"epoch": 2.3492586490939047,
"grad_norm": 0.08503200569748393,
"learning_rate": 2.7019062525150784e-05,
"loss": 0.3016,
"step": 713
},
{
"epoch": 2.352553542009885,
"grad_norm": 0.08504863770154245,
"learning_rate": 2.6757040035378568e-05,
"loss": 0.2984,
"step": 714
},
{
"epoch": 2.355848434925865,
"grad_norm": 0.08644443469983008,
"learning_rate": 2.6496097880718364e-05,
"loss": 0.2954,
"step": 715
},
{
"epoch": 2.359143327841845,
"grad_norm": 0.08915999578886315,
"learning_rate": 2.6236239910072068e-05,
"loss": 0.2988,
"step": 716
},
{
"epoch": 2.3624382207578254,
"grad_norm": 0.08416603357610633,
"learning_rate": 2.5977469956349954e-05,
"loss": 0.2879,
"step": 717
},
{
"epoch": 2.3657331136738056,
"grad_norm": 0.08817807859466884,
"learning_rate": 2.5719791836413976e-05,
"loss": 0.299,
"step": 718
},
{
"epoch": 2.3690280065897857,
"grad_norm": 0.08541077975584914,
"learning_rate": 2.5463209351021456e-05,
"loss": 0.2901,
"step": 719
},
{
"epoch": 2.372322899505766,
"grad_norm": 0.08470671550098466,
"learning_rate": 2.5207726284769194e-05,
"loss": 0.303,
"step": 720
},
{
"epoch": 2.375617792421746,
"grad_norm": 0.08819661663546222,
"learning_rate": 2.495334640603746e-05,
"loss": 0.2888,
"step": 721
},
{
"epoch": 2.3789126853377267,
"grad_norm": 0.08591537862829479,
"learning_rate": 2.470007346693455e-05,
"loss": 0.2948,
"step": 722
},
{
"epoch": 2.382207578253707,
"grad_norm": 0.0875830979203168,
"learning_rate": 2.444791120324127e-05,
"loss": 0.2915,
"step": 723
},
{
"epoch": 2.385502471169687,
"grad_norm": 0.08613104051905594,
"learning_rate": 2.419686333435606e-05,
"loss": 0.2972,
"step": 724
},
{
"epoch": 2.3887973640856672,
"grad_norm": 0.08782242765122425,
"learning_rate": 2.3946933563239972e-05,
"loss": 0.299,
"step": 725
},
{
"epoch": 2.3920922570016474,
"grad_norm": 0.08680612007460217,
"learning_rate": 2.369812557636204e-05,
"loss": 0.2943,
"step": 726
},
{
"epoch": 2.3953871499176276,
"grad_norm": 0.08603749003168497,
"learning_rate": 2.3450443043645032e-05,
"loss": 0.3012,
"step": 727
},
{
"epoch": 2.3986820428336078,
"grad_norm": 0.08657552256692291,
"learning_rate": 2.3203889618411233e-05,
"loss": 0.2906,
"step": 728
},
{
"epoch": 2.4019769357495884,
"grad_norm": 0.08704450296844311,
"learning_rate": 2.2958468937328526e-05,
"loss": 0.2951,
"step": 729
},
{
"epoch": 2.4052718286655685,
"grad_norm": 0.0855118301451358,
"learning_rate": 2.2714184620356827e-05,
"loss": 0.2931,
"step": 730
},
{
"epoch": 2.4085667215815487,
"grad_norm": 0.08724807161271898,
"learning_rate": 2.2471040270694666e-05,
"loss": 0.2917,
"step": 731
},
{
"epoch": 2.411861614497529,
"grad_norm": 0.08600975835485788,
"learning_rate": 2.2229039474726053e-05,
"loss": 0.296,
"step": 732
},
{
"epoch": 2.415156507413509,
"grad_norm": 0.08864784516812786,
"learning_rate": 2.1988185801967464e-05,
"loss": 0.2972,
"step": 733
},
{
"epoch": 2.4184514003294892,
"grad_norm": 0.08762596803411996,
"learning_rate": 2.174848280501538e-05,
"loss": 0.2906,
"step": 734
},
{
"epoch": 2.4217462932454694,
"grad_norm": 0.09035462710118229,
"learning_rate": 2.150993401949376e-05,
"loss": 0.2951,
"step": 735
},
{
"epoch": 2.4250411861614496,
"grad_norm": 0.08755876151137919,
"learning_rate": 2.127254296400195e-05,
"loss": 0.2901,
"step": 736
},
{
"epoch": 2.4283360790774298,
"grad_norm": 0.08766926155438874,
"learning_rate": 2.103631314006267e-05,
"loss": 0.2934,
"step": 737
},
{
"epoch": 2.4316309719934104,
"grad_norm": 0.08606023381132255,
"learning_rate": 2.0801248032070542e-05,
"loss": 0.2846,
"step": 738
},
{
"epoch": 2.4349258649093906,
"grad_norm": 0.08809592154677096,
"learning_rate": 2.0567351107240563e-05,
"loss": 0.2916,
"step": 739
},
{
"epoch": 2.4382207578253707,
"grad_norm": 0.0904592519515893,
"learning_rate": 2.033462581555703e-05,
"loss": 0.2819,
"step": 740
},
{
"epoch": 2.441515650741351,
"grad_norm": 0.08617703763043115,
"learning_rate": 2.0103075589722575e-05,
"loss": 0.2908,
"step": 741
},
{
"epoch": 2.444810543657331,
"grad_norm": 0.08964817977842639,
"learning_rate": 1.9872703845107643e-05,
"loss": 0.305,
"step": 742
},
{
"epoch": 2.4481054365733113,
"grad_norm": 0.09015699027576811,
"learning_rate": 1.9643513979700033e-05,
"loss": 0.2954,
"step": 743
},
{
"epoch": 2.4514003294892914,
"grad_norm": 0.08997440049824991,
"learning_rate": 1.941550937405483e-05,
"loss": 0.2926,
"step": 744
},
{
"epoch": 2.454695222405272,
"grad_norm": 0.09114828822884588,
"learning_rate": 1.918869339124444e-05,
"loss": 0.296,
"step": 745
},
{
"epoch": 2.4579901153212522,
"grad_norm": 0.09027991136572545,
"learning_rate": 1.896306937680913e-05,
"loss": 0.2977,
"step": 746
},
{
"epoch": 2.4612850082372324,
"grad_norm": 0.08660452640872197,
"learning_rate": 1.8738640658707585e-05,
"loss": 0.2885,
"step": 747
},
{
"epoch": 2.4645799011532126,
"grad_norm": 0.08979882751073061,
"learning_rate": 1.8515410547267875e-05,
"loss": 0.2906,
"step": 748
},
{
"epoch": 2.4678747940691927,
"grad_norm": 0.08618796776224673,
"learning_rate": 1.829338233513853e-05,
"loss": 0.2972,
"step": 749
},
{
"epoch": 2.471169686985173,
"grad_norm": 0.08735262583561819,
"learning_rate": 1.80725592972401e-05,
"loss": 0.2851,
"step": 750
},
{
"epoch": 2.474464579901153,
"grad_norm": 0.08587818073514078,
"learning_rate": 1.7852944690716768e-05,
"loss": 0.298,
"step": 751
},
{
"epoch": 2.4777594728171333,
"grad_norm": 0.09093449501445203,
"learning_rate": 1.7634541754888367e-05,
"loss": 0.2996,
"step": 752
},
{
"epoch": 2.4810543657331134,
"grad_norm": 0.08576101421203153,
"learning_rate": 1.7417353711202478e-05,
"loss": 0.2892,
"step": 753
},
{
"epoch": 2.484349258649094,
"grad_norm": 0.08796655318401572,
"learning_rate": 1.7201383763187085e-05,
"loss": 0.3064,
"step": 754
},
{
"epoch": 2.4876441515650742,
"grad_norm": 0.08917308260607959,
"learning_rate": 1.6986635096403215e-05,
"loss": 0.296,
"step": 755
},
{
"epoch": 2.4909390444810544,
"grad_norm": 0.08712369748650076,
"learning_rate": 1.6773110878397935e-05,
"loss": 0.2892,
"step": 756
},
{
"epoch": 2.4942339373970346,
"grad_norm": 0.08830732733373077,
"learning_rate": 1.6560814258657686e-05,
"loss": 0.2868,
"step": 757
},
{
"epoch": 2.4975288303130148,
"grad_norm": 0.08886428573765547,
"learning_rate": 1.634974836856188e-05,
"loss": 0.2975,
"step": 758
},
{
"epoch": 2.500823723228995,
"grad_norm": 0.09001998396318142,
"learning_rate": 1.6139916321336512e-05,
"loss": 0.3011,
"step": 759
},
{
"epoch": 2.504118616144975,
"grad_norm": 0.0866643459541065,
"learning_rate": 1.5931321212008466e-05,
"loss": 0.2791,
"step": 760
},
{
"epoch": 2.5074135090609557,
"grad_norm": 0.08969541659303033,
"learning_rate": 1.5723966117359746e-05,
"loss": 0.2845,
"step": 761
},
{
"epoch": 2.510708401976936,
"grad_norm": 0.08678120829747425,
"learning_rate": 1.5517854095882124e-05,
"loss": 0.2897,
"step": 762
},
{
"epoch": 2.514003294892916,
"grad_norm": 0.08768949528062786,
"learning_rate": 1.531298818773197e-05,
"loss": 0.2926,
"step": 763
},
{
"epoch": 2.5172981878088962,
"grad_norm": 0.08613630689405796,
"learning_rate": 1.5109371414685513e-05,
"loss": 0.2994,
"step": 764
},
{
"epoch": 2.5205930807248764,
"grad_norm": 0.08818859976681247,
"learning_rate": 1.4907006780094213e-05,
"loss": 0.2915,
"step": 765
},
{
"epoch": 2.5238879736408566,
"grad_norm": 0.08916911710423102,
"learning_rate": 1.4705897268840396e-05,
"loss": 0.3087,
"step": 766
},
{
"epoch": 2.5271828665568368,
"grad_norm": 0.08860834462061187,
"learning_rate": 1.4506045847293359e-05,
"loss": 0.2936,
"step": 767
},
{
"epoch": 2.5304777594728174,
"grad_norm": 0.08563535943112995,
"learning_rate": 1.4307455463265573e-05,
"loss": 0.2855,
"step": 768
},
{
"epoch": 2.533772652388797,
"grad_norm": 0.08826978729379603,
"learning_rate": 1.411012904596909e-05,
"loss": 0.3115,
"step": 769
},
{
"epoch": 2.5370675453047777,
"grad_norm": 0.0870890210971865,
"learning_rate": 1.3914069505972483e-05,
"loss": 0.2905,
"step": 770
},
{
"epoch": 2.540362438220758,
"grad_norm": 0.08354870518763845,
"learning_rate": 1.3719279735157874e-05,
"loss": 0.2916,
"step": 771
},
{
"epoch": 2.543657331136738,
"grad_norm": 0.08910519459239277,
"learning_rate": 1.3525762606678271e-05,
"loss": 0.3048,
"step": 772
},
{
"epoch": 2.5469522240527183,
"grad_norm": 0.08365455172188432,
"learning_rate": 1.3333520974915093e-05,
"loss": 0.292,
"step": 773
},
{
"epoch": 2.5502471169686984,
"grad_norm": 0.08911881943765725,
"learning_rate": 1.3142557675436262e-05,
"loss": 0.2983,
"step": 774
},
{
"epoch": 2.5535420098846786,
"grad_norm": 0.08661195377868255,
"learning_rate": 1.2952875524954233e-05,
"loss": 0.2916,
"step": 775
},
{
"epoch": 2.556836902800659,
"grad_norm": 0.08738778807939013,
"learning_rate": 1.2764477321284474e-05,
"loss": 0.2827,
"step": 776
},
{
"epoch": 2.5601317957166394,
"grad_norm": 0.08675909170932133,
"learning_rate": 1.2577365843304212e-05,
"loss": 0.2922,
"step": 777
},
{
"epoch": 2.5634266886326196,
"grad_norm": 0.08653045636242618,
"learning_rate": 1.2391543850911514e-05,
"loss": 0.289,
"step": 778
},
{
"epoch": 2.5667215815485998,
"grad_norm": 0.08794743727959366,
"learning_rate": 1.2207014084984381e-05,
"loss": 0.3006,
"step": 779
},
{
"epoch": 2.57001647446458,
"grad_norm": 0.08765042878548815,
"learning_rate": 1.2023779267340562e-05,
"loss": 0.3053,
"step": 780
},
{
"epoch": 2.57331136738056,
"grad_norm": 0.08506311043178476,
"learning_rate": 1.1841842100697254e-05,
"loss": 0.287,
"step": 781
},
{
"epoch": 2.5766062602965403,
"grad_norm": 0.08559193608270571,
"learning_rate": 1.1661205268631247e-05,
"loss": 0.2913,
"step": 782
},
{
"epoch": 2.5799011532125204,
"grad_norm": 0.08817756260222355,
"learning_rate": 1.1481871435539415e-05,
"loss": 0.2912,
"step": 783
},
{
"epoch": 2.583196046128501,
"grad_norm": 0.0871445033944061,
"learning_rate": 1.1303843246599344e-05,
"loss": 0.2807,
"step": 784
},
{
"epoch": 2.586490939044481,
"grad_norm": 0.08535578251400154,
"learning_rate": 1.1127123327730381e-05,
"loss": 0.2818,
"step": 785
},
{
"epoch": 2.5897858319604614,
"grad_norm": 0.08761486848867922,
"learning_rate": 1.0951714285554782e-05,
"loss": 0.3059,
"step": 786
},
{
"epoch": 2.5930807248764416,
"grad_norm": 0.08719801254842507,
"learning_rate": 1.0777618707359428e-05,
"loss": 0.2825,
"step": 787
},
{
"epoch": 2.5963756177924218,
"grad_norm": 0.09034119263808828,
"learning_rate": 1.0604839161057567e-05,
"loss": 0.2946,
"step": 788
},
{
"epoch": 2.599670510708402,
"grad_norm": 0.08954661242149643,
"learning_rate": 1.043337819515089e-05,
"loss": 0.2852,
"step": 789
},
{
"epoch": 2.602965403624382,
"grad_norm": 0.08724138640911316,
"learning_rate": 1.0263238338692061e-05,
"loss": 0.2907,
"step": 790
},
{
"epoch": 2.6062602965403623,
"grad_norm": 0.086330949719512,
"learning_rate": 1.0094422101247369e-05,
"loss": 0.2868,
"step": 791
},
{
"epoch": 2.6095551894563425,
"grad_norm": 0.08529314875981138,
"learning_rate": 9.926931972859622e-06,
"loss": 0.2862,
"step": 792
},
{
"epoch": 2.612850082372323,
"grad_norm": 0.08538099892093676,
"learning_rate": 9.760770424011555e-06,
"loss": 0.2842,
"step": 793
},
{
"epoch": 2.6161449752883033,
"grad_norm": 0.08346633575820249,
"learning_rate": 9.595939905589301e-06,
"loss": 0.2872,
"step": 794
},
{
"epoch": 2.6194398682042834,
"grad_norm": 0.09087513588899795,
"learning_rate": 9.43244284884629e-06,
"loss": 0.2991,
"step": 795
},
{
"epoch": 2.6227347611202636,
"grad_norm": 0.08970315413157041,
"learning_rate": 9.270281665367331e-06,
"loss": 0.2864,
"step": 796
},
{
"epoch": 2.6260296540362438,
"grad_norm": 0.08831508853553123,
"learning_rate": 9.109458747033106e-06,
"loss": 0.299,
"step": 797
},
{
"epoch": 2.629324546952224,
"grad_norm": 0.08962119945659898,
"learning_rate": 8.949976465984878e-06,
"loss": 0.2933,
"step": 798
},
{
"epoch": 2.632619439868204,
"grad_norm": 0.0903147303314654,
"learning_rate": 8.791837174589402e-06,
"loss": 0.291,
"step": 799
},
{
"epoch": 2.6359143327841847,
"grad_norm": 0.0874384283159893,
"learning_rate": 8.63504320540438e-06,
"loss": 0.2947,
"step": 800
},
{
"epoch": 2.6392092257001645,
"grad_norm": 0.09114762691001292,
"learning_rate": 8.47959687114398e-06,
"loss": 0.3005,
"step": 801
},
{
"epoch": 2.642504118616145,
"grad_norm": 0.08629238625513358,
"learning_rate": 8.325500464644731e-06,
"loss": 0.2893,
"step": 802
},
{
"epoch": 2.6457990115321253,
"grad_norm": 0.08689109034275155,
"learning_rate": 8.172756258831638e-06,
"loss": 0.2861,
"step": 803
},
{
"epoch": 2.6490939044481054,
"grad_norm": 0.08687013125668969,
"learning_rate": 8.021366506684802e-06,
"loss": 0.3018,
"step": 804
},
{
"epoch": 2.6523887973640856,
"grad_norm": 0.08733316173692361,
"learning_rate": 7.871333441206053e-06,
"loss": 0.2918,
"step": 805
},
{
"epoch": 2.655683690280066,
"grad_norm": 0.08666514271966166,
"learning_rate": 7.722659275386101e-06,
"loss": 0.2893,
"step": 806
},
{
"epoch": 2.658978583196046,
"grad_norm": 0.08572152703241656,
"learning_rate": 7.575346202171818e-06,
"loss": 0.2845,
"step": 807
},
{
"epoch": 2.662273476112026,
"grad_norm": 0.08616248420611884,
"learning_rate": 7.429396394433952e-06,
"loss": 0.2908,
"step": 808
},
{
"epoch": 2.6655683690280068,
"grad_norm": 0.08544220799292981,
"learning_rate": 7.284812004935082e-06,
"loss": 0.2828,
"step": 809
},
{
"epoch": 2.668863261943987,
"grad_norm": 0.08530855466026539,
"learning_rate": 7.141595166297832e-06,
"loss": 0.2921,
"step": 810
},
{
"epoch": 2.672158154859967,
"grad_norm": 0.08676630765622653,
"learning_rate": 6.999747990973382e-06,
"loss": 0.2824,
"step": 811
},
{
"epoch": 2.6754530477759473,
"grad_norm": 0.0858286642847653,
"learning_rate": 6.859272571210385e-06,
"loss": 0.2931,
"step": 812
},
{
"epoch": 2.6787479406919275,
"grad_norm": 0.08895340909580104,
"learning_rate": 6.720170979024065e-06,
"loss": 0.2974,
"step": 813
},
{
"epoch": 2.6820428336079076,
"grad_norm": 0.08704509442550527,
"learning_rate": 6.5824452661656936e-06,
"loss": 0.2946,
"step": 814
},
{
"epoch": 2.685337726523888,
"grad_norm": 0.08719224060765357,
"learning_rate": 6.446097464092249e-06,
"loss": 0.2825,
"step": 815
},
{
"epoch": 2.6886326194398684,
"grad_norm": 0.08505418031859806,
"learning_rate": 6.311129583936504e-06,
"loss": 0.2857,
"step": 816
},
{
"epoch": 2.6919275123558486,
"grad_norm": 0.08777962549058228,
"learning_rate": 6.177543616477377e-06,
"loss": 0.2918,
"step": 817
},
{
"epoch": 2.6952224052718288,
"grad_norm": 0.08802224386633083,
"learning_rate": 6.04534153211056e-06,
"loss": 0.2943,
"step": 818
},
{
"epoch": 2.698517298187809,
"grad_norm": 0.08672625342077533,
"learning_rate": 5.914525280819383e-06,
"loss": 0.2892,
"step": 819
},
{
"epoch": 2.701812191103789,
"grad_norm": 0.08512834708737091,
"learning_rate": 5.785096792146161e-06,
"loss": 0.2961,
"step": 820
},
{
"epoch": 2.7051070840197693,
"grad_norm": 0.08644060832701138,
"learning_rate": 5.6570579751636825e-06,
"loss": 0.2828,
"step": 821
},
{
"epoch": 2.7084019769357495,
"grad_norm": 0.08586016865471162,
"learning_rate": 5.53041071844701e-06,
"loss": 0.2847,
"step": 822
},
{
"epoch": 2.71169686985173,
"grad_norm": 0.08748419080120705,
"learning_rate": 5.405156890045704e-06,
"loss": 0.2914,
"step": 823
},
{
"epoch": 2.71499176276771,
"grad_norm": 0.08611456546743239,
"learning_rate": 5.2812983374562195e-06,
"loss": 0.2817,
"step": 824
},
{
"epoch": 2.7182866556836904,
"grad_norm": 0.08601404856441977,
"learning_rate": 5.158836887594687e-06,
"loss": 0.2838,
"step": 825
},
{
"epoch": 2.7215815485996706,
"grad_norm": 0.08519086941747171,
"learning_rate": 5.037774346769874e-06,
"loss": 0.2911,
"step": 826
},
{
"epoch": 2.724876441515651,
"grad_norm": 0.08802028933104723,
"learning_rate": 4.91811250065668e-06,
"loss": 0.2857,
"step": 827
},
{
"epoch": 2.728171334431631,
"grad_norm": 0.08817444740319591,
"learning_rate": 4.799853114269725e-06,
"loss": 0.2882,
"step": 828
},
{
"epoch": 2.731466227347611,
"grad_norm": 0.08474998779590631,
"learning_rate": 4.682997931937283e-06,
"loss": 0.2857,
"step": 829
},
{
"epoch": 2.7347611202635913,
"grad_norm": 0.08826586804083016,
"learning_rate": 4.567548677275602e-06,
"loss": 0.3008,
"step": 830
},
{
"epoch": 2.7380560131795715,
"grad_norm": 0.08589345490586381,
"learning_rate": 4.4535070531635196e-06,
"loss": 0.2792,
"step": 831
},
{
"epoch": 2.741350906095552,
"grad_norm": 0.08688465761385525,
"learning_rate": 4.340874741717194e-06,
"loss": 0.2821,
"step": 832
},
{
"epoch": 2.7446457990115323,
"grad_norm": 0.08775893601760283,
"learning_rate": 4.2296534042654985e-06,
"loss": 0.2866,
"step": 833
},
{
"epoch": 2.7479406919275124,
"grad_norm": 0.08369390812057961,
"learning_rate": 4.119844681325347e-06,
"loss": 0.2847,
"step": 834
},
{
"epoch": 2.7512355848434926,
"grad_norm": 0.08608061264894241,
"learning_rate": 4.011450192577593e-06,
"loss": 0.294,
"step": 835
},
{
"epoch": 2.754530477759473,
"grad_norm": 0.08747895302707503,
"learning_rate": 3.9044715368430796e-06,
"loss": 0.2925,
"step": 836
},
{
"epoch": 2.757825370675453,
"grad_norm": 0.09108934336270781,
"learning_rate": 3.79891029205911e-06,
"loss": 0.2986,
"step": 837
},
{
"epoch": 2.761120263591433,
"grad_norm": 0.08739799542018062,
"learning_rate": 3.6947680152561448e-06,
"loss": 0.2862,
"step": 838
},
{
"epoch": 2.7644151565074138,
"grad_norm": 0.08517335050043656,
"learning_rate": 3.592046242534819e-06,
"loss": 0.2801,
"step": 839
},
{
"epoch": 2.7677100494233935,
"grad_norm": 0.087382204616756,
"learning_rate": 3.490746489043317e-06,
"loss": 0.3014,
"step": 840
},
{
"epoch": 2.771004942339374,
"grad_norm": 0.0874720007338996,
"learning_rate": 3.3908702489550246e-06,
"loss": 0.2897,
"step": 841
},
{
"epoch": 2.7742998352553543,
"grad_norm": 0.08620311190604066,
"learning_rate": 3.292418995446445e-06,
"loss": 0.2917,
"step": 842
},
{
"epoch": 2.7775947281713345,
"grad_norm": 0.08850431386846441,
"learning_rate": 3.195394180675526e-06,
"loss": 0.2876,
"step": 843
},
{
"epoch": 2.7808896210873146,
"grad_norm": 0.08718749143095159,
"learning_rate": 3.0997972357602267e-06,
"loss": 0.2915,
"step": 844
},
{
"epoch": 2.784184514003295,
"grad_norm": 0.08432627868786195,
"learning_rate": 3.0056295707573732e-06,
"loss": 0.2807,
"step": 845
},
{
"epoch": 2.787479406919275,
"grad_norm": 0.08758768949078255,
"learning_rate": 2.912892574641879e-06,
"loss": 0.3013,
"step": 846
},
{
"epoch": 2.790774299835255,
"grad_norm": 0.08774352124282939,
"learning_rate": 2.8215876152862695e-06,
"loss": 0.2953,
"step": 847
},
{
"epoch": 2.7940691927512358,
"grad_norm": 0.08578852522202607,
"learning_rate": 2.731716039440546e-06,
"loss": 0.2849,
"step": 848
},
{
"epoch": 2.797364085667216,
"grad_norm": 0.08692795875841626,
"learning_rate": 2.6432791727121984e-06,
"loss": 0.2887,
"step": 849
},
{
"epoch": 2.800658978583196,
"grad_norm": 0.0875787452554814,
"learning_rate": 2.5562783195467676e-06,
"loss": 0.2876,
"step": 850
},
{
"epoch": 2.8039538714991763,
"grad_norm": 0.08941768041430048,
"learning_rate": 2.4707147632085812e-06,
"loss": 0.2905,
"step": 851
},
{
"epoch": 2.8072487644151565,
"grad_norm": 0.08799458088680515,
"learning_rate": 2.386589765761771e-06,
"loss": 0.3009,
"step": 852
},
{
"epoch": 2.8105436573311366,
"grad_norm": 0.08621413521392886,
"learning_rate": 2.303904568051729e-06,
"loss": 0.2853,
"step": 853
},
{
"epoch": 2.813838550247117,
"grad_norm": 0.08830565616085327,
"learning_rate": 2.222660389686759e-06,
"loss": 0.2916,
"step": 854
},
{
"epoch": 2.8171334431630974,
"grad_norm": 0.08465275130891076,
"learning_rate": 2.1428584290201117e-06,
"loss": 0.2936,
"step": 855
},
{
"epoch": 2.820428336079077,
"grad_norm": 0.08848516159290687,
"learning_rate": 2.0644998631322743e-06,
"loss": 0.2897,
"step": 856
},
{
"epoch": 2.823723228995058,
"grad_norm": 0.08667388930109197,
"learning_rate": 1.9875858478136555e-06,
"loss": 0.2864,
"step": 857
},
{
"epoch": 2.827018121911038,
"grad_norm": 0.08730797196019499,
"learning_rate": 1.912117517547518e-06,
"loss": 0.2921,
"step": 858
},
{
"epoch": 2.830313014827018,
"grad_norm": 0.08754556569098132,
"learning_rate": 1.8380959854932046e-06,
"loss": 0.2845,
"step": 859
},
{
"epoch": 2.8336079077429983,
"grad_norm": 0.0888004567935744,
"learning_rate": 1.7655223434698055e-06,
"loss": 0.2944,
"step": 860
},
{
"epoch": 2.8369028006589785,
"grad_norm": 0.08629780498350664,
"learning_rate": 1.6943976619399615e-06,
"loss": 0.2894,
"step": 861
},
{
"epoch": 2.8401976935749587,
"grad_norm": 0.08678492881753877,
"learning_rate": 1.624722989994143e-06,
"loss": 0.2922,
"step": 862
},
{
"epoch": 2.843492586490939,
"grad_norm": 0.08681999478349432,
"learning_rate": 1.5564993553351393e-06,
"loss": 0.2941,
"step": 863
},
{
"epoch": 2.8467874794069195,
"grad_norm": 0.08455558885488165,
"learning_rate": 1.489727764262927e-06,
"loss": 0.2823,
"step": 864
},
{
"epoch": 2.8500823723228996,
"grad_norm": 0.0869909676987709,
"learning_rate": 1.4244092016597932e-06,
"loss": 0.2897,
"step": 865
},
{
"epoch": 2.85337726523888,
"grad_norm": 0.08999029894977731,
"learning_rate": 1.360544630975813e-06,
"loss": 0.2925,
"step": 866
},
{
"epoch": 2.85667215815486,
"grad_norm": 0.08908986656975491,
"learning_rate": 1.2981349942146947e-06,
"loss": 0.2935,
"step": 867
},
{
"epoch": 2.85996705107084,
"grad_norm": 0.0852396248401669,
"learning_rate": 1.2371812119198133e-06,
"loss": 0.2912,
"step": 868
},
{
"epoch": 2.8632619439868203,
"grad_norm": 0.09176483266172472,
"learning_rate": 1.1776841831606545e-06,
"loss": 0.2938,
"step": 869
},
{
"epoch": 2.8665568369028005,
"grad_norm": 0.08602564538788204,
"learning_rate": 1.1196447855195802e-06,
"loss": 0.2901,
"step": 870
},
{
"epoch": 2.869851729818781,
"grad_norm": 0.08784529858760343,
"learning_rate": 1.0630638750788623e-06,
"loss": 0.2975,
"step": 871
},
{
"epoch": 2.873146622734761,
"grad_norm": 0.08971622964903737,
"learning_rate": 1.007942286408048e-06,
"loss": 0.2925,
"step": 872
},
{
"epoch": 2.8764415156507415,
"grad_norm": 0.08941711599334909,
"learning_rate": 9.542808325516571e-07,
"loss": 0.2985,
"step": 873
},
{
"epoch": 2.8797364085667216,
"grad_norm": 0.08771600586759773,
"learning_rate": 9.020803050172055e-07,
"loss": 0.2891,
"step": 874
},
{
"epoch": 2.883031301482702,
"grad_norm": 0.087619918065711,
"learning_rate": 8.513414737635006e-07,
"loss": 0.2993,
"step": 875
},
{
"epoch": 2.886326194398682,
"grad_norm": 0.08574010553502064,
"learning_rate": 8.020650871893299e-07,
"loss": 0.283,
"step": 876
},
{
"epoch": 2.889621087314662,
"grad_norm": 0.08895621856403858,
"learning_rate": 7.542518721223469e-07,
"loss": 0.2953,
"step": 877
},
{
"epoch": 2.892915980230643,
"grad_norm": 0.08752216931371197,
"learning_rate": 7.079025338084356e-07,
"loss": 0.3001,
"step": 878
},
{
"epoch": 2.8962108731466225,
"grad_norm": 0.08592028048367323,
"learning_rate": 6.630177559012518e-07,
"loss": 0.2932,
"step": 879
},
{
"epoch": 2.899505766062603,
"grad_norm": 0.08922288724108252,
"learning_rate": 6.195982004521539e-07,
"loss": 0.2919,
"step": 880
},
{
"epoch": 2.9028006589785833,
"grad_norm": 0.08607983338366068,
"learning_rate": 5.776445079004656e-07,
"loss": 0.2865,
"step": 881
},
{
"epoch": 2.9060955518945635,
"grad_norm": 0.08631649082660897,
"learning_rate": 5.371572970639727e-07,
"loss": 0.2954,
"step": 882
},
{
"epoch": 2.9093904448105437,
"grad_norm": 0.08780715085907931,
"learning_rate": 4.981371651298305e-07,
"loss": 0.2973,
"step": 883
},
{
"epoch": 2.912685337726524,
"grad_norm": 0.08653539418303985,
"learning_rate": 4.605846876457709e-07,
"loss": 0.287,
"step": 884
},
{
"epoch": 2.915980230642504,
"grad_norm": 0.08853983076560318,
"learning_rate": 4.245004185115753e-07,
"loss": 0.2926,
"step": 885
},
{
"epoch": 2.919275123558484,
"grad_norm": 0.08688701796384708,
"learning_rate": 3.8988488997092623e-07,
"loss": 0.2909,
"step": 886
},
{
"epoch": 2.922570016474465,
"grad_norm": 0.08707666200796532,
"learning_rate": 3.5673861260355767e-07,
"loss": 0.2916,
"step": 887
},
{
"epoch": 2.925864909390445,
"grad_norm": 0.0881348831962719,
"learning_rate": 3.25062075317728e-07,
"loss": 0.298,
"step": 888
},
{
"epoch": 2.929159802306425,
"grad_norm": 0.08957445515530212,
"learning_rate": 2.948557453429701e-07,
"loss": 0.2976,
"step": 889
},
{
"epoch": 2.9324546952224053,
"grad_norm": 0.08509653098922053,
"learning_rate": 2.6612006822327454e-07,
"loss": 0.2813,
"step": 890
},
{
"epoch": 2.9357495881383855,
"grad_norm": 0.08699474462722692,
"learning_rate": 2.3885546781042824e-07,
"loss": 0.2784,
"step": 891
},
{
"epoch": 2.9390444810543657,
"grad_norm": 0.08668396855759522,
"learning_rate": 2.1306234625784182e-07,
"loss": 0.2832,
"step": 892
},
{
"epoch": 2.942339373970346,
"grad_norm": 0.08908080709823125,
"learning_rate": 1.8874108401456536e-07,
"loss": 0.2943,
"step": 893
},
{
"epoch": 2.9456342668863265,
"grad_norm": 0.087253529375096,
"learning_rate": 1.658920398196928e-07,
"loss": 0.2955,
"step": 894
},
{
"epoch": 2.948929159802306,
"grad_norm": 0.08662330916170789,
"learning_rate": 1.4451555069708856e-07,
"loss": 0.2791,
"step": 895
},
{
"epoch": 2.952224052718287,
"grad_norm": 0.0874866376451616,
"learning_rate": 1.2461193195038022e-07,
"loss": 0.2854,
"step": 896
},
{
"epoch": 2.955518945634267,
"grad_norm": 0.08661370692532423,
"learning_rate": 1.0618147715835137e-07,
"loss": 0.2871,
"step": 897
},
{
"epoch": 2.958813838550247,
"grad_norm": 0.08427881713621545,
"learning_rate": 8.922445817056701e-08,
"loss": 0.2776,
"step": 898
},
{
"epoch": 2.9621087314662273,
"grad_norm": 0.08831048147016833,
"learning_rate": 7.374112510339926e-08,
"loss": 0.3043,
"step": 899
},
{
"epoch": 2.9654036243822075,
"grad_norm": 0.08457458559860552,
"learning_rate": 5.973170633631897e-08,
"loss": 0.2852,
"step": 900
},
{
"epoch": 2.9686985172981877,
"grad_norm": 0.08674405906715675,
"learning_rate": 4.719640850852081e-08,
"loss": 0.2873,
"step": 901
},
{
"epoch": 2.971993410214168,
"grad_norm": 0.08956957869825671,
"learning_rate": 3.6135416515903356e-08,
"loss": 0.3088,
"step": 902
},
{
"epoch": 2.9752883031301485,
"grad_norm": 0.08742057262425709,
"learning_rate": 2.6548893508315798e-08,
"loss": 0.2914,
"step": 903
},
{
"epoch": 2.9785831960461286,
"grad_norm": 0.08686074450100637,
"learning_rate": 1.8436980887170942e-08,
"loss": 0.2823,
"step": 904
},
{
"epoch": 2.981878088962109,
"grad_norm": 0.08958760682591677,
"learning_rate": 1.1799798303335775e-08,
"loss": 0.2935,
"step": 905
},
{
"epoch": 2.985172981878089,
"grad_norm": 0.08621279240787924,
"learning_rate": 6.637443655366227e-09,
"loss": 0.2798,
"step": 906
},
{
"epoch": 2.988467874794069,
"grad_norm": 0.08689572544052393,
"learning_rate": 2.949993088130487e-09,
"loss": 0.2945,
"step": 907
},
{
"epoch": 2.9917627677100493,
"grad_norm": 0.08696925538627061,
"learning_rate": 7.375009915655539e-10,
"loss": 0.2918,
"step": 908
},
{
"epoch": 2.9950576606260295,
"grad_norm": 0.08749368325157687,
"learning_rate": 0.0,
"loss": 0.2889,
"step": 909
},
{
"epoch": 2.9950576606260295,
"eval_loss": 0.31883931159973145,
"eval_runtime": 159.817,
"eval_samples_per_second": 31.943,
"eval_steps_per_second": 1.001,
"step": 909
},
{
"epoch": 2.9950576606260295,
"step": 909,
"total_flos": 3.2428259312245146e+17,
"train_loss": 0.34365460066774367,
"train_runtime": 10432.531,
"train_samples_per_second": 11.155,
"train_steps_per_second": 0.087
}
],
"logging_steps": 1,
"max_steps": 909,
"num_input_tokens_seen": 0,
"num_train_epochs": 3,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 3.2428259312245146e+17,
"train_batch_size": 8,
"trial_name": null,
"trial_params": null
}