llama3-8b-math-sft-subtask-7 / trainer_state.json
Dynosaur's picture
Model save
747dce0 verified
raw
history blame
183 kB
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 2.0,
"eval_steps": 500,
"global_step": 1052,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0019011406844106464,
"grad_norm": 3.5782954370521867,
"learning_rate": 1.886792452830189e-07,
"loss": 0.7096,
"step": 1
},
{
"epoch": 0.0038022813688212928,
"grad_norm": 3.3941595165154594,
"learning_rate": 3.773584905660378e-07,
"loss": 0.7167,
"step": 2
},
{
"epoch": 0.005703422053231939,
"grad_norm": 3.6124063148298062,
"learning_rate": 5.660377358490567e-07,
"loss": 0.708,
"step": 3
},
{
"epoch": 0.0076045627376425855,
"grad_norm": 3.0889500238068153,
"learning_rate": 7.547169811320755e-07,
"loss": 0.6702,
"step": 4
},
{
"epoch": 0.009505703422053232,
"grad_norm": 3.3065815632932,
"learning_rate": 9.433962264150944e-07,
"loss": 0.7281,
"step": 5
},
{
"epoch": 0.011406844106463879,
"grad_norm": 3.0898750665956487,
"learning_rate": 1.1320754716981133e-06,
"loss": 0.6911,
"step": 6
},
{
"epoch": 0.013307984790874524,
"grad_norm": 3.255757640136501,
"learning_rate": 1.3207547169811322e-06,
"loss": 0.705,
"step": 7
},
{
"epoch": 0.015209125475285171,
"grad_norm": 3.1783775894549042,
"learning_rate": 1.509433962264151e-06,
"loss": 0.7361,
"step": 8
},
{
"epoch": 0.017110266159695818,
"grad_norm": 2.601896149523353,
"learning_rate": 1.6981132075471698e-06,
"loss": 0.6699,
"step": 9
},
{
"epoch": 0.019011406844106463,
"grad_norm": 2.502563485552909,
"learning_rate": 1.8867924528301889e-06,
"loss": 0.6796,
"step": 10
},
{
"epoch": 0.02091254752851711,
"grad_norm": 1.8413928625090792,
"learning_rate": 2.075471698113208e-06,
"loss": 0.6469,
"step": 11
},
{
"epoch": 0.022813688212927757,
"grad_norm": 1.6452704835515077,
"learning_rate": 2.2641509433962266e-06,
"loss": 0.6021,
"step": 12
},
{
"epoch": 0.024714828897338403,
"grad_norm": 1.6020089603612706,
"learning_rate": 2.4528301886792453e-06,
"loss": 0.6012,
"step": 13
},
{
"epoch": 0.026615969581749048,
"grad_norm": 1.4349223917701353,
"learning_rate": 2.6415094339622644e-06,
"loss": 0.5634,
"step": 14
},
{
"epoch": 0.028517110266159697,
"grad_norm": 1.781373845542272,
"learning_rate": 2.830188679245283e-06,
"loss": 0.5838,
"step": 15
},
{
"epoch": 0.030418250950570342,
"grad_norm": 2.1825240391979737,
"learning_rate": 3.018867924528302e-06,
"loss": 0.5714,
"step": 16
},
{
"epoch": 0.03231939163498099,
"grad_norm": 2.222359649308977,
"learning_rate": 3.207547169811321e-06,
"loss": 0.5843,
"step": 17
},
{
"epoch": 0.034220532319391636,
"grad_norm": 1.7671524031551902,
"learning_rate": 3.3962264150943395e-06,
"loss": 0.5653,
"step": 18
},
{
"epoch": 0.03612167300380228,
"grad_norm": 1.552785176624156,
"learning_rate": 3.5849056603773586e-06,
"loss": 0.555,
"step": 19
},
{
"epoch": 0.03802281368821293,
"grad_norm": 1.3502721860831004,
"learning_rate": 3.7735849056603777e-06,
"loss": 0.554,
"step": 20
},
{
"epoch": 0.039923954372623575,
"grad_norm": 1.419182739019091,
"learning_rate": 3.962264150943396e-06,
"loss": 0.5356,
"step": 21
},
{
"epoch": 0.04182509505703422,
"grad_norm": 1.4394377603827857,
"learning_rate": 4.150943396226416e-06,
"loss": 0.5547,
"step": 22
},
{
"epoch": 0.043726235741444866,
"grad_norm": 1.3772350822192425,
"learning_rate": 4.339622641509435e-06,
"loss": 0.5438,
"step": 23
},
{
"epoch": 0.045627376425855515,
"grad_norm": 1.1699364526372937,
"learning_rate": 4.528301886792453e-06,
"loss": 0.5453,
"step": 24
},
{
"epoch": 0.04752851711026616,
"grad_norm": 1.0947449757331273,
"learning_rate": 4.716981132075472e-06,
"loss": 0.5342,
"step": 25
},
{
"epoch": 0.049429657794676805,
"grad_norm": 1.0827410579747634,
"learning_rate": 4.905660377358491e-06,
"loss": 0.5471,
"step": 26
},
{
"epoch": 0.051330798479087454,
"grad_norm": 1.060303635004072,
"learning_rate": 5.09433962264151e-06,
"loss": 0.5399,
"step": 27
},
{
"epoch": 0.053231939163498096,
"grad_norm": 1.046058592195638,
"learning_rate": 5.283018867924529e-06,
"loss": 0.5377,
"step": 28
},
{
"epoch": 0.055133079847908745,
"grad_norm": 0.9723124667237367,
"learning_rate": 5.4716981132075475e-06,
"loss": 0.5217,
"step": 29
},
{
"epoch": 0.057034220532319393,
"grad_norm": 0.9329068918430661,
"learning_rate": 5.660377358490566e-06,
"loss": 0.5202,
"step": 30
},
{
"epoch": 0.058935361216730035,
"grad_norm": 0.9258371312380779,
"learning_rate": 5.849056603773585e-06,
"loss": 0.5004,
"step": 31
},
{
"epoch": 0.060836501901140684,
"grad_norm": 0.9307966306026091,
"learning_rate": 6.037735849056604e-06,
"loss": 0.4787,
"step": 32
},
{
"epoch": 0.06273764258555133,
"grad_norm": 0.9434281210590566,
"learning_rate": 6.226415094339623e-06,
"loss": 0.4897,
"step": 33
},
{
"epoch": 0.06463878326996197,
"grad_norm": 0.9405427761176385,
"learning_rate": 6.415094339622642e-06,
"loss": 0.5122,
"step": 34
},
{
"epoch": 0.06653992395437262,
"grad_norm": 0.9490258876909833,
"learning_rate": 6.60377358490566e-06,
"loss": 0.5079,
"step": 35
},
{
"epoch": 0.06844106463878327,
"grad_norm": 0.9374536273575098,
"learning_rate": 6.792452830188679e-06,
"loss": 0.494,
"step": 36
},
{
"epoch": 0.07034220532319392,
"grad_norm": 0.9062817688755312,
"learning_rate": 6.981132075471699e-06,
"loss": 0.4652,
"step": 37
},
{
"epoch": 0.07224334600760456,
"grad_norm": 0.9398078871550827,
"learning_rate": 7.169811320754717e-06,
"loss": 0.51,
"step": 38
},
{
"epoch": 0.0741444866920152,
"grad_norm": 0.9745285716706361,
"learning_rate": 7.358490566037736e-06,
"loss": 0.5033,
"step": 39
},
{
"epoch": 0.07604562737642585,
"grad_norm": 0.9450823120302225,
"learning_rate": 7.5471698113207555e-06,
"loss": 0.4779,
"step": 40
},
{
"epoch": 0.0779467680608365,
"grad_norm": 0.8867061366832006,
"learning_rate": 7.735849056603775e-06,
"loss": 0.4841,
"step": 41
},
{
"epoch": 0.07984790874524715,
"grad_norm": 0.851457834608874,
"learning_rate": 7.924528301886793e-06,
"loss": 0.4688,
"step": 42
},
{
"epoch": 0.0817490494296578,
"grad_norm": 0.9031208316909863,
"learning_rate": 8.113207547169812e-06,
"loss": 0.4829,
"step": 43
},
{
"epoch": 0.08365019011406843,
"grad_norm": 0.8659105815422486,
"learning_rate": 8.301886792452832e-06,
"loss": 0.4633,
"step": 44
},
{
"epoch": 0.08555133079847908,
"grad_norm": 0.8748762299125159,
"learning_rate": 8.49056603773585e-06,
"loss": 0.4758,
"step": 45
},
{
"epoch": 0.08745247148288973,
"grad_norm": 0.9007053925087198,
"learning_rate": 8.67924528301887e-06,
"loss": 0.4819,
"step": 46
},
{
"epoch": 0.08935361216730038,
"grad_norm": 0.8316294476091217,
"learning_rate": 8.867924528301887e-06,
"loss": 0.4935,
"step": 47
},
{
"epoch": 0.09125475285171103,
"grad_norm": 0.8620226406365487,
"learning_rate": 9.056603773584907e-06,
"loss": 0.4805,
"step": 48
},
{
"epoch": 0.09315589353612168,
"grad_norm": 0.9043811716854999,
"learning_rate": 9.245283018867926e-06,
"loss": 0.4767,
"step": 49
},
{
"epoch": 0.09505703422053231,
"grad_norm": 0.8659874789291866,
"learning_rate": 9.433962264150944e-06,
"loss": 0.483,
"step": 50
},
{
"epoch": 0.09695817490494296,
"grad_norm": 0.8405294683710988,
"learning_rate": 9.622641509433963e-06,
"loss": 0.4743,
"step": 51
},
{
"epoch": 0.09885931558935361,
"grad_norm": 0.9583125661833157,
"learning_rate": 9.811320754716981e-06,
"loss": 0.4729,
"step": 52
},
{
"epoch": 0.10076045627376426,
"grad_norm": 1.06462169318722,
"learning_rate": 1e-05,
"loss": 0.4743,
"step": 53
},
{
"epoch": 0.10266159695817491,
"grad_norm": 0.8807692451307935,
"learning_rate": 1.018867924528302e-05,
"loss": 0.4831,
"step": 54
},
{
"epoch": 0.10456273764258556,
"grad_norm": 0.9292498641478995,
"learning_rate": 1.0377358490566038e-05,
"loss": 0.4827,
"step": 55
},
{
"epoch": 0.10646387832699619,
"grad_norm": 0.8871188863710504,
"learning_rate": 1.0566037735849058e-05,
"loss": 0.4723,
"step": 56
},
{
"epoch": 0.10836501901140684,
"grad_norm": 0.8356425472522154,
"learning_rate": 1.0754716981132076e-05,
"loss": 0.4456,
"step": 57
},
{
"epoch": 0.11026615969581749,
"grad_norm": 0.9411621000987156,
"learning_rate": 1.0943396226415095e-05,
"loss": 0.4829,
"step": 58
},
{
"epoch": 0.11216730038022814,
"grad_norm": 0.8150859719697868,
"learning_rate": 1.1132075471698115e-05,
"loss": 0.4869,
"step": 59
},
{
"epoch": 0.11406844106463879,
"grad_norm": 0.9748629580394024,
"learning_rate": 1.1320754716981132e-05,
"loss": 0.5039,
"step": 60
},
{
"epoch": 0.11596958174904944,
"grad_norm": 0.8746527497773197,
"learning_rate": 1.1509433962264152e-05,
"loss": 0.4521,
"step": 61
},
{
"epoch": 0.11787072243346007,
"grad_norm": 0.9099343774701552,
"learning_rate": 1.169811320754717e-05,
"loss": 0.4765,
"step": 62
},
{
"epoch": 0.11977186311787072,
"grad_norm": 0.8233570393451283,
"learning_rate": 1.188679245283019e-05,
"loss": 0.4484,
"step": 63
},
{
"epoch": 0.12167300380228137,
"grad_norm": 1.0527624687767663,
"learning_rate": 1.2075471698113209e-05,
"loss": 0.5161,
"step": 64
},
{
"epoch": 0.12357414448669202,
"grad_norm": 0.9099774665434944,
"learning_rate": 1.2264150943396227e-05,
"loss": 0.4648,
"step": 65
},
{
"epoch": 0.12547528517110265,
"grad_norm": 0.9025608602398439,
"learning_rate": 1.2452830188679246e-05,
"loss": 0.4737,
"step": 66
},
{
"epoch": 0.12737642585551331,
"grad_norm": 0.8847522797529523,
"learning_rate": 1.2641509433962264e-05,
"loss": 0.4608,
"step": 67
},
{
"epoch": 0.12927756653992395,
"grad_norm": 0.9565179505050055,
"learning_rate": 1.2830188679245283e-05,
"loss": 0.4764,
"step": 68
},
{
"epoch": 0.1311787072243346,
"grad_norm": 1.0532096396599409,
"learning_rate": 1.3018867924528303e-05,
"loss": 0.4913,
"step": 69
},
{
"epoch": 0.13307984790874525,
"grad_norm": 0.8669266753524634,
"learning_rate": 1.320754716981132e-05,
"loss": 0.4634,
"step": 70
},
{
"epoch": 0.13498098859315588,
"grad_norm": 1.1679750482872506,
"learning_rate": 1.339622641509434e-05,
"loss": 0.4678,
"step": 71
},
{
"epoch": 0.13688212927756654,
"grad_norm": 0.9968881959382052,
"learning_rate": 1.3584905660377358e-05,
"loss": 0.4785,
"step": 72
},
{
"epoch": 0.13878326996197718,
"grad_norm": 0.9631675380024608,
"learning_rate": 1.3773584905660378e-05,
"loss": 0.4737,
"step": 73
},
{
"epoch": 0.14068441064638784,
"grad_norm": 0.9847375672625539,
"learning_rate": 1.3962264150943397e-05,
"loss": 0.4372,
"step": 74
},
{
"epoch": 0.14258555133079848,
"grad_norm": 1.1453125523286225,
"learning_rate": 1.4150943396226415e-05,
"loss": 0.451,
"step": 75
},
{
"epoch": 0.1444866920152091,
"grad_norm": 1.041699330598297,
"learning_rate": 1.4339622641509435e-05,
"loss": 0.4687,
"step": 76
},
{
"epoch": 0.14638783269961977,
"grad_norm": 1.0397141705044202,
"learning_rate": 1.4528301886792452e-05,
"loss": 0.4793,
"step": 77
},
{
"epoch": 0.1482889733840304,
"grad_norm": 0.8762777090585395,
"learning_rate": 1.4716981132075472e-05,
"loss": 0.4552,
"step": 78
},
{
"epoch": 0.15019011406844107,
"grad_norm": 1.2702066662042568,
"learning_rate": 1.4905660377358491e-05,
"loss": 0.4592,
"step": 79
},
{
"epoch": 0.1520912547528517,
"grad_norm": 1.0249073147765457,
"learning_rate": 1.5094339622641511e-05,
"loss": 0.4718,
"step": 80
},
{
"epoch": 0.15399239543726237,
"grad_norm": 1.1370542431045585,
"learning_rate": 1.5283018867924532e-05,
"loss": 0.4801,
"step": 81
},
{
"epoch": 0.155893536121673,
"grad_norm": 1.1101186929807074,
"learning_rate": 1.547169811320755e-05,
"loss": 0.4527,
"step": 82
},
{
"epoch": 0.15779467680608364,
"grad_norm": 0.8839283520424742,
"learning_rate": 1.5660377358490568e-05,
"loss": 0.4762,
"step": 83
},
{
"epoch": 0.1596958174904943,
"grad_norm": 1.3233069450424841,
"learning_rate": 1.5849056603773586e-05,
"loss": 0.4802,
"step": 84
},
{
"epoch": 0.16159695817490494,
"grad_norm": 1.198054662673216,
"learning_rate": 1.6037735849056607e-05,
"loss": 0.4471,
"step": 85
},
{
"epoch": 0.1634980988593156,
"grad_norm": 0.8815981806511403,
"learning_rate": 1.6226415094339625e-05,
"loss": 0.4414,
"step": 86
},
{
"epoch": 0.16539923954372623,
"grad_norm": 1.1187333949600748,
"learning_rate": 1.6415094339622643e-05,
"loss": 0.464,
"step": 87
},
{
"epoch": 0.16730038022813687,
"grad_norm": 0.9503729431678609,
"learning_rate": 1.6603773584905664e-05,
"loss": 0.4714,
"step": 88
},
{
"epoch": 0.16920152091254753,
"grad_norm": 0.8990127440141839,
"learning_rate": 1.679245283018868e-05,
"loss": 0.4819,
"step": 89
},
{
"epoch": 0.17110266159695817,
"grad_norm": 1.3062453176712012,
"learning_rate": 1.69811320754717e-05,
"loss": 0.4704,
"step": 90
},
{
"epoch": 0.17300380228136883,
"grad_norm": 0.8672876565941433,
"learning_rate": 1.716981132075472e-05,
"loss": 0.4577,
"step": 91
},
{
"epoch": 0.17490494296577946,
"grad_norm": 1.0642937829513388,
"learning_rate": 1.735849056603774e-05,
"loss": 0.487,
"step": 92
},
{
"epoch": 0.17680608365019013,
"grad_norm": 0.820014178840764,
"learning_rate": 1.7547169811320756e-05,
"loss": 0.4663,
"step": 93
},
{
"epoch": 0.17870722433460076,
"grad_norm": 0.9564213422101271,
"learning_rate": 1.7735849056603774e-05,
"loss": 0.4763,
"step": 94
},
{
"epoch": 0.1806083650190114,
"grad_norm": 1.0110705334574648,
"learning_rate": 1.7924528301886795e-05,
"loss": 0.4759,
"step": 95
},
{
"epoch": 0.18250950570342206,
"grad_norm": 4.7742147678446845,
"learning_rate": 1.8113207547169813e-05,
"loss": 0.461,
"step": 96
},
{
"epoch": 0.1844106463878327,
"grad_norm": 8.799214667920824,
"learning_rate": 1.830188679245283e-05,
"loss": 0.4796,
"step": 97
},
{
"epoch": 0.18631178707224336,
"grad_norm": 1.6216923647009223,
"learning_rate": 1.8490566037735852e-05,
"loss": 0.4364,
"step": 98
},
{
"epoch": 0.188212927756654,
"grad_norm": 0.9578335641788249,
"learning_rate": 1.867924528301887e-05,
"loss": 0.4662,
"step": 99
},
{
"epoch": 0.19011406844106463,
"grad_norm": 1.0197995552848935,
"learning_rate": 1.8867924528301888e-05,
"loss": 0.4773,
"step": 100
},
{
"epoch": 0.1920152091254753,
"grad_norm": 0.9069768164734816,
"learning_rate": 1.905660377358491e-05,
"loss": 0.4857,
"step": 101
},
{
"epoch": 0.19391634980988592,
"grad_norm": 0.8731997341945048,
"learning_rate": 1.9245283018867927e-05,
"loss": 0.4497,
"step": 102
},
{
"epoch": 0.1958174904942966,
"grad_norm": 0.9526140185230106,
"learning_rate": 1.9433962264150945e-05,
"loss": 0.47,
"step": 103
},
{
"epoch": 0.19771863117870722,
"grad_norm": 0.9329790064960451,
"learning_rate": 1.9622641509433963e-05,
"loss": 0.4488,
"step": 104
},
{
"epoch": 0.19961977186311788,
"grad_norm": 0.8835726611780776,
"learning_rate": 1.9811320754716984e-05,
"loss": 0.4639,
"step": 105
},
{
"epoch": 0.20152091254752852,
"grad_norm": 0.9277989564029124,
"learning_rate": 2e-05,
"loss": 0.4612,
"step": 106
},
{
"epoch": 0.20342205323193915,
"grad_norm": 0.9861570860920709,
"learning_rate": 1.9999944857420527e-05,
"loss": 0.4578,
"step": 107
},
{
"epoch": 0.20532319391634982,
"grad_norm": 0.9690037012398802,
"learning_rate": 1.9999779430290247e-05,
"loss": 0.4638,
"step": 108
},
{
"epoch": 0.20722433460076045,
"grad_norm": 0.9852013573825265,
"learning_rate": 1.9999503720433575e-05,
"loss": 0.4637,
"step": 109
},
{
"epoch": 0.20912547528517111,
"grad_norm": 1.0501471476395878,
"learning_rate": 1.999911773089118e-05,
"loss": 0.4489,
"step": 110
},
{
"epoch": 0.21102661596958175,
"grad_norm": 0.8433954778932308,
"learning_rate": 1.999862146591996e-05,
"loss": 0.4406,
"step": 111
},
{
"epoch": 0.21292775665399238,
"grad_norm": 0.9223665452312211,
"learning_rate": 1.9998014930992976e-05,
"loss": 0.4421,
"step": 112
},
{
"epoch": 0.21482889733840305,
"grad_norm": 0.9287916681664911,
"learning_rate": 1.9997298132799408e-05,
"loss": 0.4625,
"step": 113
},
{
"epoch": 0.21673003802281368,
"grad_norm": 0.9207130777492082,
"learning_rate": 1.9996471079244477e-05,
"loss": 0.4823,
"step": 114
},
{
"epoch": 0.21863117870722434,
"grad_norm": 0.8618477879680877,
"learning_rate": 1.999553377944936e-05,
"loss": 0.4376,
"step": 115
},
{
"epoch": 0.22053231939163498,
"grad_norm": 0.934906724381784,
"learning_rate": 1.9994486243751076e-05,
"loss": 0.4647,
"step": 116
},
{
"epoch": 0.2224334600760456,
"grad_norm": 0.8754752708154347,
"learning_rate": 1.9993328483702393e-05,
"loss": 0.4551,
"step": 117
},
{
"epoch": 0.22433460076045628,
"grad_norm": 0.8767660694306663,
"learning_rate": 1.999206051207169e-05,
"loss": 0.4636,
"step": 118
},
{
"epoch": 0.2262357414448669,
"grad_norm": 0.9093985597739513,
"learning_rate": 1.9990682342842805e-05,
"loss": 0.4704,
"step": 119
},
{
"epoch": 0.22813688212927757,
"grad_norm": 0.8274217130804313,
"learning_rate": 1.99891939912149e-05,
"loss": 0.4582,
"step": 120
},
{
"epoch": 0.2300380228136882,
"grad_norm": 0.8995562281824847,
"learning_rate": 1.9987595473602292e-05,
"loss": 0.491,
"step": 121
},
{
"epoch": 0.23193916349809887,
"grad_norm": 0.847940069269653,
"learning_rate": 1.9985886807634246e-05,
"loss": 0.4906,
"step": 122
},
{
"epoch": 0.2338403041825095,
"grad_norm": 0.8330416368476238,
"learning_rate": 1.9984068012154824e-05,
"loss": 0.453,
"step": 123
},
{
"epoch": 0.23574144486692014,
"grad_norm": 0.8678412612602343,
"learning_rate": 1.9982139107222634e-05,
"loss": 0.4625,
"step": 124
},
{
"epoch": 0.2376425855513308,
"grad_norm": 0.8845935158738598,
"learning_rate": 1.9980100114110637e-05,
"loss": 0.4483,
"step": 125
},
{
"epoch": 0.23954372623574144,
"grad_norm": 0.9512453201517795,
"learning_rate": 1.99779510553059e-05,
"loss": 0.4597,
"step": 126
},
{
"epoch": 0.2414448669201521,
"grad_norm": 0.8531601006542403,
"learning_rate": 1.9975691954509347e-05,
"loss": 0.4843,
"step": 127
},
{
"epoch": 0.24334600760456274,
"grad_norm": 0.8598347823275206,
"learning_rate": 1.9973322836635517e-05,
"loss": 0.4664,
"step": 128
},
{
"epoch": 0.24524714828897337,
"grad_norm": 0.9475582922748836,
"learning_rate": 1.997084372781226e-05,
"loss": 0.4937,
"step": 129
},
{
"epoch": 0.24714828897338403,
"grad_norm": 0.8681962802006601,
"learning_rate": 1.9968254655380465e-05,
"loss": 0.4615,
"step": 130
},
{
"epoch": 0.24904942965779467,
"grad_norm": 0.8811510821257919,
"learning_rate": 1.996555564789376e-05,
"loss": 0.4663,
"step": 131
},
{
"epoch": 0.2509505703422053,
"grad_norm": 0.8008012975842812,
"learning_rate": 1.996274673511819e-05,
"loss": 0.4286,
"step": 132
},
{
"epoch": 0.25285171102661597,
"grad_norm": 0.8480627790754445,
"learning_rate": 1.99598279480319e-05,
"loss": 0.4529,
"step": 133
},
{
"epoch": 0.25475285171102663,
"grad_norm": 0.7834029612852157,
"learning_rate": 1.9956799318824776e-05,
"loss": 0.434,
"step": 134
},
{
"epoch": 0.25665399239543724,
"grad_norm": 1.131637150610238,
"learning_rate": 1.99536608808981e-05,
"loss": 0.4796,
"step": 135
},
{
"epoch": 0.2585551330798479,
"grad_norm": 0.8577839734466526,
"learning_rate": 1.995041266886419e-05,
"loss": 0.452,
"step": 136
},
{
"epoch": 0.26045627376425856,
"grad_norm": 0.9036864985233956,
"learning_rate": 1.9947054718545996e-05,
"loss": 0.4659,
"step": 137
},
{
"epoch": 0.2623574144486692,
"grad_norm": 0.8464994448662844,
"learning_rate": 1.994358706697674e-05,
"loss": 0.4752,
"step": 138
},
{
"epoch": 0.26425855513307983,
"grad_norm": 0.8240336025630546,
"learning_rate": 1.9940009752399462e-05,
"loss": 0.4649,
"step": 139
},
{
"epoch": 0.2661596958174905,
"grad_norm": 0.9737309894750616,
"learning_rate": 1.9936322814266634e-05,
"loss": 0.4777,
"step": 140
},
{
"epoch": 0.26806083650190116,
"grad_norm": 0.8887210107003006,
"learning_rate": 1.9932526293239713e-05,
"loss": 0.4664,
"step": 141
},
{
"epoch": 0.26996197718631176,
"grad_norm": 0.768650213630635,
"learning_rate": 1.9928620231188694e-05,
"loss": 0.445,
"step": 142
},
{
"epoch": 0.2718631178707224,
"grad_norm": 0.9275235686979297,
"learning_rate": 1.992460467119164e-05,
"loss": 0.4627,
"step": 143
},
{
"epoch": 0.2737642585551331,
"grad_norm": 0.8821031881591208,
"learning_rate": 1.992047965753422e-05,
"loss": 0.4827,
"step": 144
},
{
"epoch": 0.27566539923954375,
"grad_norm": 0.9165135617077241,
"learning_rate": 1.991624523570922e-05,
"loss": 0.4819,
"step": 145
},
{
"epoch": 0.27756653992395436,
"grad_norm": 0.8376809662999528,
"learning_rate": 1.9911901452416012e-05,
"loss": 0.4508,
"step": 146
},
{
"epoch": 0.279467680608365,
"grad_norm": 0.8522232600069493,
"learning_rate": 1.9907448355560094e-05,
"loss": 0.4482,
"step": 147
},
{
"epoch": 0.2813688212927757,
"grad_norm": 0.9314138147483847,
"learning_rate": 1.9902885994252506e-05,
"loss": 0.4592,
"step": 148
},
{
"epoch": 0.2832699619771863,
"grad_norm": 0.9082681969766799,
"learning_rate": 1.989821441880933e-05,
"loss": 0.471,
"step": 149
},
{
"epoch": 0.28517110266159695,
"grad_norm": 0.8308764161071152,
"learning_rate": 1.9893433680751105e-05,
"loss": 0.4557,
"step": 150
},
{
"epoch": 0.2870722433460076,
"grad_norm": 0.8486466915730397,
"learning_rate": 1.9888543832802277e-05,
"loss": 0.432,
"step": 151
},
{
"epoch": 0.2889733840304182,
"grad_norm": 0.9848314321372703,
"learning_rate": 1.9883544928890612e-05,
"loss": 0.4665,
"step": 152
},
{
"epoch": 0.2908745247148289,
"grad_norm": 0.9765901273057536,
"learning_rate": 1.9878437024146603e-05,
"loss": 0.4906,
"step": 153
},
{
"epoch": 0.29277566539923955,
"grad_norm": 0.8769148502173149,
"learning_rate": 1.9873220174902857e-05,
"loss": 0.4571,
"step": 154
},
{
"epoch": 0.2946768060836502,
"grad_norm": 0.8075927358345226,
"learning_rate": 1.986789443869348e-05,
"loss": 0.4592,
"step": 155
},
{
"epoch": 0.2965779467680608,
"grad_norm": 0.8228747069636811,
"learning_rate": 1.9862459874253438e-05,
"loss": 0.4414,
"step": 156
},
{
"epoch": 0.2984790874524715,
"grad_norm": 0.8010782353106709,
"learning_rate": 1.985691654151791e-05,
"loss": 0.438,
"step": 157
},
{
"epoch": 0.30038022813688214,
"grad_norm": 0.7849971890062005,
"learning_rate": 1.9851264501621635e-05,
"loss": 0.4708,
"step": 158
},
{
"epoch": 0.30228136882129275,
"grad_norm": 0.8507088067437428,
"learning_rate": 1.984550381689822e-05,
"loss": 0.4644,
"step": 159
},
{
"epoch": 0.3041825095057034,
"grad_norm": 0.8031811205074328,
"learning_rate": 1.983963455087946e-05,
"loss": 0.4629,
"step": 160
},
{
"epoch": 0.3060836501901141,
"grad_norm": 0.9359173128075622,
"learning_rate": 1.983365676829466e-05,
"loss": 0.4667,
"step": 161
},
{
"epoch": 0.30798479087452474,
"grad_norm": 0.831137409115297,
"learning_rate": 1.982757053506989e-05,
"loss": 0.4291,
"step": 162
},
{
"epoch": 0.30988593155893535,
"grad_norm": 0.8338930521144545,
"learning_rate": 1.9821375918327268e-05,
"loss": 0.4647,
"step": 163
},
{
"epoch": 0.311787072243346,
"grad_norm": 0.8542016873894958,
"learning_rate": 1.981507298638422e-05,
"loss": 0.4318,
"step": 164
},
{
"epoch": 0.31368821292775667,
"grad_norm": 0.9006370587129156,
"learning_rate": 1.9808661808752735e-05,
"loss": 0.458,
"step": 165
},
{
"epoch": 0.3155893536121673,
"grad_norm": 0.8452573826318911,
"learning_rate": 1.980214245613858e-05,
"loss": 0.4587,
"step": 166
},
{
"epoch": 0.31749049429657794,
"grad_norm": 1.0096737059877634,
"learning_rate": 1.979551500044055e-05,
"loss": 0.4589,
"step": 167
},
{
"epoch": 0.3193916349809886,
"grad_norm": 0.8054769921911692,
"learning_rate": 1.9788779514749635e-05,
"loss": 0.4687,
"step": 168
},
{
"epoch": 0.32129277566539927,
"grad_norm": 1.0846377921302122,
"learning_rate": 1.978193607334826e-05,
"loss": 0.4439,
"step": 169
},
{
"epoch": 0.3231939163498099,
"grad_norm": 0.7537185796729853,
"learning_rate": 1.977498475170941e-05,
"loss": 0.4674,
"step": 170
},
{
"epoch": 0.32509505703422054,
"grad_norm": 0.8931424459296898,
"learning_rate": 1.9767925626495857e-05,
"loss": 0.461,
"step": 171
},
{
"epoch": 0.3269961977186312,
"grad_norm": 0.9707273512130514,
"learning_rate": 1.9760758775559275e-05,
"loss": 0.4522,
"step": 172
},
{
"epoch": 0.3288973384030418,
"grad_norm": 1.0389394429473147,
"learning_rate": 1.975348427793939e-05,
"loss": 0.4609,
"step": 173
},
{
"epoch": 0.33079847908745247,
"grad_norm": 0.7893310729870803,
"learning_rate": 1.9746102213863113e-05,
"loss": 0.4463,
"step": 174
},
{
"epoch": 0.33269961977186313,
"grad_norm": 0.7743308077024001,
"learning_rate": 1.973861266474366e-05,
"loss": 0.454,
"step": 175
},
{
"epoch": 0.33460076045627374,
"grad_norm": 0.8474627379114403,
"learning_rate": 1.9731015713179643e-05,
"loss": 0.4601,
"step": 176
},
{
"epoch": 0.3365019011406844,
"grad_norm": 0.8653422487339897,
"learning_rate": 1.9723311442954163e-05,
"loss": 0.4479,
"step": 177
},
{
"epoch": 0.33840304182509506,
"grad_norm": 0.7898035918721892,
"learning_rate": 1.9715499939033883e-05,
"loss": 0.4426,
"step": 178
},
{
"epoch": 0.3403041825095057,
"grad_norm": 0.8094334376099781,
"learning_rate": 1.9707581287568094e-05,
"loss": 0.4289,
"step": 179
},
{
"epoch": 0.34220532319391633,
"grad_norm": 0.8040851639431348,
"learning_rate": 1.969955557588778e-05,
"loss": 0.4343,
"step": 180
},
{
"epoch": 0.344106463878327,
"grad_norm": 0.7896818878599305,
"learning_rate": 1.9691422892504626e-05,
"loss": 0.4336,
"step": 181
},
{
"epoch": 0.34600760456273766,
"grad_norm": 0.7877785701625993,
"learning_rate": 1.968318332711006e-05,
"loss": 0.4749,
"step": 182
},
{
"epoch": 0.34790874524714827,
"grad_norm": 0.8789551469584153,
"learning_rate": 1.9674836970574253e-05,
"loss": 0.4715,
"step": 183
},
{
"epoch": 0.34980988593155893,
"grad_norm": 0.875123941946867,
"learning_rate": 1.966638391494514e-05,
"loss": 0.4605,
"step": 184
},
{
"epoch": 0.3517110266159696,
"grad_norm": 0.878958252634536,
"learning_rate": 1.9657824253447378e-05,
"loss": 0.4424,
"step": 185
},
{
"epoch": 0.35361216730038025,
"grad_norm": 0.8084688137933784,
"learning_rate": 1.9649158080481327e-05,
"loss": 0.4737,
"step": 186
},
{
"epoch": 0.35551330798479086,
"grad_norm": 0.8576206949273462,
"learning_rate": 1.964038549162201e-05,
"loss": 0.4347,
"step": 187
},
{
"epoch": 0.3574144486692015,
"grad_norm": 0.8806038381954029,
"learning_rate": 1.963150658361807e-05,
"loss": 0.4439,
"step": 188
},
{
"epoch": 0.3593155893536122,
"grad_norm": 0.8416039365377381,
"learning_rate": 1.962252145439068e-05,
"loss": 0.4253,
"step": 189
},
{
"epoch": 0.3612167300380228,
"grad_norm": 0.9004717760465282,
"learning_rate": 1.9613430203032486e-05,
"loss": 0.443,
"step": 190
},
{
"epoch": 0.36311787072243346,
"grad_norm": 0.8944272792283807,
"learning_rate": 1.9604232929806493e-05,
"loss": 0.4468,
"step": 191
},
{
"epoch": 0.3650190114068441,
"grad_norm": 0.8634684417543759,
"learning_rate": 1.9594929736144978e-05,
"loss": 0.4609,
"step": 192
},
{
"epoch": 0.3669201520912547,
"grad_norm": 0.9878892691848816,
"learning_rate": 1.9585520724648354e-05,
"loss": 0.4466,
"step": 193
},
{
"epoch": 0.3688212927756654,
"grad_norm": 0.8282966702396417,
"learning_rate": 1.957600599908406e-05,
"loss": 0.4536,
"step": 194
},
{
"epoch": 0.37072243346007605,
"grad_norm": 1.1257991612799596,
"learning_rate": 1.95663856643854e-05,
"loss": 0.4469,
"step": 195
},
{
"epoch": 0.3726235741444867,
"grad_norm": 0.9811419616554969,
"learning_rate": 1.955665982665038e-05,
"loss": 0.4443,
"step": 196
},
{
"epoch": 0.3745247148288973,
"grad_norm": 1.0541914523546327,
"learning_rate": 1.9546828593140565e-05,
"loss": 0.433,
"step": 197
},
{
"epoch": 0.376425855513308,
"grad_norm": 1.0043064594496482,
"learning_rate": 1.9536892072279863e-05,
"loss": 0.4651,
"step": 198
},
{
"epoch": 0.37832699619771865,
"grad_norm": 1.19615264001806,
"learning_rate": 1.9526850373653356e-05,
"loss": 0.4722,
"step": 199
},
{
"epoch": 0.38022813688212925,
"grad_norm": 0.8993937281607631,
"learning_rate": 1.9516703608006074e-05,
"loss": 0.4124,
"step": 200
},
{
"epoch": 0.3821292775665399,
"grad_norm": 1.004675462119528,
"learning_rate": 1.9506451887241787e-05,
"loss": 0.4557,
"step": 201
},
{
"epoch": 0.3840304182509506,
"grad_norm": 1.1121635658668072,
"learning_rate": 1.949609532442176e-05,
"loss": 0.46,
"step": 202
},
{
"epoch": 0.38593155893536124,
"grad_norm": 0.878418773872806,
"learning_rate": 1.9485634033763507e-05,
"loss": 0.4309,
"step": 203
},
{
"epoch": 0.38783269961977185,
"grad_norm": 0.9326433734448492,
"learning_rate": 1.9475068130639543e-05,
"loss": 0.4325,
"step": 204
},
{
"epoch": 0.3897338403041825,
"grad_norm": 0.8779590481554084,
"learning_rate": 1.9464397731576093e-05,
"loss": 0.4342,
"step": 205
},
{
"epoch": 0.3916349809885932,
"grad_norm": 0.8556758224274593,
"learning_rate": 1.945362295425183e-05,
"loss": 0.4469,
"step": 206
},
{
"epoch": 0.3935361216730038,
"grad_norm": 0.8129250389806647,
"learning_rate": 1.944274391749655e-05,
"loss": 0.4404,
"step": 207
},
{
"epoch": 0.39543726235741444,
"grad_norm": 0.8309252609227962,
"learning_rate": 1.9431760741289886e-05,
"loss": 0.4349,
"step": 208
},
{
"epoch": 0.3973384030418251,
"grad_norm": 0.9615251211246022,
"learning_rate": 1.942067354675997e-05,
"loss": 0.4534,
"step": 209
},
{
"epoch": 0.39923954372623577,
"grad_norm": 0.89207989675763,
"learning_rate": 1.9409482456182105e-05,
"loss": 0.4403,
"step": 210
},
{
"epoch": 0.4011406844106464,
"grad_norm": 0.8371073682440002,
"learning_rate": 1.939818759297741e-05,
"loss": 0.4405,
"step": 211
},
{
"epoch": 0.40304182509505704,
"grad_norm": 0.9541336097820109,
"learning_rate": 1.9386789081711465e-05,
"loss": 0.4608,
"step": 212
},
{
"epoch": 0.4049429657794677,
"grad_norm": 0.8605065514586155,
"learning_rate": 1.9375287048092927e-05,
"loss": 0.4553,
"step": 213
},
{
"epoch": 0.4068441064638783,
"grad_norm": 1.0343041712835617,
"learning_rate": 1.9363681618972166e-05,
"loss": 0.4456,
"step": 214
},
{
"epoch": 0.40874524714828897,
"grad_norm": 0.8778652173555826,
"learning_rate": 1.9351972922339835e-05,
"loss": 0.4613,
"step": 215
},
{
"epoch": 0.41064638783269963,
"grad_norm": 1.0133007139700836,
"learning_rate": 1.9340161087325483e-05,
"loss": 0.4532,
"step": 216
},
{
"epoch": 0.41254752851711024,
"grad_norm": 0.7817994216143113,
"learning_rate": 1.9328246244196117e-05,
"loss": 0.4411,
"step": 217
},
{
"epoch": 0.4144486692015209,
"grad_norm": 0.8370023534671679,
"learning_rate": 1.931622852435478e-05,
"loss": 0.4477,
"step": 218
},
{
"epoch": 0.41634980988593157,
"grad_norm": 0.7597652688727878,
"learning_rate": 1.930410806033908e-05,
"loss": 0.4337,
"step": 219
},
{
"epoch": 0.41825095057034223,
"grad_norm": 0.8562135186347026,
"learning_rate": 1.929188498581975e-05,
"loss": 0.4376,
"step": 220
},
{
"epoch": 0.42015209125475284,
"grad_norm": 0.8360631984854724,
"learning_rate": 1.9279559435599164e-05,
"loss": 0.464,
"step": 221
},
{
"epoch": 0.4220532319391635,
"grad_norm": 0.8565276872807861,
"learning_rate": 1.926713154560984e-05,
"loss": 0.4504,
"step": 222
},
{
"epoch": 0.42395437262357416,
"grad_norm": 0.8522362311756184,
"learning_rate": 1.9254601452912972e-05,
"loss": 0.478,
"step": 223
},
{
"epoch": 0.42585551330798477,
"grad_norm": 0.795452922029974,
"learning_rate": 1.924196929569688e-05,
"loss": 0.4472,
"step": 224
},
{
"epoch": 0.42775665399239543,
"grad_norm": 0.7886447733196899,
"learning_rate": 1.922923521327551e-05,
"loss": 0.4593,
"step": 225
},
{
"epoch": 0.4296577946768061,
"grad_norm": 0.7870022901706833,
"learning_rate": 1.9216399346086893e-05,
"loss": 0.4524,
"step": 226
},
{
"epoch": 0.43155893536121676,
"grad_norm": 0.8207650602959417,
"learning_rate": 1.9203461835691596e-05,
"loss": 0.4539,
"step": 227
},
{
"epoch": 0.43346007604562736,
"grad_norm": 0.8443366944742314,
"learning_rate": 1.9190422824771158e-05,
"loss": 0.4303,
"step": 228
},
{
"epoch": 0.435361216730038,
"grad_norm": 0.7333838515847383,
"learning_rate": 1.9177282457126515e-05,
"loss": 0.4212,
"step": 229
},
{
"epoch": 0.4372623574144487,
"grad_norm": 0.8440185257112748,
"learning_rate": 1.9164040877676425e-05,
"loss": 0.4313,
"step": 230
},
{
"epoch": 0.4391634980988593,
"grad_norm": 0.7680695676038496,
"learning_rate": 1.9150698232455853e-05,
"loss": 0.4098,
"step": 231
},
{
"epoch": 0.44106463878326996,
"grad_norm": 0.8129667213013306,
"learning_rate": 1.913725466861438e-05,
"loss": 0.4205,
"step": 232
},
{
"epoch": 0.4429657794676806,
"grad_norm": 0.8872933719623889,
"learning_rate": 1.9123710334414552e-05,
"loss": 0.4495,
"step": 233
},
{
"epoch": 0.4448669201520912,
"grad_norm": 0.7958472474093201,
"learning_rate": 1.911006537923029e-05,
"loss": 0.4338,
"step": 234
},
{
"epoch": 0.4467680608365019,
"grad_norm": 0.9872490022897036,
"learning_rate": 1.9096319953545186e-05,
"loss": 0.4301,
"step": 235
},
{
"epoch": 0.44866920152091255,
"grad_norm": 0.8123818550664913,
"learning_rate": 1.908247420895089e-05,
"loss": 0.4397,
"step": 236
},
{
"epoch": 0.4505703422053232,
"grad_norm": 0.9563022645068926,
"learning_rate": 1.9068528298145418e-05,
"loss": 0.4346,
"step": 237
},
{
"epoch": 0.4524714828897338,
"grad_norm": 0.7388576600369232,
"learning_rate": 1.905448237493147e-05,
"loss": 0.4459,
"step": 238
},
{
"epoch": 0.4543726235741445,
"grad_norm": 0.7588215832834538,
"learning_rate": 1.9040336594214727e-05,
"loss": 0.4268,
"step": 239
},
{
"epoch": 0.45627376425855515,
"grad_norm": 0.7319652290070489,
"learning_rate": 1.9026091112002163e-05,
"loss": 0.4248,
"step": 240
},
{
"epoch": 0.45817490494296575,
"grad_norm": 0.7445575159437425,
"learning_rate": 1.90117460854003e-05,
"loss": 0.438,
"step": 241
},
{
"epoch": 0.4600760456273764,
"grad_norm": 0.8723292501284733,
"learning_rate": 1.8997301672613496e-05,
"loss": 0.4661,
"step": 242
},
{
"epoch": 0.4619771863117871,
"grad_norm": 0.7103955415797374,
"learning_rate": 1.8982758032942184e-05,
"loss": 0.4591,
"step": 243
},
{
"epoch": 0.46387832699619774,
"grad_norm": 0.815109385381162,
"learning_rate": 1.896811532678113e-05,
"loss": 0.4602,
"step": 244
},
{
"epoch": 0.46577946768060835,
"grad_norm": 0.8418384331736157,
"learning_rate": 1.8953373715617646e-05,
"loss": 0.4458,
"step": 245
},
{
"epoch": 0.467680608365019,
"grad_norm": 0.7123447212151948,
"learning_rate": 1.893853336202983e-05,
"loss": 0.4135,
"step": 246
},
{
"epoch": 0.4695817490494297,
"grad_norm": 0.8734460247819267,
"learning_rate": 1.892359442968475e-05,
"loss": 0.4224,
"step": 247
},
{
"epoch": 0.4714828897338403,
"grad_norm": 0.7783885285542095,
"learning_rate": 1.8908557083336668e-05,
"loss": 0.4228,
"step": 248
},
{
"epoch": 0.47338403041825095,
"grad_norm": 0.8735113308212828,
"learning_rate": 1.889342148882519e-05,
"loss": 0.4264,
"step": 249
},
{
"epoch": 0.4752851711026616,
"grad_norm": 0.8519446730088098,
"learning_rate": 1.8878187813073465e-05,
"loss": 0.4521,
"step": 250
},
{
"epoch": 0.47718631178707227,
"grad_norm": 0.6943717970432743,
"learning_rate": 1.886285622408633e-05,
"loss": 0.4249,
"step": 251
},
{
"epoch": 0.4790874524714829,
"grad_norm": 0.7800200238003078,
"learning_rate": 1.8847426890948447e-05,
"loss": 0.4336,
"step": 252
},
{
"epoch": 0.48098859315589354,
"grad_norm": 0.9656272884464944,
"learning_rate": 1.8831899983822475e-05,
"loss": 0.437,
"step": 253
},
{
"epoch": 0.4828897338403042,
"grad_norm": 0.7318585554046652,
"learning_rate": 1.8816275673947148e-05,
"loss": 0.4251,
"step": 254
},
{
"epoch": 0.4847908745247148,
"grad_norm": 1.013600137863201,
"learning_rate": 1.8800554133635417e-05,
"loss": 0.4146,
"step": 255
},
{
"epoch": 0.4866920152091255,
"grad_norm": 0.753022725709431,
"learning_rate": 1.8784735536272543e-05,
"loss": 0.4215,
"step": 256
},
{
"epoch": 0.48859315589353614,
"grad_norm": 0.8925869815866877,
"learning_rate": 1.8768820056314173e-05,
"loss": 0.425,
"step": 257
},
{
"epoch": 0.49049429657794674,
"grad_norm": 0.8986147493018323,
"learning_rate": 1.875280786928444e-05,
"loss": 0.4329,
"step": 258
},
{
"epoch": 0.4923954372623574,
"grad_norm": 0.7788484598809857,
"learning_rate": 1.873669915177399e-05,
"loss": 0.446,
"step": 259
},
{
"epoch": 0.49429657794676807,
"grad_norm": 0.906196292758039,
"learning_rate": 1.872049408143808e-05,
"loss": 0.4133,
"step": 260
},
{
"epoch": 0.49619771863117873,
"grad_norm": 0.7762932392430605,
"learning_rate": 1.8704192836994578e-05,
"loss": 0.423,
"step": 261
},
{
"epoch": 0.49809885931558934,
"grad_norm": 0.8223494401885655,
"learning_rate": 1.8687795598222024e-05,
"loss": 0.4545,
"step": 262
},
{
"epoch": 0.5,
"grad_norm": 0.6787722949179172,
"learning_rate": 1.8671302545957628e-05,
"loss": 0.4351,
"step": 263
},
{
"epoch": 0.5019011406844106,
"grad_norm": 0.745054332597767,
"learning_rate": 1.8654713862095272e-05,
"loss": 0.438,
"step": 264
},
{
"epoch": 0.5038022813688213,
"grad_norm": 0.8013101618305613,
"learning_rate": 1.8638029729583524e-05,
"loss": 0.4358,
"step": 265
},
{
"epoch": 0.5057034220532319,
"grad_norm": 0.7214827807068841,
"learning_rate": 1.8621250332423603e-05,
"loss": 0.4377,
"step": 266
},
{
"epoch": 0.5076045627376425,
"grad_norm": 0.8351125262363144,
"learning_rate": 1.860437585566736e-05,
"loss": 0.4324,
"step": 267
},
{
"epoch": 0.5095057034220533,
"grad_norm": 0.7276113239090198,
"learning_rate": 1.8587406485415226e-05,
"loss": 0.4195,
"step": 268
},
{
"epoch": 0.5114068441064639,
"grad_norm": 0.7491877236146232,
"learning_rate": 1.8570342408814173e-05,
"loss": 0.4301,
"step": 269
},
{
"epoch": 0.5133079847908745,
"grad_norm": 0.7498742635052652,
"learning_rate": 1.855318381405564e-05,
"loss": 0.4671,
"step": 270
},
{
"epoch": 0.5152091254752852,
"grad_norm": 0.7857756474376003,
"learning_rate": 1.8535930890373467e-05,
"loss": 0.4356,
"step": 271
},
{
"epoch": 0.5171102661596958,
"grad_norm": 0.7102076283113054,
"learning_rate": 1.8518583828041787e-05,
"loss": 0.4399,
"step": 272
},
{
"epoch": 0.5190114068441065,
"grad_norm": 0.816685316550917,
"learning_rate": 1.8501142818372964e-05,
"loss": 0.4374,
"step": 273
},
{
"epoch": 0.5209125475285171,
"grad_norm": 0.7606632577787334,
"learning_rate": 1.848360805371544e-05,
"loss": 0.4504,
"step": 274
},
{
"epoch": 0.5228136882129277,
"grad_norm": 0.7668609125442392,
"learning_rate": 1.8465979727451653e-05,
"loss": 0.4333,
"step": 275
},
{
"epoch": 0.5247148288973384,
"grad_norm": 0.8094314903167398,
"learning_rate": 1.8448258033995877e-05,
"loss": 0.4406,
"step": 276
},
{
"epoch": 0.526615969581749,
"grad_norm": 0.849042409522864,
"learning_rate": 1.8430443168792087e-05,
"loss": 0.4201,
"step": 277
},
{
"epoch": 0.5285171102661597,
"grad_norm": 0.7629134404140965,
"learning_rate": 1.8412535328311813e-05,
"loss": 0.4356,
"step": 278
},
{
"epoch": 0.5304182509505704,
"grad_norm": 0.8492914008244423,
"learning_rate": 1.8394534710051956e-05,
"loss": 0.4517,
"step": 279
},
{
"epoch": 0.532319391634981,
"grad_norm": 0.7907191565848902,
"learning_rate": 1.8376441512532617e-05,
"loss": 0.4357,
"step": 280
},
{
"epoch": 0.5342205323193916,
"grad_norm": 0.6329100012990677,
"learning_rate": 1.835825593529492e-05,
"loss": 0.4152,
"step": 281
},
{
"epoch": 0.5361216730038023,
"grad_norm": 0.8225746219074328,
"learning_rate": 1.833997817889878e-05,
"loss": 0.4427,
"step": 282
},
{
"epoch": 0.5380228136882129,
"grad_norm": 0.6736007300339728,
"learning_rate": 1.8321608444920738e-05,
"loss": 0.4063,
"step": 283
},
{
"epoch": 0.5399239543726235,
"grad_norm": 0.7132578713308504,
"learning_rate": 1.830314693595169e-05,
"loss": 0.4197,
"step": 284
},
{
"epoch": 0.5418250950570342,
"grad_norm": 0.7714985661491659,
"learning_rate": 1.828459385559468e-05,
"loss": 0.4224,
"step": 285
},
{
"epoch": 0.5437262357414449,
"grad_norm": 0.8598173759118742,
"learning_rate": 1.8265949408462657e-05,
"loss": 0.4253,
"step": 286
},
{
"epoch": 0.5456273764258555,
"grad_norm": 0.7029283482151572,
"learning_rate": 1.8247213800176192e-05,
"loss": 0.4255,
"step": 287
},
{
"epoch": 0.5475285171102662,
"grad_norm": 0.6849783730109497,
"learning_rate": 1.8228387237361245e-05,
"loss": 0.4332,
"step": 288
},
{
"epoch": 0.5494296577946768,
"grad_norm": 0.7834167436819741,
"learning_rate": 1.8209469927646863e-05,
"loss": 0.4144,
"step": 289
},
{
"epoch": 0.5513307984790875,
"grad_norm": 0.6975582999097005,
"learning_rate": 1.8190462079662897e-05,
"loss": 0.4486,
"step": 290
},
{
"epoch": 0.5532319391634981,
"grad_norm": 0.7665448183443585,
"learning_rate": 1.81713639030377e-05,
"loss": 0.4341,
"step": 291
},
{
"epoch": 0.5551330798479087,
"grad_norm": 0.7866244190588035,
"learning_rate": 1.8152175608395814e-05,
"loss": 0.4215,
"step": 292
},
{
"epoch": 0.5570342205323194,
"grad_norm": 0.7583982024477378,
"learning_rate": 1.8132897407355657e-05,
"loss": 0.4377,
"step": 293
},
{
"epoch": 0.55893536121673,
"grad_norm": 0.7076965800561023,
"learning_rate": 1.811352951252717e-05,
"loss": 0.4157,
"step": 294
},
{
"epoch": 0.5608365019011406,
"grad_norm": 0.6987567565339486,
"learning_rate": 1.809407213750949e-05,
"loss": 0.4264,
"step": 295
},
{
"epoch": 0.5627376425855514,
"grad_norm": 0.6856839925038805,
"learning_rate": 1.807452549688859e-05,
"loss": 0.4347,
"step": 296
},
{
"epoch": 0.564638783269962,
"grad_norm": 0.820632914900802,
"learning_rate": 1.8054889806234906e-05,
"loss": 0.4581,
"step": 297
},
{
"epoch": 0.5665399239543726,
"grad_norm": 0.7821303762318998,
"learning_rate": 1.8035165282100963e-05,
"loss": 0.4274,
"step": 298
},
{
"epoch": 0.5684410646387833,
"grad_norm": 0.694299740097921,
"learning_rate": 1.8015352142018984e-05,
"loss": 0.4278,
"step": 299
},
{
"epoch": 0.5703422053231939,
"grad_norm": 0.9836910835008542,
"learning_rate": 1.799545060449851e-05,
"loss": 0.4143,
"step": 300
},
{
"epoch": 0.5722433460076045,
"grad_norm": 0.7187139877910586,
"learning_rate": 1.797546088902396e-05,
"loss": 0.4228,
"step": 301
},
{
"epoch": 0.5741444866920152,
"grad_norm": 0.7985662685301071,
"learning_rate": 1.7955383216052224e-05,
"loss": 0.4285,
"step": 302
},
{
"epoch": 0.5760456273764258,
"grad_norm": 0.8852086694475297,
"learning_rate": 1.7935217807010238e-05,
"loss": 0.4306,
"step": 303
},
{
"epoch": 0.5779467680608364,
"grad_norm": 0.7367084762695927,
"learning_rate": 1.7914964884292543e-05,
"loss": 0.4332,
"step": 304
},
{
"epoch": 0.5798479087452472,
"grad_norm": 0.7984884106074497,
"learning_rate": 1.7894624671258813e-05,
"loss": 0.4313,
"step": 305
},
{
"epoch": 0.5817490494296578,
"grad_norm": 0.7750068184675216,
"learning_rate": 1.7874197392231414e-05,
"loss": 0.4263,
"step": 306
},
{
"epoch": 0.5836501901140685,
"grad_norm": 0.758307993341141,
"learning_rate": 1.7853683272492913e-05,
"loss": 0.4362,
"step": 307
},
{
"epoch": 0.5855513307984791,
"grad_norm": 0.8897761756410105,
"learning_rate": 1.7833082538283615e-05,
"loss": 0.4166,
"step": 308
},
{
"epoch": 0.5874524714828897,
"grad_norm": 0.7383739858389016,
"learning_rate": 1.7812395416799034e-05,
"loss": 0.4134,
"step": 309
},
{
"epoch": 0.5893536121673004,
"grad_norm": 0.815149784793214,
"learning_rate": 1.7791622136187422e-05,
"loss": 0.4502,
"step": 310
},
{
"epoch": 0.591254752851711,
"grad_norm": 0.7323005872170358,
"learning_rate": 1.7770762925547235e-05,
"loss": 0.4452,
"step": 311
},
{
"epoch": 0.5931558935361216,
"grad_norm": 0.7811650758301157,
"learning_rate": 1.7749818014924612e-05,
"loss": 0.4264,
"step": 312
},
{
"epoch": 0.5950570342205324,
"grad_norm": 0.7155419614901966,
"learning_rate": 1.7728787635310828e-05,
"loss": 0.4155,
"step": 313
},
{
"epoch": 0.596958174904943,
"grad_norm": 1.1099640278211553,
"learning_rate": 1.770767201863976e-05,
"loss": 0.4304,
"step": 314
},
{
"epoch": 0.5988593155893536,
"grad_norm": 0.6874190429408628,
"learning_rate": 1.7686471397785322e-05,
"loss": 0.4106,
"step": 315
},
{
"epoch": 0.6007604562737643,
"grad_norm": 1.5953408915677911,
"learning_rate": 1.76651860065589e-05,
"loss": 0.4149,
"step": 316
},
{
"epoch": 0.6026615969581749,
"grad_norm": 0.7599276026088725,
"learning_rate": 1.764381607970677e-05,
"loss": 0.4373,
"step": 317
},
{
"epoch": 0.6045627376425855,
"grad_norm": 0.7398069061351941,
"learning_rate": 1.7622361852907506e-05,
"loss": 0.4259,
"step": 318
},
{
"epoch": 0.6064638783269962,
"grad_norm": 0.8020847528270457,
"learning_rate": 1.760082356276939e-05,
"loss": 0.4381,
"step": 319
},
{
"epoch": 0.6083650190114068,
"grad_norm": 0.7840114862344743,
"learning_rate": 1.75792014468278e-05,
"loss": 0.4205,
"step": 320
},
{
"epoch": 0.6102661596958175,
"grad_norm": 0.713869928577788,
"learning_rate": 1.7557495743542586e-05,
"loss": 0.442,
"step": 321
},
{
"epoch": 0.6121673003802282,
"grad_norm": 0.7144026256955816,
"learning_rate": 1.7535706692295436e-05,
"loss": 0.432,
"step": 322
},
{
"epoch": 0.6140684410646388,
"grad_norm": 0.7206599928049825,
"learning_rate": 1.7513834533387256e-05,
"loss": 0.4296,
"step": 323
},
{
"epoch": 0.6159695817490495,
"grad_norm": 0.7302802157915199,
"learning_rate": 1.7491879508035488e-05,
"loss": 0.4277,
"step": 324
},
{
"epoch": 0.6178707224334601,
"grad_norm": 0.7231497618281654,
"learning_rate": 1.746984185837149e-05,
"loss": 0.4255,
"step": 325
},
{
"epoch": 0.6197718631178707,
"grad_norm": 0.6876679026890539,
"learning_rate": 1.744772182743782e-05,
"loss": 0.4265,
"step": 326
},
{
"epoch": 0.6216730038022814,
"grad_norm": 0.8393840216228529,
"learning_rate": 1.7425519659185596e-05,
"loss": 0.4263,
"step": 327
},
{
"epoch": 0.623574144486692,
"grad_norm": 0.7461311723113427,
"learning_rate": 1.740323559847179e-05,
"loss": 0.4163,
"step": 328
},
{
"epoch": 0.6254752851711026,
"grad_norm": 0.7560535257527539,
"learning_rate": 1.738086989105651e-05,
"loss": 0.4188,
"step": 329
},
{
"epoch": 0.6273764258555133,
"grad_norm": 0.7272479940617572,
"learning_rate": 1.735842278360032e-05,
"loss": 0.4485,
"step": 330
},
{
"epoch": 0.629277566539924,
"grad_norm": 0.7953465841345556,
"learning_rate": 1.73358945236615e-05,
"loss": 0.4195,
"step": 331
},
{
"epoch": 0.6311787072243346,
"grad_norm": 0.6570823411597754,
"learning_rate": 1.7313285359693322e-05,
"loss": 0.429,
"step": 332
},
{
"epoch": 0.6330798479087453,
"grad_norm": 0.7142186773565881,
"learning_rate": 1.7290595541041312e-05,
"loss": 0.3941,
"step": 333
},
{
"epoch": 0.6349809885931559,
"grad_norm": 0.7815097039154676,
"learning_rate": 1.7267825317940494e-05,
"loss": 0.4462,
"step": 334
},
{
"epoch": 0.6368821292775665,
"grad_norm": 0.763566823082039,
"learning_rate": 1.724497494151264e-05,
"loss": 0.4081,
"step": 335
},
{
"epoch": 0.6387832699619772,
"grad_norm": 0.8160932822499316,
"learning_rate": 1.7222044663763484e-05,
"loss": 0.4154,
"step": 336
},
{
"epoch": 0.6406844106463878,
"grad_norm": 0.9054514936381787,
"learning_rate": 1.7199034737579962e-05,
"loss": 0.42,
"step": 337
},
{
"epoch": 0.6425855513307985,
"grad_norm": 0.7697241067621587,
"learning_rate": 1.7175945416727405e-05,
"loss": 0.4386,
"step": 338
},
{
"epoch": 0.6444866920152091,
"grad_norm": 0.7793897133325295,
"learning_rate": 1.7152776955846768e-05,
"loss": 0.4136,
"step": 339
},
{
"epoch": 0.6463878326996197,
"grad_norm": 0.9737250754183426,
"learning_rate": 1.7129529610451775e-05,
"loss": 0.4083,
"step": 340
},
{
"epoch": 0.6482889733840305,
"grad_norm": 0.7696341456400928,
"learning_rate": 1.7106203636926154e-05,
"loss": 0.4424,
"step": 341
},
{
"epoch": 0.6501901140684411,
"grad_norm": 0.9975105153413644,
"learning_rate": 1.7082799292520767e-05,
"loss": 0.4099,
"step": 342
},
{
"epoch": 0.6520912547528517,
"grad_norm": 0.7266944385871547,
"learning_rate": 1.7059316835350806e-05,
"loss": 0.4255,
"step": 343
},
{
"epoch": 0.6539923954372624,
"grad_norm": 0.759984352313296,
"learning_rate": 1.7035756524392924e-05,
"loss": 0.4193,
"step": 344
},
{
"epoch": 0.655893536121673,
"grad_norm": 0.856245907199728,
"learning_rate": 1.7012118619482376e-05,
"loss": 0.4193,
"step": 345
},
{
"epoch": 0.6577946768060836,
"grad_norm": 0.7118784207647573,
"learning_rate": 1.6988403381310177e-05,
"loss": 0.4413,
"step": 346
},
{
"epoch": 0.6596958174904943,
"grad_norm": 0.889563245579411,
"learning_rate": 1.696461107142021e-05,
"loss": 0.4277,
"step": 347
},
{
"epoch": 0.6615969581749049,
"grad_norm": 0.7381868896846077,
"learning_rate": 1.6940741952206342e-05,
"loss": 0.4141,
"step": 348
},
{
"epoch": 0.6634980988593155,
"grad_norm": 0.7405703208985123,
"learning_rate": 1.691679628690953e-05,
"loss": 0.4411,
"step": 349
},
{
"epoch": 0.6653992395437263,
"grad_norm": 0.8246611555171061,
"learning_rate": 1.6892774339614927e-05,
"loss": 0.4086,
"step": 350
},
{
"epoch": 0.6673003802281369,
"grad_norm": 0.8003304523840453,
"learning_rate": 1.686867637524896e-05,
"loss": 0.4358,
"step": 351
},
{
"epoch": 0.6692015209125475,
"grad_norm": 0.7435097561294612,
"learning_rate": 1.6844502659576414e-05,
"loss": 0.4249,
"step": 352
},
{
"epoch": 0.6711026615969582,
"grad_norm": 0.7424926295243242,
"learning_rate": 1.6820253459197493e-05,
"loss": 0.442,
"step": 353
},
{
"epoch": 0.6730038022813688,
"grad_norm": 0.8233183033151985,
"learning_rate": 1.679592904154489e-05,
"loss": 0.4358,
"step": 354
},
{
"epoch": 0.6749049429657795,
"grad_norm": 0.6988959113059067,
"learning_rate": 1.677152967488084e-05,
"loss": 0.4312,
"step": 355
},
{
"epoch": 0.6768060836501901,
"grad_norm": 0.8682734068350872,
"learning_rate": 1.6747055628294134e-05,
"loss": 0.429,
"step": 356
},
{
"epoch": 0.6787072243346007,
"grad_norm": 0.7043268373325795,
"learning_rate": 1.6722507171697184e-05,
"loss": 0.4319,
"step": 357
},
{
"epoch": 0.6806083650190115,
"grad_norm": 0.8250172635981263,
"learning_rate": 1.669788457582304e-05,
"loss": 0.4374,
"step": 358
},
{
"epoch": 0.6825095057034221,
"grad_norm": 0.8669956318215357,
"learning_rate": 1.6673188112222394e-05,
"loss": 0.4106,
"step": 359
},
{
"epoch": 0.6844106463878327,
"grad_norm": 0.8315710333418098,
"learning_rate": 1.6648418053260585e-05,
"loss": 0.4308,
"step": 360
},
{
"epoch": 0.6863117870722434,
"grad_norm": 0.8886513268089046,
"learning_rate": 1.6623574672114596e-05,
"loss": 0.4226,
"step": 361
},
{
"epoch": 0.688212927756654,
"grad_norm": 0.7090073519651046,
"learning_rate": 1.6598658242770054e-05,
"loss": 0.4491,
"step": 362
},
{
"epoch": 0.6901140684410646,
"grad_norm": 0.810591457398368,
"learning_rate": 1.6573669040018202e-05,
"loss": 0.4151,
"step": 363
},
{
"epoch": 0.6920152091254753,
"grad_norm": 0.6992188788001621,
"learning_rate": 1.6548607339452853e-05,
"loss": 0.4044,
"step": 364
},
{
"epoch": 0.6939163498098859,
"grad_norm": 0.6612507765788376,
"learning_rate": 1.652347341746737e-05,
"loss": 0.4346,
"step": 365
},
{
"epoch": 0.6958174904942965,
"grad_norm": 0.7269067842068467,
"learning_rate": 1.6498267551251618e-05,
"loss": 0.4144,
"step": 366
},
{
"epoch": 0.6977186311787072,
"grad_norm": 0.6923625939428302,
"learning_rate": 1.6472990018788884e-05,
"loss": 0.4039,
"step": 367
},
{
"epoch": 0.6996197718631179,
"grad_norm": 0.6960261081531017,
"learning_rate": 1.644764109885284e-05,
"loss": 0.4381,
"step": 368
},
{
"epoch": 0.7015209125475285,
"grad_norm": 0.7480791561472906,
"learning_rate": 1.642222107100446e-05,
"loss": 0.4372,
"step": 369
},
{
"epoch": 0.7034220532319392,
"grad_norm": 0.7316044484800576,
"learning_rate": 1.6396730215588913e-05,
"loss": 0.4118,
"step": 370
},
{
"epoch": 0.7053231939163498,
"grad_norm": 0.8200069901040676,
"learning_rate": 1.6371168813732514e-05,
"loss": 0.4135,
"step": 371
},
{
"epoch": 0.7072243346007605,
"grad_norm": 0.732249845862064,
"learning_rate": 1.6345537147339578e-05,
"loss": 0.409,
"step": 372
},
{
"epoch": 0.7091254752851711,
"grad_norm": 0.7246888380111897,
"learning_rate": 1.6319835499089358e-05,
"loss": 0.4017,
"step": 373
},
{
"epoch": 0.7110266159695817,
"grad_norm": 0.7780493957682008,
"learning_rate": 1.6294064152432878e-05,
"loss": 0.4069,
"step": 374
},
{
"epoch": 0.7129277566539924,
"grad_norm": 0.713020318087346,
"learning_rate": 1.626822339158985e-05,
"loss": 0.4069,
"step": 375
},
{
"epoch": 0.714828897338403,
"grad_norm": 0.8527059024757032,
"learning_rate": 1.6242313501545522e-05,
"loss": 0.426,
"step": 376
},
{
"epoch": 0.7167300380228137,
"grad_norm": 0.7466709016431567,
"learning_rate": 1.621633476804752e-05,
"loss": 0.4203,
"step": 377
},
{
"epoch": 0.7186311787072244,
"grad_norm": 0.8091464055071294,
"learning_rate": 1.6190287477602716e-05,
"loss": 0.4156,
"step": 378
},
{
"epoch": 0.720532319391635,
"grad_norm": 0.9367217697061448,
"learning_rate": 1.6164171917474078e-05,
"loss": 0.4136,
"step": 379
},
{
"epoch": 0.7224334600760456,
"grad_norm": 0.7038233854162687,
"learning_rate": 1.6137988375677466e-05,
"loss": 0.4353,
"step": 380
},
{
"epoch": 0.7243346007604563,
"grad_norm": 0.8253527824810887,
"learning_rate": 1.6111737140978495e-05,
"loss": 0.412,
"step": 381
},
{
"epoch": 0.7262357414448669,
"grad_norm": 0.7537977831173165,
"learning_rate": 1.6085418502889315e-05,
"loss": 0.4326,
"step": 382
},
{
"epoch": 0.7281368821292775,
"grad_norm": 0.820889289514122,
"learning_rate": 1.6059032751665454e-05,
"loss": 0.4444,
"step": 383
},
{
"epoch": 0.7300380228136882,
"grad_norm": 0.7769949157296688,
"learning_rate": 1.6032580178302585e-05,
"loss": 0.4532,
"step": 384
},
{
"epoch": 0.7319391634980988,
"grad_norm": 0.7283837430661269,
"learning_rate": 1.600606107453333e-05,
"loss": 0.4207,
"step": 385
},
{
"epoch": 0.7338403041825095,
"grad_norm": 0.6825497080781258,
"learning_rate": 1.597947573282405e-05,
"loss": 0.425,
"step": 386
},
{
"epoch": 0.7357414448669202,
"grad_norm": 0.7242546941234317,
"learning_rate": 1.5952824446371608e-05,
"loss": 0.4471,
"step": 387
},
{
"epoch": 0.7376425855513308,
"grad_norm": 0.695899597082016,
"learning_rate": 1.592610750910014e-05,
"loss": 0.4156,
"step": 388
},
{
"epoch": 0.7395437262357415,
"grad_norm": 0.6804528150308485,
"learning_rate": 1.589932521565781e-05,
"loss": 0.4071,
"step": 389
},
{
"epoch": 0.7414448669201521,
"grad_norm": 0.723304324761366,
"learning_rate": 1.587247786141358e-05,
"loss": 0.4285,
"step": 390
},
{
"epoch": 0.7433460076045627,
"grad_norm": 0.7173331581696021,
"learning_rate": 1.5845565742453906e-05,
"loss": 0.429,
"step": 391
},
{
"epoch": 0.7452471482889734,
"grad_norm": 0.6801564650308655,
"learning_rate": 1.581858915557953e-05,
"loss": 0.4238,
"step": 392
},
{
"epoch": 0.747148288973384,
"grad_norm": 0.7608848629913085,
"learning_rate": 1.5791548398302167e-05,
"loss": 0.4287,
"step": 393
},
{
"epoch": 0.7490494296577946,
"grad_norm": 0.6408678807865112,
"learning_rate": 1.5764443768841234e-05,
"loss": 0.3977,
"step": 394
},
{
"epoch": 0.7509505703422054,
"grad_norm": 0.8090992400396866,
"learning_rate": 1.5737275566120577e-05,
"loss": 0.4381,
"step": 395
},
{
"epoch": 0.752851711026616,
"grad_norm": 0.6559332878551333,
"learning_rate": 1.5710044089765144e-05,
"loss": 0.4289,
"step": 396
},
{
"epoch": 0.7547528517110266,
"grad_norm": 0.7357298033864544,
"learning_rate": 1.5682749640097708e-05,
"loss": 0.41,
"step": 397
},
{
"epoch": 0.7566539923954373,
"grad_norm": 0.6271455659020091,
"learning_rate": 1.565539251813554e-05,
"loss": 0.4003,
"step": 398
},
{
"epoch": 0.7585551330798479,
"grad_norm": 0.6769839481905983,
"learning_rate": 1.5627973025587093e-05,
"loss": 0.4271,
"step": 399
},
{
"epoch": 0.7604562737642585,
"grad_norm": 0.6572210141336534,
"learning_rate": 1.560049146484868e-05,
"loss": 0.4324,
"step": 400
},
{
"epoch": 0.7623574144486692,
"grad_norm": 0.6722761473001982,
"learning_rate": 1.5572948139001128e-05,
"loss": 0.4227,
"step": 401
},
{
"epoch": 0.7642585551330798,
"grad_norm": 0.6350284090251579,
"learning_rate": 1.5545343351806443e-05,
"loss": 0.4088,
"step": 402
},
{
"epoch": 0.7661596958174905,
"grad_norm": 0.6619289855162904,
"learning_rate": 1.551767740770446e-05,
"loss": 0.4035,
"step": 403
},
{
"epoch": 0.7680608365019012,
"grad_norm": 0.6490790330235795,
"learning_rate": 1.5489950611809484e-05,
"loss": 0.414,
"step": 404
},
{
"epoch": 0.7699619771863118,
"grad_norm": 0.6492888425925012,
"learning_rate": 1.5462163269906928e-05,
"loss": 0.4115,
"step": 405
},
{
"epoch": 0.7718631178707225,
"grad_norm": 0.6441884240500749,
"learning_rate": 1.5434315688449924e-05,
"loss": 0.4087,
"step": 406
},
{
"epoch": 0.7737642585551331,
"grad_norm": 0.6552785384684677,
"learning_rate": 1.5406408174555978e-05,
"loss": 0.4003,
"step": 407
},
{
"epoch": 0.7756653992395437,
"grad_norm": 0.7326288869442495,
"learning_rate": 1.5378441036003543e-05,
"loss": 0.4565,
"step": 408
},
{
"epoch": 0.7775665399239544,
"grad_norm": 0.689655327927933,
"learning_rate": 1.535041458122865e-05,
"loss": 0.4103,
"step": 409
},
{
"epoch": 0.779467680608365,
"grad_norm": 0.6292132076996315,
"learning_rate": 1.5322329119321508e-05,
"loss": 0.3954,
"step": 410
},
{
"epoch": 0.7813688212927756,
"grad_norm": 0.6906150378080441,
"learning_rate": 1.529418496002308e-05,
"loss": 0.4175,
"step": 411
},
{
"epoch": 0.7832699619771863,
"grad_norm": 0.6435393022142084,
"learning_rate": 1.5265982413721662e-05,
"loss": 0.3906,
"step": 412
},
{
"epoch": 0.785171102661597,
"grad_norm": 0.6389571349977216,
"learning_rate": 1.5237721791449497e-05,
"loss": 0.4226,
"step": 413
},
{
"epoch": 0.7870722433460076,
"grad_norm": 0.697399122162458,
"learning_rate": 1.5209403404879305e-05,
"loss": 0.416,
"step": 414
},
{
"epoch": 0.7889733840304183,
"grad_norm": 0.6376508135645901,
"learning_rate": 1.5181027566320858e-05,
"loss": 0.4141,
"step": 415
},
{
"epoch": 0.7908745247148289,
"grad_norm": 0.699246451563322,
"learning_rate": 1.5152594588717544e-05,
"loss": 0.4306,
"step": 416
},
{
"epoch": 0.7927756653992395,
"grad_norm": 0.6655565980344988,
"learning_rate": 1.5124104785642909e-05,
"loss": 0.4261,
"step": 417
},
{
"epoch": 0.7946768060836502,
"grad_norm": 0.7133513374991727,
"learning_rate": 1.5095558471297196e-05,
"loss": 0.4125,
"step": 418
},
{
"epoch": 0.7965779467680608,
"grad_norm": 0.6664116987009641,
"learning_rate": 1.5066955960503893e-05,
"loss": 0.4177,
"step": 419
},
{
"epoch": 0.7984790874524715,
"grad_norm": 0.6773906655476327,
"learning_rate": 1.5038297568706244e-05,
"loss": 0.4118,
"step": 420
},
{
"epoch": 0.8003802281368821,
"grad_norm": 0.7007230544491551,
"learning_rate": 1.5009583611963772e-05,
"loss": 0.4242,
"step": 421
},
{
"epoch": 0.8022813688212928,
"grad_norm": 0.7388269034786492,
"learning_rate": 1.4980814406948806e-05,
"loss": 0.4238,
"step": 422
},
{
"epoch": 0.8041825095057035,
"grad_norm": 0.6649185528690237,
"learning_rate": 1.4951990270942991e-05,
"loss": 0.4049,
"step": 423
},
{
"epoch": 0.8060836501901141,
"grad_norm": 0.8074913613451679,
"learning_rate": 1.492311152183376e-05,
"loss": 0.391,
"step": 424
},
{
"epoch": 0.8079847908745247,
"grad_norm": 0.7294428485769718,
"learning_rate": 1.4894178478110856e-05,
"loss": 0.4151,
"step": 425
},
{
"epoch": 0.8098859315589354,
"grad_norm": 0.6458244120460152,
"learning_rate": 1.4865191458862816e-05,
"loss": 0.407,
"step": 426
},
{
"epoch": 0.811787072243346,
"grad_norm": 0.8623561735588674,
"learning_rate": 1.4836150783773442e-05,
"loss": 0.4094,
"step": 427
},
{
"epoch": 0.8136882129277566,
"grad_norm": 0.7230087295264667,
"learning_rate": 1.4807056773118276e-05,
"loss": 0.4457,
"step": 428
},
{
"epoch": 0.8155893536121673,
"grad_norm": 0.6312694562062598,
"learning_rate": 1.4777909747761085e-05,
"loss": 0.3813,
"step": 429
},
{
"epoch": 0.8174904942965779,
"grad_norm": 0.827354868176591,
"learning_rate": 1.4748710029150296e-05,
"loss": 0.4332,
"step": 430
},
{
"epoch": 0.8193916349809885,
"grad_norm": 0.6982560284723996,
"learning_rate": 1.4719457939315468e-05,
"loss": 0.4223,
"step": 431
},
{
"epoch": 0.8212927756653993,
"grad_norm": 0.7691903497514757,
"learning_rate": 1.4690153800863743e-05,
"loss": 0.416,
"step": 432
},
{
"epoch": 0.8231939163498099,
"grad_norm": 0.7143769294798197,
"learning_rate": 1.4660797936976278e-05,
"loss": 0.3915,
"step": 433
},
{
"epoch": 0.8250950570342205,
"grad_norm": 0.6614957450846861,
"learning_rate": 1.4631390671404682e-05,
"loss": 0.4061,
"step": 434
},
{
"epoch": 0.8269961977186312,
"grad_norm": 0.7461203353085591,
"learning_rate": 1.460193232846745e-05,
"loss": 0.4001,
"step": 435
},
{
"epoch": 0.8288973384030418,
"grad_norm": 0.8014408308224453,
"learning_rate": 1.4572423233046386e-05,
"loss": 0.4422,
"step": 436
},
{
"epoch": 0.8307984790874525,
"grad_norm": 0.8086959504103169,
"learning_rate": 1.4542863710583022e-05,
"loss": 0.4254,
"step": 437
},
{
"epoch": 0.8326996197718631,
"grad_norm": 0.6578384242170929,
"learning_rate": 1.4513254087075015e-05,
"loss": 0.4102,
"step": 438
},
{
"epoch": 0.8346007604562737,
"grad_norm": 0.6364855422729294,
"learning_rate": 1.4483594689072571e-05,
"loss": 0.4051,
"step": 439
},
{
"epoch": 0.8365019011406845,
"grad_norm": 0.7379756008845559,
"learning_rate": 1.4453885843674837e-05,
"loss": 0.4036,
"step": 440
},
{
"epoch": 0.8384030418250951,
"grad_norm": 0.663558703564245,
"learning_rate": 1.4424127878526278e-05,
"loss": 0.4184,
"step": 441
},
{
"epoch": 0.8403041825095057,
"grad_norm": 0.6574894480005159,
"learning_rate": 1.4394321121813093e-05,
"loss": 0.4032,
"step": 442
},
{
"epoch": 0.8422053231939164,
"grad_norm": 0.7018913238108152,
"learning_rate": 1.436446590225957e-05,
"loss": 0.4193,
"step": 443
},
{
"epoch": 0.844106463878327,
"grad_norm": 0.6037032610954689,
"learning_rate": 1.433456254912447e-05,
"loss": 0.3915,
"step": 444
},
{
"epoch": 0.8460076045627376,
"grad_norm": 0.6503162131831633,
"learning_rate": 1.4304611392197399e-05,
"loss": 0.4232,
"step": 445
},
{
"epoch": 0.8479087452471483,
"grad_norm": 0.7033567411758512,
"learning_rate": 1.427461276179517e-05,
"loss": 0.4188,
"step": 446
},
{
"epoch": 0.8498098859315589,
"grad_norm": 0.6807630709132103,
"learning_rate": 1.4244566988758152e-05,
"loss": 0.4007,
"step": 447
},
{
"epoch": 0.8517110266159695,
"grad_norm": 0.6935982753716256,
"learning_rate": 1.4214474404446633e-05,
"loss": 0.4149,
"step": 448
},
{
"epoch": 0.8536121673003803,
"grad_norm": 0.6544425087100251,
"learning_rate": 1.4184335340737158e-05,
"loss": 0.4276,
"step": 449
},
{
"epoch": 0.8555133079847909,
"grad_norm": 0.6791280931086721,
"learning_rate": 1.4154150130018867e-05,
"loss": 0.4189,
"step": 450
},
{
"epoch": 0.8574144486692015,
"grad_norm": 0.7102400590900081,
"learning_rate": 1.4123919105189836e-05,
"loss": 0.426,
"step": 451
},
{
"epoch": 0.8593155893536122,
"grad_norm": 0.6709020933848153,
"learning_rate": 1.4093642599653406e-05,
"loss": 0.4082,
"step": 452
},
{
"epoch": 0.8612167300380228,
"grad_norm": 0.6994358195708956,
"learning_rate": 1.40633209473145e-05,
"loss": 0.4425,
"step": 453
},
{
"epoch": 0.8631178707224335,
"grad_norm": 0.6456770812646054,
"learning_rate": 1.4032954482575938e-05,
"loss": 0.4095,
"step": 454
},
{
"epoch": 0.8650190114068441,
"grad_norm": 0.6724294781223209,
"learning_rate": 1.4002543540334766e-05,
"loss": 0.4303,
"step": 455
},
{
"epoch": 0.8669201520912547,
"grad_norm": 0.8661394533630303,
"learning_rate": 1.3972088455978537e-05,
"loss": 0.4036,
"step": 456
},
{
"epoch": 0.8688212927756654,
"grad_norm": 0.6727054644704753,
"learning_rate": 1.3941589565381635e-05,
"loss": 0.4172,
"step": 457
},
{
"epoch": 0.870722433460076,
"grad_norm": 0.731977495171835,
"learning_rate": 1.391104720490156e-05,
"loss": 0.4426,
"step": 458
},
{
"epoch": 0.8726235741444867,
"grad_norm": 0.7434833377061584,
"learning_rate": 1.3880461711375224e-05,
"loss": 0.4083,
"step": 459
},
{
"epoch": 0.8745247148288974,
"grad_norm": 0.6846878273527628,
"learning_rate": 1.3849833422115221e-05,
"loss": 0.436,
"step": 460
},
{
"epoch": 0.876425855513308,
"grad_norm": 0.7372196935495674,
"learning_rate": 1.3819162674906134e-05,
"loss": 0.4092,
"step": 461
},
{
"epoch": 0.8783269961977186,
"grad_norm": 0.6515126657380419,
"learning_rate": 1.378844980800078e-05,
"loss": 0.3948,
"step": 462
},
{
"epoch": 0.8802281368821293,
"grad_norm": 0.6720251503751603,
"learning_rate": 1.3757695160116502e-05,
"loss": 0.4185,
"step": 463
},
{
"epoch": 0.8821292775665399,
"grad_norm": 0.7098778860236342,
"learning_rate": 1.3726899070431423e-05,
"loss": 0.3944,
"step": 464
},
{
"epoch": 0.8840304182509505,
"grad_norm": 0.6830207045922908,
"learning_rate": 1.3696061878580707e-05,
"loss": 0.42,
"step": 465
},
{
"epoch": 0.8859315589353612,
"grad_norm": 0.6682618421505093,
"learning_rate": 1.3665183924652817e-05,
"loss": 0.4168,
"step": 466
},
{
"epoch": 0.8878326996197718,
"grad_norm": 0.6570199921065812,
"learning_rate": 1.3634265549185755e-05,
"loss": 0.3928,
"step": 467
},
{
"epoch": 0.8897338403041825,
"grad_norm": 0.660413310181966,
"learning_rate": 1.3603307093163319e-05,
"loss": 0.4325,
"step": 468
},
{
"epoch": 0.8916349809885932,
"grad_norm": 0.6958765821053353,
"learning_rate": 1.3572308898011328e-05,
"loss": 0.402,
"step": 469
},
{
"epoch": 0.8935361216730038,
"grad_norm": 0.7478945491147632,
"learning_rate": 1.3541271305593878e-05,
"loss": 0.4249,
"step": 470
},
{
"epoch": 0.8954372623574145,
"grad_norm": 0.6300601134596673,
"learning_rate": 1.3510194658209547e-05,
"loss": 0.4022,
"step": 471
},
{
"epoch": 0.8973384030418251,
"grad_norm": 0.6916472206313369,
"learning_rate": 1.3479079298587634e-05,
"loss": 0.3976,
"step": 472
},
{
"epoch": 0.8992395437262357,
"grad_norm": 0.6733294889869995,
"learning_rate": 1.3447925569884374e-05,
"loss": 0.4232,
"step": 473
},
{
"epoch": 0.9011406844106464,
"grad_norm": 0.6654146222865712,
"learning_rate": 1.3416733815679166e-05,
"loss": 0.405,
"step": 474
},
{
"epoch": 0.903041825095057,
"grad_norm": 0.6730785707670287,
"learning_rate": 1.3385504379970764e-05,
"loss": 0.4254,
"step": 475
},
{
"epoch": 0.9049429657794676,
"grad_norm": 0.7217349812018427,
"learning_rate": 1.3354237607173494e-05,
"loss": 0.4096,
"step": 476
},
{
"epoch": 0.9068441064638784,
"grad_norm": 0.6725554212400988,
"learning_rate": 1.3322933842113457e-05,
"loss": 0.4342,
"step": 477
},
{
"epoch": 0.908745247148289,
"grad_norm": 0.6196126277626116,
"learning_rate": 1.3291593430024727e-05,
"loss": 0.4083,
"step": 478
},
{
"epoch": 0.9106463878326996,
"grad_norm": 0.764471245012473,
"learning_rate": 1.3260216716545534e-05,
"loss": 0.4116,
"step": 479
},
{
"epoch": 0.9125475285171103,
"grad_norm": 0.6931517372682152,
"learning_rate": 1.3228804047714462e-05,
"loss": 0.4075,
"step": 480
},
{
"epoch": 0.9144486692015209,
"grad_norm": 0.694633561159114,
"learning_rate": 1.319735576996663e-05,
"loss": 0.435,
"step": 481
},
{
"epoch": 0.9163498098859315,
"grad_norm": 0.7414568556033129,
"learning_rate": 1.3165872230129869e-05,
"loss": 0.4153,
"step": 482
},
{
"epoch": 0.9182509505703422,
"grad_norm": 0.6396517675646527,
"learning_rate": 1.3134353775420895e-05,
"loss": 0.393,
"step": 483
},
{
"epoch": 0.9201520912547528,
"grad_norm": 0.6195564445699178,
"learning_rate": 1.3102800753441488e-05,
"loss": 0.4242,
"step": 484
},
{
"epoch": 0.9220532319391636,
"grad_norm": 0.6088136204310354,
"learning_rate": 1.3071213512174655e-05,
"loss": 0.3815,
"step": 485
},
{
"epoch": 0.9239543726235742,
"grad_norm": 0.6281615154225191,
"learning_rate": 1.3039592399980785e-05,
"loss": 0.3928,
"step": 486
},
{
"epoch": 0.9258555133079848,
"grad_norm": 0.6786083409901216,
"learning_rate": 1.3007937765593818e-05,
"loss": 0.4262,
"step": 487
},
{
"epoch": 0.9277566539923955,
"grad_norm": 0.6856445669036492,
"learning_rate": 1.2976249958117395e-05,
"loss": 0.4132,
"step": 488
},
{
"epoch": 0.9296577946768061,
"grad_norm": 0.6875356019462465,
"learning_rate": 1.2944529327021002e-05,
"loss": 0.402,
"step": 489
},
{
"epoch": 0.9315589353612167,
"grad_norm": 0.6559303939961741,
"learning_rate": 1.291277622213612e-05,
"loss": 0.3924,
"step": 490
},
{
"epoch": 0.9334600760456274,
"grad_norm": 0.6768694411848571,
"learning_rate": 1.2880990993652379e-05,
"loss": 0.4098,
"step": 491
},
{
"epoch": 0.935361216730038,
"grad_norm": 0.6199398611570226,
"learning_rate": 1.2849173992113669e-05,
"loss": 0.4051,
"step": 492
},
{
"epoch": 0.9372623574144486,
"grad_norm": 0.650592344480318,
"learning_rate": 1.2817325568414299e-05,
"loss": 0.4208,
"step": 493
},
{
"epoch": 0.9391634980988594,
"grad_norm": 0.7061985707691085,
"learning_rate": 1.2785446073795118e-05,
"loss": 0.4236,
"step": 494
},
{
"epoch": 0.94106463878327,
"grad_norm": 0.7160749602649036,
"learning_rate": 1.2753535859839638e-05,
"loss": 0.4237,
"step": 495
},
{
"epoch": 0.9429657794676806,
"grad_norm": 0.6309276906563795,
"learning_rate": 1.272159527847016e-05,
"loss": 0.3877,
"step": 496
},
{
"epoch": 0.9448669201520913,
"grad_norm": 0.6568010069692419,
"learning_rate": 1.2689624681943897e-05,
"loss": 0.421,
"step": 497
},
{
"epoch": 0.9467680608365019,
"grad_norm": 0.7298441147080376,
"learning_rate": 1.2657624422849077e-05,
"loss": 0.4153,
"step": 498
},
{
"epoch": 0.9486692015209125,
"grad_norm": 0.6703956271447228,
"learning_rate": 1.2625594854101066e-05,
"loss": 0.4112,
"step": 499
},
{
"epoch": 0.9505703422053232,
"grad_norm": 0.6824608127469088,
"learning_rate": 1.2593536328938471e-05,
"loss": 0.4121,
"step": 500
},
{
"epoch": 0.9524714828897338,
"grad_norm": 0.6605822662194483,
"learning_rate": 1.2561449200919253e-05,
"loss": 0.4048,
"step": 501
},
{
"epoch": 0.9543726235741445,
"grad_norm": 0.6457167769235831,
"learning_rate": 1.2529333823916807e-05,
"loss": 0.3979,
"step": 502
},
{
"epoch": 0.9562737642585551,
"grad_norm": 0.6987868620854571,
"learning_rate": 1.2497190552116082e-05,
"loss": 0.4349,
"step": 503
},
{
"epoch": 0.9581749049429658,
"grad_norm": 0.6341693296683917,
"learning_rate": 1.2465019740009662e-05,
"loss": 0.4058,
"step": 504
},
{
"epoch": 0.9600760456273765,
"grad_norm": 0.7345204781693294,
"learning_rate": 1.2432821742393854e-05,
"loss": 0.4213,
"step": 505
},
{
"epoch": 0.9619771863117871,
"grad_norm": 0.6664328110957722,
"learning_rate": 1.2400596914364792e-05,
"loss": 0.4084,
"step": 506
},
{
"epoch": 0.9638783269961977,
"grad_norm": 0.663347728126144,
"learning_rate": 1.2368345611314508e-05,
"loss": 0.4143,
"step": 507
},
{
"epoch": 0.9657794676806084,
"grad_norm": 0.6541702427031206,
"learning_rate": 1.2336068188927002e-05,
"loss": 0.3796,
"step": 508
},
{
"epoch": 0.967680608365019,
"grad_norm": 0.671724946980086,
"learning_rate": 1.2303765003174342e-05,
"loss": 0.4094,
"step": 509
},
{
"epoch": 0.9695817490494296,
"grad_norm": 0.6959893791738174,
"learning_rate": 1.2271436410312727e-05,
"loss": 0.4286,
"step": 510
},
{
"epoch": 0.9714828897338403,
"grad_norm": 0.6868654830710365,
"learning_rate": 1.2239082766878557e-05,
"loss": 0.4271,
"step": 511
},
{
"epoch": 0.973384030418251,
"grad_norm": 0.691224535089129,
"learning_rate": 1.2206704429684504e-05,
"loss": 0.3986,
"step": 512
},
{
"epoch": 0.9752851711026616,
"grad_norm": 0.6808205167962615,
"learning_rate": 1.2174301755815572e-05,
"loss": 0.4224,
"step": 513
},
{
"epoch": 0.9771863117870723,
"grad_norm": 0.664435026571481,
"learning_rate": 1.2141875102625166e-05,
"loss": 0.4105,
"step": 514
},
{
"epoch": 0.9790874524714829,
"grad_norm": 0.6571567246246903,
"learning_rate": 1.2109424827731144e-05,
"loss": 0.4188,
"step": 515
},
{
"epoch": 0.9809885931558935,
"grad_norm": 0.6852597719623185,
"learning_rate": 1.2076951289011884e-05,
"loss": 0.4331,
"step": 516
},
{
"epoch": 0.9828897338403042,
"grad_norm": 0.6738924232853097,
"learning_rate": 1.204445484460232e-05,
"loss": 0.4196,
"step": 517
},
{
"epoch": 0.9847908745247148,
"grad_norm": 0.6598475731826865,
"learning_rate": 1.2011935852890004e-05,
"loss": 0.4218,
"step": 518
},
{
"epoch": 0.9866920152091255,
"grad_norm": 0.6299028373310901,
"learning_rate": 1.1979394672511156e-05,
"loss": 0.3776,
"step": 519
},
{
"epoch": 0.9885931558935361,
"grad_norm": 0.6793513247395356,
"learning_rate": 1.19468316623467e-05,
"loss": 0.4117,
"step": 520
},
{
"epoch": 0.9904942965779467,
"grad_norm": 0.685276652860963,
"learning_rate": 1.1914247181518312e-05,
"loss": 0.3962,
"step": 521
},
{
"epoch": 0.9923954372623575,
"grad_norm": 0.6736429365507653,
"learning_rate": 1.1881641589384456e-05,
"loss": 0.4083,
"step": 522
},
{
"epoch": 0.9942965779467681,
"grad_norm": 0.719492486658024,
"learning_rate": 1.1849015245536424e-05,
"loss": 0.4082,
"step": 523
},
{
"epoch": 0.9961977186311787,
"grad_norm": 0.6799878382788851,
"learning_rate": 1.1816368509794365e-05,
"loss": 0.4051,
"step": 524
},
{
"epoch": 0.9980988593155894,
"grad_norm": 0.6288963439397117,
"learning_rate": 1.1783701742203326e-05,
"loss": 0.3912,
"step": 525
},
{
"epoch": 1.0,
"grad_norm": 0.6598685229794085,
"learning_rate": 1.1751015303029272e-05,
"loss": 0.3989,
"step": 526
},
{
"epoch": 1.0019011406844107,
"grad_norm": 0.7707661421706666,
"learning_rate": 1.1718309552755118e-05,
"loss": 0.3238,
"step": 527
},
{
"epoch": 1.0038022813688212,
"grad_norm": 0.7625734233051265,
"learning_rate": 1.1685584852076746e-05,
"loss": 0.3297,
"step": 528
},
{
"epoch": 1.005703422053232,
"grad_norm": 0.7144406560759979,
"learning_rate": 1.1652841561899042e-05,
"loss": 0.3151,
"step": 529
},
{
"epoch": 1.0076045627376427,
"grad_norm": 0.7226873165784877,
"learning_rate": 1.1620080043331901e-05,
"loss": 0.314,
"step": 530
},
{
"epoch": 1.0095057034220531,
"grad_norm": 0.745014707808309,
"learning_rate": 1.1587300657686254e-05,
"loss": 0.315,
"step": 531
},
{
"epoch": 1.0114068441064639,
"grad_norm": 0.8189545658094899,
"learning_rate": 1.1554503766470069e-05,
"loss": 0.3169,
"step": 532
},
{
"epoch": 1.0133079847908746,
"grad_norm": 0.895506745114853,
"learning_rate": 1.1521689731384391e-05,
"loss": 0.3188,
"step": 533
},
{
"epoch": 1.015209125475285,
"grad_norm": 0.7960997859721232,
"learning_rate": 1.1488858914319321e-05,
"loss": 0.321,
"step": 534
},
{
"epoch": 1.0171102661596958,
"grad_norm": 0.7289684808142549,
"learning_rate": 1.1456011677350052e-05,
"loss": 0.3047,
"step": 535
},
{
"epoch": 1.0190114068441065,
"grad_norm": 0.7615362756958234,
"learning_rate": 1.1423148382732854e-05,
"loss": 0.3145,
"step": 536
},
{
"epoch": 1.020912547528517,
"grad_norm": 0.7692117434319292,
"learning_rate": 1.1390269392901096e-05,
"loss": 0.2941,
"step": 537
},
{
"epoch": 1.0228136882129277,
"grad_norm": 0.7205353979195608,
"learning_rate": 1.1357375070461241e-05,
"loss": 0.3037,
"step": 538
},
{
"epoch": 1.0247148288973384,
"grad_norm": 0.6895861024882127,
"learning_rate": 1.1324465778188846e-05,
"loss": 0.3112,
"step": 539
},
{
"epoch": 1.026615969581749,
"grad_norm": 0.682731445634838,
"learning_rate": 1.1291541879024568e-05,
"loss": 0.3046,
"step": 540
},
{
"epoch": 1.0285171102661597,
"grad_norm": 0.6694869444974515,
"learning_rate": 1.1258603736070145e-05,
"loss": 0.2912,
"step": 541
},
{
"epoch": 1.0304182509505704,
"grad_norm": 0.7020705287186066,
"learning_rate": 1.1225651712584413e-05,
"loss": 0.3043,
"step": 542
},
{
"epoch": 1.032319391634981,
"grad_norm": 0.7187158222861396,
"learning_rate": 1.1192686171979288e-05,
"loss": 0.3089,
"step": 543
},
{
"epoch": 1.0342205323193916,
"grad_norm": 0.6910452053402893,
"learning_rate": 1.1159707477815756e-05,
"loss": 0.2965,
"step": 544
},
{
"epoch": 1.0361216730038023,
"grad_norm": 0.7019848884637951,
"learning_rate": 1.1126715993799875e-05,
"loss": 0.3021,
"step": 545
},
{
"epoch": 1.038022813688213,
"grad_norm": 0.7822185663006678,
"learning_rate": 1.1093712083778748e-05,
"loss": 0.3019,
"step": 546
},
{
"epoch": 1.0399239543726235,
"grad_norm": 0.7342403295913638,
"learning_rate": 1.1060696111736515e-05,
"loss": 0.3087,
"step": 547
},
{
"epoch": 1.0418250950570342,
"grad_norm": 0.7320472454146006,
"learning_rate": 1.1027668441790358e-05,
"loss": 0.2954,
"step": 548
},
{
"epoch": 1.043726235741445,
"grad_norm": 0.8858108421176452,
"learning_rate": 1.099462943818646e-05,
"loss": 0.3101,
"step": 549
},
{
"epoch": 1.0456273764258555,
"grad_norm": 0.7084236790614143,
"learning_rate": 1.0961579465295987e-05,
"loss": 0.3,
"step": 550
},
{
"epoch": 1.0475285171102662,
"grad_norm": 0.7770517048890775,
"learning_rate": 1.0928518887611099e-05,
"loss": 0.3084,
"step": 551
},
{
"epoch": 1.049429657794677,
"grad_norm": 0.8411605807242971,
"learning_rate": 1.0895448069740902e-05,
"loss": 0.3159,
"step": 552
},
{
"epoch": 1.0513307984790874,
"grad_norm": 0.6842586485921024,
"learning_rate": 1.0862367376407433e-05,
"loss": 0.3045,
"step": 553
},
{
"epoch": 1.053231939163498,
"grad_norm": 0.8081603324157975,
"learning_rate": 1.0829277172441648e-05,
"loss": 0.3204,
"step": 554
},
{
"epoch": 1.0551330798479088,
"grad_norm": 0.7366783869063036,
"learning_rate": 1.0796177822779384e-05,
"loss": 0.2901,
"step": 555
},
{
"epoch": 1.0570342205323193,
"grad_norm": 0.7454930410611925,
"learning_rate": 1.0763069692457346e-05,
"loss": 0.3118,
"step": 556
},
{
"epoch": 1.05893536121673,
"grad_norm": 0.7323735375889059,
"learning_rate": 1.0729953146609076e-05,
"loss": 0.3081,
"step": 557
},
{
"epoch": 1.0608365019011408,
"grad_norm": 0.7447828933834634,
"learning_rate": 1.0696828550460928e-05,
"loss": 0.3085,
"step": 558
},
{
"epoch": 1.0627376425855513,
"grad_norm": 0.8233473854490362,
"learning_rate": 1.0663696269328034e-05,
"loss": 0.3139,
"step": 559
},
{
"epoch": 1.064638783269962,
"grad_norm": 0.7518365236137025,
"learning_rate": 1.0630556668610286e-05,
"loss": 0.29,
"step": 560
},
{
"epoch": 1.0665399239543727,
"grad_norm": 0.7272544608215237,
"learning_rate": 1.059741011378829e-05,
"loss": 0.3016,
"step": 561
},
{
"epoch": 1.0684410646387832,
"grad_norm": 0.7107163057659822,
"learning_rate": 1.0564256970419367e-05,
"loss": 0.3044,
"step": 562
},
{
"epoch": 1.070342205323194,
"grad_norm": 0.7801714229545913,
"learning_rate": 1.0531097604133473e-05,
"loss": 0.3161,
"step": 563
},
{
"epoch": 1.0722433460076046,
"grad_norm": 0.724057724193455,
"learning_rate": 1.0497932380629207e-05,
"loss": 0.2918,
"step": 564
},
{
"epoch": 1.0741444866920151,
"grad_norm": 0.68457508663241,
"learning_rate": 1.0464761665669771e-05,
"loss": 0.3014,
"step": 565
},
{
"epoch": 1.0760456273764258,
"grad_norm": 0.7841620061659592,
"learning_rate": 1.0431585825078916e-05,
"loss": 0.3086,
"step": 566
},
{
"epoch": 1.0779467680608366,
"grad_norm": 0.7110327786817953,
"learning_rate": 1.0398405224736927e-05,
"loss": 0.2982,
"step": 567
},
{
"epoch": 1.079847908745247,
"grad_norm": 0.6942966087067389,
"learning_rate": 1.0365220230576592e-05,
"loss": 0.2908,
"step": 568
},
{
"epoch": 1.0817490494296578,
"grad_norm": 0.7348455230582168,
"learning_rate": 1.0332031208579133e-05,
"loss": 0.3104,
"step": 569
},
{
"epoch": 1.0836501901140685,
"grad_norm": 0.7068075202197642,
"learning_rate": 1.0298838524770212e-05,
"loss": 0.2996,
"step": 570
},
{
"epoch": 1.085551330798479,
"grad_norm": 0.7166667032652435,
"learning_rate": 1.0265642545215872e-05,
"loss": 0.3048,
"step": 571
},
{
"epoch": 1.0874524714828897,
"grad_norm": 0.7057902963440738,
"learning_rate": 1.0232443636018502e-05,
"loss": 0.3228,
"step": 572
},
{
"epoch": 1.0893536121673004,
"grad_norm": 0.6656181229687357,
"learning_rate": 1.0199242163312794e-05,
"loss": 0.2902,
"step": 573
},
{
"epoch": 1.091254752851711,
"grad_norm": 0.6691369769404082,
"learning_rate": 1.0166038493261723e-05,
"loss": 0.2961,
"step": 574
},
{
"epoch": 1.0931558935361216,
"grad_norm": 0.7134851004552554,
"learning_rate": 1.013283299205249e-05,
"loss": 0.3022,
"step": 575
},
{
"epoch": 1.0950570342205324,
"grad_norm": 0.7004290691140342,
"learning_rate": 1.0099626025892491e-05,
"loss": 0.2954,
"step": 576
},
{
"epoch": 1.0969581749049429,
"grad_norm": 0.6922229062216017,
"learning_rate": 1.0066417961005283e-05,
"loss": 0.3064,
"step": 577
},
{
"epoch": 1.0988593155893536,
"grad_norm": 0.6964372332405654,
"learning_rate": 1.0033209163626539e-05,
"loss": 0.2974,
"step": 578
},
{
"epoch": 1.1007604562737643,
"grad_norm": 0.6997814084627099,
"learning_rate": 1e-05,
"loss": 0.2945,
"step": 579
},
{
"epoch": 1.102661596958175,
"grad_norm": 0.7136980138958149,
"learning_rate": 9.966790836373465e-06,
"loss": 0.3019,
"step": 580
},
{
"epoch": 1.1045627376425855,
"grad_norm": 0.7382312402585471,
"learning_rate": 9.933582038994719e-06,
"loss": 0.3236,
"step": 581
},
{
"epoch": 1.1064638783269962,
"grad_norm": 0.6836779899335518,
"learning_rate": 9.90037397410751e-06,
"loss": 0.3061,
"step": 582
},
{
"epoch": 1.108365019011407,
"grad_norm": 0.6832044341621887,
"learning_rate": 9.867167007947511e-06,
"loss": 0.2978,
"step": 583
},
{
"epoch": 1.1102661596958174,
"grad_norm": 0.7360146490302776,
"learning_rate": 9.833961506738282e-06,
"loss": 0.3088,
"step": 584
},
{
"epoch": 1.1121673003802282,
"grad_norm": 0.6804323906090229,
"learning_rate": 9.80075783668721e-06,
"loss": 0.3055,
"step": 585
},
{
"epoch": 1.1140684410646389,
"grad_norm": 0.7537424441246182,
"learning_rate": 9.767556363981503e-06,
"loss": 0.3019,
"step": 586
},
{
"epoch": 1.1159695817490494,
"grad_norm": 0.6667484356312856,
"learning_rate": 9.734357454784131e-06,
"loss": 0.291,
"step": 587
},
{
"epoch": 1.11787072243346,
"grad_norm": 0.6519171172529563,
"learning_rate": 9.701161475229791e-06,
"loss": 0.295,
"step": 588
},
{
"epoch": 1.1197718631178708,
"grad_norm": 0.7101749338230608,
"learning_rate": 9.66796879142087e-06,
"loss": 0.2937,
"step": 589
},
{
"epoch": 1.1216730038022813,
"grad_norm": 0.7359383681370455,
"learning_rate": 9.634779769423412e-06,
"loss": 0.2908,
"step": 590
},
{
"epoch": 1.123574144486692,
"grad_norm": 0.6832170325532164,
"learning_rate": 9.601594775263073e-06,
"loss": 0.2968,
"step": 591
},
{
"epoch": 1.1254752851711027,
"grad_norm": 0.7736773353770998,
"learning_rate": 9.568414174921085e-06,
"loss": 0.3116,
"step": 592
},
{
"epoch": 1.1273764258555132,
"grad_norm": 0.776457673364819,
"learning_rate": 9.535238334330234e-06,
"loss": 0.311,
"step": 593
},
{
"epoch": 1.129277566539924,
"grad_norm": 0.7459635800134198,
"learning_rate": 9.502067619370794e-06,
"loss": 0.3114,
"step": 594
},
{
"epoch": 1.1311787072243347,
"grad_norm": 0.7189799648817666,
"learning_rate": 9.468902395866532e-06,
"loss": 0.3038,
"step": 595
},
{
"epoch": 1.1330798479087452,
"grad_norm": 0.6815792890637997,
"learning_rate": 9.435743029580638e-06,
"loss": 0.306,
"step": 596
},
{
"epoch": 1.1349809885931559,
"grad_norm": 0.6814841733249866,
"learning_rate": 9.402589886211711e-06,
"loss": 0.2945,
"step": 597
},
{
"epoch": 1.1368821292775666,
"grad_norm": 0.7953980926394838,
"learning_rate": 9.369443331389718e-06,
"loss": 0.312,
"step": 598
},
{
"epoch": 1.138783269961977,
"grad_norm": 0.7775369621748678,
"learning_rate": 9.336303730671968e-06,
"loss": 0.3068,
"step": 599
},
{
"epoch": 1.1406844106463878,
"grad_norm": 0.7389217637657598,
"learning_rate": 9.303171449539074e-06,
"loss": 0.3112,
"step": 600
},
{
"epoch": 1.1425855513307985,
"grad_norm": 0.6790158735462165,
"learning_rate": 9.270046853390924e-06,
"loss": 0.2937,
"step": 601
},
{
"epoch": 1.144486692015209,
"grad_norm": 0.7186410127547359,
"learning_rate": 9.236930307542654e-06,
"loss": 0.3062,
"step": 602
},
{
"epoch": 1.1463878326996197,
"grad_norm": 0.7463210684268154,
"learning_rate": 9.203822177220621e-06,
"loss": 0.2852,
"step": 603
},
{
"epoch": 1.1482889733840305,
"grad_norm": 0.7607233656039101,
"learning_rate": 9.170722827558357e-06,
"loss": 0.3204,
"step": 604
},
{
"epoch": 1.1501901140684412,
"grad_norm": 0.6871760005654157,
"learning_rate": 9.13763262359257e-06,
"loss": 0.309,
"step": 605
},
{
"epoch": 1.1520912547528517,
"grad_norm": 0.7026353176415957,
"learning_rate": 9.104551930259101e-06,
"loss": 0.3084,
"step": 606
},
{
"epoch": 1.1539923954372624,
"grad_norm": 0.7944800232126742,
"learning_rate": 9.071481112388905e-06,
"loss": 0.3139,
"step": 607
},
{
"epoch": 1.1558935361216731,
"grad_norm": 0.640219772261035,
"learning_rate": 9.038420534704015e-06,
"loss": 0.2878,
"step": 608
},
{
"epoch": 1.1577946768060836,
"grad_norm": 0.7339760698042704,
"learning_rate": 9.005370561813545e-06,
"loss": 0.3128,
"step": 609
},
{
"epoch": 1.1596958174904943,
"grad_norm": 0.732085405182035,
"learning_rate": 8.972331558209644e-06,
"loss": 0.3153,
"step": 610
},
{
"epoch": 1.161596958174905,
"grad_norm": 0.6712008455975784,
"learning_rate": 8.939303888263485e-06,
"loss": 0.3015,
"step": 611
},
{
"epoch": 1.1634980988593155,
"grad_norm": 0.7029762742600044,
"learning_rate": 8.906287916221259e-06,
"loss": 0.3015,
"step": 612
},
{
"epoch": 1.1653992395437263,
"grad_norm": 0.7528972523494144,
"learning_rate": 8.873284006200129e-06,
"loss": 0.2949,
"step": 613
},
{
"epoch": 1.167300380228137,
"grad_norm": 0.7133749419747232,
"learning_rate": 8.840292522184247e-06,
"loss": 0.2905,
"step": 614
},
{
"epoch": 1.1692015209125475,
"grad_norm": 0.6843996752342721,
"learning_rate": 8.807313828020715e-06,
"loss": 0.3158,
"step": 615
},
{
"epoch": 1.1711026615969582,
"grad_norm": 0.8046093260719543,
"learning_rate": 8.774348287415589e-06,
"loss": 0.3063,
"step": 616
},
{
"epoch": 1.173003802281369,
"grad_norm": 0.707990066288727,
"learning_rate": 8.74139626392986e-06,
"loss": 0.3085,
"step": 617
},
{
"epoch": 1.1749049429657794,
"grad_norm": 0.7187875805696615,
"learning_rate": 8.708458120975436e-06,
"loss": 0.2912,
"step": 618
},
{
"epoch": 1.1768060836501901,
"grad_norm": 0.764868008761198,
"learning_rate": 8.675534221811156e-06,
"loss": 0.2936,
"step": 619
},
{
"epoch": 1.1787072243346008,
"grad_norm": 0.6887666196981906,
"learning_rate": 8.64262492953876e-06,
"loss": 0.2894,
"step": 620
},
{
"epoch": 1.1806083650190113,
"grad_norm": 0.7507655986042095,
"learning_rate": 8.60973060709891e-06,
"loss": 0.3233,
"step": 621
},
{
"epoch": 1.182509505703422,
"grad_norm": 0.6901555543261185,
"learning_rate": 8.576851617267151e-06,
"loss": 0.3106,
"step": 622
},
{
"epoch": 1.1844106463878328,
"grad_norm": 0.7645087443425159,
"learning_rate": 8.543988322649954e-06,
"loss": 0.317,
"step": 623
},
{
"epoch": 1.1863117870722433,
"grad_norm": 0.7218125636962177,
"learning_rate": 8.511141085680684e-06,
"loss": 0.3164,
"step": 624
},
{
"epoch": 1.188212927756654,
"grad_norm": 0.6953092694730455,
"learning_rate": 8.478310268615612e-06,
"loss": 0.3075,
"step": 625
},
{
"epoch": 1.1901140684410647,
"grad_norm": 0.7302946165004834,
"learning_rate": 8.445496233529934e-06,
"loss": 0.3032,
"step": 626
},
{
"epoch": 1.1920152091254752,
"grad_norm": 0.7066526146987111,
"learning_rate": 8.41269934231375e-06,
"loss": 0.3076,
"step": 627
},
{
"epoch": 1.193916349809886,
"grad_norm": 0.7336702739452111,
"learning_rate": 8.3799199566681e-06,
"loss": 0.2917,
"step": 628
},
{
"epoch": 1.1958174904942966,
"grad_norm": 0.663472351993531,
"learning_rate": 8.34715843810096e-06,
"loss": 0.2873,
"step": 629
},
{
"epoch": 1.1977186311787071,
"grad_norm": 0.6950687330794441,
"learning_rate": 8.314415147923254e-06,
"loss": 0.2993,
"step": 630
},
{
"epoch": 1.1996197718631179,
"grad_norm": 0.6546553624464801,
"learning_rate": 8.281690447244887e-06,
"loss": 0.2966,
"step": 631
},
{
"epoch": 1.2015209125475286,
"grad_norm": 0.7933948183750206,
"learning_rate": 8.248984696970732e-06,
"loss": 0.2962,
"step": 632
},
{
"epoch": 1.203422053231939,
"grad_norm": 0.7076285465002569,
"learning_rate": 8.216298257796677e-06,
"loss": 0.308,
"step": 633
},
{
"epoch": 1.2053231939163498,
"grad_norm": 0.6936251949675809,
"learning_rate": 8.183631490205636e-06,
"loss": 0.2963,
"step": 634
},
{
"epoch": 1.2072243346007605,
"grad_norm": 0.7438682404697773,
"learning_rate": 8.150984754463578e-06,
"loss": 0.3037,
"step": 635
},
{
"epoch": 1.209125475285171,
"grad_norm": 0.6992004489568848,
"learning_rate": 8.118358410615545e-06,
"loss": 0.2838,
"step": 636
},
{
"epoch": 1.2110266159695817,
"grad_norm": 0.645561025404599,
"learning_rate": 8.08575281848169e-06,
"loss": 0.2955,
"step": 637
},
{
"epoch": 1.2129277566539924,
"grad_norm": 0.6961726891740515,
"learning_rate": 8.0531683376533e-06,
"loss": 0.2926,
"step": 638
},
{
"epoch": 1.214828897338403,
"grad_norm": 0.6881623547090101,
"learning_rate": 8.020605327488846e-06,
"loss": 0.3147,
"step": 639
},
{
"epoch": 1.2167300380228137,
"grad_norm": 0.7890202776812927,
"learning_rate": 7.988064147110001e-06,
"loss": 0.3106,
"step": 640
},
{
"epoch": 1.2186311787072244,
"grad_norm": 0.7049092888213322,
"learning_rate": 7.955545155397684e-06,
"loss": 0.2987,
"step": 641
},
{
"epoch": 1.2205323193916349,
"grad_norm": 0.730223157224052,
"learning_rate": 7.923048710988119e-06,
"loss": 0.2948,
"step": 642
},
{
"epoch": 1.2224334600760456,
"grad_norm": 0.7652513896182136,
"learning_rate": 7.890575172268858e-06,
"loss": 0.3053,
"step": 643
},
{
"epoch": 1.2243346007604563,
"grad_norm": 0.674701580422622,
"learning_rate": 7.858124897374837e-06,
"loss": 0.2925,
"step": 644
},
{
"epoch": 1.2262357414448668,
"grad_norm": 0.6807132266753227,
"learning_rate": 7.825698244184432e-06,
"loss": 0.2845,
"step": 645
},
{
"epoch": 1.2281368821292775,
"grad_norm": 0.7901693441867107,
"learning_rate": 7.7932955703155e-06,
"loss": 0.2905,
"step": 646
},
{
"epoch": 1.2300380228136882,
"grad_norm": 0.7600608732789259,
"learning_rate": 7.760917233121443e-06,
"loss": 0.3121,
"step": 647
},
{
"epoch": 1.231939163498099,
"grad_norm": 0.7078947493930366,
"learning_rate": 7.728563589687275e-06,
"loss": 0.2893,
"step": 648
},
{
"epoch": 1.2338403041825095,
"grad_norm": 0.7817788991201847,
"learning_rate": 7.696234996825663e-06,
"loss": 0.3087,
"step": 649
},
{
"epoch": 1.2357414448669202,
"grad_norm": 0.6909924038794721,
"learning_rate": 7.663931811073003e-06,
"loss": 0.3034,
"step": 650
},
{
"epoch": 1.2376425855513309,
"grad_norm": 0.7303015455882389,
"learning_rate": 7.631654388685496e-06,
"loss": 0.3063,
"step": 651
},
{
"epoch": 1.2395437262357414,
"grad_norm": 0.7540824153765674,
"learning_rate": 7.599403085635208e-06,
"loss": 0.3114,
"step": 652
},
{
"epoch": 1.241444866920152,
"grad_norm": 0.6881087413287992,
"learning_rate": 7.567178257606147e-06,
"loss": 0.313,
"step": 653
},
{
"epoch": 1.2433460076045628,
"grad_norm": 0.713688461024637,
"learning_rate": 7.534980259990341e-06,
"loss": 0.2927,
"step": 654
},
{
"epoch": 1.2452471482889733,
"grad_norm": 0.7026008662997305,
"learning_rate": 7.50280944788392e-06,
"loss": 0.2939,
"step": 655
},
{
"epoch": 1.247148288973384,
"grad_norm": 0.7097056742294954,
"learning_rate": 7.470666176083193e-06,
"loss": 0.3189,
"step": 656
},
{
"epoch": 1.2490494296577948,
"grad_norm": 0.8257708963071161,
"learning_rate": 7.438550799080746e-06,
"loss": 0.3017,
"step": 657
},
{
"epoch": 1.2509505703422052,
"grad_norm": 0.7960706251184752,
"learning_rate": 7.40646367106153e-06,
"loss": 0.3041,
"step": 658
},
{
"epoch": 1.252851711026616,
"grad_norm": 0.6696622840240353,
"learning_rate": 7.3744051458989395e-06,
"loss": 0.2935,
"step": 659
},
{
"epoch": 1.2547528517110267,
"grad_norm": 0.7488775897314754,
"learning_rate": 7.342375577150928e-06,
"loss": 0.3028,
"step": 660
},
{
"epoch": 1.2566539923954372,
"grad_norm": 0.7928761227943126,
"learning_rate": 7.310375318056107e-06,
"loss": 0.3062,
"step": 661
},
{
"epoch": 1.258555133079848,
"grad_norm": 0.7012358452683226,
"learning_rate": 7.278404721529843e-06,
"loss": 0.3137,
"step": 662
},
{
"epoch": 1.2604562737642586,
"grad_norm": 0.6803211324037587,
"learning_rate": 7.246464140160365e-06,
"loss": 0.2928,
"step": 663
},
{
"epoch": 1.2623574144486693,
"grad_norm": 0.7293745156754472,
"learning_rate": 7.214553926204884e-06,
"loss": 0.2904,
"step": 664
},
{
"epoch": 1.2642585551330798,
"grad_norm": 0.7070119324937809,
"learning_rate": 7.182674431585703e-06,
"loss": 0.301,
"step": 665
},
{
"epoch": 1.2661596958174905,
"grad_norm": 0.7587610692727196,
"learning_rate": 7.150826007886334e-06,
"loss": 0.3034,
"step": 666
},
{
"epoch": 1.2680608365019013,
"grad_norm": 0.6825001274711814,
"learning_rate": 7.119009006347625e-06,
"loss": 0.2871,
"step": 667
},
{
"epoch": 1.2699619771863118,
"grad_norm": 0.6794705372175478,
"learning_rate": 7.087223777863883e-06,
"loss": 0.2981,
"step": 668
},
{
"epoch": 1.2718631178707225,
"grad_norm": 0.714317945001485,
"learning_rate": 7.055470672979003e-06,
"loss": 0.3115,
"step": 669
},
{
"epoch": 1.2737642585551332,
"grad_norm": 0.683338799164079,
"learning_rate": 7.023750041882609e-06,
"loss": 0.3046,
"step": 670
},
{
"epoch": 1.2756653992395437,
"grad_norm": 0.6474761154398133,
"learning_rate": 6.992062234406185e-06,
"loss": 0.301,
"step": 671
},
{
"epoch": 1.2775665399239544,
"grad_norm": 0.6640003728370285,
"learning_rate": 6.960407600019217e-06,
"loss": 0.2886,
"step": 672
},
{
"epoch": 1.2794676806083651,
"grad_norm": 0.6983521247856774,
"learning_rate": 6.9287864878253475e-06,
"loss": 0.2922,
"step": 673
},
{
"epoch": 1.2813688212927756,
"grad_norm": 0.7364727569993808,
"learning_rate": 6.897199246558515e-06,
"loss": 0.3039,
"step": 674
},
{
"epoch": 1.2832699619771863,
"grad_norm": 0.6957115741395067,
"learning_rate": 6.865646224579108e-06,
"loss": 0.2954,
"step": 675
},
{
"epoch": 1.285171102661597,
"grad_norm": 0.7141133530708946,
"learning_rate": 6.834127769870134e-06,
"loss": 0.2884,
"step": 676
},
{
"epoch": 1.2870722433460076,
"grad_norm": 0.8554444347781054,
"learning_rate": 6.802644230033373e-06,
"loss": 0.3095,
"step": 677
},
{
"epoch": 1.2889733840304183,
"grad_norm": 0.7225580348498389,
"learning_rate": 6.771195952285541e-06,
"loss": 0.3004,
"step": 678
},
{
"epoch": 1.290874524714829,
"grad_norm": 0.7220499278808754,
"learning_rate": 6.739783283454469e-06,
"loss": 0.2935,
"step": 679
},
{
"epoch": 1.2927756653992395,
"grad_norm": 0.7529434476476357,
"learning_rate": 6.708406569975274e-06,
"loss": 0.2808,
"step": 680
},
{
"epoch": 1.2946768060836502,
"grad_norm": 0.7695545463943655,
"learning_rate": 6.6770661578865444e-06,
"loss": 0.2952,
"step": 681
},
{
"epoch": 1.296577946768061,
"grad_norm": 0.7041223867596914,
"learning_rate": 6.645762392826509e-06,
"loss": 0.3048,
"step": 682
},
{
"epoch": 1.2984790874524714,
"grad_norm": 0.7608267360696721,
"learning_rate": 6.614495620029238e-06,
"loss": 0.3037,
"step": 683
},
{
"epoch": 1.3003802281368821,
"grad_norm": 0.8214520605342743,
"learning_rate": 6.583266184320836e-06,
"loss": 0.3004,
"step": 684
},
{
"epoch": 1.3022813688212929,
"grad_norm": 0.8184658810499741,
"learning_rate": 6.552074430115624e-06,
"loss": 0.3086,
"step": 685
},
{
"epoch": 1.3041825095057034,
"grad_norm": 0.714596287281576,
"learning_rate": 6.520920701412371e-06,
"loss": 0.2866,
"step": 686
},
{
"epoch": 1.306083650190114,
"grad_norm": 0.7513517328244365,
"learning_rate": 6.489805341790456e-06,
"loss": 0.2913,
"step": 687
},
{
"epoch": 1.3079847908745248,
"grad_norm": 0.7813765153978279,
"learning_rate": 6.458728694406124e-06,
"loss": 0.3082,
"step": 688
},
{
"epoch": 1.3098859315589353,
"grad_norm": 0.6866453555163008,
"learning_rate": 6.427691101988673e-06,
"loss": 0.2954,
"step": 689
},
{
"epoch": 1.311787072243346,
"grad_norm": 0.7107430474393321,
"learning_rate": 6.396692906836686e-06,
"loss": 0.3024,
"step": 690
},
{
"epoch": 1.3136882129277567,
"grad_norm": 0.7107547238077487,
"learning_rate": 6.3657344508142495e-06,
"loss": 0.2931,
"step": 691
},
{
"epoch": 1.3155893536121672,
"grad_norm": 0.7497143028634738,
"learning_rate": 6.334816075347185e-06,
"loss": 0.2852,
"step": 692
},
{
"epoch": 1.317490494296578,
"grad_norm": 0.7083716112611571,
"learning_rate": 6.303938121419295e-06,
"loss": 0.2915,
"step": 693
},
{
"epoch": 1.3193916349809887,
"grad_norm": 0.69245156468591,
"learning_rate": 6.273100929568579e-06,
"loss": 0.3024,
"step": 694
},
{
"epoch": 1.3212927756653992,
"grad_norm": 0.7111730598143924,
"learning_rate": 6.242304839883502e-06,
"loss": 0.3057,
"step": 695
},
{
"epoch": 1.3231939163498099,
"grad_norm": 0.7126414268396348,
"learning_rate": 6.211550191999223e-06,
"loss": 0.2983,
"step": 696
},
{
"epoch": 1.3250950570342206,
"grad_norm": 0.7614361696731025,
"learning_rate": 6.18083732509387e-06,
"loss": 0.3228,
"step": 697
},
{
"epoch": 1.326996197718631,
"grad_norm": 0.7692049074523043,
"learning_rate": 6.150166577884781e-06,
"loss": 0.2935,
"step": 698
},
{
"epoch": 1.3288973384030418,
"grad_norm": 0.7169581247800685,
"learning_rate": 6.119538288624778e-06,
"loss": 0.2859,
"step": 699
},
{
"epoch": 1.3307984790874525,
"grad_norm": 0.7800374943684687,
"learning_rate": 6.088952795098442e-06,
"loss": 0.2914,
"step": 700
},
{
"epoch": 1.332699619771863,
"grad_norm": 0.7566309644291601,
"learning_rate": 6.058410434618367e-06,
"loss": 0.303,
"step": 701
},
{
"epoch": 1.3346007604562737,
"grad_norm": 0.6812583754292457,
"learning_rate": 6.027911544021465e-06,
"loss": 0.3023,
"step": 702
},
{
"epoch": 1.3365019011406845,
"grad_norm": 0.7056981598036505,
"learning_rate": 5.997456459665237e-06,
"loss": 0.2943,
"step": 703
},
{
"epoch": 1.338403041825095,
"grad_norm": 0.732563106389636,
"learning_rate": 5.967045517424062e-06,
"loss": 0.2988,
"step": 704
},
{
"epoch": 1.3403041825095057,
"grad_norm": 0.7359652762348089,
"learning_rate": 5.936679052685505e-06,
"loss": 0.3021,
"step": 705
},
{
"epoch": 1.3422053231939164,
"grad_norm": 0.7138876103546067,
"learning_rate": 5.906357400346596e-06,
"loss": 0.2968,
"step": 706
},
{
"epoch": 1.3441064638783269,
"grad_norm": 0.6737030120189719,
"learning_rate": 5.876080894810167e-06,
"loss": 0.2895,
"step": 707
},
{
"epoch": 1.3460076045627376,
"grad_norm": 0.7113630573144554,
"learning_rate": 5.845849869981137e-06,
"loss": 0.2946,
"step": 708
},
{
"epoch": 1.3479087452471483,
"grad_norm": 0.7257558523861187,
"learning_rate": 5.815664659262845e-06,
"loss": 0.2974,
"step": 709
},
{
"epoch": 1.3498098859315588,
"grad_norm": 0.7062159528887825,
"learning_rate": 5.78552559555337e-06,
"loss": 0.2895,
"step": 710
},
{
"epoch": 1.3517110266159695,
"grad_norm": 0.67014728307949,
"learning_rate": 5.755433011241851e-06,
"loss": 0.2898,
"step": 711
},
{
"epoch": 1.3536121673003803,
"grad_norm": 0.7098566875539977,
"learning_rate": 5.725387238204831e-06,
"loss": 0.3038,
"step": 712
},
{
"epoch": 1.3555133079847907,
"grad_norm": 0.7190302489020721,
"learning_rate": 5.695388607802603e-06,
"loss": 0.2996,
"step": 713
},
{
"epoch": 1.3574144486692015,
"grad_norm": 0.7110621808285221,
"learning_rate": 5.665437450875534e-06,
"loss": 0.2888,
"step": 714
},
{
"epoch": 1.3593155893536122,
"grad_norm": 0.7299173606699906,
"learning_rate": 5.635534097740435e-06,
"loss": 0.2973,
"step": 715
},
{
"epoch": 1.3612167300380227,
"grad_norm": 0.7176866773964024,
"learning_rate": 5.605678878186911e-06,
"loss": 0.2919,
"step": 716
},
{
"epoch": 1.3631178707224334,
"grad_norm": 0.6927444048761369,
"learning_rate": 5.575872121473722e-06,
"loss": 0.2808,
"step": 717
},
{
"epoch": 1.3650190114068441,
"grad_norm": 0.7556501736579121,
"learning_rate": 5.546114156325166e-06,
"loss": 0.2989,
"step": 718
},
{
"epoch": 1.3669201520912546,
"grad_norm": 0.7209560658812484,
"learning_rate": 5.516405310927431e-06,
"loss": 0.2926,
"step": 719
},
{
"epoch": 1.3688212927756653,
"grad_norm": 0.6583835531844443,
"learning_rate": 5.4867459129249846e-06,
"loss": 0.291,
"step": 720
},
{
"epoch": 1.370722433460076,
"grad_norm": 0.7185377611220743,
"learning_rate": 5.4571362894169795e-06,
"loss": 0.3043,
"step": 721
},
{
"epoch": 1.3726235741444868,
"grad_norm": 0.7181679662678484,
"learning_rate": 5.427576766953615e-06,
"loss": 0.288,
"step": 722
},
{
"epoch": 1.3745247148288973,
"grad_norm": 0.7832777717044008,
"learning_rate": 5.398067671532554e-06,
"loss": 0.2939,
"step": 723
},
{
"epoch": 1.376425855513308,
"grad_norm": 0.6876824566014526,
"learning_rate": 5.368609328595323e-06,
"loss": 0.2872,
"step": 724
},
{
"epoch": 1.3783269961977187,
"grad_norm": 0.7268056251485421,
"learning_rate": 5.339202063023727e-06,
"loss": 0.2837,
"step": 725
},
{
"epoch": 1.3802281368821292,
"grad_norm": 0.7051203597873101,
"learning_rate": 5.309846199136258e-06,
"loss": 0.2924,
"step": 726
},
{
"epoch": 1.38212927756654,
"grad_norm": 0.6897073475256568,
"learning_rate": 5.280542060684535e-06,
"loss": 0.2949,
"step": 727
},
{
"epoch": 1.3840304182509506,
"grad_norm": 0.7041367838386796,
"learning_rate": 5.2512899708497086e-06,
"loss": 0.2932,
"step": 728
},
{
"epoch": 1.3859315589353614,
"grad_norm": 0.6683264477473839,
"learning_rate": 5.222090252238916e-06,
"loss": 0.2877,
"step": 729
},
{
"epoch": 1.3878326996197718,
"grad_norm": 0.7221607927209864,
"learning_rate": 5.192943226881724e-06,
"loss": 0.2836,
"step": 730
},
{
"epoch": 1.3897338403041826,
"grad_norm": 0.7515054015377011,
"learning_rate": 5.163849216226562e-06,
"loss": 0.2997,
"step": 731
},
{
"epoch": 1.3916349809885933,
"grad_norm": 0.6909291421778516,
"learning_rate": 5.134808541137183e-06,
"loss": 0.2982,
"step": 732
},
{
"epoch": 1.3935361216730038,
"grad_norm": 0.7232476998468705,
"learning_rate": 5.105821521889147e-06,
"loss": 0.3016,
"step": 733
},
{
"epoch": 1.3954372623574145,
"grad_norm": 0.6807403846410478,
"learning_rate": 5.076888478166247e-06,
"loss": 0.3023,
"step": 734
},
{
"epoch": 1.3973384030418252,
"grad_norm": 0.7112753037026083,
"learning_rate": 5.048009729057012e-06,
"loss": 0.2984,
"step": 735
},
{
"epoch": 1.3992395437262357,
"grad_norm": 0.7366937434443529,
"learning_rate": 5.0191855930511946e-06,
"loss": 0.2968,
"step": 736
},
{
"epoch": 1.4011406844106464,
"grad_norm": 0.6689155291762198,
"learning_rate": 4.990416388036233e-06,
"loss": 0.2887,
"step": 737
},
{
"epoch": 1.4030418250950571,
"grad_norm": 0.7081360303140702,
"learning_rate": 4.961702431293759e-06,
"loss": 0.2993,
"step": 738
},
{
"epoch": 1.4049429657794676,
"grad_norm": 0.729160975968536,
"learning_rate": 4.933044039496107e-06,
"loss": 0.3002,
"step": 739
},
{
"epoch": 1.4068441064638784,
"grad_norm": 0.6623672708731712,
"learning_rate": 4.904441528702806e-06,
"loss": 0.2988,
"step": 740
},
{
"epoch": 1.408745247148289,
"grad_norm": 0.7105539677537536,
"learning_rate": 4.875895214357093e-06,
"loss": 0.3005,
"step": 741
},
{
"epoch": 1.4106463878326996,
"grad_norm": 0.7155687428289734,
"learning_rate": 4.847405411282462e-06,
"loss": 0.2994,
"step": 742
},
{
"epoch": 1.4125475285171103,
"grad_norm": 0.6904293125562719,
"learning_rate": 4.818972433679145e-06,
"loss": 0.2881,
"step": 743
},
{
"epoch": 1.414448669201521,
"grad_norm": 0.6894502446356497,
"learning_rate": 4.790596595120699e-06,
"loss": 0.2869,
"step": 744
},
{
"epoch": 1.4163498098859315,
"grad_norm": 0.6529449266209777,
"learning_rate": 4.762278208550505e-06,
"loss": 0.272,
"step": 745
},
{
"epoch": 1.4182509505703422,
"grad_norm": 0.6595852728782041,
"learning_rate": 4.734017586278337e-06,
"loss": 0.2833,
"step": 746
},
{
"epoch": 1.420152091254753,
"grad_norm": 0.7105883785087419,
"learning_rate": 4.7058150399769245e-06,
"loss": 0.3014,
"step": 747
},
{
"epoch": 1.4220532319391634,
"grad_norm": 0.7085464036420024,
"learning_rate": 4.677670880678493e-06,
"loss": 0.3003,
"step": 748
},
{
"epoch": 1.4239543726235742,
"grad_norm": 0.6867378944737853,
"learning_rate": 4.649585418771348e-06,
"loss": 0.2893,
"step": 749
},
{
"epoch": 1.4258555133079849,
"grad_norm": 0.7018083827754964,
"learning_rate": 4.621558963996458e-06,
"loss": 0.2997,
"step": 750
},
{
"epoch": 1.4277566539923954,
"grad_norm": 0.717364745046398,
"learning_rate": 4.593591825444028e-06,
"loss": 0.2888,
"step": 751
},
{
"epoch": 1.429657794676806,
"grad_norm": 0.7145207910594493,
"learning_rate": 4.565684311550077e-06,
"loss": 0.2891,
"step": 752
},
{
"epoch": 1.4315589353612168,
"grad_norm": 0.7433098171832778,
"learning_rate": 4.537836730093077e-06,
"loss": 0.3052,
"step": 753
},
{
"epoch": 1.4334600760456273,
"grad_norm": 0.6828309050336888,
"learning_rate": 4.510049388190518e-06,
"loss": 0.2873,
"step": 754
},
{
"epoch": 1.435361216730038,
"grad_norm": 0.6966456031323172,
"learning_rate": 4.482322592295541e-06,
"loss": 0.2846,
"step": 755
},
{
"epoch": 1.4372623574144487,
"grad_norm": 0.7499011956214495,
"learning_rate": 4.454656648193559e-06,
"loss": 0.2884,
"step": 756
},
{
"epoch": 1.4391634980988592,
"grad_norm": 0.6817497895662598,
"learning_rate": 4.427051860998877e-06,
"loss": 0.2889,
"step": 757
},
{
"epoch": 1.44106463878327,
"grad_norm": 0.6751995560534841,
"learning_rate": 4.399508535151321e-06,
"loss": 0.2756,
"step": 758
},
{
"epoch": 1.4429657794676807,
"grad_norm": 0.6973859817402791,
"learning_rate": 4.372026974412907e-06,
"loss": 0.2841,
"step": 759
},
{
"epoch": 1.4448669201520912,
"grad_norm": 0.7197459602824847,
"learning_rate": 4.344607481864466e-06,
"loss": 0.2998,
"step": 760
},
{
"epoch": 1.446768060836502,
"grad_norm": 0.7032017905329768,
"learning_rate": 4.317250359902295e-06,
"loss": 0.2968,
"step": 761
},
{
"epoch": 1.4486692015209126,
"grad_norm": 0.6965077720453614,
"learning_rate": 4.2899559102348585e-06,
"loss": 0.2963,
"step": 762
},
{
"epoch": 1.450570342205323,
"grad_norm": 0.7266756042905513,
"learning_rate": 4.262724433879427e-06,
"loss": 0.2924,
"step": 763
},
{
"epoch": 1.4524714828897338,
"grad_norm": 0.7310607985577439,
"learning_rate": 4.235556231158765e-06,
"loss": 0.2889,
"step": 764
},
{
"epoch": 1.4543726235741445,
"grad_norm": 0.7248390846557323,
"learning_rate": 4.208451601697836e-06,
"loss": 0.2854,
"step": 765
},
{
"epoch": 1.456273764258555,
"grad_norm": 0.6949054691238198,
"learning_rate": 4.181410844420473e-06,
"loss": 0.2915,
"step": 766
},
{
"epoch": 1.4581749049429658,
"grad_norm": 0.6624350442217246,
"learning_rate": 4.154434257546095e-06,
"loss": 0.2805,
"step": 767
},
{
"epoch": 1.4600760456273765,
"grad_norm": 0.6775056869337501,
"learning_rate": 4.127522138586424e-06,
"loss": 0.2853,
"step": 768
},
{
"epoch": 1.461977186311787,
"grad_norm": 0.7283906009371305,
"learning_rate": 4.10067478434219e-06,
"loss": 0.3035,
"step": 769
},
{
"epoch": 1.4638783269961977,
"grad_norm": 0.6820545513864502,
"learning_rate": 4.073892490899865e-06,
"loss": 0.2872,
"step": 770
},
{
"epoch": 1.4657794676806084,
"grad_norm": 0.667857080293407,
"learning_rate": 4.047175553628397e-06,
"loss": 0.2853,
"step": 771
},
{
"epoch": 1.467680608365019,
"grad_norm": 0.6928471555307516,
"learning_rate": 4.020524267175954e-06,
"loss": 0.2824,
"step": 772
},
{
"epoch": 1.4695817490494296,
"grad_norm": 0.6738015156937467,
"learning_rate": 3.993938925466674e-06,
"loss": 0.2855,
"step": 773
},
{
"epoch": 1.4714828897338403,
"grad_norm": 0.7007674474534115,
"learning_rate": 3.96741982169742e-06,
"loss": 0.2902,
"step": 774
},
{
"epoch": 1.4733840304182508,
"grad_norm": 0.7292801696433047,
"learning_rate": 3.9409672483345465e-06,
"loss": 0.2902,
"step": 775
},
{
"epoch": 1.4752851711026616,
"grad_norm": 0.7278624506697899,
"learning_rate": 3.914581497110684e-06,
"loss": 0.2943,
"step": 776
},
{
"epoch": 1.4771863117870723,
"grad_norm": 0.7050973741374458,
"learning_rate": 3.888262859021508e-06,
"loss": 0.2851,
"step": 777
},
{
"epoch": 1.4790874524714828,
"grad_norm": 0.6742410956934353,
"learning_rate": 3.862011624322534e-06,
"loss": 0.2919,
"step": 778
},
{
"epoch": 1.4809885931558935,
"grad_norm": 0.7232482349457217,
"learning_rate": 3.835828082525925e-06,
"loss": 0.2943,
"step": 779
},
{
"epoch": 1.4828897338403042,
"grad_norm": 0.6994521685626667,
"learning_rate": 3.8097125223972864e-06,
"loss": 0.2926,
"step": 780
},
{
"epoch": 1.4847908745247147,
"grad_norm": 0.7042214770667615,
"learning_rate": 3.7836652319524835e-06,
"loss": 0.2841,
"step": 781
},
{
"epoch": 1.4866920152091254,
"grad_norm": 0.6967658416270236,
"learning_rate": 3.7576864984544814e-06,
"loss": 0.2875,
"step": 782
},
{
"epoch": 1.4885931558935361,
"grad_norm": 0.700973359960345,
"learning_rate": 3.73177660841015e-06,
"loss": 0.3129,
"step": 783
},
{
"epoch": 1.4904942965779466,
"grad_norm": 0.6989781078686236,
"learning_rate": 3.7059358475671225e-06,
"loss": 0.292,
"step": 784
},
{
"epoch": 1.4923954372623573,
"grad_norm": 0.6745352865527052,
"learning_rate": 3.680164500910646e-06,
"loss": 0.2836,
"step": 785
},
{
"epoch": 1.494296577946768,
"grad_norm": 0.6648954954406044,
"learning_rate": 3.654462852660423e-06,
"loss": 0.3077,
"step": 786
},
{
"epoch": 1.4961977186311788,
"grad_norm": 0.6798713308395538,
"learning_rate": 3.6288311862674885e-06,
"loss": 0.2942,
"step": 787
},
{
"epoch": 1.4980988593155893,
"grad_norm": 0.7055236284857903,
"learning_rate": 3.6032697844110896e-06,
"loss": 0.3038,
"step": 788
},
{
"epoch": 1.5,
"grad_norm": 0.6455644079660416,
"learning_rate": 3.5777789289955454e-06,
"loss": 0.2862,
"step": 789
},
{
"epoch": 1.5019011406844105,
"grad_norm": 0.6485756988855925,
"learning_rate": 3.5523589011471592e-06,
"loss": 0.2812,
"step": 790
},
{
"epoch": 1.5038022813688214,
"grad_norm": 0.7098930942258047,
"learning_rate": 3.527009981211119e-06,
"loss": 0.3017,
"step": 791
},
{
"epoch": 1.505703422053232,
"grad_norm": 0.6959088123661091,
"learning_rate": 3.5017324487483873e-06,
"loss": 0.2864,
"step": 792
},
{
"epoch": 1.5076045627376424,
"grad_norm": 0.7096042369410857,
"learning_rate": 3.47652658253263e-06,
"loss": 0.2951,
"step": 793
},
{
"epoch": 1.5095057034220534,
"grad_norm": 0.7393005131603504,
"learning_rate": 3.4513926605471504e-06,
"loss": 0.2879,
"step": 794
},
{
"epoch": 1.5114068441064639,
"grad_norm": 0.6612220583440445,
"learning_rate": 3.4263309599818017e-06,
"loss": 0.2732,
"step": 795
},
{
"epoch": 1.5133079847908744,
"grad_norm": 0.6768406170360567,
"learning_rate": 3.4013417572299446e-06,
"loss": 0.3011,
"step": 796
},
{
"epoch": 1.5152091254752853,
"grad_norm": 0.6799332931558087,
"learning_rate": 3.37642532788541e-06,
"loss": 0.2882,
"step": 797
},
{
"epoch": 1.5171102661596958,
"grad_norm": 0.7215037107200758,
"learning_rate": 3.3515819467394184e-06,
"loss": 0.3008,
"step": 798
},
{
"epoch": 1.5190114068441065,
"grad_norm": 0.6935018156935899,
"learning_rate": 3.326811887777607e-06,
"loss": 0.3006,
"step": 799
},
{
"epoch": 1.5209125475285172,
"grad_norm": 0.7089750689283194,
"learning_rate": 3.3021154241769606e-06,
"loss": 0.2908,
"step": 800
},
{
"epoch": 1.5228136882129277,
"grad_norm": 0.7186296978077891,
"learning_rate": 3.2774928283028153e-06,
"loss": 0.2972,
"step": 801
},
{
"epoch": 1.5247148288973384,
"grad_norm": 0.7320606599145869,
"learning_rate": 3.2529443717058693e-06,
"loss": 0.2925,
"step": 802
},
{
"epoch": 1.5266159695817492,
"grad_norm": 0.7129696195067098,
"learning_rate": 3.228470325119164e-06,
"loss": 0.2833,
"step": 803
},
{
"epoch": 1.5285171102661597,
"grad_norm": 0.6743563898293771,
"learning_rate": 3.20407095845511e-06,
"loss": 0.2817,
"step": 804
},
{
"epoch": 1.5304182509505704,
"grad_norm": 0.762821347870341,
"learning_rate": 3.179746540802506e-06,
"loss": 0.2998,
"step": 805
},
{
"epoch": 1.532319391634981,
"grad_norm": 0.7123455686269954,
"learning_rate": 3.155497340423588e-06,
"loss": 0.2915,
"step": 806
},
{
"epoch": 1.5342205323193916,
"grad_norm": 0.7364899992790835,
"learning_rate": 3.1313236247510414e-06,
"loss": 0.2861,
"step": 807
},
{
"epoch": 1.5361216730038023,
"grad_norm": 0.7665411358091122,
"learning_rate": 3.107225660385077e-06,
"loss": 0.3061,
"step": 808
},
{
"epoch": 1.538022813688213,
"grad_norm": 0.7046683139563734,
"learning_rate": 3.0832037130904748e-06,
"loss": 0.2915,
"step": 809
},
{
"epoch": 1.5399239543726235,
"grad_norm": 0.6996705909700672,
"learning_rate": 3.0592580477936606e-06,
"loss": 0.2881,
"step": 810
},
{
"epoch": 1.5418250950570342,
"grad_norm": 0.6906854505319303,
"learning_rate": 3.035388928579792e-06,
"loss": 0.3026,
"step": 811
},
{
"epoch": 1.543726235741445,
"grad_norm": 0.6711356643204208,
"learning_rate": 3.011596618689825e-06,
"loss": 0.2913,
"step": 812
},
{
"epoch": 1.5456273764258555,
"grad_norm": 0.6975768122002354,
"learning_rate": 2.9878813805176252e-06,
"loss": 0.2727,
"step": 813
},
{
"epoch": 1.5475285171102662,
"grad_norm": 0.6853434738758215,
"learning_rate": 2.9642434756070793e-06,
"loss": 0.2871,
"step": 814
},
{
"epoch": 1.549429657794677,
"grad_norm": 0.6992851752016862,
"learning_rate": 2.940683164649194e-06,
"loss": 0.2915,
"step": 815
},
{
"epoch": 1.5513307984790874,
"grad_norm": 0.6947756057981619,
"learning_rate": 2.9172007074792342e-06,
"loss": 0.2834,
"step": 816
},
{
"epoch": 1.553231939163498,
"grad_norm": 0.694978982145842,
"learning_rate": 2.8937963630738517e-06,
"loss": 0.2824,
"step": 817
},
{
"epoch": 1.5551330798479088,
"grad_norm": 0.7281370922331171,
"learning_rate": 2.87047038954823e-06,
"loss": 0.2949,
"step": 818
},
{
"epoch": 1.5570342205323193,
"grad_norm": 0.6919233417351244,
"learning_rate": 2.8472230441532365e-06,
"loss": 0.2885,
"step": 819
},
{
"epoch": 1.55893536121673,
"grad_norm": 0.6689009340157299,
"learning_rate": 2.8240545832725963e-06,
"loss": 0.2893,
"step": 820
},
{
"epoch": 1.5608365019011408,
"grad_norm": 0.6747723266242025,
"learning_rate": 2.8009652624200436e-06,
"loss": 0.2928,
"step": 821
},
{
"epoch": 1.5627376425855513,
"grad_norm": 0.6624778265291289,
"learning_rate": 2.7779553362365184e-06,
"loss": 0.2765,
"step": 822
},
{
"epoch": 1.564638783269962,
"grad_norm": 0.7505639301396252,
"learning_rate": 2.755025058487364e-06,
"loss": 0.3019,
"step": 823
},
{
"epoch": 1.5665399239543727,
"grad_norm": 0.6943147988828117,
"learning_rate": 2.7321746820595084e-06,
"loss": 0.2844,
"step": 824
},
{
"epoch": 1.5684410646387832,
"grad_norm": 0.7661225612927439,
"learning_rate": 2.709404458958693e-06,
"loss": 0.3026,
"step": 825
},
{
"epoch": 1.570342205323194,
"grad_norm": 0.6787396517681653,
"learning_rate": 2.6867146403066833e-06,
"loss": 0.2752,
"step": 826
},
{
"epoch": 1.5722433460076046,
"grad_norm": 0.6873874066697369,
"learning_rate": 2.6641054763385044e-06,
"loss": 0.2802,
"step": 827
},
{
"epoch": 1.5741444866920151,
"grad_norm": 0.7243929034218036,
"learning_rate": 2.6415772163996845e-06,
"loss": 0.2923,
"step": 828
},
{
"epoch": 1.5760456273764258,
"grad_norm": 0.7542225435988764,
"learning_rate": 2.619130108943494e-06,
"loss": 0.2955,
"step": 829
},
{
"epoch": 1.5779467680608366,
"grad_norm": 0.6545819016443233,
"learning_rate": 2.5967644015282146e-06,
"loss": 0.2677,
"step": 830
},
{
"epoch": 1.579847908745247,
"grad_norm": 0.6892785102869334,
"learning_rate": 2.5744803408144026e-06,
"loss": 0.286,
"step": 831
},
{
"epoch": 1.5817490494296578,
"grad_norm": 0.7085683799028424,
"learning_rate": 2.5522781725621814e-06,
"loss": 0.302,
"step": 832
},
{
"epoch": 1.5836501901140685,
"grad_norm": 0.7043790525673234,
"learning_rate": 2.530158141628515e-06,
"loss": 0.2796,
"step": 833
},
{
"epoch": 1.585551330798479,
"grad_norm": 0.7094589839020906,
"learning_rate": 2.508120491964512e-06,
"loss": 0.2967,
"step": 834
},
{
"epoch": 1.5874524714828897,
"grad_norm": 0.6870200740109677,
"learning_rate": 2.486165466612751e-06,
"loss": 0.2882,
"step": 835
},
{
"epoch": 1.5893536121673004,
"grad_norm": 0.6956609181806732,
"learning_rate": 2.464293307704566e-06,
"loss": 0.2882,
"step": 836
},
{
"epoch": 1.591254752851711,
"grad_norm": 0.7146411009968474,
"learning_rate": 2.4425042564574186e-06,
"loss": 0.2826,
"step": 837
},
{
"epoch": 1.5931558935361216,
"grad_norm": 0.6504253419967946,
"learning_rate": 2.4207985531722034e-06,
"loss": 0.281,
"step": 838
},
{
"epoch": 1.5950570342205324,
"grad_norm": 0.695678280790003,
"learning_rate": 2.3991764372306113e-06,
"loss": 0.2772,
"step": 839
},
{
"epoch": 1.5969581749049429,
"grad_norm": 0.6614351605549079,
"learning_rate": 2.377638147092497e-06,
"loss": 0.2653,
"step": 840
},
{
"epoch": 1.5988593155893536,
"grad_norm": 0.6755774907066655,
"learning_rate": 2.3561839202932344e-06,
"loss": 0.2896,
"step": 841
},
{
"epoch": 1.6007604562737643,
"grad_norm": 0.6956930233456882,
"learning_rate": 2.3348139934411008e-06,
"loss": 0.2758,
"step": 842
},
{
"epoch": 1.6026615969581748,
"grad_norm": 0.6741150827690423,
"learning_rate": 2.3135286022146785e-06,
"loss": 0.2888,
"step": 843
},
{
"epoch": 1.6045627376425855,
"grad_norm": 0.6316895047380994,
"learning_rate": 2.292327981360245e-06,
"loss": 0.2638,
"step": 844
},
{
"epoch": 1.6064638783269962,
"grad_norm": 0.6992821603762209,
"learning_rate": 2.271212364689176e-06,
"loss": 0.2865,
"step": 845
},
{
"epoch": 1.6083650190114067,
"grad_norm": 0.7162860982571969,
"learning_rate": 2.2501819850753925e-06,
"loss": 0.3053,
"step": 846
},
{
"epoch": 1.6102661596958177,
"grad_norm": 0.7207324873475008,
"learning_rate": 2.229237074452768e-06,
"loss": 0.2966,
"step": 847
},
{
"epoch": 1.6121673003802282,
"grad_norm": 0.6630457368725939,
"learning_rate": 2.2083778638125796e-06,
"loss": 0.2839,
"step": 848
},
{
"epoch": 1.6140684410646386,
"grad_norm": 0.6604787149642914,
"learning_rate": 2.1876045832009694e-06,
"loss": 0.2657,
"step": 849
},
{
"epoch": 1.6159695817490496,
"grad_norm": 0.681220836719113,
"learning_rate": 2.16691746171639e-06,
"loss": 0.2825,
"step": 850
},
{
"epoch": 1.61787072243346,
"grad_norm": 0.660161185972932,
"learning_rate": 2.1463167275070863e-06,
"loss": 0.2952,
"step": 851
},
{
"epoch": 1.6197718631178706,
"grad_norm": 0.6920853928643931,
"learning_rate": 2.125802607768588e-06,
"loss": 0.2684,
"step": 852
},
{
"epoch": 1.6216730038022815,
"grad_norm": 0.7401560941732488,
"learning_rate": 2.1053753287411895e-06,
"loss": 0.2763,
"step": 853
},
{
"epoch": 1.623574144486692,
"grad_norm": 0.6995007969340379,
"learning_rate": 2.08503511570746e-06,
"loss": 0.2807,
"step": 854
},
{
"epoch": 1.6254752851711025,
"grad_norm": 0.6805054843155047,
"learning_rate": 2.064782192989765e-06,
"loss": 0.2804,
"step": 855
},
{
"epoch": 1.6273764258555135,
"grad_norm": 0.6644525198905932,
"learning_rate": 2.0446167839477815e-06,
"loss": 0.2761,
"step": 856
},
{
"epoch": 1.629277566539924,
"grad_norm": 0.6870250462874686,
"learning_rate": 2.0245391109760437e-06,
"loss": 0.297,
"step": 857
},
{
"epoch": 1.6311787072243344,
"grad_norm": 0.6786371224598402,
"learning_rate": 2.0045493955014915e-06,
"loss": 0.2773,
"step": 858
},
{
"epoch": 1.6330798479087454,
"grad_norm": 0.6707049785918812,
"learning_rate": 1.984647857981017e-06,
"loss": 0.2956,
"step": 859
},
{
"epoch": 1.6349809885931559,
"grad_norm": 0.6961056251857739,
"learning_rate": 1.96483471789904e-06,
"loss": 0.2909,
"step": 860
},
{
"epoch": 1.6368821292775664,
"grad_norm": 0.6699006104096554,
"learning_rate": 1.9451101937650963e-06,
"loss": 0.2862,
"step": 861
},
{
"epoch": 1.6387832699619773,
"grad_norm": 0.687816943547927,
"learning_rate": 1.925474503111412e-06,
"loss": 0.2905,
"step": 862
},
{
"epoch": 1.6406844106463878,
"grad_norm": 0.714121698692748,
"learning_rate": 1.905927862490512e-06,
"loss": 0.304,
"step": 863
},
{
"epoch": 1.6425855513307985,
"grad_norm": 0.6760944562898356,
"learning_rate": 1.8864704874728346e-06,
"loss": 0.2754,
"step": 864
},
{
"epoch": 1.6444866920152093,
"grad_norm": 0.709949536585649,
"learning_rate": 1.8671025926443464e-06,
"loss": 0.2976,
"step": 865
},
{
"epoch": 1.6463878326996197,
"grad_norm": 0.6736618086418665,
"learning_rate": 1.8478243916041882e-06,
"loss": 0.2838,
"step": 866
},
{
"epoch": 1.6482889733840305,
"grad_norm": 0.6894888200015823,
"learning_rate": 1.828636096962304e-06,
"loss": 0.2809,
"step": 867
},
{
"epoch": 1.6501901140684412,
"grad_norm": 0.7201040108842734,
"learning_rate": 1.8095379203371044e-06,
"loss": 0.2847,
"step": 868
},
{
"epoch": 1.6520912547528517,
"grad_norm": 0.6697327152375478,
"learning_rate": 1.7905300723531393e-06,
"loss": 0.2836,
"step": 869
},
{
"epoch": 1.6539923954372624,
"grad_norm": 0.6979744594375921,
"learning_rate": 1.771612762638758e-06,
"loss": 0.287,
"step": 870
},
{
"epoch": 1.6558935361216731,
"grad_norm": 0.7094870773214628,
"learning_rate": 1.7527861998238094e-06,
"loss": 0.2886,
"step": 871
},
{
"epoch": 1.6577946768060836,
"grad_norm": 0.729131987438983,
"learning_rate": 1.7340505915373495e-06,
"loss": 0.2802,
"step": 872
},
{
"epoch": 1.6596958174904943,
"grad_norm": 0.714366655072255,
"learning_rate": 1.7154061444053239e-06,
"loss": 0.3,
"step": 873
},
{
"epoch": 1.661596958174905,
"grad_norm": 0.6863281379758543,
"learning_rate": 1.6968530640483126e-06,
"loss": 0.2792,
"step": 874
},
{
"epoch": 1.6634980988593155,
"grad_norm": 0.7004287445784028,
"learning_rate": 1.6783915550792652e-06,
"loss": 0.2864,
"step": 875
},
{
"epoch": 1.6653992395437263,
"grad_norm": 0.6824854626203597,
"learning_rate": 1.660021821101222e-06,
"loss": 0.2892,
"step": 876
},
{
"epoch": 1.667300380228137,
"grad_norm": 0.658577991448895,
"learning_rate": 1.6417440647050853e-06,
"loss": 0.2784,
"step": 877
},
{
"epoch": 1.6692015209125475,
"grad_norm": 0.680033365410524,
"learning_rate": 1.6235584874673848e-06,
"loss": 0.2826,
"step": 878
},
{
"epoch": 1.6711026615969582,
"grad_norm": 0.6772368824497229,
"learning_rate": 1.6054652899480472e-06,
"loss": 0.2639,
"step": 879
},
{
"epoch": 1.673003802281369,
"grad_norm": 0.6332497564388321,
"learning_rate": 1.587464671688187e-06,
"loss": 0.2606,
"step": 880
},
{
"epoch": 1.6749049429657794,
"grad_norm": 0.6653723609205437,
"learning_rate": 1.5695568312079156e-06,
"loss": 0.2843,
"step": 881
},
{
"epoch": 1.6768060836501901,
"grad_norm": 0.7069722392018514,
"learning_rate": 1.5517419660041277e-06,
"loss": 0.2793,
"step": 882
},
{
"epoch": 1.6787072243346008,
"grad_norm": 0.7067520056885543,
"learning_rate": 1.534020272548349e-06,
"loss": 0.2921,
"step": 883
},
{
"epoch": 1.6806083650190113,
"grad_norm": 0.7862721755456222,
"learning_rate": 1.5163919462845622e-06,
"loss": 0.297,
"step": 884
},
{
"epoch": 1.682509505703422,
"grad_norm": 0.698438633742708,
"learning_rate": 1.4988571816270402e-06,
"loss": 0.292,
"step": 885
},
{
"epoch": 1.6844106463878328,
"grad_norm": 0.7693434006934526,
"learning_rate": 1.4814161719582132e-06,
"loss": 0.2783,
"step": 886
},
{
"epoch": 1.6863117870722433,
"grad_norm": 0.7255816679465157,
"learning_rate": 1.4640691096265358e-06,
"loss": 0.3062,
"step": 887
},
{
"epoch": 1.688212927756654,
"grad_norm": 0.697571286514653,
"learning_rate": 1.4468161859443609e-06,
"loss": 0.2924,
"step": 888
},
{
"epoch": 1.6901140684410647,
"grad_norm": 0.6502769329581025,
"learning_rate": 1.4296575911858268e-06,
"loss": 0.2751,
"step": 889
},
{
"epoch": 1.6920152091254752,
"grad_norm": 0.6834173428292971,
"learning_rate": 1.412593514584777e-06,
"loss": 0.2933,
"step": 890
},
{
"epoch": 1.693916349809886,
"grad_norm": 0.7253152216154239,
"learning_rate": 1.3956241443326423e-06,
"loss": 0.2786,
"step": 891
},
{
"epoch": 1.6958174904942966,
"grad_norm": 0.719177407334314,
"learning_rate": 1.378749667576399e-06,
"loss": 0.2984,
"step": 892
},
{
"epoch": 1.6977186311787071,
"grad_norm": 0.6710453758618833,
"learning_rate": 1.3619702704164783e-06,
"loss": 0.2847,
"step": 893
},
{
"epoch": 1.6996197718631179,
"grad_norm": 0.7190489428088257,
"learning_rate": 1.3452861379047289e-06,
"loss": 0.2964,
"step": 894
},
{
"epoch": 1.7015209125475286,
"grad_norm": 0.6680997838054387,
"learning_rate": 1.3286974540423747e-06,
"loss": 0.2862,
"step": 895
},
{
"epoch": 1.703422053231939,
"grad_norm": 0.6915979036556366,
"learning_rate": 1.3122044017779768e-06,
"loss": 0.3016,
"step": 896
},
{
"epoch": 1.7053231939163498,
"grad_norm": 0.7061322770517322,
"learning_rate": 1.2958071630054214e-06,
"loss": 0.2919,
"step": 897
},
{
"epoch": 1.7072243346007605,
"grad_norm": 0.6980910970288429,
"learning_rate": 1.279505918561923e-06,
"loss": 0.3056,
"step": 898
},
{
"epoch": 1.709125475285171,
"grad_norm": 0.7242398699847349,
"learning_rate": 1.2633008482260146e-06,
"loss": 0.2931,
"step": 899
},
{
"epoch": 1.7110266159695817,
"grad_norm": 0.7090328155108881,
"learning_rate": 1.2471921307155655e-06,
"loss": 0.278,
"step": 900
},
{
"epoch": 1.7129277566539924,
"grad_norm": 0.6731231742943454,
"learning_rate": 1.2311799436858275e-06,
"loss": 0.2913,
"step": 901
},
{
"epoch": 1.714828897338403,
"grad_norm": 0.681487038246602,
"learning_rate": 1.2152644637274603e-06,
"loss": 0.2807,
"step": 902
},
{
"epoch": 1.7167300380228137,
"grad_norm": 0.6798140783412531,
"learning_rate": 1.1994458663645836e-06,
"loss": 0.2843,
"step": 903
},
{
"epoch": 1.7186311787072244,
"grad_norm": 0.7171628645002974,
"learning_rate": 1.1837243260528542e-06,
"loss": 0.2933,
"step": 904
},
{
"epoch": 1.7205323193916349,
"grad_norm": 0.6597518428337097,
"learning_rate": 1.168100016177528e-06,
"loss": 0.2815,
"step": 905
},
{
"epoch": 1.7224334600760456,
"grad_norm": 0.7324952892528912,
"learning_rate": 1.1525731090515536e-06,
"loss": 0.2869,
"step": 906
},
{
"epoch": 1.7243346007604563,
"grad_norm": 0.7041444866532199,
"learning_rate": 1.137143775913675e-06,
"loss": 0.2859,
"step": 907
},
{
"epoch": 1.7262357414448668,
"grad_norm": 0.7103061566252038,
"learning_rate": 1.1218121869265365e-06,
"loss": 0.2907,
"step": 908
},
{
"epoch": 1.7281368821292775,
"grad_norm": 0.6477835344116541,
"learning_rate": 1.1065785111748117e-06,
"loss": 0.27,
"step": 909
},
{
"epoch": 1.7300380228136882,
"grad_norm": 0.6772629013791996,
"learning_rate": 1.0914429166633355e-06,
"loss": 0.2823,
"step": 910
},
{
"epoch": 1.7319391634980987,
"grad_norm": 0.6959297770815,
"learning_rate": 1.076405570315252e-06,
"loss": 0.2952,
"step": 911
},
{
"epoch": 1.7338403041825095,
"grad_norm": 0.709273268007864,
"learning_rate": 1.0614666379701732e-06,
"loss": 0.2802,
"step": 912
},
{
"epoch": 1.7357414448669202,
"grad_norm": 0.6977418116654548,
"learning_rate": 1.046626284382356e-06,
"loss": 0.2867,
"step": 913
},
{
"epoch": 1.7376425855513307,
"grad_norm": 0.7167937759116931,
"learning_rate": 1.0318846732188737e-06,
"loss": 0.2878,
"step": 914
},
{
"epoch": 1.7395437262357416,
"grad_norm": 0.6513451239963968,
"learning_rate": 1.017241967057816e-06,
"loss": 0.2827,
"step": 915
},
{
"epoch": 1.741444866920152,
"grad_norm": 0.6698688465698794,
"learning_rate": 1.0026983273865055e-06,
"loss": 0.2843,
"step": 916
},
{
"epoch": 1.7433460076045626,
"grad_norm": 0.6638983417881938,
"learning_rate": 9.882539145997027e-07,
"loss": 0.2816,
"step": 917
},
{
"epoch": 1.7452471482889735,
"grad_norm": 0.6757033212968067,
"learning_rate": 9.739088879978409e-07,
"loss": 0.2878,
"step": 918
},
{
"epoch": 1.747148288973384,
"grad_norm": 0.6629838331971963,
"learning_rate": 9.59663405785277e-07,
"loss": 0.2691,
"step": 919
},
{
"epoch": 1.7490494296577945,
"grad_norm": 0.7001979526677,
"learning_rate": 9.455176250685338e-07,
"loss": 0.2778,
"step": 920
},
{
"epoch": 1.7509505703422055,
"grad_norm": 0.6802455293550186,
"learning_rate": 9.314717018545838e-07,
"loss": 0.2699,
"step": 921
},
{
"epoch": 1.752851711026616,
"grad_norm": 0.686264012966914,
"learning_rate": 9.17525791049112e-07,
"loss": 0.273,
"step": 922
},
{
"epoch": 1.7547528517110265,
"grad_norm": 0.7129153233843821,
"learning_rate": 9.036800464548157e-07,
"loss": 0.2743,
"step": 923
},
{
"epoch": 1.7566539923954374,
"grad_norm": 0.6897695776270255,
"learning_rate": 8.899346207697135e-07,
"loss": 0.2825,
"step": 924
},
{
"epoch": 1.758555133079848,
"grad_norm": 0.715846740155621,
"learning_rate": 8.762896655854481e-07,
"loss": 0.2992,
"step": 925
},
{
"epoch": 1.7604562737642584,
"grad_norm": 0.7158267450198432,
"learning_rate": 8.627453313856249e-07,
"loss": 0.2822,
"step": 926
},
{
"epoch": 1.7623574144486693,
"grad_norm": 0.6828997050391733,
"learning_rate": 8.493017675441495e-07,
"loss": 0.2795,
"step": 927
},
{
"epoch": 1.7642585551330798,
"grad_norm": 0.7476257310678405,
"learning_rate": 8.359591223235785e-07,
"loss": 0.2952,
"step": 928
},
{
"epoch": 1.7661596958174905,
"grad_norm": 0.727229626409356,
"learning_rate": 8.227175428734868e-07,
"loss": 0.2999,
"step": 929
},
{
"epoch": 1.7680608365019013,
"grad_norm": 0.6982437133675171,
"learning_rate": 8.095771752288451e-07,
"loss": 0.2791,
"step": 930
},
{
"epoch": 1.7699619771863118,
"grad_norm": 0.7185082089703975,
"learning_rate": 7.965381643084069e-07,
"loss": 0.2739,
"step": 931
},
{
"epoch": 1.7718631178707225,
"grad_norm": 0.7012339014029306,
"learning_rate": 7.83600653913108e-07,
"loss": 0.2978,
"step": 932
},
{
"epoch": 1.7737642585551332,
"grad_norm": 0.6718404130221965,
"learning_rate": 7.707647867244927e-07,
"loss": 0.286,
"step": 933
},
{
"epoch": 1.7756653992395437,
"grad_norm": 0.7200388988778087,
"learning_rate": 7.580307043031232e-07,
"loss": 0.291,
"step": 934
},
{
"epoch": 1.7775665399239544,
"grad_norm": 0.6975924639846894,
"learning_rate": 7.453985470870284e-07,
"loss": 0.2811,
"step": 935
},
{
"epoch": 1.7794676806083651,
"grad_norm": 0.719269513406665,
"learning_rate": 7.328684543901598e-07,
"loss": 0.2931,
"step": 936
},
{
"epoch": 1.7813688212927756,
"grad_norm": 0.6665010899203128,
"learning_rate": 7.204405644008416e-07,
"loss": 0.2762,
"step": 937
},
{
"epoch": 1.7832699619771863,
"grad_norm": 0.6842680593973314,
"learning_rate": 7.081150141802518e-07,
"loss": 0.2926,
"step": 938
},
{
"epoch": 1.785171102661597,
"grad_norm": 0.6437101762797967,
"learning_rate": 6.958919396609231e-07,
"loss": 0.2812,
"step": 939
},
{
"epoch": 1.7870722433460076,
"grad_norm": 0.680020989039017,
"learning_rate": 6.837714756452241e-07,
"loss": 0.2854,
"step": 940
},
{
"epoch": 1.7889733840304183,
"grad_norm": 0.6707449769777548,
"learning_rate": 6.717537558038845e-07,
"loss": 0.2834,
"step": 941
},
{
"epoch": 1.790874524714829,
"grad_norm": 0.7022743815937758,
"learning_rate": 6.598389126745209e-07,
"loss": 0.3019,
"step": 942
},
{
"epoch": 1.7927756653992395,
"grad_norm": 0.7048640621655783,
"learning_rate": 6.480270776601682e-07,
"loss": 0.2867,
"step": 943
},
{
"epoch": 1.7946768060836502,
"grad_norm": 0.6464869131104212,
"learning_rate": 6.36318381027835e-07,
"loss": 0.2774,
"step": 944
},
{
"epoch": 1.796577946768061,
"grad_norm": 0.6752630905318655,
"learning_rate": 6.247129519070728e-07,
"loss": 0.2672,
"step": 945
},
{
"epoch": 1.7984790874524714,
"grad_norm": 0.6845966297204927,
"learning_rate": 6.132109182885382e-07,
"loss": 0.2864,
"step": 946
},
{
"epoch": 1.8003802281368821,
"grad_norm": 0.6675139547191826,
"learning_rate": 6.018124070225928e-07,
"loss": 0.2738,
"step": 947
},
{
"epoch": 1.8022813688212929,
"grad_norm": 0.6349755582443027,
"learning_rate": 5.905175438178979e-07,
"loss": 0.2785,
"step": 948
},
{
"epoch": 1.8041825095057034,
"grad_norm": 0.676103350403797,
"learning_rate": 5.793264532400311e-07,
"loss": 0.2878,
"step": 949
},
{
"epoch": 1.806083650190114,
"grad_norm": 0.6777862478954594,
"learning_rate": 5.68239258710116e-07,
"loss": 0.2835,
"step": 950
},
{
"epoch": 1.8079847908745248,
"grad_norm": 0.6737740718249264,
"learning_rate": 5.572560825034523e-07,
"loss": 0.2732,
"step": 951
},
{
"epoch": 1.8098859315589353,
"grad_norm": 0.7150219168433349,
"learning_rate": 5.463770457481732e-07,
"loss": 0.2943,
"step": 952
},
{
"epoch": 1.811787072243346,
"grad_norm": 0.6612056690361954,
"learning_rate": 5.35602268423906e-07,
"loss": 0.2921,
"step": 953
},
{
"epoch": 1.8136882129277567,
"grad_norm": 0.732860163904896,
"learning_rate": 5.249318693604577e-07,
"loss": 0.287,
"step": 954
},
{
"epoch": 1.8155893536121672,
"grad_norm": 0.7246410761157274,
"learning_rate": 5.143659662364931e-07,
"loss": 0.3046,
"step": 955
},
{
"epoch": 1.817490494296578,
"grad_norm": 0.667956659408272,
"learning_rate": 5.039046755782417e-07,
"loss": 0.2794,
"step": 956
},
{
"epoch": 1.8193916349809887,
"grad_norm": 0.6615101759235585,
"learning_rate": 4.935481127582131e-07,
"loss": 0.2726,
"step": 957
},
{
"epoch": 1.8212927756653992,
"grad_norm": 0.7094703181147476,
"learning_rate": 4.83296391993926e-07,
"loss": 0.2991,
"step": 958
},
{
"epoch": 1.8231939163498099,
"grad_norm": 0.6600531582033483,
"learning_rate": 4.7314962634664616e-07,
"loss": 0.2745,
"step": 959
},
{
"epoch": 1.8250950570342206,
"grad_norm": 0.646097973376974,
"learning_rate": 4.631079277201389e-07,
"loss": 0.281,
"step": 960
},
{
"epoch": 1.826996197718631,
"grad_norm": 0.6613503186308753,
"learning_rate": 4.5317140685943726e-07,
"loss": 0.2789,
"step": 961
},
{
"epoch": 1.8288973384030418,
"grad_norm": 0.6779042750343954,
"learning_rate": 4.433401733496201e-07,
"loss": 0.2831,
"step": 962
},
{
"epoch": 1.8307984790874525,
"grad_norm": 0.7368311685727883,
"learning_rate": 4.3361433561460274e-07,
"loss": 0.3063,
"step": 963
},
{
"epoch": 1.832699619771863,
"grad_norm": 0.6846583066565738,
"learning_rate": 4.2399400091594154e-07,
"loss": 0.28,
"step": 964
},
{
"epoch": 1.8346007604562737,
"grad_norm": 0.6568283106761658,
"learning_rate": 4.14479275351648e-07,
"loss": 0.2661,
"step": 965
},
{
"epoch": 1.8365019011406845,
"grad_norm": 0.6692694139205372,
"learning_rate": 4.0507026385502747e-07,
"loss": 0.2765,
"step": 966
},
{
"epoch": 1.838403041825095,
"grad_norm": 0.6814697534975698,
"learning_rate": 3.9576707019350903e-07,
"loss": 0.2908,
"step": 967
},
{
"epoch": 1.8403041825095057,
"grad_norm": 0.7387862304392728,
"learning_rate": 3.865697969675164e-07,
"loss": 0.2957,
"step": 968
},
{
"epoch": 1.8422053231939164,
"grad_norm": 0.6722846166170809,
"learning_rate": 3.7747854560931996e-07,
"loss": 0.2788,
"step": 969
},
{
"epoch": 1.8441064638783269,
"grad_norm": 0.6685332505331593,
"learning_rate": 3.684934163819309e-07,
"loss": 0.28,
"step": 970
},
{
"epoch": 1.8460076045627376,
"grad_norm": 0.6500226258000547,
"learning_rate": 3.596145083779912e-07,
"loss": 0.2757,
"step": 971
},
{
"epoch": 1.8479087452471483,
"grad_norm": 0.6993372392672914,
"learning_rate": 3.508419195186774e-07,
"loss": 0.2685,
"step": 972
},
{
"epoch": 1.8498098859315588,
"grad_norm": 0.6622438875705294,
"learning_rate": 3.421757465526243e-07,
"loss": 0.2759,
"step": 973
},
{
"epoch": 1.8517110266159695,
"grad_norm": 0.6929211329818729,
"learning_rate": 3.33616085054862e-07,
"loss": 0.278,
"step": 974
},
{
"epoch": 1.8536121673003803,
"grad_norm": 0.6727343076200936,
"learning_rate": 3.2516302942574794e-07,
"loss": 0.2885,
"step": 975
},
{
"epoch": 1.8555133079847907,
"grad_norm": 0.7161273496096213,
"learning_rate": 3.1681667288994353e-07,
"loss": 0.2898,
"step": 976
},
{
"epoch": 1.8574144486692015,
"grad_norm": 0.6923413261617984,
"learning_rate": 3.0857710749537585e-07,
"loss": 0.2783,
"step": 977
},
{
"epoch": 1.8593155893536122,
"grad_norm": 0.6856206153720831,
"learning_rate": 3.0044442411222066e-07,
"loss": 0.2687,
"step": 978
},
{
"epoch": 1.8612167300380227,
"grad_norm": 0.6620593176401336,
"learning_rate": 2.9241871243190555e-07,
"loss": 0.2785,
"step": 979
},
{
"epoch": 1.8631178707224336,
"grad_norm": 0.6716196846149549,
"learning_rate": 2.845000609661208e-07,
"loss": 0.2757,
"step": 980
},
{
"epoch": 1.8650190114068441,
"grad_norm": 0.6556669528713602,
"learning_rate": 2.7668855704583997e-07,
"loss": 0.2743,
"step": 981
},
{
"epoch": 1.8669201520912546,
"grad_norm": 0.671385849078519,
"learning_rate": 2.689842868203563e-07,
"loss": 0.278,
"step": 982
},
{
"epoch": 1.8688212927756656,
"grad_norm": 0.7019022371875026,
"learning_rate": 2.6138733525633896e-07,
"loss": 0.2757,
"step": 983
},
{
"epoch": 1.870722433460076,
"grad_norm": 0.7407847221589642,
"learning_rate": 2.5389778613688744e-07,
"loss": 0.2868,
"step": 984
},
{
"epoch": 1.8726235741444865,
"grad_norm": 0.6928374240370797,
"learning_rate": 2.46515722060614e-07,
"loss": 0.2901,
"step": 985
},
{
"epoch": 1.8745247148288975,
"grad_norm": 0.6848142171572185,
"learning_rate": 2.392412244407294e-07,
"loss": 0.259,
"step": 986
},
{
"epoch": 1.876425855513308,
"grad_norm": 0.6818479329508684,
"learning_rate": 2.3207437350414418e-07,
"loss": 0.2833,
"step": 987
},
{
"epoch": 1.8783269961977185,
"grad_norm": 0.7002114333204681,
"learning_rate": 2.2501524829059208e-07,
"loss": 0.2882,
"step": 988
},
{
"epoch": 1.8802281368821294,
"grad_norm": 0.7492765210332142,
"learning_rate": 2.180639266517448e-07,
"loss": 0.2869,
"step": 989
},
{
"epoch": 1.88212927756654,
"grad_norm": 0.7240229850965966,
"learning_rate": 2.1122048525036409e-07,
"loss": 0.289,
"step": 990
},
{
"epoch": 1.8840304182509504,
"grad_norm": 0.6907575207384802,
"learning_rate": 2.0448499955945223e-07,
"loss": 0.2867,
"step": 991
},
{
"epoch": 1.8859315589353614,
"grad_norm": 0.7354521787660664,
"learning_rate": 1.9785754386142164e-07,
"loss": 0.2832,
"step": 992
},
{
"epoch": 1.8878326996197718,
"grad_norm": 0.6756070001728416,
"learning_rate": 1.9133819124727003e-07,
"loss": 0.2814,
"step": 993
},
{
"epoch": 1.8897338403041823,
"grad_norm": 0.6981500188141937,
"learning_rate": 1.8492701361578326e-07,
"loss": 0.2774,
"step": 994
},
{
"epoch": 1.8916349809885933,
"grad_norm": 0.6711274518007815,
"learning_rate": 1.7862408167273472e-07,
"loss": 0.2867,
"step": 995
},
{
"epoch": 1.8935361216730038,
"grad_norm": 0.6962920670492144,
"learning_rate": 1.724294649301095e-07,
"loss": 0.2883,
"step": 996
},
{
"epoch": 1.8954372623574145,
"grad_norm": 0.7141221137782582,
"learning_rate": 1.6634323170533928e-07,
"loss": 0.2905,
"step": 997
},
{
"epoch": 1.8973384030418252,
"grad_norm": 0.663065725009529,
"learning_rate": 1.6036544912054087e-07,
"loss": 0.2832,
"step": 998
},
{
"epoch": 1.8992395437262357,
"grad_norm": 0.6526735540925764,
"learning_rate": 1.544961831017855e-07,
"loss": 0.275,
"step": 999
},
{
"epoch": 1.9011406844106464,
"grad_norm": 0.6987579184398464,
"learning_rate": 1.487354983783673e-07,
"loss": 0.2865,
"step": 1000
},
{
"epoch": 1.9030418250950571,
"grad_norm": 0.664610075015669,
"learning_rate": 1.430834584820895e-07,
"loss": 0.2766,
"step": 1001
},
{
"epoch": 1.9049429657794676,
"grad_norm": 0.7103392115334612,
"learning_rate": 1.375401257465625e-07,
"loss": 0.292,
"step": 1002
},
{
"epoch": 1.9068441064638784,
"grad_norm": 0.6839262704960665,
"learning_rate": 1.3210556130652031e-07,
"loss": 0.2923,
"step": 1003
},
{
"epoch": 1.908745247148289,
"grad_norm": 0.6820865292248977,
"learning_rate": 1.2677982509714415e-07,
"loss": 0.2753,
"step": 1004
},
{
"epoch": 1.9106463878326996,
"grad_norm": 0.6870856486722848,
"learning_rate": 1.2156297585339872e-07,
"loss": 0.2717,
"step": 1005
},
{
"epoch": 1.9125475285171103,
"grad_norm": 0.7037984695137841,
"learning_rate": 1.1645507110938925e-07,
"loss": 0.2811,
"step": 1006
},
{
"epoch": 1.914448669201521,
"grad_norm": 0.6783227710712997,
"learning_rate": 1.1145616719772545e-07,
"loss": 0.2833,
"step": 1007
},
{
"epoch": 1.9163498098859315,
"grad_norm": 0.6744709919577592,
"learning_rate": 1.0656631924889749e-07,
"loss": 0.2851,
"step": 1008
},
{
"epoch": 1.9182509505703422,
"grad_norm": 0.7191365540749947,
"learning_rate": 1.0178558119067316e-07,
"loss": 0.2913,
"step": 1009
},
{
"epoch": 1.920152091254753,
"grad_norm": 0.6877660938875847,
"learning_rate": 9.711400574749507e-08,
"loss": 0.2712,
"step": 1010
},
{
"epoch": 1.9220532319391634,
"grad_norm": 0.679595900300687,
"learning_rate": 9.255164443990994e-08,
"loss": 0.2816,
"step": 1011
},
{
"epoch": 1.9239543726235742,
"grad_norm": 0.7165912030234379,
"learning_rate": 8.809854758399017e-08,
"loss": 0.2919,
"step": 1012
},
{
"epoch": 1.9258555133079849,
"grad_norm": 0.6606189401638207,
"learning_rate": 8.375476429078543e-08,
"loss": 0.2728,
"step": 1013
},
{
"epoch": 1.9277566539923954,
"grad_norm": 0.7159922230897539,
"learning_rate": 7.952034246577977e-08,
"loss": 0.2906,
"step": 1014
},
{
"epoch": 1.929657794676806,
"grad_norm": 0.7032567205728644,
"learning_rate": 7.539532880836087e-08,
"loss": 0.2801,
"step": 1015
},
{
"epoch": 1.9315589353612168,
"grad_norm": 0.6290466716655878,
"learning_rate": 7.137976881130826e-08,
"loss": 0.2638,
"step": 1016
},
{
"epoch": 1.9334600760456273,
"grad_norm": 0.7190870237929656,
"learning_rate": 6.747370676028819e-08,
"loss": 0.2932,
"step": 1017
},
{
"epoch": 1.935361216730038,
"grad_norm": 0.720278712790371,
"learning_rate": 6.367718573336845e-08,
"loss": 0.2813,
"step": 1018
},
{
"epoch": 1.9372623574144487,
"grad_norm": 0.7352066727119855,
"learning_rate": 5.999024760054095e-08,
"loss": 0.2837,
"step": 1019
},
{
"epoch": 1.9391634980988592,
"grad_norm": 0.6819402606759842,
"learning_rate": 5.641293302326323e-08,
"loss": 0.2831,
"step": 1020
},
{
"epoch": 1.94106463878327,
"grad_norm": 0.673159999184549,
"learning_rate": 5.2945281454003236e-08,
"loss": 0.2878,
"step": 1021
},
{
"epoch": 1.9429657794676807,
"grad_norm": 0.665205222409615,
"learning_rate": 4.958733113581415e-08,
"loss": 0.2901,
"step": 1022
},
{
"epoch": 1.9448669201520912,
"grad_norm": 0.7055359823135217,
"learning_rate": 4.6339119101902475e-08,
"loss": 0.2801,
"step": 1023
},
{
"epoch": 1.946768060836502,
"grad_norm": 0.6900636341522559,
"learning_rate": 4.320068117522835e-08,
"loss": 0.2839,
"step": 1024
},
{
"epoch": 1.9486692015209126,
"grad_norm": 0.6873396940235628,
"learning_rate": 4.0172051968101474e-08,
"loss": 0.2932,
"step": 1025
},
{
"epoch": 1.950570342205323,
"grad_norm": 0.6430944310016597,
"learning_rate": 3.7253264881809137e-08,
"loss": 0.2742,
"step": 1026
},
{
"epoch": 1.9524714828897338,
"grad_norm": 0.639006015900848,
"learning_rate": 3.4444352106242086e-08,
"loss": 0.277,
"step": 1027
},
{
"epoch": 1.9543726235741445,
"grad_norm": 0.6662868221427745,
"learning_rate": 3.174534461953593e-08,
"loss": 0.2774,
"step": 1028
},
{
"epoch": 1.956273764258555,
"grad_norm": 0.6800069492449898,
"learning_rate": 2.915627218774142e-08,
"loss": 0.286,
"step": 1029
},
{
"epoch": 1.9581749049429658,
"grad_norm": 0.7005335136420766,
"learning_rate": 2.667716336448356e-08,
"loss": 0.2893,
"step": 1030
},
{
"epoch": 1.9600760456273765,
"grad_norm": 0.6546772043328244,
"learning_rate": 2.430804549065302e-08,
"loss": 0.2931,
"step": 1031
},
{
"epoch": 1.961977186311787,
"grad_norm": 0.6742583335313866,
"learning_rate": 2.2048944694104123e-08,
"loss": 0.2774,
"step": 1032
},
{
"epoch": 1.9638783269961977,
"grad_norm": 0.6906196576891838,
"learning_rate": 1.989988588936509e-08,
"loss": 0.2934,
"step": 1033
},
{
"epoch": 1.9657794676806084,
"grad_norm": 0.6795979199272293,
"learning_rate": 1.7860892777367133e-08,
"loss": 0.2849,
"step": 1034
},
{
"epoch": 1.967680608365019,
"grad_norm": 0.6516203819092081,
"learning_rate": 1.5931987845176912e-08,
"loss": 0.2694,
"step": 1035
},
{
"epoch": 1.9695817490494296,
"grad_norm": 0.6572368746030832,
"learning_rate": 1.411319236575337e-08,
"loss": 0.2846,
"step": 1036
},
{
"epoch": 1.9714828897338403,
"grad_norm": 0.7189574006051913,
"learning_rate": 1.2404526397711281e-08,
"loss": 0.2969,
"step": 1037
},
{
"epoch": 1.9733840304182508,
"grad_norm": 0.6446349628294267,
"learning_rate": 1.0806008785100297e-08,
"loss": 0.2679,
"step": 1038
},
{
"epoch": 1.9752851711026616,
"grad_norm": 0.7162715195981485,
"learning_rate": 9.317657157197347e-09,
"loss": 0.2997,
"step": 1039
},
{
"epoch": 1.9771863117870723,
"grad_norm": 0.7317928733213358,
"learning_rate": 7.93948792831234e-09,
"loss": 0.289,
"step": 1040
},
{
"epoch": 1.9790874524714828,
"grad_norm": 0.6634135146423511,
"learning_rate": 6.671516297606095e-09,
"loss": 0.2875,
"step": 1041
},
{
"epoch": 1.9809885931558935,
"grad_norm": 0.6935820806809422,
"learning_rate": 5.513756248924917e-09,
"loss": 0.2878,
"step": 1042
},
{
"epoch": 1.9828897338403042,
"grad_norm": 0.6837768778383532,
"learning_rate": 4.466220550641831e-09,
"loss": 0.2831,
"step": 1043
},
{
"epoch": 1.9847908745247147,
"grad_norm": 0.682184213816015,
"learning_rate": 3.528920755523357e-09,
"loss": 0.2843,
"step": 1044
},
{
"epoch": 1.9866920152091256,
"grad_norm": 0.7069434143922075,
"learning_rate": 2.701867200592956e-09,
"loss": 0.2825,
"step": 1045
},
{
"epoch": 1.9885931558935361,
"grad_norm": 0.6595307976803098,
"learning_rate": 1.9850690070266633e-09,
"loss": 0.2831,
"step": 1046
},
{
"epoch": 1.9904942965779466,
"grad_norm": 0.6941951826345345,
"learning_rate": 1.378534080042071e-09,
"loss": 0.2877,
"step": 1047
},
{
"epoch": 1.9923954372623576,
"grad_norm": 0.6505901119062624,
"learning_rate": 8.822691088195001e-10,
"loss": 0.2719,
"step": 1048
},
{
"epoch": 1.994296577946768,
"grad_norm": 0.6644799417007563,
"learning_rate": 4.962795664265052e-10,
"loss": 0.2656,
"step": 1049
},
{
"epoch": 1.9961977186311786,
"grad_norm": 0.7028065925418686,
"learning_rate": 2.2056970975459223e-10,
"loss": 0.2779,
"step": 1050
},
{
"epoch": 1.9980988593155895,
"grad_norm": 0.6991960900720445,
"learning_rate": 5.514257947369928e-11,
"loss": 0.2967,
"step": 1051
},
{
"epoch": 2.0,
"grad_norm": 0.6414230119948425,
"learning_rate": 0.0,
"loss": 0.2795,
"step": 1052
},
{
"epoch": 2.0,
"step": 1052,
"total_flos": 106814534860800.0,
"train_loss": 0.14604618646799386,
"train_runtime": 1782.4574,
"train_samples_per_second": 75.545,
"train_steps_per_second": 0.59
}
],
"logging_steps": 1,
"max_steps": 1052,
"num_input_tokens_seen": 0,
"num_train_epochs": 2,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 106814534860800.0,
"train_batch_size": 2,
"trial_name": null,
"trial_params": null
}