AngOFA-SYN / trainer_state.json
cx-olquinjica's picture
Upload 7 files
7ceeaa1 verified
raw
history blame
83.4 kB
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 24.999766967123723,
"eval_steps": 500,
"global_step": 335250,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.04,
"learning_rate": 4.992647278150634e-05,
"loss": 5.3605,
"step": 500
},
{
"epoch": 0.07,
"learning_rate": 4.985190156599553e-05,
"loss": 3.4845,
"step": 1000
},
{
"epoch": 0.11,
"learning_rate": 4.9777330350484714e-05,
"loss": 2.9795,
"step": 1500
},
{
"epoch": 0.15,
"learning_rate": 4.97027591349739e-05,
"loss": 2.6753,
"step": 2000
},
{
"epoch": 0.19,
"learning_rate": 4.9628187919463085e-05,
"loss": 2.485,
"step": 2500
},
{
"epoch": 0.22,
"learning_rate": 4.9553616703952274e-05,
"loss": 2.3458,
"step": 3000
},
{
"epoch": 0.26,
"learning_rate": 4.947904548844147e-05,
"loss": 2.2333,
"step": 3500
},
{
"epoch": 0.3,
"learning_rate": 4.940447427293065e-05,
"loss": 2.1338,
"step": 4000
},
{
"epoch": 0.34,
"learning_rate": 4.932990305741984e-05,
"loss": 2.0561,
"step": 4500
},
{
"epoch": 0.37,
"learning_rate": 4.925533184190902e-05,
"loss": 2.0012,
"step": 5000
},
{
"epoch": 0.41,
"learning_rate": 4.918076062639822e-05,
"loss": 1.9287,
"step": 5500
},
{
"epoch": 0.45,
"learning_rate": 4.91061894108874e-05,
"loss": 1.8906,
"step": 6000
},
{
"epoch": 0.48,
"learning_rate": 4.903161819537659e-05,
"loss": 1.8421,
"step": 6500
},
{
"epoch": 0.52,
"learning_rate": 4.895704697986577e-05,
"loss": 1.8052,
"step": 7000
},
{
"epoch": 0.56,
"learning_rate": 4.8882475764354966e-05,
"loss": 1.7832,
"step": 7500
},
{
"epoch": 0.6,
"learning_rate": 4.880790454884415e-05,
"loss": 1.7501,
"step": 8000
},
{
"epoch": 0.63,
"learning_rate": 4.8733333333333337e-05,
"loss": 1.7011,
"step": 8500
},
{
"epoch": 0.67,
"learning_rate": 4.865876211782252e-05,
"loss": 1.6742,
"step": 9000
},
{
"epoch": 0.71,
"learning_rate": 4.858419090231171e-05,
"loss": 1.6682,
"step": 9500
},
{
"epoch": 0.75,
"learning_rate": 4.8509619686800896e-05,
"loss": 1.6518,
"step": 10000
},
{
"epoch": 0.78,
"learning_rate": 4.8435048471290085e-05,
"loss": 1.6028,
"step": 10500
},
{
"epoch": 0.82,
"learning_rate": 4.8360477255779274e-05,
"loss": 1.5983,
"step": 11000
},
{
"epoch": 0.86,
"learning_rate": 4.8285906040268456e-05,
"loss": 1.5778,
"step": 11500
},
{
"epoch": 0.89,
"learning_rate": 4.821133482475765e-05,
"loss": 1.5532,
"step": 12000
},
{
"epoch": 0.93,
"learning_rate": 4.813691275167786e-05,
"loss": 1.5414,
"step": 12500
},
{
"epoch": 0.97,
"learning_rate": 4.8062341536167046e-05,
"loss": 1.5212,
"step": 13000
},
{
"epoch": 1.01,
"learning_rate": 4.798791946308725e-05,
"loss": 1.5105,
"step": 13500
},
{
"epoch": 1.04,
"learning_rate": 4.7913497390007464e-05,
"loss": 1.4837,
"step": 14000
},
{
"epoch": 1.08,
"learning_rate": 4.7838926174496646e-05,
"loss": 1.4661,
"step": 14500
},
{
"epoch": 1.12,
"learning_rate": 4.7764354958985834e-05,
"loss": 1.4747,
"step": 15000
},
{
"epoch": 1.16,
"learning_rate": 4.7689783743475016e-05,
"loss": 1.4458,
"step": 15500
},
{
"epoch": 1.19,
"learning_rate": 4.761521252796421e-05,
"loss": 1.4461,
"step": 16000
},
{
"epoch": 1.23,
"learning_rate": 4.7540641312453394e-05,
"loss": 1.4218,
"step": 16500
},
{
"epoch": 1.27,
"learning_rate": 4.746607009694258e-05,
"loss": 1.3968,
"step": 17000
},
{
"epoch": 1.3,
"learning_rate": 4.7391498881431765e-05,
"loss": 1.3959,
"step": 17500
},
{
"epoch": 1.34,
"learning_rate": 4.731692766592096e-05,
"loss": 1.3846,
"step": 18000
},
{
"epoch": 1.38,
"learning_rate": 4.724235645041014e-05,
"loss": 1.3882,
"step": 18500
},
{
"epoch": 1.42,
"learning_rate": 4.716778523489933e-05,
"loss": 1.3611,
"step": 19000
},
{
"epoch": 1.45,
"learning_rate": 4.709321401938852e-05,
"loss": 1.3609,
"step": 19500
},
{
"epoch": 1.49,
"learning_rate": 4.70186428038777e-05,
"loss": 1.3447,
"step": 20000
},
{
"epoch": 1.53,
"learning_rate": 4.69440715883669e-05,
"loss": 1.3426,
"step": 20500
},
{
"epoch": 1.57,
"learning_rate": 4.68696495152871e-05,
"loss": 1.3421,
"step": 21000
},
{
"epoch": 1.6,
"learning_rate": 4.679507829977629e-05,
"loss": 1.3309,
"step": 21500
},
{
"epoch": 1.64,
"learning_rate": 4.6720507084265474e-05,
"loss": 1.2993,
"step": 22000
},
{
"epoch": 1.68,
"learning_rate": 4.6646085011185686e-05,
"loss": 1.3166,
"step": 22500
},
{
"epoch": 1.72,
"learning_rate": 4.657151379567487e-05,
"loss": 1.3053,
"step": 23000
},
{
"epoch": 1.75,
"learning_rate": 4.649694258016406e-05,
"loss": 1.3013,
"step": 23500
},
{
"epoch": 1.79,
"learning_rate": 4.6422371364653246e-05,
"loss": 1.2956,
"step": 24000
},
{
"epoch": 1.83,
"learning_rate": 4.6347800149142434e-05,
"loss": 1.2732,
"step": 24500
},
{
"epoch": 1.86,
"learning_rate": 4.6273228933631616e-05,
"loss": 1.2753,
"step": 25000
},
{
"epoch": 1.9,
"learning_rate": 4.6198657718120805e-05,
"loss": 1.2672,
"step": 25500
},
{
"epoch": 1.94,
"learning_rate": 4.6124086502609994e-05,
"loss": 1.261,
"step": 26000
},
{
"epoch": 1.98,
"learning_rate": 4.604951528709918e-05,
"loss": 1.2585,
"step": 26500
},
{
"epoch": 2.01,
"learning_rate": 4.597509321401939e-05,
"loss": 1.2504,
"step": 27000
},
{
"epoch": 2.05,
"learning_rate": 4.590052199850858e-05,
"loss": 1.2506,
"step": 27500
},
{
"epoch": 2.09,
"learning_rate": 4.5825950782997766e-05,
"loss": 1.2363,
"step": 28000
},
{
"epoch": 2.13,
"learning_rate": 4.5751379567486955e-05,
"loss": 1.222,
"step": 28500
},
{
"epoch": 2.16,
"learning_rate": 4.5676808351976143e-05,
"loss": 1.2189,
"step": 29000
},
{
"epoch": 2.2,
"learning_rate": 4.560238627889635e-05,
"loss": 1.2014,
"step": 29500
},
{
"epoch": 2.24,
"learning_rate": 4.552781506338554e-05,
"loss": 1.2192,
"step": 30000
},
{
"epoch": 2.27,
"learning_rate": 4.545324384787472e-05,
"loss": 1.1989,
"step": 30500
},
{
"epoch": 2.31,
"learning_rate": 4.537867263236391e-05,
"loss": 1.1974,
"step": 31000
},
{
"epoch": 2.35,
"learning_rate": 4.53041014168531e-05,
"loss": 1.2056,
"step": 31500
},
{
"epoch": 2.39,
"learning_rate": 4.5229530201342286e-05,
"loss": 1.1954,
"step": 32000
},
{
"epoch": 2.42,
"learning_rate": 4.515510812826249e-05,
"loss": 1.1942,
"step": 32500
},
{
"epoch": 2.46,
"learning_rate": 4.508053691275168e-05,
"loss": 1.1946,
"step": 33000
},
{
"epoch": 2.5,
"learning_rate": 4.500596569724086e-05,
"loss": 1.174,
"step": 33500
},
{
"epoch": 2.54,
"learning_rate": 4.493139448173005e-05,
"loss": 1.1633,
"step": 34000
},
{
"epoch": 2.57,
"learning_rate": 4.485682326621924e-05,
"loss": 1.1667,
"step": 34500
},
{
"epoch": 2.61,
"learning_rate": 4.478240119313945e-05,
"loss": 1.1628,
"step": 35000
},
{
"epoch": 2.65,
"learning_rate": 4.470797912005966e-05,
"loss": 1.1526,
"step": 35500
},
{
"epoch": 2.68,
"learning_rate": 4.463340790454885e-05,
"loss": 1.1463,
"step": 36000
},
{
"epoch": 2.72,
"learning_rate": 4.4558836689038036e-05,
"loss": 1.1465,
"step": 36500
},
{
"epoch": 2.76,
"learning_rate": 4.448426547352722e-05,
"loss": 1.1445,
"step": 37000
},
{
"epoch": 2.8,
"learning_rate": 4.440984340044743e-05,
"loss": 1.1454,
"step": 37500
},
{
"epoch": 2.83,
"learning_rate": 4.433527218493662e-05,
"loss": 1.1362,
"step": 38000
},
{
"epoch": 2.87,
"learning_rate": 4.426070096942581e-05,
"loss": 1.1308,
"step": 38500
},
{
"epoch": 2.91,
"learning_rate": 4.418612975391499e-05,
"loss": 1.1211,
"step": 39000
},
{
"epoch": 2.95,
"learning_rate": 4.411155853840418e-05,
"loss": 1.1337,
"step": 39500
},
{
"epoch": 2.98,
"learning_rate": 4.403698732289336e-05,
"loss": 1.1272,
"step": 40000
},
{
"epoch": 3.02,
"learning_rate": 4.396256524981357e-05,
"loss": 1.1187,
"step": 40500
},
{
"epoch": 3.06,
"learning_rate": 4.388799403430276e-05,
"loss": 1.1082,
"step": 41000
},
{
"epoch": 3.09,
"learning_rate": 4.381342281879195e-05,
"loss": 1.1056,
"step": 41500
},
{
"epoch": 3.13,
"learning_rate": 4.373885160328113e-05,
"loss": 1.1042,
"step": 42000
},
{
"epoch": 3.17,
"learning_rate": 4.366428038777032e-05,
"loss": 1.1029,
"step": 42500
},
{
"epoch": 3.21,
"learning_rate": 4.358970917225951e-05,
"loss": 1.1082,
"step": 43000
},
{
"epoch": 3.24,
"learning_rate": 4.35151379567487e-05,
"loss": 1.1002,
"step": 43500
},
{
"epoch": 3.28,
"learning_rate": 4.344056674123789e-05,
"loss": 1.0933,
"step": 44000
},
{
"epoch": 3.32,
"learning_rate": 4.336599552572707e-05,
"loss": 1.0962,
"step": 44500
},
{
"epoch": 3.36,
"learning_rate": 4.329142431021626e-05,
"loss": 1.0848,
"step": 45000
},
{
"epoch": 3.39,
"learning_rate": 4.321685309470545e-05,
"loss": 1.0868,
"step": 45500
},
{
"epoch": 3.43,
"learning_rate": 4.3142281879194636e-05,
"loss": 1.0955,
"step": 46000
},
{
"epoch": 3.47,
"learning_rate": 4.306771066368382e-05,
"loss": 1.075,
"step": 46500
},
{
"epoch": 3.5,
"learning_rate": 4.299328859060403e-05,
"loss": 1.0769,
"step": 47000
},
{
"epoch": 3.54,
"learning_rate": 4.291871737509321e-05,
"loss": 1.0804,
"step": 47500
},
{
"epoch": 3.58,
"learning_rate": 4.284414615958241e-05,
"loss": 1.0595,
"step": 48000
},
{
"epoch": 3.62,
"learning_rate": 4.2769724086502613e-05,
"loss": 1.0628,
"step": 48500
},
{
"epoch": 3.65,
"learning_rate": 4.26951528709918e-05,
"loss": 1.0656,
"step": 49000
},
{
"epoch": 3.69,
"learning_rate": 4.2620581655480984e-05,
"loss": 1.0656,
"step": 49500
},
{
"epoch": 3.73,
"learning_rate": 4.254601043997017e-05,
"loss": 1.0523,
"step": 50000
},
{
"epoch": 3.77,
"learning_rate": 4.247143922445936e-05,
"loss": 1.0538,
"step": 50500
},
{
"epoch": 3.8,
"learning_rate": 4.239686800894855e-05,
"loss": 1.0577,
"step": 51000
},
{
"epoch": 3.84,
"learning_rate": 4.232229679343773e-05,
"loss": 1.0548,
"step": 51500
},
{
"epoch": 3.88,
"learning_rate": 4.2247874720357945e-05,
"loss": 1.0555,
"step": 52000
},
{
"epoch": 3.91,
"learning_rate": 4.2173303504847134e-05,
"loss": 1.0373,
"step": 52500
},
{
"epoch": 3.95,
"learning_rate": 4.2098732289336316e-05,
"loss": 1.0473,
"step": 53000
},
{
"epoch": 3.99,
"learning_rate": 4.2024161073825504e-05,
"loss": 1.0423,
"step": 53500
},
{
"epoch": 4.03,
"learning_rate": 4.194973900074572e-05,
"loss": 1.0484,
"step": 54000
},
{
"epoch": 4.06,
"learning_rate": 4.1875167785234906e-05,
"loss": 1.0332,
"step": 54500
},
{
"epoch": 4.1,
"learning_rate": 4.180059656972409e-05,
"loss": 1.0329,
"step": 55000
},
{
"epoch": 4.14,
"learning_rate": 4.1726025354213276e-05,
"loss": 1.0273,
"step": 55500
},
{
"epoch": 4.18,
"learning_rate": 4.165160328113348e-05,
"loss": 1.0288,
"step": 56000
},
{
"epoch": 4.21,
"learning_rate": 4.157703206562267e-05,
"loss": 1.0206,
"step": 56500
},
{
"epoch": 4.25,
"learning_rate": 4.150246085011186e-05,
"loss": 1.023,
"step": 57000
},
{
"epoch": 4.29,
"learning_rate": 4.142788963460105e-05,
"loss": 1.0277,
"step": 57500
},
{
"epoch": 4.33,
"learning_rate": 4.135331841909023e-05,
"loss": 1.0213,
"step": 58000
},
{
"epoch": 4.36,
"learning_rate": 4.127874720357942e-05,
"loss": 1.017,
"step": 58500
},
{
"epoch": 4.4,
"learning_rate": 4.120417598806861e-05,
"loss": 1.0195,
"step": 59000
},
{
"epoch": 4.44,
"learning_rate": 4.1129604772557797e-05,
"loss": 1.0132,
"step": 59500
},
{
"epoch": 4.47,
"learning_rate": 4.1055182699478e-05,
"loss": 1.0123,
"step": 60000
},
{
"epoch": 4.51,
"learning_rate": 4.098061148396719e-05,
"loss": 0.9978,
"step": 60500
},
{
"epoch": 4.55,
"learning_rate": 4.090604026845638e-05,
"loss": 1.0106,
"step": 61000
},
{
"epoch": 4.59,
"learning_rate": 4.083146905294556e-05,
"loss": 0.9945,
"step": 61500
},
{
"epoch": 4.62,
"learning_rate": 4.075689783743476e-05,
"loss": 1.0011,
"step": 62000
},
{
"epoch": 4.66,
"learning_rate": 4.068232662192394e-05,
"loss": 0.9933,
"step": 62500
},
{
"epoch": 4.7,
"learning_rate": 4.060790454884415e-05,
"loss": 1.0049,
"step": 63000
},
{
"epoch": 4.74,
"learning_rate": 4.053348247576436e-05,
"loss": 0.9991,
"step": 63500
},
{
"epoch": 4.77,
"learning_rate": 4.0458911260253546e-05,
"loss": 0.9994,
"step": 64000
},
{
"epoch": 4.81,
"learning_rate": 4.038434004474273e-05,
"loss": 0.9937,
"step": 64500
},
{
"epoch": 4.85,
"learning_rate": 4.030976882923192e-05,
"loss": 0.9941,
"step": 65000
},
{
"epoch": 4.88,
"learning_rate": 4.0235197613721106e-05,
"loss": 0.9905,
"step": 65500
},
{
"epoch": 4.92,
"learning_rate": 4.0160626398210294e-05,
"loss": 1.0044,
"step": 66000
},
{
"epoch": 4.96,
"learning_rate": 4.0086055182699476e-05,
"loss": 0.9899,
"step": 66500
},
{
"epoch": 5.0,
"learning_rate": 4.0011483967188665e-05,
"loss": 0.9891,
"step": 67000
},
{
"epoch": 5.03,
"learning_rate": 3.9936912751677854e-05,
"loss": 0.9818,
"step": 67500
},
{
"epoch": 5.07,
"learning_rate": 3.9862490678598066e-05,
"loss": 0.9858,
"step": 68000
},
{
"epoch": 5.11,
"learning_rate": 3.978791946308725e-05,
"loss": 0.9752,
"step": 68500
},
{
"epoch": 5.15,
"learning_rate": 3.971334824757644e-05,
"loss": 0.9735,
"step": 69000
},
{
"epoch": 5.18,
"learning_rate": 3.9638777032065626e-05,
"loss": 0.9742,
"step": 69500
},
{
"epoch": 5.22,
"learning_rate": 3.956420581655481e-05,
"loss": 0.9745,
"step": 70000
},
{
"epoch": 5.26,
"learning_rate": 3.9489634601044003e-05,
"loss": 0.9675,
"step": 70500
},
{
"epoch": 5.29,
"learning_rate": 3.9415063385533185e-05,
"loss": 0.9649,
"step": 71000
},
{
"epoch": 5.33,
"learning_rate": 3.93406413124534e-05,
"loss": 0.983,
"step": 71500
},
{
"epoch": 5.37,
"learning_rate": 3.926607009694258e-05,
"loss": 0.9622,
"step": 72000
},
{
"epoch": 5.41,
"learning_rate": 3.919149888143177e-05,
"loss": 0.9659,
"step": 72500
},
{
"epoch": 5.44,
"learning_rate": 3.911692766592096e-05,
"loss": 0.9717,
"step": 73000
},
{
"epoch": 5.48,
"learning_rate": 3.9042356450410146e-05,
"loss": 0.9503,
"step": 73500
},
{
"epoch": 5.52,
"learning_rate": 3.896793437733035e-05,
"loss": 0.9699,
"step": 74000
},
{
"epoch": 5.56,
"learning_rate": 3.889336316181954e-05,
"loss": 0.962,
"step": 74500
},
{
"epoch": 5.59,
"learning_rate": 3.881879194630872e-05,
"loss": 0.9561,
"step": 75000
},
{
"epoch": 5.63,
"learning_rate": 3.874422073079791e-05,
"loss": 0.9587,
"step": 75500
},
{
"epoch": 5.67,
"learning_rate": 3.86696495152871e-05,
"loss": 0.9576,
"step": 76000
},
{
"epoch": 5.7,
"learning_rate": 3.859507829977629e-05,
"loss": 0.9509,
"step": 76500
},
{
"epoch": 5.74,
"learning_rate": 3.8520656226696495e-05,
"loss": 0.9565,
"step": 77000
},
{
"epoch": 5.78,
"learning_rate": 3.844608501118568e-05,
"loss": 0.9555,
"step": 77500
},
{
"epoch": 5.82,
"learning_rate": 3.837151379567487e-05,
"loss": 0.9573,
"step": 78000
},
{
"epoch": 5.85,
"learning_rate": 3.829694258016406e-05,
"loss": 0.9459,
"step": 78500
},
{
"epoch": 5.89,
"learning_rate": 3.822237136465325e-05,
"loss": 0.9394,
"step": 79000
},
{
"epoch": 5.93,
"learning_rate": 3.814780014914243e-05,
"loss": 0.9417,
"step": 79500
},
{
"epoch": 5.97,
"learning_rate": 3.8073378076062644e-05,
"loss": 0.9459,
"step": 80000
},
{
"epoch": 6.0,
"learning_rate": 3.7998806860551826e-05,
"loss": 0.9329,
"step": 80500
},
{
"epoch": 6.04,
"learning_rate": 3.7924235645041015e-05,
"loss": 0.9297,
"step": 81000
},
{
"epoch": 6.08,
"learning_rate": 3.7849664429530204e-05,
"loss": 0.9379,
"step": 81500
},
{
"epoch": 6.11,
"learning_rate": 3.777509321401939e-05,
"loss": 0.9363,
"step": 82000
},
{
"epoch": 6.15,
"learning_rate": 3.77006711409396e-05,
"loss": 0.9273,
"step": 82500
},
{
"epoch": 6.19,
"learning_rate": 3.762609992542879e-05,
"loss": 0.9367,
"step": 83000
},
{
"epoch": 6.23,
"learning_rate": 3.755152870991797e-05,
"loss": 0.9395,
"step": 83500
},
{
"epoch": 6.26,
"learning_rate": 3.7476957494407164e-05,
"loss": 0.9302,
"step": 84000
},
{
"epoch": 6.3,
"learning_rate": 3.7402386278896346e-05,
"loss": 0.9259,
"step": 84500
},
{
"epoch": 6.34,
"learning_rate": 3.732796420581656e-05,
"loss": 0.9315,
"step": 85000
},
{
"epoch": 6.38,
"learning_rate": 3.725339299030574e-05,
"loss": 0.9245,
"step": 85500
},
{
"epoch": 6.41,
"learning_rate": 3.717882177479493e-05,
"loss": 0.9289,
"step": 86000
},
{
"epoch": 6.45,
"learning_rate": 3.710425055928412e-05,
"loss": 0.9199,
"step": 86500
},
{
"epoch": 6.49,
"learning_rate": 3.702967934377331e-05,
"loss": 0.9177,
"step": 87000
},
{
"epoch": 6.52,
"learning_rate": 3.6955108128262496e-05,
"loss": 0.9263,
"step": 87500
},
{
"epoch": 6.56,
"learning_rate": 3.68806860551827e-05,
"loss": 0.9239,
"step": 88000
},
{
"epoch": 6.6,
"learning_rate": 3.680611483967189e-05,
"loss": 0.924,
"step": 88500
},
{
"epoch": 6.64,
"learning_rate": 3.673154362416107e-05,
"loss": 0.918,
"step": 89000
},
{
"epoch": 6.67,
"learning_rate": 3.665697240865026e-05,
"loss": 0.9203,
"step": 89500
},
{
"epoch": 6.71,
"learning_rate": 3.658269947800149e-05,
"loss": 0.9224,
"step": 90000
},
{
"epoch": 6.75,
"learning_rate": 3.650812826249068e-05,
"loss": 0.9149,
"step": 90500
},
{
"epoch": 6.79,
"learning_rate": 3.643355704697987e-05,
"loss": 0.9134,
"step": 91000
},
{
"epoch": 6.82,
"learning_rate": 3.635898583146906e-05,
"loss": 0.9185,
"step": 91500
},
{
"epoch": 6.86,
"learning_rate": 3.628441461595824e-05,
"loss": 0.9132,
"step": 92000
},
{
"epoch": 6.9,
"learning_rate": 3.620984340044743e-05,
"loss": 0.9159,
"step": 92500
},
{
"epoch": 6.94,
"learning_rate": 3.6135272184936616e-05,
"loss": 0.9081,
"step": 93000
},
{
"epoch": 6.97,
"learning_rate": 3.6060700969425805e-05,
"loss": 0.9047,
"step": 93500
},
{
"epoch": 7.01,
"learning_rate": 3.5986129753914994e-05,
"loss": 0.9107,
"step": 94000
},
{
"epoch": 7.05,
"learning_rate": 3.5911558538404176e-05,
"loss": 0.9004,
"step": 94500
},
{
"epoch": 7.08,
"learning_rate": 3.5836987322893364e-05,
"loss": 0.8956,
"step": 95000
},
{
"epoch": 7.12,
"learning_rate": 3.576256524981357e-05,
"loss": 0.9009,
"step": 95500
},
{
"epoch": 7.16,
"learning_rate": 3.5687994034302766e-05,
"loss": 0.9023,
"step": 96000
},
{
"epoch": 7.2,
"learning_rate": 3.561342281879195e-05,
"loss": 0.8972,
"step": 96500
},
{
"epoch": 7.23,
"learning_rate": 3.5538851603281136e-05,
"loss": 0.8967,
"step": 97000
},
{
"epoch": 7.27,
"learning_rate": 3.546428038777032e-05,
"loss": 0.8867,
"step": 97500
},
{
"epoch": 7.31,
"learning_rate": 3.538985831469053e-05,
"loss": 0.9029,
"step": 98000
},
{
"epoch": 7.35,
"learning_rate": 3.5315436241610737e-05,
"loss": 0.901,
"step": 98500
},
{
"epoch": 7.38,
"learning_rate": 3.5240865026099925e-05,
"loss": 0.889,
"step": 99000
},
{
"epoch": 7.42,
"learning_rate": 3.5166293810589114e-05,
"loss": 0.8931,
"step": 99500
},
{
"epoch": 7.46,
"learning_rate": 3.50917225950783e-05,
"loss": 0.892,
"step": 100000
},
{
"epoch": 7.49,
"learning_rate": 3.5017151379567485e-05,
"loss": 0.8955,
"step": 100500
},
{
"epoch": 7.53,
"learning_rate": 3.4942580164056674e-05,
"loss": 0.891,
"step": 101000
},
{
"epoch": 7.57,
"learning_rate": 3.486800894854586e-05,
"loss": 0.8959,
"step": 101500
},
{
"epoch": 7.61,
"learning_rate": 3.4793586875466075e-05,
"loss": 0.8927,
"step": 102000
},
{
"epoch": 7.64,
"learning_rate": 3.471901565995526e-05,
"loss": 0.886,
"step": 102500
},
{
"epoch": 7.68,
"learning_rate": 3.4644444444444446e-05,
"loss": 0.8811,
"step": 103000
},
{
"epoch": 7.72,
"learning_rate": 3.4569873228933634e-05,
"loss": 0.8788,
"step": 103500
},
{
"epoch": 7.76,
"learning_rate": 3.449530201342282e-05,
"loss": 0.8892,
"step": 104000
},
{
"epoch": 7.79,
"learning_rate": 3.442073079791201e-05,
"loss": 0.8826,
"step": 104500
},
{
"epoch": 7.83,
"learning_rate": 3.4346159582401194e-05,
"loss": 0.8872,
"step": 105000
},
{
"epoch": 7.87,
"learning_rate": 3.427158836689038e-05,
"loss": 0.8826,
"step": 105500
},
{
"epoch": 7.9,
"learning_rate": 3.4197017151379565e-05,
"loss": 0.8824,
"step": 106000
},
{
"epoch": 7.94,
"learning_rate": 3.412244593586876e-05,
"loss": 0.8805,
"step": 106500
},
{
"epoch": 7.98,
"learning_rate": 3.404787472035794e-05,
"loss": 0.8835,
"step": 107000
},
{
"epoch": 8.02,
"learning_rate": 3.397330350484713e-05,
"loss": 0.8835,
"step": 107500
},
{
"epoch": 8.05,
"learning_rate": 3.3898881431767337e-05,
"loss": 0.877,
"step": 108000
},
{
"epoch": 8.09,
"learning_rate": 3.3824310216256525e-05,
"loss": 0.8645,
"step": 108500
},
{
"epoch": 8.13,
"learning_rate": 3.3749739000745714e-05,
"loss": 0.8742,
"step": 109000
},
{
"epoch": 8.17,
"learning_rate": 3.36751677852349e-05,
"loss": 0.8655,
"step": 109500
},
{
"epoch": 8.2,
"learning_rate": 3.3600596569724085e-05,
"loss": 0.8711,
"step": 110000
},
{
"epoch": 8.24,
"learning_rate": 3.352632363907532e-05,
"loss": 0.8715,
"step": 110500
},
{
"epoch": 8.28,
"learning_rate": 3.34517524235645e-05,
"loss": 0.8717,
"step": 111000
},
{
"epoch": 8.31,
"learning_rate": 3.337718120805369e-05,
"loss": 0.8632,
"step": 111500
},
{
"epoch": 8.35,
"learning_rate": 3.330260999254288e-05,
"loss": 0.8666,
"step": 112000
},
{
"epoch": 8.39,
"learning_rate": 3.3228187919463086e-05,
"loss": 0.8669,
"step": 112500
},
{
"epoch": 8.43,
"learning_rate": 3.3153616703952275e-05,
"loss": 0.8653,
"step": 113000
},
{
"epoch": 8.46,
"learning_rate": 3.3079045488441464e-05,
"loss": 0.8602,
"step": 113500
},
{
"epoch": 8.5,
"learning_rate": 3.300447427293065e-05,
"loss": 0.8666,
"step": 114000
},
{
"epoch": 8.54,
"learning_rate": 3.2929903057419834e-05,
"loss": 0.8565,
"step": 114500
},
{
"epoch": 8.58,
"learning_rate": 3.285533184190902e-05,
"loss": 0.8628,
"step": 115000
},
{
"epoch": 8.61,
"learning_rate": 3.278076062639821e-05,
"loss": 0.8666,
"step": 115500
},
{
"epoch": 8.65,
"learning_rate": 3.27061894108874e-05,
"loss": 0.8649,
"step": 116000
},
{
"epoch": 8.69,
"learning_rate": 3.2631767337807606e-05,
"loss": 0.8605,
"step": 116500
},
{
"epoch": 8.72,
"learning_rate": 3.2557196122296795e-05,
"loss": 0.8594,
"step": 117000
},
{
"epoch": 8.76,
"learning_rate": 3.248262490678598e-05,
"loss": 0.8624,
"step": 117500
},
{
"epoch": 8.8,
"learning_rate": 3.240805369127517e-05,
"loss": 0.8604,
"step": 118000
},
{
"epoch": 8.84,
"learning_rate": 3.2333482475764355e-05,
"loss": 0.8592,
"step": 118500
},
{
"epoch": 8.87,
"learning_rate": 3.225906040268457e-05,
"loss": 0.8643,
"step": 119000
},
{
"epoch": 8.91,
"learning_rate": 3.218448918717375e-05,
"loss": 0.8638,
"step": 119500
},
{
"epoch": 8.95,
"learning_rate": 3.210991797166294e-05,
"loss": 0.8563,
"step": 120000
},
{
"epoch": 8.99,
"learning_rate": 3.203534675615213e-05,
"loss": 0.8497,
"step": 120500
},
{
"epoch": 9.02,
"learning_rate": 3.1960775540641315e-05,
"loss": 0.8588,
"step": 121000
},
{
"epoch": 9.06,
"learning_rate": 3.188635346756153e-05,
"loss": 0.8427,
"step": 121500
},
{
"epoch": 9.1,
"learning_rate": 3.181178225205071e-05,
"loss": 0.8571,
"step": 122000
},
{
"epoch": 9.13,
"learning_rate": 3.17372110365399e-05,
"loss": 0.8445,
"step": 122500
},
{
"epoch": 9.17,
"learning_rate": 3.166263982102908e-05,
"loss": 0.8491,
"step": 123000
},
{
"epoch": 9.21,
"learning_rate": 3.1588068605518276e-05,
"loss": 0.8394,
"step": 123500
},
{
"epoch": 9.25,
"learning_rate": 3.151364653243848e-05,
"loss": 0.8485,
"step": 124000
},
{
"epoch": 9.28,
"learning_rate": 3.143907531692767e-05,
"loss": 0.8426,
"step": 124500
},
{
"epoch": 9.32,
"learning_rate": 3.136450410141685e-05,
"loss": 0.847,
"step": 125000
},
{
"epoch": 9.36,
"learning_rate": 3.128993288590604e-05,
"loss": 0.8453,
"step": 125500
},
{
"epoch": 9.4,
"learning_rate": 3.121536167039523e-05,
"loss": 0.8464,
"step": 126000
},
{
"epoch": 9.43,
"learning_rate": 3.114079045488442e-05,
"loss": 0.8477,
"step": 126500
},
{
"epoch": 9.47,
"learning_rate": 3.1066368381804625e-05,
"loss": 0.84,
"step": 127000
},
{
"epoch": 9.51,
"learning_rate": 3.099179716629381e-05,
"loss": 0.8442,
"step": 127500
},
{
"epoch": 9.55,
"learning_rate": 3.091737509321402e-05,
"loss": 0.8431,
"step": 128000
},
{
"epoch": 9.58,
"learning_rate": 3.084280387770321e-05,
"loss": 0.842,
"step": 128500
},
{
"epoch": 9.62,
"learning_rate": 3.0768232662192396e-05,
"loss": 0.8428,
"step": 129000
},
{
"epoch": 9.66,
"learning_rate": 3.0693661446681585e-05,
"loss": 0.8397,
"step": 129500
},
{
"epoch": 9.69,
"learning_rate": 3.0619090231170774e-05,
"loss": 0.8439,
"step": 130000
},
{
"epoch": 9.73,
"learning_rate": 3.0544519015659956e-05,
"loss": 0.8376,
"step": 130500
},
{
"epoch": 9.77,
"learning_rate": 3.047009694258017e-05,
"loss": 0.835,
"step": 131000
},
{
"epoch": 9.81,
"learning_rate": 3.039552572706935e-05,
"loss": 0.8321,
"step": 131500
},
{
"epoch": 9.84,
"learning_rate": 3.0320954511558543e-05,
"loss": 0.8346,
"step": 132000
},
{
"epoch": 9.88,
"learning_rate": 3.0246383296047725e-05,
"loss": 0.8362,
"step": 132500
},
{
"epoch": 9.92,
"learning_rate": 3.0171961222967937e-05,
"loss": 0.8297,
"step": 133000
},
{
"epoch": 9.96,
"learning_rate": 3.0097539149888143e-05,
"loss": 0.841,
"step": 133500
},
{
"epoch": 9.99,
"learning_rate": 3.002296793437733e-05,
"loss": 0.8359,
"step": 134000
},
{
"epoch": 10.03,
"learning_rate": 2.9948396718866517e-05,
"loss": 0.8275,
"step": 134500
},
{
"epoch": 10.07,
"learning_rate": 2.9873825503355706e-05,
"loss": 0.8226,
"step": 135000
},
{
"epoch": 10.1,
"learning_rate": 2.979925428784489e-05,
"loss": 0.8322,
"step": 135500
},
{
"epoch": 10.14,
"learning_rate": 2.972468307233408e-05,
"loss": 0.8213,
"step": 136000
},
{
"epoch": 10.18,
"learning_rate": 2.9650111856823265e-05,
"loss": 0.8237,
"step": 136500
},
{
"epoch": 10.22,
"learning_rate": 2.9575540641312454e-05,
"loss": 0.8273,
"step": 137000
},
{
"epoch": 10.25,
"learning_rate": 2.9500969425801646e-05,
"loss": 0.8243,
"step": 137500
},
{
"epoch": 10.29,
"learning_rate": 2.9426398210290828e-05,
"loss": 0.82,
"step": 138000
},
{
"epoch": 10.33,
"learning_rate": 2.9351826994780017e-05,
"loss": 0.8232,
"step": 138500
},
{
"epoch": 10.37,
"learning_rate": 2.9277255779269202e-05,
"loss": 0.8242,
"step": 139000
},
{
"epoch": 10.4,
"learning_rate": 2.9202833706189415e-05,
"loss": 0.8229,
"step": 139500
},
{
"epoch": 10.44,
"learning_rate": 2.91282624906786e-05,
"loss": 0.8172,
"step": 140000
},
{
"epoch": 10.48,
"learning_rate": 2.905369127516779e-05,
"loss": 0.823,
"step": 140500
},
{
"epoch": 10.51,
"learning_rate": 2.897912005965697e-05,
"loss": 0.8224,
"step": 141000
},
{
"epoch": 10.55,
"learning_rate": 2.8904548844146163e-05,
"loss": 0.8265,
"step": 141500
},
{
"epoch": 10.59,
"learning_rate": 2.8829977628635345e-05,
"loss": 0.8191,
"step": 142000
},
{
"epoch": 10.63,
"learning_rate": 2.8755406413124537e-05,
"loss": 0.8236,
"step": 142500
},
{
"epoch": 10.66,
"learning_rate": 2.868083519761372e-05,
"loss": 0.8158,
"step": 143000
},
{
"epoch": 10.7,
"learning_rate": 2.860626398210291e-05,
"loss": 0.8221,
"step": 143500
},
{
"epoch": 10.74,
"learning_rate": 2.8531841909023117e-05,
"loss": 0.8212,
"step": 144000
},
{
"epoch": 10.78,
"learning_rate": 2.8457419835943326e-05,
"loss": 0.825,
"step": 144500
},
{
"epoch": 10.81,
"learning_rate": 2.838284862043251e-05,
"loss": 0.8255,
"step": 145000
},
{
"epoch": 10.85,
"learning_rate": 2.83082774049217e-05,
"loss": 0.8139,
"step": 145500
},
{
"epoch": 10.89,
"learning_rate": 2.8233706189410892e-05,
"loss": 0.8157,
"step": 146000
},
{
"epoch": 10.92,
"learning_rate": 2.8159134973900074e-05,
"loss": 0.8177,
"step": 146500
},
{
"epoch": 10.96,
"learning_rate": 2.8084563758389266e-05,
"loss": 0.8097,
"step": 147000
},
{
"epoch": 11.0,
"learning_rate": 2.8009992542878448e-05,
"loss": 0.8206,
"step": 147500
},
{
"epoch": 11.04,
"learning_rate": 2.793557046979866e-05,
"loss": 0.8084,
"step": 148000
},
{
"epoch": 11.07,
"learning_rate": 2.7860999254287846e-05,
"loss": 0.8082,
"step": 148500
},
{
"epoch": 11.11,
"learning_rate": 2.7786428038777035e-05,
"loss": 0.8034,
"step": 149000
},
{
"epoch": 11.15,
"learning_rate": 2.771185682326622e-05,
"loss": 0.8013,
"step": 149500
},
{
"epoch": 11.19,
"learning_rate": 2.763728560775541e-05,
"loss": 0.8098,
"step": 150000
},
{
"epoch": 11.22,
"learning_rate": 2.7562714392244594e-05,
"loss": 0.8062,
"step": 150500
},
{
"epoch": 11.26,
"learning_rate": 2.7488143176733783e-05,
"loss": 0.8112,
"step": 151000
},
{
"epoch": 11.3,
"learning_rate": 2.741372110365399e-05,
"loss": 0.8068,
"step": 151500
},
{
"epoch": 11.33,
"learning_rate": 2.7339149888143178e-05,
"loss": 0.8069,
"step": 152000
},
{
"epoch": 11.37,
"learning_rate": 2.7264578672632363e-05,
"loss": 0.8089,
"step": 152500
},
{
"epoch": 11.41,
"learning_rate": 2.7190156599552575e-05,
"loss": 0.8094,
"step": 153000
},
{
"epoch": 11.45,
"learning_rate": 2.7115585384041757e-05,
"loss": 0.8012,
"step": 153500
},
{
"epoch": 11.48,
"learning_rate": 2.704101416853095e-05,
"loss": 0.8105,
"step": 154000
},
{
"epoch": 11.52,
"learning_rate": 2.696644295302014e-05,
"loss": 0.8066,
"step": 154500
},
{
"epoch": 11.56,
"learning_rate": 2.6891871737509324e-05,
"loss": 0.8094,
"step": 155000
},
{
"epoch": 11.6,
"learning_rate": 2.6817300521998512e-05,
"loss": 0.809,
"step": 155500
},
{
"epoch": 11.63,
"learning_rate": 2.6742729306487694e-05,
"loss": 0.7998,
"step": 156000
},
{
"epoch": 11.67,
"learning_rate": 2.6668158090976887e-05,
"loss": 0.8112,
"step": 156500
},
{
"epoch": 11.71,
"learning_rate": 2.6593736017897092e-05,
"loss": 0.8068,
"step": 157000
},
{
"epoch": 11.74,
"learning_rate": 2.651916480238628e-05,
"loss": 0.8042,
"step": 157500
},
{
"epoch": 11.78,
"learning_rate": 2.6444593586875466e-05,
"loss": 0.7972,
"step": 158000
},
{
"epoch": 11.82,
"learning_rate": 2.6370022371364655e-05,
"loss": 0.8119,
"step": 158500
},
{
"epoch": 11.86,
"learning_rate": 2.629545115585384e-05,
"loss": 0.7982,
"step": 159000
},
{
"epoch": 11.89,
"learning_rate": 2.622102908277405e-05,
"loss": 0.7973,
"step": 159500
},
{
"epoch": 11.93,
"learning_rate": 2.6146457867263235e-05,
"loss": 0.7919,
"step": 160000
},
{
"epoch": 11.97,
"learning_rate": 2.6071886651752424e-05,
"loss": 0.8013,
"step": 160500
},
{
"epoch": 12.01,
"learning_rate": 2.599731543624161e-05,
"loss": 0.7986,
"step": 161000
},
{
"epoch": 12.04,
"learning_rate": 2.592289336316182e-05,
"loss": 0.7927,
"step": 161500
},
{
"epoch": 12.08,
"learning_rate": 2.584832214765101e-05,
"loss": 0.7983,
"step": 162000
},
{
"epoch": 12.12,
"learning_rate": 2.5773750932140196e-05,
"loss": 0.7913,
"step": 162500
},
{
"epoch": 12.15,
"learning_rate": 2.5699179716629384e-05,
"loss": 0.7979,
"step": 163000
},
{
"epoch": 12.19,
"learning_rate": 2.562460850111857e-05,
"loss": 0.7941,
"step": 163500
},
{
"epoch": 12.23,
"learning_rate": 2.555003728560776e-05,
"loss": 0.7871,
"step": 164000
},
{
"epoch": 12.27,
"learning_rate": 2.5475466070096944e-05,
"loss": 0.7927,
"step": 164500
},
{
"epoch": 12.3,
"learning_rate": 2.5400894854586133e-05,
"loss": 0.7908,
"step": 165000
},
{
"epoch": 12.34,
"learning_rate": 2.532647278150634e-05,
"loss": 0.7872,
"step": 165500
},
{
"epoch": 12.38,
"learning_rate": 2.5251901565995527e-05,
"loss": 0.7883,
"step": 166000
},
{
"epoch": 12.42,
"learning_rate": 2.5177330350484713e-05,
"loss": 0.785,
"step": 166500
},
{
"epoch": 12.45,
"learning_rate": 2.51027591349739e-05,
"loss": 0.7894,
"step": 167000
},
{
"epoch": 12.49,
"learning_rate": 2.5028337061894107e-05,
"loss": 0.7822,
"step": 167500
},
{
"epoch": 12.53,
"learning_rate": 2.49537658463833e-05,
"loss": 0.7952,
"step": 168000
},
{
"epoch": 12.57,
"learning_rate": 2.4879194630872485e-05,
"loss": 0.787,
"step": 168500
},
{
"epoch": 12.6,
"learning_rate": 2.4804623415361673e-05,
"loss": 0.7909,
"step": 169000
},
{
"epoch": 12.64,
"learning_rate": 2.473005219985086e-05,
"loss": 0.7913,
"step": 169500
},
{
"epoch": 12.68,
"learning_rate": 2.4655480984340047e-05,
"loss": 0.7831,
"step": 170000
},
{
"epoch": 12.71,
"learning_rate": 2.4580909768829233e-05,
"loss": 0.7899,
"step": 170500
},
{
"epoch": 12.75,
"learning_rate": 2.4506338553318418e-05,
"loss": 0.79,
"step": 171000
},
{
"epoch": 12.79,
"learning_rate": 2.4431767337807607e-05,
"loss": 0.7897,
"step": 171500
},
{
"epoch": 12.83,
"learning_rate": 2.4357345264727816e-05,
"loss": 0.7813,
"step": 172000
},
{
"epoch": 12.86,
"learning_rate": 2.4282774049217e-05,
"loss": 0.7852,
"step": 172500
},
{
"epoch": 12.9,
"learning_rate": 2.420820283370619e-05,
"loss": 0.7856,
"step": 173000
},
{
"epoch": 12.94,
"learning_rate": 2.4133631618195376e-05,
"loss": 0.7859,
"step": 173500
},
{
"epoch": 12.98,
"learning_rate": 2.4059209545115585e-05,
"loss": 0.7854,
"step": 174000
},
{
"epoch": 13.01,
"learning_rate": 2.3984787472035794e-05,
"loss": 0.7768,
"step": 174500
},
{
"epoch": 13.05,
"learning_rate": 2.3910216256524982e-05,
"loss": 0.7776,
"step": 175000
},
{
"epoch": 13.09,
"learning_rate": 2.383564504101417e-05,
"loss": 0.7801,
"step": 175500
},
{
"epoch": 13.12,
"learning_rate": 2.3761073825503357e-05,
"loss": 0.7801,
"step": 176000
},
{
"epoch": 13.16,
"learning_rate": 2.3686502609992545e-05,
"loss": 0.7767,
"step": 176500
},
{
"epoch": 13.2,
"learning_rate": 2.3612080536912754e-05,
"loss": 0.7765,
"step": 177000
},
{
"epoch": 13.24,
"learning_rate": 2.353750932140194e-05,
"loss": 0.7761,
"step": 177500
},
{
"epoch": 13.27,
"learning_rate": 2.346293810589113e-05,
"loss": 0.7767,
"step": 178000
},
{
"epoch": 13.31,
"learning_rate": 2.3388366890380314e-05,
"loss": 0.7699,
"step": 178500
},
{
"epoch": 13.35,
"learning_rate": 2.3313795674869503e-05,
"loss": 0.7793,
"step": 179000
},
{
"epoch": 13.39,
"learning_rate": 2.3239373601789712e-05,
"loss": 0.7755,
"step": 179500
},
{
"epoch": 13.42,
"learning_rate": 2.3164802386278897e-05,
"loss": 0.767,
"step": 180000
},
{
"epoch": 13.46,
"learning_rate": 2.3090231170768082e-05,
"loss": 0.7779,
"step": 180500
},
{
"epoch": 13.5,
"learning_rate": 2.301565995525727e-05,
"loss": 0.7851,
"step": 181000
},
{
"epoch": 13.53,
"learning_rate": 2.2941088739746457e-05,
"loss": 0.7793,
"step": 181500
},
{
"epoch": 13.57,
"learning_rate": 2.2866517524235645e-05,
"loss": 0.7716,
"step": 182000
},
{
"epoch": 13.61,
"learning_rate": 2.279194630872483e-05,
"loss": 0.7818,
"step": 182500
},
{
"epoch": 13.65,
"learning_rate": 2.2717375093214023e-05,
"loss": 0.7779,
"step": 183000
},
{
"epoch": 13.68,
"learning_rate": 2.2642953020134232e-05,
"loss": 0.7739,
"step": 183500
},
{
"epoch": 13.72,
"learning_rate": 2.2568381804623417e-05,
"loss": 0.766,
"step": 184000
},
{
"epoch": 13.76,
"learning_rate": 2.2493810589112603e-05,
"loss": 0.7742,
"step": 184500
},
{
"epoch": 13.8,
"learning_rate": 2.2419388516032812e-05,
"loss": 0.767,
"step": 185000
},
{
"epoch": 13.83,
"learning_rate": 2.2344817300522e-05,
"loss": 0.7672,
"step": 185500
},
{
"epoch": 13.87,
"learning_rate": 2.2270246085011186e-05,
"loss": 0.7774,
"step": 186000
},
{
"epoch": 13.91,
"learning_rate": 2.2195674869500375e-05,
"loss": 0.7696,
"step": 186500
},
{
"epoch": 13.94,
"learning_rate": 2.2121252796420584e-05,
"loss": 0.7736,
"step": 187000
},
{
"epoch": 13.98,
"learning_rate": 2.204668158090977e-05,
"loss": 0.7705,
"step": 187500
},
{
"epoch": 14.02,
"learning_rate": 2.1972110365398958e-05,
"loss": 0.7768,
"step": 188000
},
{
"epoch": 14.06,
"learning_rate": 2.1897539149888143e-05,
"loss": 0.7629,
"step": 188500
},
{
"epoch": 14.09,
"learning_rate": 2.1822967934377332e-05,
"loss": 0.758,
"step": 189000
},
{
"epoch": 14.13,
"learning_rate": 2.174854586129754e-05,
"loss": 0.7726,
"step": 189500
},
{
"epoch": 14.17,
"learning_rate": 2.1673974645786726e-05,
"loss": 0.765,
"step": 190000
},
{
"epoch": 14.21,
"learning_rate": 2.1599403430275912e-05,
"loss": 0.7639,
"step": 190500
},
{
"epoch": 14.24,
"learning_rate": 2.15248322147651e-05,
"loss": 0.7613,
"step": 191000
},
{
"epoch": 14.28,
"learning_rate": 2.145026099925429e-05,
"loss": 0.7624,
"step": 191500
},
{
"epoch": 14.32,
"learning_rate": 2.13758389261745e-05,
"loss": 0.7682,
"step": 192000
},
{
"epoch": 14.35,
"learning_rate": 2.1301267710663687e-05,
"loss": 0.7683,
"step": 192500
},
{
"epoch": 14.39,
"learning_rate": 2.1226696495152873e-05,
"loss": 0.7684,
"step": 193000
},
{
"epoch": 14.43,
"learning_rate": 2.115212527964206e-05,
"loss": 0.7561,
"step": 193500
},
{
"epoch": 14.47,
"learning_rate": 2.1077554064131247e-05,
"loss": 0.7672,
"step": 194000
},
{
"epoch": 14.5,
"learning_rate": 2.1002982848620435e-05,
"loss": 0.7658,
"step": 194500
},
{
"epoch": 14.54,
"learning_rate": 2.092841163310962e-05,
"loss": 0.7625,
"step": 195000
},
{
"epoch": 14.58,
"learning_rate": 2.0853840417598806e-05,
"loss": 0.757,
"step": 195500
},
{
"epoch": 14.62,
"learning_rate": 2.0779269202087995e-05,
"loss": 0.7596,
"step": 196000
},
{
"epoch": 14.65,
"learning_rate": 2.0704847129008204e-05,
"loss": 0.7603,
"step": 196500
},
{
"epoch": 14.69,
"learning_rate": 2.0630425055928413e-05,
"loss": 0.7666,
"step": 197000
},
{
"epoch": 14.73,
"learning_rate": 2.05558538404176e-05,
"loss": 0.7602,
"step": 197500
},
{
"epoch": 14.76,
"learning_rate": 2.0481282624906787e-05,
"loss": 0.7575,
"step": 198000
},
{
"epoch": 14.8,
"learning_rate": 2.0406711409395973e-05,
"loss": 0.7615,
"step": 198500
},
{
"epoch": 14.84,
"learning_rate": 2.033214019388516e-05,
"loss": 0.761,
"step": 199000
},
{
"epoch": 14.88,
"learning_rate": 2.025771812080537e-05,
"loss": 0.7628,
"step": 199500
},
{
"epoch": 14.91,
"learning_rate": 2.0183296047725576e-05,
"loss": 0.7629,
"step": 200000
},
{
"epoch": 14.95,
"learning_rate": 2.0108724832214765e-05,
"loss": 0.7623,
"step": 200500
},
{
"epoch": 14.99,
"learning_rate": 2.0034153616703954e-05,
"loss": 0.7528,
"step": 201000
},
{
"epoch": 15.03,
"learning_rate": 1.9959582401193142e-05,
"loss": 0.7458,
"step": 201500
},
{
"epoch": 15.06,
"learning_rate": 1.9885011185682328e-05,
"loss": 0.7596,
"step": 202000
},
{
"epoch": 15.1,
"learning_rate": 1.9810439970171517e-05,
"loss": 0.7547,
"step": 202500
},
{
"epoch": 15.14,
"learning_rate": 1.9735868754660702e-05,
"loss": 0.7581,
"step": 203000
},
{
"epoch": 15.18,
"learning_rate": 1.966129753914989e-05,
"loss": 0.7527,
"step": 203500
},
{
"epoch": 15.21,
"learning_rate": 1.9586726323639076e-05,
"loss": 0.7592,
"step": 204000
},
{
"epoch": 15.25,
"learning_rate": 1.9512304250559285e-05,
"loss": 0.7538,
"step": 204500
},
{
"epoch": 15.29,
"learning_rate": 1.943773303504847e-05,
"loss": 0.7466,
"step": 205000
},
{
"epoch": 15.32,
"learning_rate": 1.936316181953766e-05,
"loss": 0.7529,
"step": 205500
},
{
"epoch": 15.36,
"learning_rate": 1.9288590604026845e-05,
"loss": 0.7533,
"step": 206000
},
{
"epoch": 15.4,
"learning_rate": 1.9214019388516033e-05,
"loss": 0.7532,
"step": 206500
},
{
"epoch": 15.44,
"learning_rate": 1.9139597315436243e-05,
"loss": 0.7627,
"step": 207000
},
{
"epoch": 15.47,
"learning_rate": 1.906517524235645e-05,
"loss": 0.7514,
"step": 207500
},
{
"epoch": 15.51,
"learning_rate": 1.8990604026845637e-05,
"loss": 0.7564,
"step": 208000
},
{
"epoch": 15.55,
"learning_rate": 1.8916032811334826e-05,
"loss": 0.7587,
"step": 208500
},
{
"epoch": 15.59,
"learning_rate": 1.884146159582401e-05,
"loss": 0.7508,
"step": 209000
},
{
"epoch": 15.62,
"learning_rate": 1.87668903803132e-05,
"loss": 0.7443,
"step": 209500
},
{
"epoch": 15.66,
"learning_rate": 1.869231916480239e-05,
"loss": 0.7514,
"step": 210000
},
{
"epoch": 15.7,
"learning_rate": 1.8617747949291574e-05,
"loss": 0.7461,
"step": 210500
},
{
"epoch": 15.73,
"learning_rate": 1.8543176733780763e-05,
"loss": 0.7537,
"step": 211000
},
{
"epoch": 15.77,
"learning_rate": 1.8468605518269948e-05,
"loss": 0.7532,
"step": 211500
},
{
"epoch": 15.81,
"learning_rate": 1.8394034302759137e-05,
"loss": 0.7416,
"step": 212000
},
{
"epoch": 15.85,
"learning_rate": 1.8319463087248322e-05,
"loss": 0.7456,
"step": 212500
},
{
"epoch": 15.88,
"learning_rate": 1.824489187173751e-05,
"loss": 0.7477,
"step": 213000
},
{
"epoch": 15.92,
"learning_rate": 1.8170320656226696e-05,
"loss": 0.749,
"step": 213500
},
{
"epoch": 15.96,
"learning_rate": 1.8095749440715885e-05,
"loss": 0.7512,
"step": 214000
},
{
"epoch": 16.0,
"learning_rate": 1.8021327367636094e-05,
"loss": 0.7422,
"step": 214500
},
{
"epoch": 16.03,
"learning_rate": 1.794675615212528e-05,
"loss": 0.749,
"step": 215000
},
{
"epoch": 16.07,
"learning_rate": 1.787218493661447e-05,
"loss": 0.7409,
"step": 215500
},
{
"epoch": 16.11,
"learning_rate": 1.7797613721103654e-05,
"loss": 0.7411,
"step": 216000
},
{
"epoch": 16.14,
"learning_rate": 1.7723191648023863e-05,
"loss": 0.7397,
"step": 216500
},
{
"epoch": 16.18,
"learning_rate": 1.764862043251305e-05,
"loss": 0.7448,
"step": 217000
},
{
"epoch": 16.22,
"learning_rate": 1.757404921700224e-05,
"loss": 0.7482,
"step": 217500
},
{
"epoch": 16.26,
"learning_rate": 1.7499478001491426e-05,
"loss": 0.7399,
"step": 218000
},
{
"epoch": 16.29,
"learning_rate": 1.7424906785980614e-05,
"loss": 0.7347,
"step": 218500
},
{
"epoch": 16.33,
"learning_rate": 1.7350633855331844e-05,
"loss": 0.7431,
"step": 219000
},
{
"epoch": 16.37,
"learning_rate": 1.727606263982103e-05,
"loss": 0.7426,
"step": 219500
},
{
"epoch": 16.41,
"learning_rate": 1.7201491424310218e-05,
"loss": 0.7437,
"step": 220000
},
{
"epoch": 16.44,
"learning_rate": 1.7126920208799403e-05,
"loss": 0.7407,
"step": 220500
},
{
"epoch": 16.48,
"learning_rate": 1.7052348993288592e-05,
"loss": 0.7389,
"step": 221000
},
{
"epoch": 16.52,
"learning_rate": 1.6977777777777777e-05,
"loss": 0.7369,
"step": 221500
},
{
"epoch": 16.55,
"learning_rate": 1.6903206562266966e-05,
"loss": 0.7444,
"step": 222000
},
{
"epoch": 16.59,
"learning_rate": 1.682863534675615e-05,
"loss": 0.7385,
"step": 222500
},
{
"epoch": 16.63,
"learning_rate": 1.675421327367636e-05,
"loss": 0.7439,
"step": 223000
},
{
"epoch": 16.67,
"learning_rate": 1.667964205816555e-05,
"loss": 0.743,
"step": 223500
},
{
"epoch": 16.7,
"learning_rate": 1.6605070842654735e-05,
"loss": 0.7422,
"step": 224000
},
{
"epoch": 16.74,
"learning_rate": 1.6530499627143924e-05,
"loss": 0.7384,
"step": 224500
},
{
"epoch": 16.78,
"learning_rate": 1.645592841163311e-05,
"loss": 0.7388,
"step": 225000
},
{
"epoch": 16.82,
"learning_rate": 1.6381357196122298e-05,
"loss": 0.7384,
"step": 225500
},
{
"epoch": 16.85,
"learning_rate": 1.6306785980611486e-05,
"loss": 0.7387,
"step": 226000
},
{
"epoch": 16.89,
"learning_rate": 1.6232363907531696e-05,
"loss": 0.7306,
"step": 226500
},
{
"epoch": 16.93,
"learning_rate": 1.615779269202088e-05,
"loss": 0.7398,
"step": 227000
},
{
"epoch": 16.96,
"learning_rate": 1.608322147651007e-05,
"loss": 0.7371,
"step": 227500
},
{
"epoch": 17.0,
"learning_rate": 1.6008650260999255e-05,
"loss": 0.7357,
"step": 228000
},
{
"epoch": 17.04,
"learning_rate": 1.5934377330350488e-05,
"loss": 0.7357,
"step": 228500
},
{
"epoch": 17.08,
"learning_rate": 1.5859806114839673e-05,
"loss": 0.7397,
"step": 229000
},
{
"epoch": 17.11,
"learning_rate": 1.578523489932886e-05,
"loss": 0.735,
"step": 229500
},
{
"epoch": 17.15,
"learning_rate": 1.5710663683818047e-05,
"loss": 0.7345,
"step": 230000
},
{
"epoch": 17.19,
"learning_rate": 1.5636092468307233e-05,
"loss": 0.7339,
"step": 230500
},
{
"epoch": 17.23,
"learning_rate": 1.556152125279642e-05,
"loss": 0.7317,
"step": 231000
},
{
"epoch": 17.26,
"learning_rate": 1.5486950037285607e-05,
"loss": 0.7325,
"step": 231500
},
{
"epoch": 17.3,
"learning_rate": 1.5412378821774796e-05,
"loss": 0.7328,
"step": 232000
},
{
"epoch": 17.34,
"learning_rate": 1.5337956748695005e-05,
"loss": 0.7326,
"step": 232500
},
{
"epoch": 17.37,
"learning_rate": 1.526338553318419e-05,
"loss": 0.734,
"step": 233000
},
{
"epoch": 17.41,
"learning_rate": 1.5188814317673377e-05,
"loss": 0.7369,
"step": 233500
},
{
"epoch": 17.45,
"learning_rate": 1.5114243102162564e-05,
"loss": 0.7334,
"step": 234000
},
{
"epoch": 17.49,
"learning_rate": 1.5039671886651755e-05,
"loss": 0.73,
"step": 234500
},
{
"epoch": 17.52,
"learning_rate": 1.4965100671140942e-05,
"loss": 0.7294,
"step": 235000
},
{
"epoch": 17.56,
"learning_rate": 1.4890529455630129e-05,
"loss": 0.7238,
"step": 235500
},
{
"epoch": 17.6,
"learning_rate": 1.4815958240119316e-05,
"loss": 0.7297,
"step": 236000
},
{
"epoch": 17.64,
"learning_rate": 1.4741536167039525e-05,
"loss": 0.7347,
"step": 236500
},
{
"epoch": 17.67,
"learning_rate": 1.466696495152871e-05,
"loss": 0.7336,
"step": 237000
},
{
"epoch": 17.71,
"learning_rate": 1.459254287844892e-05,
"loss": 0.7238,
"step": 237500
},
{
"epoch": 17.75,
"learning_rate": 1.4517971662938106e-05,
"loss": 0.7348,
"step": 238000
},
{
"epoch": 17.79,
"learning_rate": 1.4443549589858315e-05,
"loss": 0.7348,
"step": 238500
},
{
"epoch": 17.82,
"learning_rate": 1.4368978374347503e-05,
"loss": 0.7221,
"step": 239000
},
{
"epoch": 17.86,
"learning_rate": 1.429440715883669e-05,
"loss": 0.7276,
"step": 239500
},
{
"epoch": 17.9,
"learning_rate": 1.4219835943325877e-05,
"loss": 0.725,
"step": 240000
},
{
"epoch": 17.93,
"learning_rate": 1.4145264727815064e-05,
"loss": 0.7271,
"step": 240500
},
{
"epoch": 17.97,
"learning_rate": 1.407069351230425e-05,
"loss": 0.7253,
"step": 241000
},
{
"epoch": 18.01,
"learning_rate": 1.3996122296793438e-05,
"loss": 0.7249,
"step": 241500
},
{
"epoch": 18.05,
"learning_rate": 1.3921551081282625e-05,
"loss": 0.7248,
"step": 242000
},
{
"epoch": 18.08,
"learning_rate": 1.3846979865771812e-05,
"loss": 0.7279,
"step": 242500
},
{
"epoch": 18.12,
"learning_rate": 1.3772408650261e-05,
"loss": 0.7313,
"step": 243000
},
{
"epoch": 18.16,
"learning_rate": 1.3697837434750188e-05,
"loss": 0.7249,
"step": 243500
},
{
"epoch": 18.2,
"learning_rate": 1.3623266219239375e-05,
"loss": 0.7154,
"step": 244000
},
{
"epoch": 18.23,
"learning_rate": 1.3548695003728562e-05,
"loss": 0.7206,
"step": 244500
},
{
"epoch": 18.27,
"learning_rate": 1.3474272930648771e-05,
"loss": 0.7222,
"step": 245000
},
{
"epoch": 18.31,
"learning_rate": 1.3399701715137958e-05,
"loss": 0.7212,
"step": 245500
},
{
"epoch": 18.34,
"learning_rate": 1.3325130499627145e-05,
"loss": 0.7207,
"step": 246000
},
{
"epoch": 18.38,
"learning_rate": 1.3250559284116332e-05,
"loss": 0.7236,
"step": 246500
},
{
"epoch": 18.42,
"learning_rate": 1.3176137211036541e-05,
"loss": 0.7235,
"step": 247000
},
{
"epoch": 18.46,
"learning_rate": 1.3101715137956749e-05,
"loss": 0.7207,
"step": 247500
},
{
"epoch": 18.49,
"learning_rate": 1.3027143922445936e-05,
"loss": 0.7201,
"step": 248000
},
{
"epoch": 18.53,
"learning_rate": 1.2952572706935123e-05,
"loss": 0.7212,
"step": 248500
},
{
"epoch": 18.57,
"learning_rate": 1.287800149142431e-05,
"loss": 0.7328,
"step": 249000
},
{
"epoch": 18.61,
"learning_rate": 1.2803430275913497e-05,
"loss": 0.7278,
"step": 249500
},
{
"epoch": 18.64,
"learning_rate": 1.2728859060402684e-05,
"loss": 0.7188,
"step": 250000
},
{
"epoch": 18.68,
"learning_rate": 1.2654287844891871e-05,
"loss": 0.7212,
"step": 250500
},
{
"epoch": 18.72,
"learning_rate": 1.2579716629381062e-05,
"loss": 0.7273,
"step": 251000
},
{
"epoch": 18.75,
"learning_rate": 1.2505294556301269e-05,
"loss": 0.718,
"step": 251500
},
{
"epoch": 18.79,
"learning_rate": 1.2430723340790456e-05,
"loss": 0.7256,
"step": 252000
},
{
"epoch": 18.83,
"learning_rate": 1.2356152125279641e-05,
"loss": 0.7269,
"step": 252500
},
{
"epoch": 18.87,
"learning_rate": 1.228173005219985e-05,
"loss": 0.7144,
"step": 253000
},
{
"epoch": 18.9,
"learning_rate": 1.220730797912006e-05,
"loss": 0.7169,
"step": 253500
},
{
"epoch": 18.94,
"learning_rate": 1.2132736763609247e-05,
"loss": 0.7225,
"step": 254000
},
{
"epoch": 18.98,
"learning_rate": 1.2058165548098435e-05,
"loss": 0.7198,
"step": 254500
},
{
"epoch": 19.02,
"learning_rate": 1.1983594332587622e-05,
"loss": 0.7165,
"step": 255000
},
{
"epoch": 19.05,
"learning_rate": 1.190902311707681e-05,
"loss": 0.7114,
"step": 255500
},
{
"epoch": 19.09,
"learning_rate": 1.1834451901565997e-05,
"loss": 0.7191,
"step": 256000
},
{
"epoch": 19.13,
"learning_rate": 1.1759880686055184e-05,
"loss": 0.7164,
"step": 256500
},
{
"epoch": 19.16,
"learning_rate": 1.168530947054437e-05,
"loss": 0.7177,
"step": 257000
},
{
"epoch": 19.2,
"learning_rate": 1.1610738255033558e-05,
"loss": 0.7125,
"step": 257500
},
{
"epoch": 19.24,
"learning_rate": 1.1536167039522745e-05,
"loss": 0.7177,
"step": 258000
},
{
"epoch": 19.28,
"learning_rate": 1.1461595824011932e-05,
"loss": 0.7107,
"step": 258500
},
{
"epoch": 19.31,
"learning_rate": 1.1387024608501119e-05,
"loss": 0.7199,
"step": 259000
},
{
"epoch": 19.35,
"learning_rate": 1.1312453392990306e-05,
"loss": 0.7197,
"step": 259500
},
{
"epoch": 19.39,
"learning_rate": 1.1238031319910515e-05,
"loss": 0.7166,
"step": 260000
},
{
"epoch": 19.43,
"learning_rate": 1.1163460104399702e-05,
"loss": 0.7153,
"step": 260500
},
{
"epoch": 19.46,
"learning_rate": 1.108888888888889e-05,
"loss": 0.7176,
"step": 261000
},
{
"epoch": 19.5,
"learning_rate": 1.1014466815809098e-05,
"loss": 0.7143,
"step": 261500
},
{
"epoch": 19.54,
"learning_rate": 1.0940044742729306e-05,
"loss": 0.7167,
"step": 262000
},
{
"epoch": 19.57,
"learning_rate": 1.0865473527218494e-05,
"loss": 0.7157,
"step": 262500
},
{
"epoch": 19.61,
"learning_rate": 1.0790902311707682e-05,
"loss": 0.7107,
"step": 263000
},
{
"epoch": 19.65,
"learning_rate": 1.0716331096196869e-05,
"loss": 0.7156,
"step": 263500
},
{
"epoch": 19.69,
"learning_rate": 1.0641909023117078e-05,
"loss": 0.7164,
"step": 264000
},
{
"epoch": 19.72,
"learning_rate": 1.0567337807606265e-05,
"loss": 0.7131,
"step": 264500
},
{
"epoch": 19.76,
"learning_rate": 1.0492766592095452e-05,
"loss": 0.7158,
"step": 265000
},
{
"epoch": 19.8,
"learning_rate": 1.0418195376584639e-05,
"loss": 0.7129,
"step": 265500
},
{
"epoch": 19.84,
"learning_rate": 1.0343624161073826e-05,
"loss": 0.7119,
"step": 266000
},
{
"epoch": 19.87,
"learning_rate": 1.0269052945563013e-05,
"loss": 0.7208,
"step": 266500
},
{
"epoch": 19.91,
"learning_rate": 1.01944817300522e-05,
"loss": 0.7132,
"step": 267000
},
{
"epoch": 19.95,
"learning_rate": 1.0119910514541387e-05,
"loss": 0.715,
"step": 267500
},
{
"epoch": 19.98,
"learning_rate": 1.0045339299030574e-05,
"loss": 0.7125,
"step": 268000
},
{
"epoch": 20.02,
"learning_rate": 9.970768083519761e-06,
"loss": 0.7126,
"step": 268500
},
{
"epoch": 20.06,
"learning_rate": 9.89634601043997e-06,
"loss": 0.7056,
"step": 269000
},
{
"epoch": 20.1,
"learning_rate": 9.821774794929157e-06,
"loss": 0.7101,
"step": 269500
},
{
"epoch": 20.13,
"learning_rate": 9.747203579418344e-06,
"loss": 0.7103,
"step": 270000
},
{
"epoch": 20.17,
"learning_rate": 9.672632363907533e-06,
"loss": 0.7109,
"step": 270500
},
{
"epoch": 20.21,
"learning_rate": 9.59806114839672e-06,
"loss": 0.707,
"step": 271000
},
{
"epoch": 20.25,
"learning_rate": 9.52363907531693e-06,
"loss": 0.7121,
"step": 271500
},
{
"epoch": 20.28,
"learning_rate": 9.449067859806115e-06,
"loss": 0.7088,
"step": 272000
},
{
"epoch": 20.32,
"learning_rate": 9.374496644295302e-06,
"loss": 0.7078,
"step": 272500
},
{
"epoch": 20.36,
"learning_rate": 9.299925428784489e-06,
"loss": 0.7094,
"step": 273000
},
{
"epoch": 20.4,
"learning_rate": 9.225354213273676e-06,
"loss": 0.7103,
"step": 273500
},
{
"epoch": 20.43,
"learning_rate": 9.150932140193885e-06,
"loss": 0.7097,
"step": 274000
},
{
"epoch": 20.47,
"learning_rate": 9.076360924683072e-06,
"loss": 0.7092,
"step": 274500
},
{
"epoch": 20.51,
"learning_rate": 9.001938851603281e-06,
"loss": 0.7092,
"step": 275000
},
{
"epoch": 20.54,
"learning_rate": 8.92736763609247e-06,
"loss": 0.7061,
"step": 275500
},
{
"epoch": 20.58,
"learning_rate": 8.852796420581657e-06,
"loss": 0.7076,
"step": 276000
},
{
"epoch": 20.62,
"learning_rate": 8.778225205070844e-06,
"loss": 0.7124,
"step": 276500
},
{
"epoch": 20.66,
"learning_rate": 8.70365398956003e-06,
"loss": 0.7051,
"step": 277000
},
{
"epoch": 20.69,
"learning_rate": 8.629082774049217e-06,
"loss": 0.706,
"step": 277500
},
{
"epoch": 20.73,
"learning_rate": 8.554511558538404e-06,
"loss": 0.7043,
"step": 278000
},
{
"epoch": 20.77,
"learning_rate": 8.47994034302759e-06,
"loss": 0.7081,
"step": 278500
},
{
"epoch": 20.81,
"learning_rate": 8.40536912751678e-06,
"loss": 0.7048,
"step": 279000
},
{
"epoch": 20.84,
"learning_rate": 8.330797912005966e-06,
"loss": 0.715,
"step": 279500
},
{
"epoch": 20.88,
"learning_rate": 8.256226696495154e-06,
"loss": 0.7035,
"step": 280000
},
{
"epoch": 20.92,
"learning_rate": 8.18165548098434e-06,
"loss": 0.7097,
"step": 280500
},
{
"epoch": 20.95,
"learning_rate": 8.107084265473528e-06,
"loss": 0.706,
"step": 281000
},
{
"epoch": 20.99,
"learning_rate": 8.032662192393737e-06,
"loss": 0.7003,
"step": 281500
},
{
"epoch": 21.03,
"learning_rate": 7.958090976882924e-06,
"loss": 0.7037,
"step": 282000
},
{
"epoch": 21.07,
"learning_rate": 7.883668903803131e-06,
"loss": 0.7066,
"step": 282500
},
{
"epoch": 21.1,
"learning_rate": 7.809097688292318e-06,
"loss": 0.7031,
"step": 283000
},
{
"epoch": 21.14,
"learning_rate": 7.734526472781507e-06,
"loss": 0.6978,
"step": 283500
},
{
"epoch": 21.18,
"learning_rate": 7.659955257270694e-06,
"loss": 0.7041,
"step": 284000
},
{
"epoch": 21.22,
"learning_rate": 7.585533184190903e-06,
"loss": 0.7044,
"step": 284500
},
{
"epoch": 21.25,
"learning_rate": 7.51096196868009e-06,
"loss": 0.6957,
"step": 285000
},
{
"epoch": 21.29,
"learning_rate": 7.436390753169277e-06,
"loss": 0.702,
"step": 285500
},
{
"epoch": 21.33,
"learning_rate": 7.3619686800894855e-06,
"loss": 0.7002,
"step": 286000
},
{
"epoch": 21.36,
"learning_rate": 7.287397464578673e-06,
"loss": 0.7031,
"step": 286500
},
{
"epoch": 21.4,
"learning_rate": 7.21282624906786e-06,
"loss": 0.7073,
"step": 287000
},
{
"epoch": 21.44,
"learning_rate": 7.138255033557047e-06,
"loss": 0.706,
"step": 287500
},
{
"epoch": 21.48,
"learning_rate": 7.063683818046235e-06,
"loss": 0.7052,
"step": 288000
},
{
"epoch": 21.51,
"learning_rate": 6.989112602535422e-06,
"loss": 0.7019,
"step": 288500
},
{
"epoch": 21.55,
"learning_rate": 6.914690529455631e-06,
"loss": 0.6996,
"step": 289000
},
{
"epoch": 21.59,
"learning_rate": 6.840119313944818e-06,
"loss": 0.7,
"step": 289500
},
{
"epoch": 21.63,
"learning_rate": 6.765548098434005e-06,
"loss": 0.7037,
"step": 290000
},
{
"epoch": 21.66,
"learning_rate": 6.690976882923192e-06,
"loss": 0.7007,
"step": 290500
},
{
"epoch": 21.7,
"learning_rate": 6.616405667412379e-06,
"loss": 0.7031,
"step": 291000
},
{
"epoch": 21.74,
"learning_rate": 6.541834451901565e-06,
"loss": 0.7011,
"step": 291500
},
{
"epoch": 21.77,
"learning_rate": 6.467263236390754e-06,
"loss": 0.7008,
"step": 292000
},
{
"epoch": 21.81,
"learning_rate": 6.392692020879941e-06,
"loss": 0.696,
"step": 292500
},
{
"epoch": 21.85,
"learning_rate": 6.31826994780015e-06,
"loss": 0.6973,
"step": 293000
},
{
"epoch": 21.89,
"learning_rate": 6.243698732289336e-06,
"loss": 0.6995,
"step": 293500
},
{
"epoch": 21.92,
"learning_rate": 6.1691275167785235e-06,
"loss": 0.6985,
"step": 294000
},
{
"epoch": 21.96,
"learning_rate": 6.0945563012677105e-06,
"loss": 0.703,
"step": 294500
},
{
"epoch": 22.0,
"learning_rate": 6.0199850857568984e-06,
"loss": 0.6983,
"step": 295000
},
{
"epoch": 22.04,
"learning_rate": 5.9454138702460855e-06,
"loss": 0.6993,
"step": 295500
},
{
"epoch": 22.07,
"learning_rate": 5.8708426547352726e-06,
"loss": 0.6923,
"step": 296000
},
{
"epoch": 22.11,
"learning_rate": 5.79627143922446e-06,
"loss": 0.6993,
"step": 296500
},
{
"epoch": 22.15,
"learning_rate": 5.721700223713647e-06,
"loss": 0.6927,
"step": 297000
},
{
"epoch": 22.18,
"learning_rate": 5.647129008202834e-06,
"loss": 0.6942,
"step": 297500
},
{
"epoch": 22.22,
"learning_rate": 5.572706935123043e-06,
"loss": 0.6976,
"step": 298000
},
{
"epoch": 22.26,
"learning_rate": 5.49813571961223e-06,
"loss": 0.6943,
"step": 298500
},
{
"epoch": 22.3,
"learning_rate": 5.423564504101417e-06,
"loss": 0.7016,
"step": 299000
},
{
"epoch": 22.33,
"learning_rate": 5.348993288590604e-06,
"loss": 0.6978,
"step": 299500
},
{
"epoch": 22.37,
"learning_rate": 5.274422073079791e-06,
"loss": 0.6952,
"step": 300000
},
{
"epoch": 22.41,
"learning_rate": 5.199850857568979e-06,
"loss": 0.6971,
"step": 300500
},
{
"epoch": 22.45,
"learning_rate": 5.125279642058166e-06,
"loss": 0.6997,
"step": 301000
},
{
"epoch": 22.48,
"learning_rate": 5.050708426547353e-06,
"loss": 0.6925,
"step": 301500
},
{
"epoch": 22.52,
"learning_rate": 4.976137211036539e-06,
"loss": 0.692,
"step": 302000
},
{
"epoch": 22.56,
"learning_rate": 4.901715137956749e-06,
"loss": 0.7017,
"step": 302500
},
{
"epoch": 22.59,
"learning_rate": 4.827143922445936e-06,
"loss": 0.6978,
"step": 303000
},
{
"epoch": 22.63,
"learning_rate": 4.7525727069351234e-06,
"loss": 0.698,
"step": 303500
},
{
"epoch": 22.67,
"learning_rate": 4.6780014914243105e-06,
"loss": 0.6987,
"step": 304000
},
{
"epoch": 22.71,
"learning_rate": 4.603430275913498e-06,
"loss": 0.6922,
"step": 304500
},
{
"epoch": 22.74,
"learning_rate": 4.529008202833707e-06,
"loss": 0.693,
"step": 305000
},
{
"epoch": 22.78,
"learning_rate": 4.454436987322894e-06,
"loss": 0.6959,
"step": 305500
},
{
"epoch": 22.82,
"learning_rate": 4.379865771812081e-06,
"loss": 0.6993,
"step": 306000
},
{
"epoch": 22.86,
"learning_rate": 4.305294556301268e-06,
"loss": 0.6949,
"step": 306500
},
{
"epoch": 22.89,
"learning_rate": 4.230872483221477e-06,
"loss": 0.6936,
"step": 307000
},
{
"epoch": 22.93,
"learning_rate": 4.156301267710664e-06,
"loss": 0.6956,
"step": 307500
},
{
"epoch": 22.97,
"learning_rate": 4.081730052199851e-06,
"loss": 0.6929,
"step": 308000
},
{
"epoch": 23.01,
"learning_rate": 4.007158836689038e-06,
"loss": 0.6897,
"step": 308500
},
{
"epoch": 23.04,
"learning_rate": 3.932736763609246e-06,
"loss": 0.6975,
"step": 309000
},
{
"epoch": 23.08,
"learning_rate": 3.858165548098434e-06,
"loss": 0.6971,
"step": 309500
},
{
"epoch": 23.12,
"learning_rate": 3.7835943325876213e-06,
"loss": 0.6965,
"step": 310000
},
{
"epoch": 23.15,
"learning_rate": 3.70917225950783e-06,
"loss": 0.697,
"step": 310500
},
{
"epoch": 23.19,
"learning_rate": 3.634601043997017e-06,
"loss": 0.6885,
"step": 311000
},
{
"epoch": 23.23,
"learning_rate": 3.560029828486205e-06,
"loss": 0.6854,
"step": 311500
},
{
"epoch": 23.27,
"learning_rate": 3.4854586129753916e-06,
"loss": 0.6938,
"step": 312000
},
{
"epoch": 23.3,
"learning_rate": 3.4108873974645787e-06,
"loss": 0.6905,
"step": 312500
},
{
"epoch": 23.34,
"learning_rate": 3.3364653243847873e-06,
"loss": 0.6927,
"step": 313000
},
{
"epoch": 23.38,
"learning_rate": 3.2618941088739744e-06,
"loss": 0.693,
"step": 313500
},
{
"epoch": 23.42,
"learning_rate": 3.1873228933631623e-06,
"loss": 0.6894,
"step": 314000
},
{
"epoch": 23.45,
"learning_rate": 3.112751677852349e-06,
"loss": 0.6899,
"step": 314500
},
{
"epoch": 23.49,
"learning_rate": 3.0381804623415364e-06,
"loss": 0.6924,
"step": 315000
},
{
"epoch": 23.53,
"learning_rate": 2.9636092468307235e-06,
"loss": 0.6936,
"step": 315500
},
{
"epoch": 23.56,
"learning_rate": 2.8890380313199106e-06,
"loss": 0.6939,
"step": 316000
},
{
"epoch": 23.6,
"learning_rate": 2.8144668158090976e-06,
"loss": 0.6975,
"step": 316500
},
{
"epoch": 23.64,
"learning_rate": 2.7400447427293063e-06,
"loss": 0.6905,
"step": 317000
},
{
"epoch": 23.68,
"learning_rate": 2.6654735272184938e-06,
"loss": 0.6908,
"step": 317500
},
{
"epoch": 23.71,
"learning_rate": 2.5909023117076813e-06,
"loss": 0.6891,
"step": 318000
},
{
"epoch": 23.75,
"learning_rate": 2.516331096196868e-06,
"loss": 0.6911,
"step": 318500
},
{
"epoch": 23.79,
"learning_rate": 2.4417598806860554e-06,
"loss": 0.6956,
"step": 319000
},
{
"epoch": 23.83,
"learning_rate": 2.3671886651752425e-06,
"loss": 0.6941,
"step": 319500
},
{
"epoch": 23.86,
"learning_rate": 2.292766592095451e-06,
"loss": 0.6864,
"step": 320000
},
{
"epoch": 23.9,
"learning_rate": 2.2181953765846386e-06,
"loss": 0.6959,
"step": 320500
},
{
"epoch": 23.94,
"learning_rate": 2.1436241610738253e-06,
"loss": 0.689,
"step": 321000
},
{
"epoch": 23.97,
"learning_rate": 2.0690529455630127e-06,
"loss": 0.6906,
"step": 321500
},
{
"epoch": 24.01,
"learning_rate": 1.9944817300522002e-06,
"loss": 0.6921,
"step": 322000
},
{
"epoch": 24.05,
"learning_rate": 1.919910514541387e-06,
"loss": 0.6915,
"step": 322500
},
{
"epoch": 24.09,
"learning_rate": 1.8454884414615957e-06,
"loss": 0.6957,
"step": 323000
},
{
"epoch": 24.12,
"learning_rate": 1.7709172259507832e-06,
"loss": 0.6892,
"step": 323500
},
{
"epoch": 24.16,
"learning_rate": 1.69634601043997e-06,
"loss": 0.6875,
"step": 324000
},
{
"epoch": 24.2,
"learning_rate": 1.6217747949291574e-06,
"loss": 0.6899,
"step": 324500
},
{
"epoch": 24.24,
"learning_rate": 1.5472035794183446e-06,
"loss": 0.6888,
"step": 325000
},
{
"epoch": 24.27,
"learning_rate": 1.4726323639075317e-06,
"loss": 0.6921,
"step": 325500
},
{
"epoch": 24.31,
"learning_rate": 1.3980611483967188e-06,
"loss": 0.6866,
"step": 326000
},
{
"epoch": 24.35,
"learning_rate": 1.3234899328859063e-06,
"loss": 0.6873,
"step": 326500
},
{
"epoch": 24.38,
"learning_rate": 1.249067859806115e-06,
"loss": 0.6859,
"step": 327000
},
{
"epoch": 24.42,
"learning_rate": 1.1744966442953022e-06,
"loss": 0.6825,
"step": 327500
},
{
"epoch": 24.46,
"learning_rate": 1.0999254287844893e-06,
"loss": 0.6904,
"step": 328000
},
{
"epoch": 24.5,
"learning_rate": 1.0253542132736763e-06,
"loss": 0.6941,
"step": 328500
},
{
"epoch": 24.53,
"learning_rate": 9.507829977628635e-07,
"loss": 0.69,
"step": 329000
},
{
"epoch": 24.57,
"learning_rate": 8.762117822520507e-07,
"loss": 0.6842,
"step": 329500
},
{
"epoch": 24.61,
"learning_rate": 8.01640566741238e-07,
"loss": 0.6931,
"step": 330000
},
{
"epoch": 24.65,
"learning_rate": 7.27069351230425e-07,
"loss": 0.6869,
"step": 330500
},
{
"epoch": 24.68,
"learning_rate": 6.527964205816555e-07,
"loss": 0.6927,
"step": 331000
},
{
"epoch": 24.72,
"learning_rate": 5.782252050708427e-07,
"loss": 0.6846,
"step": 331500
},
{
"epoch": 24.76,
"learning_rate": 5.036539895600298e-07,
"loss": 0.6932,
"step": 332000
},
{
"epoch": 24.79,
"learning_rate": 4.29082774049217e-07,
"loss": 0.6867,
"step": 332500
},
{
"epoch": 24.83,
"learning_rate": 3.545115585384042e-07,
"loss": 0.6883,
"step": 333000
},
{
"epoch": 24.87,
"learning_rate": 2.799403430275914e-07,
"loss": 0.6826,
"step": 333500
},
{
"epoch": 24.91,
"learning_rate": 2.053691275167785e-07,
"loss": 0.6882,
"step": 334000
},
{
"epoch": 24.94,
"learning_rate": 1.3109619686800894e-07,
"loss": 0.6854,
"step": 334500
},
{
"epoch": 24.98,
"learning_rate": 5.652498135719612e-08,
"loss": 0.6878,
"step": 335000
},
{
"epoch": 25.0,
"step": 335250,
"total_flos": 2.835875553205027e+18,
"train_loss": 0.891669009442653,
"train_runtime": 467828.9629,
"train_samples_per_second": 22.932,
"train_steps_per_second": 0.717
}
],
"logging_steps": 500,
"max_steps": 335250,
"num_train_epochs": 25,
"save_steps": 10000,
"total_flos": 2.835875553205027e+18,
"trial_name": null,
"trial_params": null
}