|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 4358, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0002294630564479119, |
|
"grad_norm": 16.915573515504455, |
|
"learning_rate": 4.587155963302753e-08, |
|
"loss": 1.3608, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0011473152822395595, |
|
"grad_norm": 16.57824498398554, |
|
"learning_rate": 2.2935779816513764e-07, |
|
"loss": 1.3475, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.002294630564479119, |
|
"grad_norm": 13.391120630574697, |
|
"learning_rate": 4.587155963302753e-07, |
|
"loss": 1.3563, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0034419458467186783, |
|
"grad_norm": 7.451292451267116, |
|
"learning_rate": 6.880733944954129e-07, |
|
"loss": 1.2802, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.004589261128958238, |
|
"grad_norm": 5.153183243132574, |
|
"learning_rate": 9.174311926605506e-07, |
|
"loss": 1.2213, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.005736576411197797, |
|
"grad_norm": 4.42706429835042, |
|
"learning_rate": 1.1467889908256882e-06, |
|
"loss": 1.1605, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.006883891693437357, |
|
"grad_norm": 4.3909586491865, |
|
"learning_rate": 1.3761467889908258e-06, |
|
"loss": 1.1432, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.008031206975676917, |
|
"grad_norm": 3.635568351265977, |
|
"learning_rate": 1.6055045871559635e-06, |
|
"loss": 1.1287, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.009178522257916476, |
|
"grad_norm": 3.6782219530388676, |
|
"learning_rate": 1.8348623853211011e-06, |
|
"loss": 1.1319, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.010325837540156035, |
|
"grad_norm": 3.9153495168079786, |
|
"learning_rate": 2.064220183486239e-06, |
|
"loss": 1.1259, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.011473152822395595, |
|
"grad_norm": 3.9184448958021036, |
|
"learning_rate": 2.2935779816513764e-06, |
|
"loss": 1.1435, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.012620468104635154, |
|
"grad_norm": 3.686838030224446, |
|
"learning_rate": 2.522935779816514e-06, |
|
"loss": 1.1297, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.013767783386874713, |
|
"grad_norm": 3.849747711258154, |
|
"learning_rate": 2.7522935779816517e-06, |
|
"loss": 1.1342, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.014915098669114273, |
|
"grad_norm": 3.6560526277238443, |
|
"learning_rate": 2.981651376146789e-06, |
|
"loss": 1.113, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.016062413951353834, |
|
"grad_norm": 4.12272475395816, |
|
"learning_rate": 3.211009174311927e-06, |
|
"loss": 1.1367, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.01720972923359339, |
|
"grad_norm": 3.6689121561670825, |
|
"learning_rate": 3.4403669724770644e-06, |
|
"loss": 1.1354, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.018357044515832952, |
|
"grad_norm": 3.5167783836221838, |
|
"learning_rate": 3.6697247706422022e-06, |
|
"loss": 1.1516, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.01950435979807251, |
|
"grad_norm": 3.6409760141175, |
|
"learning_rate": 3.89908256880734e-06, |
|
"loss": 1.1447, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.02065167508031207, |
|
"grad_norm": 3.5724528502767567, |
|
"learning_rate": 4.128440366972478e-06, |
|
"loss": 1.1103, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.02179899036255163, |
|
"grad_norm": 4.387301576122793, |
|
"learning_rate": 4.357798165137615e-06, |
|
"loss": 1.148, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.02294630564479119, |
|
"grad_norm": 3.5739499775971795, |
|
"learning_rate": 4.587155963302753e-06, |
|
"loss": 1.1412, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.024093620927030747, |
|
"grad_norm": 3.7629714710136244, |
|
"learning_rate": 4.816513761467891e-06, |
|
"loss": 1.1386, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.025240936209270308, |
|
"grad_norm": 3.56307250426267, |
|
"learning_rate": 5.045871559633028e-06, |
|
"loss": 1.1265, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.026388251491509866, |
|
"grad_norm": 3.7511673022298617, |
|
"learning_rate": 5.275229357798165e-06, |
|
"loss": 1.1331, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.027535566773749427, |
|
"grad_norm": 3.779191425409107, |
|
"learning_rate": 5.504587155963303e-06, |
|
"loss": 1.1324, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.028682882055988984, |
|
"grad_norm": 3.5751301446097252, |
|
"learning_rate": 5.733944954128441e-06, |
|
"loss": 1.1409, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.029830197338228545, |
|
"grad_norm": 3.7113311883773368, |
|
"learning_rate": 5.963302752293578e-06, |
|
"loss": 1.1522, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.030977512620468106, |
|
"grad_norm": 3.521541660293309, |
|
"learning_rate": 6.192660550458715e-06, |
|
"loss": 1.1657, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.03212482790270767, |
|
"grad_norm": 3.439820085380388, |
|
"learning_rate": 6.422018348623854e-06, |
|
"loss": 1.1607, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.03327214318494722, |
|
"grad_norm": 3.4447742850994842, |
|
"learning_rate": 6.651376146788992e-06, |
|
"loss": 1.1363, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.03441945846718678, |
|
"grad_norm": 3.5236191397811742, |
|
"learning_rate": 6.880733944954129e-06, |
|
"loss": 1.138, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.03556677374942634, |
|
"grad_norm": 3.3663301898503017, |
|
"learning_rate": 7.110091743119267e-06, |
|
"loss": 1.1574, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.036714089031665904, |
|
"grad_norm": 3.360850229705706, |
|
"learning_rate": 7.3394495412844045e-06, |
|
"loss": 1.133, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.03786140431390546, |
|
"grad_norm": 3.4791313578433702, |
|
"learning_rate": 7.568807339449542e-06, |
|
"loss": 1.1543, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.03900871959614502, |
|
"grad_norm": 3.7004977570629554, |
|
"learning_rate": 7.79816513761468e-06, |
|
"loss": 1.1934, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.04015603487838458, |
|
"grad_norm": 3.8223961646506903, |
|
"learning_rate": 8.027522935779817e-06, |
|
"loss": 1.1659, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.04130335016062414, |
|
"grad_norm": 3.55532539649572, |
|
"learning_rate": 8.256880733944956e-06, |
|
"loss": 1.1128, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.042450665442863696, |
|
"grad_norm": 3.6076069307268015, |
|
"learning_rate": 8.486238532110093e-06, |
|
"loss": 1.1569, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.04359798072510326, |
|
"grad_norm": 3.429437490724809, |
|
"learning_rate": 8.71559633027523e-06, |
|
"loss": 1.1521, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.04474529600734282, |
|
"grad_norm": 3.234909828239376, |
|
"learning_rate": 8.944954128440367e-06, |
|
"loss": 1.1438, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.04589261128958238, |
|
"grad_norm": 3.3130400141238834, |
|
"learning_rate": 9.174311926605506e-06, |
|
"loss": 1.1706, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.04703992657182194, |
|
"grad_norm": 3.7263058619472673, |
|
"learning_rate": 9.403669724770643e-06, |
|
"loss": 1.1876, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.048187241854061494, |
|
"grad_norm": 3.4658866154835373, |
|
"learning_rate": 9.633027522935781e-06, |
|
"loss": 1.153, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.049334557136301055, |
|
"grad_norm": 3.4639517157270086, |
|
"learning_rate": 9.862385321100918e-06, |
|
"loss": 1.185, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.050481872418540616, |
|
"grad_norm": 3.3739187071512937, |
|
"learning_rate": 1.0091743119266055e-05, |
|
"loss": 1.1948, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.05162918770078018, |
|
"grad_norm": 3.3478939956905363, |
|
"learning_rate": 1.0321100917431192e-05, |
|
"loss": 1.1694, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.05277650298301973, |
|
"grad_norm": 3.4368659867072773, |
|
"learning_rate": 1.055045871559633e-05, |
|
"loss": 1.1484, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.05392381826525929, |
|
"grad_norm": 3.2936522686474294, |
|
"learning_rate": 1.077981651376147e-05, |
|
"loss": 1.199, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.05507113354749885, |
|
"grad_norm": 3.0900646767838165, |
|
"learning_rate": 1.1009174311926607e-05, |
|
"loss": 1.1961, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.056218448829738414, |
|
"grad_norm": 3.422687599705482, |
|
"learning_rate": 1.1238532110091744e-05, |
|
"loss": 1.1988, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.05736576411197797, |
|
"grad_norm": 3.3144909495939103, |
|
"learning_rate": 1.1467889908256882e-05, |
|
"loss": 1.195, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.05851307939421753, |
|
"grad_norm": 3.309877252453861, |
|
"learning_rate": 1.169724770642202e-05, |
|
"loss": 1.1785, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.05966039467645709, |
|
"grad_norm": 3.280922712269594, |
|
"learning_rate": 1.1926605504587156e-05, |
|
"loss": 1.1776, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.06080770995869665, |
|
"grad_norm": 3.341721826593927, |
|
"learning_rate": 1.2155963302752293e-05, |
|
"loss": 1.1948, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.06195502524093621, |
|
"grad_norm": 3.475868431178915, |
|
"learning_rate": 1.238532110091743e-05, |
|
"loss": 1.1768, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.06310234052317577, |
|
"grad_norm": 3.3610034239289153, |
|
"learning_rate": 1.261467889908257e-05, |
|
"loss": 1.1789, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.06424965580541533, |
|
"grad_norm": 3.6114556514756755, |
|
"learning_rate": 1.2844036697247708e-05, |
|
"loss": 1.1839, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.06539697108765488, |
|
"grad_norm": 3.5383966049543765, |
|
"learning_rate": 1.3073394495412845e-05, |
|
"loss": 1.1732, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.06654428636989444, |
|
"grad_norm": 3.3264744648376805, |
|
"learning_rate": 1.3302752293577984e-05, |
|
"loss": 1.195, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.067691601652134, |
|
"grad_norm": 3.242818454268948, |
|
"learning_rate": 1.353211009174312e-05, |
|
"loss": 1.2111, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.06883891693437356, |
|
"grad_norm": 3.460258415056196, |
|
"learning_rate": 1.3761467889908258e-05, |
|
"loss": 1.1993, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.06998623221661313, |
|
"grad_norm": 3.695239807114153, |
|
"learning_rate": 1.3990825688073395e-05, |
|
"loss": 1.1888, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.07113354749885269, |
|
"grad_norm": 3.107063192268371, |
|
"learning_rate": 1.4220183486238533e-05, |
|
"loss": 1.1733, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.07228086278109225, |
|
"grad_norm": 3.2028941831424036, |
|
"learning_rate": 1.4449541284403672e-05, |
|
"loss": 1.2139, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.07342817806333181, |
|
"grad_norm": 3.066486456903709, |
|
"learning_rate": 1.4678899082568809e-05, |
|
"loss": 1.2, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.07457549334557137, |
|
"grad_norm": 4.414463221911407, |
|
"learning_rate": 1.4908256880733946e-05, |
|
"loss": 1.2214, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.07572280862781092, |
|
"grad_norm": 4.982537045862733, |
|
"learning_rate": 1.5137614678899085e-05, |
|
"loss": 1.1999, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.07687012391005048, |
|
"grad_norm": 3.339016451684457, |
|
"learning_rate": 1.536697247706422e-05, |
|
"loss": 1.2257, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.07801743919229004, |
|
"grad_norm": 3.3426471175954022, |
|
"learning_rate": 1.559633027522936e-05, |
|
"loss": 1.1864, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.0791647544745296, |
|
"grad_norm": 3.151247165643508, |
|
"learning_rate": 1.5825688073394497e-05, |
|
"loss": 1.2004, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.08031206975676916, |
|
"grad_norm": 3.398589476409347, |
|
"learning_rate": 1.6055045871559634e-05, |
|
"loss": 1.2106, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.08145938503900872, |
|
"grad_norm": 3.4252971557238103, |
|
"learning_rate": 1.628440366972477e-05, |
|
"loss": 1.2156, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.08260670032124828, |
|
"grad_norm": 3.331446919185167, |
|
"learning_rate": 1.6513761467889912e-05, |
|
"loss": 1.2221, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.08375401560348784, |
|
"grad_norm": 3.2290984121569073, |
|
"learning_rate": 1.674311926605505e-05, |
|
"loss": 1.2287, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.08490133088572739, |
|
"grad_norm": 3.260527699936813, |
|
"learning_rate": 1.6972477064220186e-05, |
|
"loss": 1.1927, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.08604864616796695, |
|
"grad_norm": 3.290227145809032, |
|
"learning_rate": 1.7201834862385323e-05, |
|
"loss": 1.2177, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.08719596145020651, |
|
"grad_norm": 3.3351224170083884, |
|
"learning_rate": 1.743119266055046e-05, |
|
"loss": 1.2332, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.08834327673244607, |
|
"grad_norm": 3.0987470046595944, |
|
"learning_rate": 1.7660550458715597e-05, |
|
"loss": 1.2218, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.08949059201468564, |
|
"grad_norm": 3.4005430609570766, |
|
"learning_rate": 1.7889908256880734e-05, |
|
"loss": 1.2104, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.0906379072969252, |
|
"grad_norm": 3.2242800996442633, |
|
"learning_rate": 1.811926605504587e-05, |
|
"loss": 1.2444, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.09178522257916476, |
|
"grad_norm": 3.465093348681784, |
|
"learning_rate": 1.834862385321101e-05, |
|
"loss": 1.2207, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.09293253786140432, |
|
"grad_norm": 3.273893281103885, |
|
"learning_rate": 1.8577981651376148e-05, |
|
"loss": 1.2811, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.09407985314364388, |
|
"grad_norm": 3.389197589070483, |
|
"learning_rate": 1.8807339449541285e-05, |
|
"loss": 1.2325, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.09522716842588343, |
|
"grad_norm": 3.289591416306655, |
|
"learning_rate": 1.9036697247706422e-05, |
|
"loss": 1.2516, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.09637448370812299, |
|
"grad_norm": 3.0860795978402225, |
|
"learning_rate": 1.9266055045871563e-05, |
|
"loss": 1.2475, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.09752179899036255, |
|
"grad_norm": 3.2671393904118475, |
|
"learning_rate": 1.94954128440367e-05, |
|
"loss": 1.2681, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.09866911427260211, |
|
"grad_norm": 3.2619543784914535, |
|
"learning_rate": 1.9724770642201837e-05, |
|
"loss": 1.2523, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.09981642955484167, |
|
"grad_norm": 3.302002070021214, |
|
"learning_rate": 1.9954128440366974e-05, |
|
"loss": 1.2393, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.10096374483708123, |
|
"grad_norm": 3.093855168671344, |
|
"learning_rate": 1.9999948669655127e-05, |
|
"loss": 1.2365, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.10211106011932079, |
|
"grad_norm": 3.123194323251484, |
|
"learning_rate": 1.9999740141032216e-05, |
|
"loss": 1.2305, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.10325837540156035, |
|
"grad_norm": 3.0544621364543856, |
|
"learning_rate": 1.999937120932709e-05, |
|
"loss": 1.2906, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.10440569068379991, |
|
"grad_norm": 3.0019401915038273, |
|
"learning_rate": 1.9998841880457682e-05, |
|
"loss": 1.2699, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.10555300596603946, |
|
"grad_norm": 3.2375777216613972, |
|
"learning_rate": 1.9998152162914807e-05, |
|
"loss": 1.2553, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.10670032124827902, |
|
"grad_norm": 3.2092571589249737, |
|
"learning_rate": 1.9997302067762044e-05, |
|
"loss": 1.2313, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.10784763653051858, |
|
"grad_norm": 3.017032153972236, |
|
"learning_rate": 1.9996291608635527e-05, |
|
"loss": 1.2775, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.10899495181275815, |
|
"grad_norm": 3.324102394140963, |
|
"learning_rate": 1.999512080174375e-05, |
|
"loss": 1.2837, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.1101422670949977, |
|
"grad_norm": 3.1004727688280744, |
|
"learning_rate": 1.9993789665867316e-05, |
|
"loss": 1.2867, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.11128958237723727, |
|
"grad_norm": 3.011852793277257, |
|
"learning_rate": 1.9992298222358603e-05, |
|
"loss": 1.258, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.11243689765947683, |
|
"grad_norm": 2.905138461740028, |
|
"learning_rate": 1.9990646495141445e-05, |
|
"loss": 1.2602, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.11358421294171639, |
|
"grad_norm": 2.9824902308051984, |
|
"learning_rate": 1.9988834510710747e-05, |
|
"loss": 1.2446, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.11473152822395594, |
|
"grad_norm": 3.122225637940991, |
|
"learning_rate": 1.998686229813205e-05, |
|
"loss": 1.2704, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.1158788435061955, |
|
"grad_norm": 3.116874664916445, |
|
"learning_rate": 1.9984729889041077e-05, |
|
"loss": 1.2559, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.11702615878843506, |
|
"grad_norm": 3.280595317938923, |
|
"learning_rate": 1.9982437317643218e-05, |
|
"loss": 1.2762, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.11817347407067462, |
|
"grad_norm": 3.013891926535922, |
|
"learning_rate": 1.9979984620712972e-05, |
|
"loss": 1.2655, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.11932078935291418, |
|
"grad_norm": 3.230616995695979, |
|
"learning_rate": 1.9977371837593382e-05, |
|
"loss": 1.2794, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.12046810463515374, |
|
"grad_norm": 2.985385004989635, |
|
"learning_rate": 1.9974599010195384e-05, |
|
"loss": 1.2688, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.1216154199173933, |
|
"grad_norm": 2.9459280723846955, |
|
"learning_rate": 1.997166618299714e-05, |
|
"loss": 1.2565, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.12276273519963286, |
|
"grad_norm": 3.327101250478011, |
|
"learning_rate": 1.9968573403043325e-05, |
|
"loss": 1.2386, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.12391005048187242, |
|
"grad_norm": 3.10050951594509, |
|
"learning_rate": 1.9965320719944366e-05, |
|
"loss": 1.2785, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.12505736576411197, |
|
"grad_norm": 3.2120944118445367, |
|
"learning_rate": 1.9961908185875662e-05, |
|
"loss": 1.2578, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.12620468104635155, |
|
"grad_norm": 2.9884869949235204, |
|
"learning_rate": 1.995833585557674e-05, |
|
"loss": 1.2694, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.1273519963285911, |
|
"grad_norm": 2.937215349500601, |
|
"learning_rate": 1.9954603786350353e-05, |
|
"loss": 1.2475, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.12849931161083067, |
|
"grad_norm": 4.433606116061364, |
|
"learning_rate": 1.9950712038061617e-05, |
|
"loss": 1.2883, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.12964662689307022, |
|
"grad_norm": 3.081051255447705, |
|
"learning_rate": 1.994666067313698e-05, |
|
"loss": 1.2919, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.13079394217530976, |
|
"grad_norm": 3.246318877481652, |
|
"learning_rate": 1.994244975656328e-05, |
|
"loss": 1.2913, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.13194125745754934, |
|
"grad_norm": 2.860708074989303, |
|
"learning_rate": 1.9938079355886674e-05, |
|
"loss": 1.2504, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.13308857273978889, |
|
"grad_norm": 3.2926223671952157, |
|
"learning_rate": 1.993354954121155e-05, |
|
"loss": 1.2938, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.13423588802202846, |
|
"grad_norm": 2.8372407984509076, |
|
"learning_rate": 1.992886038519943e-05, |
|
"loss": 1.266, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.135383203304268, |
|
"grad_norm": 3.0930378514658816, |
|
"learning_rate": 1.9924011963067765e-05, |
|
"loss": 1.2736, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.13653051858650758, |
|
"grad_norm": 3.046343540888883, |
|
"learning_rate": 1.9919004352588768e-05, |
|
"loss": 1.2603, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.13767783386874713, |
|
"grad_norm": 3.4947134554407038, |
|
"learning_rate": 1.9913837634088143e-05, |
|
"loss": 1.2806, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.1388251491509867, |
|
"grad_norm": 2.9164567885161863, |
|
"learning_rate": 1.99085118904438e-05, |
|
"loss": 1.2727, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.13997246443322625, |
|
"grad_norm": 3.2929601550445455, |
|
"learning_rate": 1.9903027207084525e-05, |
|
"loss": 1.283, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.1411197797154658, |
|
"grad_norm": 3.1329363090645104, |
|
"learning_rate": 1.989738367198862e-05, |
|
"loss": 1.2742, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.14226709499770537, |
|
"grad_norm": 3.1482798021615905, |
|
"learning_rate": 1.9891581375682472e-05, |
|
"loss": 1.271, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.14341441027994492, |
|
"grad_norm": 2.8188055853541694, |
|
"learning_rate": 1.9885620411239134e-05, |
|
"loss": 1.2436, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.1445617255621845, |
|
"grad_norm": 2.994182190028967, |
|
"learning_rate": 1.9879500874276788e-05, |
|
"loss": 1.256, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.14570904084442404, |
|
"grad_norm": 2.8840122615715407, |
|
"learning_rate": 1.9873222862957243e-05, |
|
"loss": 1.2759, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.14685635612666362, |
|
"grad_norm": 3.216834319956669, |
|
"learning_rate": 1.9866786477984357e-05, |
|
"loss": 1.2297, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.14800367140890316, |
|
"grad_norm": 2.9383340312733575, |
|
"learning_rate": 1.9860191822602415e-05, |
|
"loss": 1.25, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.14915098669114274, |
|
"grad_norm": 2.897291855134717, |
|
"learning_rate": 1.985343900259446e-05, |
|
"loss": 1.2537, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.1502983019733823, |
|
"grad_norm": 2.975239778063116, |
|
"learning_rate": 1.9846528126280632e-05, |
|
"loss": 1.2621, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.15144561725562183, |
|
"grad_norm": 2.978965429770825, |
|
"learning_rate": 1.983945930451639e-05, |
|
"loss": 1.2632, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.1525929325378614, |
|
"grad_norm": 2.834209330189236, |
|
"learning_rate": 1.9832232650690765e-05, |
|
"loss": 1.2555, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.15374024782010096, |
|
"grad_norm": 2.8560141240052497, |
|
"learning_rate": 1.982484828072452e-05, |
|
"loss": 1.2592, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.15488756310234053, |
|
"grad_norm": 2.8618117387415456, |
|
"learning_rate": 1.981730631306831e-05, |
|
"loss": 1.2417, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.15603487838458008, |
|
"grad_norm": 2.910525869963812, |
|
"learning_rate": 1.9809606868700755e-05, |
|
"loss": 1.2771, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.15718219366681965, |
|
"grad_norm": 2.6548556540071266, |
|
"learning_rate": 1.9801750071126536e-05, |
|
"loss": 1.2352, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.1583295089490592, |
|
"grad_norm": 2.867755148956131, |
|
"learning_rate": 1.9793736046374375e-05, |
|
"loss": 1.2651, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.15947682423129875, |
|
"grad_norm": 2.956110626708722, |
|
"learning_rate": 1.9785564922995042e-05, |
|
"loss": 1.261, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.16062413951353832, |
|
"grad_norm": 3.295040302897522, |
|
"learning_rate": 1.977723683205928e-05, |
|
"loss": 1.274, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.16177145479577787, |
|
"grad_norm": 2.936931272401806, |
|
"learning_rate": 1.9768751907155707e-05, |
|
"loss": 1.2776, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.16291877007801744, |
|
"grad_norm": 2.9114545457536845, |
|
"learning_rate": 1.9760110284388667e-05, |
|
"loss": 1.3049, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.164066085360257, |
|
"grad_norm": 2.646123204687912, |
|
"learning_rate": 1.9751312102376062e-05, |
|
"loss": 1.2781, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.16521340064249657, |
|
"grad_norm": 2.774463476806892, |
|
"learning_rate": 1.9742357502247104e-05, |
|
"loss": 1.2376, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.1663607159247361, |
|
"grad_norm": 3.092003565928542, |
|
"learning_rate": 1.9733246627640072e-05, |
|
"loss": 1.2956, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.1675080312069757, |
|
"grad_norm": 2.888582015827288, |
|
"learning_rate": 1.9723979624700004e-05, |
|
"loss": 1.2603, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.16865534648921524, |
|
"grad_norm": 2.8242754861502744, |
|
"learning_rate": 1.9714556642076347e-05, |
|
"loss": 1.2572, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.16980266177145478, |
|
"grad_norm": 2.8564441721897045, |
|
"learning_rate": 1.970497783092057e-05, |
|
"loss": 1.275, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.17094997705369436, |
|
"grad_norm": 4.737708384861383, |
|
"learning_rate": 1.969524334488375e-05, |
|
"loss": 1.2806, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.1720972923359339, |
|
"grad_norm": 2.7729048751649685, |
|
"learning_rate": 1.9685353340114104e-05, |
|
"loss": 1.2555, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.17324460761817348, |
|
"grad_norm": 3.7322733945617985, |
|
"learning_rate": 1.9675307975254478e-05, |
|
"loss": 1.2735, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.17439192290041303, |
|
"grad_norm": 6.994325743658938, |
|
"learning_rate": 1.9665107411439805e-05, |
|
"loss": 1.2726, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.1755392381826526, |
|
"grad_norm": 3.0013238213848012, |
|
"learning_rate": 1.965475181229453e-05, |
|
"loss": 1.2633, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.17668655346489215, |
|
"grad_norm": 2.743017825024535, |
|
"learning_rate": 1.9644241343929966e-05, |
|
"loss": 1.2657, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.17783386874713172, |
|
"grad_norm": 2.7294372056243126, |
|
"learning_rate": 1.963357617494165e-05, |
|
"loss": 1.254, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.17898118402937127, |
|
"grad_norm": 2.6639405368239806, |
|
"learning_rate": 1.9622756476406625e-05, |
|
"loss": 1.2555, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.18012849931161082, |
|
"grad_norm": 2.7727550754305086, |
|
"learning_rate": 1.9611782421880702e-05, |
|
"loss": 1.2726, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.1812758145938504, |
|
"grad_norm": 2.8402056215181304, |
|
"learning_rate": 1.9600654187395666e-05, |
|
"loss": 1.2892, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.18242312987608994, |
|
"grad_norm": 2.783195044162214, |
|
"learning_rate": 1.958937195145647e-05, |
|
"loss": 1.2551, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.18357044515832951, |
|
"grad_norm": 2.8916825221026303, |
|
"learning_rate": 1.9577935895038363e-05, |
|
"loss": 1.2669, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.18471776044056906, |
|
"grad_norm": 2.707687158217741, |
|
"learning_rate": 1.9566346201583974e-05, |
|
"loss": 1.2492, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.18586507572280864, |
|
"grad_norm": 3.158708488021887, |
|
"learning_rate": 1.9554603057000397e-05, |
|
"loss": 1.2637, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.18701239100504818, |
|
"grad_norm": 2.9465795983656182, |
|
"learning_rate": 1.954270664965618e-05, |
|
"loss": 1.2449, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.18815970628728776, |
|
"grad_norm": 2.7211490382547883, |
|
"learning_rate": 1.953065717037832e-05, |
|
"loss": 1.2544, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.1893070215695273, |
|
"grad_norm": 2.8949023742596314, |
|
"learning_rate": 1.951845481244921e-05, |
|
"loss": 1.2273, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.19045433685176685, |
|
"grad_norm": 2.765474296144506, |
|
"learning_rate": 1.9506099771603515e-05, |
|
"loss": 1.2493, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.19160165213400643, |
|
"grad_norm": 2.900346678087231, |
|
"learning_rate": 1.9493592246025047e-05, |
|
"loss": 1.2956, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.19274896741624598, |
|
"grad_norm": 2.845481992620935, |
|
"learning_rate": 1.9480932436343584e-05, |
|
"loss": 1.2694, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.19389628269848555, |
|
"grad_norm": 2.8255091786274047, |
|
"learning_rate": 1.9468120545631647e-05, |
|
"loss": 1.2453, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.1950435979807251, |
|
"grad_norm": 2.854290942764917, |
|
"learning_rate": 1.945515677940127e-05, |
|
"loss": 1.2611, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.19619091326296467, |
|
"grad_norm": 2.7613113548764083, |
|
"learning_rate": 1.944204134560064e-05, |
|
"loss": 1.2483, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.19733822854520422, |
|
"grad_norm": 2.9238914008425145, |
|
"learning_rate": 1.9428774454610845e-05, |
|
"loss": 1.2613, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.1984855438274438, |
|
"grad_norm": 2.9258869381539463, |
|
"learning_rate": 1.941535631924242e-05, |
|
"loss": 1.2924, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.19963285910968334, |
|
"grad_norm": 2.845716729573166, |
|
"learning_rate": 1.9401787154731993e-05, |
|
"loss": 1.2757, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.2007801743919229, |
|
"grad_norm": 2.7827244136621814, |
|
"learning_rate": 1.9388067178738807e-05, |
|
"loss": 1.2926, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.20192748967416246, |
|
"grad_norm": 2.8654232792004755, |
|
"learning_rate": 1.9374196611341212e-05, |
|
"loss": 1.2831, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.203074804956402, |
|
"grad_norm": 2.768678720904829, |
|
"learning_rate": 1.936017567503317e-05, |
|
"loss": 1.2646, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.20422212023864159, |
|
"grad_norm": 2.7336541760912083, |
|
"learning_rate": 1.934600459472067e-05, |
|
"loss": 1.2654, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.20536943552088113, |
|
"grad_norm": 2.877666820443206, |
|
"learning_rate": 1.933168359771811e-05, |
|
"loss": 1.2617, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.2065167508031207, |
|
"grad_norm": 2.77452485009731, |
|
"learning_rate": 1.931721291374467e-05, |
|
"loss": 1.2537, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.20766406608536025, |
|
"grad_norm": 2.759805605741722, |
|
"learning_rate": 1.9302592774920606e-05, |
|
"loss": 1.2469, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.20881138136759983, |
|
"grad_norm": 2.633186753874087, |
|
"learning_rate": 1.9287823415763552e-05, |
|
"loss": 1.2598, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.20995869664983938, |
|
"grad_norm": 2.762895084636893, |
|
"learning_rate": 1.9272905073184734e-05, |
|
"loss": 1.2776, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.21110601193207892, |
|
"grad_norm": 2.6741191476011523, |
|
"learning_rate": 1.9257837986485187e-05, |
|
"loss": 1.2636, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.2122533272143185, |
|
"grad_norm": 2.7328410543396293, |
|
"learning_rate": 1.92426223973519e-05, |
|
"loss": 1.2329, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.21340064249655805, |
|
"grad_norm": 2.758194666346516, |
|
"learning_rate": 1.922725854985396e-05, |
|
"loss": 1.2447, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.21454795777879762, |
|
"grad_norm": 2.9215747504377796, |
|
"learning_rate": 1.921174669043862e-05, |
|
"loss": 1.265, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.21569527306103717, |
|
"grad_norm": 2.6957052221176574, |
|
"learning_rate": 1.9196087067927348e-05, |
|
"loss": 1.273, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.21684258834327674, |
|
"grad_norm": 3.151322186534713, |
|
"learning_rate": 1.918027993351185e-05, |
|
"loss": 1.2494, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.2179899036255163, |
|
"grad_norm": 2.8468019061200005, |
|
"learning_rate": 1.916432554075002e-05, |
|
"loss": 1.2601, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.21913721890775584, |
|
"grad_norm": 2.8675388482251565, |
|
"learning_rate": 1.9148224145561876e-05, |
|
"loss": 1.2694, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.2202845341899954, |
|
"grad_norm": 2.742245998362719, |
|
"learning_rate": 1.913197600622549e-05, |
|
"loss": 1.208, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.22143184947223496, |
|
"grad_norm": 2.738604567476206, |
|
"learning_rate": 1.9115581383372782e-05, |
|
"loss": 1.2773, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.22257916475447453, |
|
"grad_norm": 2.8303874276680294, |
|
"learning_rate": 1.9099040539985395e-05, |
|
"loss": 1.2869, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.22372648003671408, |
|
"grad_norm": 2.6179532721860173, |
|
"learning_rate": 1.9082353741390453e-05, |
|
"loss": 1.2405, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.22487379531895366, |
|
"grad_norm": 2.6182090072869024, |
|
"learning_rate": 1.90655212552563e-05, |
|
"loss": 1.2459, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.2260211106011932, |
|
"grad_norm": 2.7666514211794278, |
|
"learning_rate": 1.904854335158822e-05, |
|
"loss": 1.2752, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.22716842588343278, |
|
"grad_norm": 2.818653838842991, |
|
"learning_rate": 1.9031420302724093e-05, |
|
"loss": 1.2815, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.22831574116567233, |
|
"grad_norm": 2.9114632131177367, |
|
"learning_rate": 1.901415238333005e-05, |
|
"loss": 1.2539, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.22946305644791187, |
|
"grad_norm": 2.733983237186035, |
|
"learning_rate": 1.8996739870396027e-05, |
|
"loss": 1.2919, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.23061037173015145, |
|
"grad_norm": 2.61730939299338, |
|
"learning_rate": 1.897918304323136e-05, |
|
"loss": 1.2365, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.231757687012391, |
|
"grad_norm": 2.7775678613752803, |
|
"learning_rate": 1.896148218346028e-05, |
|
"loss": 1.2835, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.23290500229463057, |
|
"grad_norm": 2.6620434707223914, |
|
"learning_rate": 1.8943637575017428e-05, |
|
"loss": 1.2754, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.23405231757687012, |
|
"grad_norm": 2.826697714775707, |
|
"learning_rate": 1.8925649504143244e-05, |
|
"loss": 1.2547, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.2351996328591097, |
|
"grad_norm": 2.723804846994195, |
|
"learning_rate": 1.890751825937944e-05, |
|
"loss": 1.2622, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.23634694814134924, |
|
"grad_norm": 2.59612274862863, |
|
"learning_rate": 1.888924413156432e-05, |
|
"loss": 1.2766, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.23749426342358881, |
|
"grad_norm": 2.6494254565525774, |
|
"learning_rate": 1.8870827413828148e-05, |
|
"loss": 1.242, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.23864157870582836, |
|
"grad_norm": 2.656174111008909, |
|
"learning_rate": 1.885226840158843e-05, |
|
"loss": 1.23, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.2397888939880679, |
|
"grad_norm": 2.763662684655998, |
|
"learning_rate": 1.8833567392545177e-05, |
|
"loss": 1.2676, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.24093620927030748, |
|
"grad_norm": 2.641174999509725, |
|
"learning_rate": 1.8814724686676133e-05, |
|
"loss": 1.2455, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.24208352455254703, |
|
"grad_norm": 2.6616650480778152, |
|
"learning_rate": 1.879574058623196e-05, |
|
"loss": 1.2372, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.2432308398347866, |
|
"grad_norm": 2.573070310310057, |
|
"learning_rate": 1.8776615395731398e-05, |
|
"loss": 1.2301, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.24437815511702615, |
|
"grad_norm": 2.7789276102882505, |
|
"learning_rate": 1.875734942195637e-05, |
|
"loss": 1.256, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.24552547039926573, |
|
"grad_norm": 2.5198281488260093, |
|
"learning_rate": 1.8737942973947062e-05, |
|
"loss": 1.2509, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.24667278568150527, |
|
"grad_norm": 2.6757554347799504, |
|
"learning_rate": 1.8718396362996968e-05, |
|
"loss": 1.2686, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.24782010096374485, |
|
"grad_norm": 2.7605588886847907, |
|
"learning_rate": 1.8698709902647903e-05, |
|
"loss": 1.2404, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.2489674162459844, |
|
"grad_norm": 2.6067001677094797, |
|
"learning_rate": 1.8678883908684964e-05, |
|
"loss": 1.243, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.25011473152822394, |
|
"grad_norm": 2.5935855463654676, |
|
"learning_rate": 1.865891869913147e-05, |
|
"loss": 1.2629, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.2512620468104635, |
|
"grad_norm": 2.6846198334888047, |
|
"learning_rate": 1.863881459424386e-05, |
|
"loss": 1.2455, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.2524093620927031, |
|
"grad_norm": 2.770064972387501, |
|
"learning_rate": 1.8618571916506548e-05, |
|
"loss": 1.2702, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.25355667737494264, |
|
"grad_norm": 7.16414497208806, |
|
"learning_rate": 1.8598190990626764e-05, |
|
"loss": 1.2746, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.2547039926571822, |
|
"grad_norm": 2.8981550261204982, |
|
"learning_rate": 1.8577672143529337e-05, |
|
"loss": 1.2653, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.25585130793942173, |
|
"grad_norm": 2.8392180307250805, |
|
"learning_rate": 1.8557015704351453e-05, |
|
"loss": 1.28, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.25699862322166134, |
|
"grad_norm": 2.8866536206176465, |
|
"learning_rate": 1.853622200443737e-05, |
|
"loss": 1.2475, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.2581459385039009, |
|
"grad_norm": 2.7434811297459287, |
|
"learning_rate": 1.8515291377333114e-05, |
|
"loss": 1.2525, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.25929325378614043, |
|
"grad_norm": 2.516325346750563, |
|
"learning_rate": 1.849422415878112e-05, |
|
"loss": 1.2477, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.26044056906838, |
|
"grad_norm": 2.7285846616073455, |
|
"learning_rate": 1.8473020686714847e-05, |
|
"loss": 1.293, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.2615878843506195, |
|
"grad_norm": 2.5584913324504517, |
|
"learning_rate": 1.8451681301253363e-05, |
|
"loss": 1.2314, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.26273519963285913, |
|
"grad_norm": 2.833266497213628, |
|
"learning_rate": 1.8430206344695875e-05, |
|
"loss": 1.2366, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.2638825149150987, |
|
"grad_norm": 2.6820864779644547, |
|
"learning_rate": 1.840859616151627e-05, |
|
"loss": 1.2479, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.2650298301973382, |
|
"grad_norm": 2.6742387500940565, |
|
"learning_rate": 1.8386851098357538e-05, |
|
"loss": 1.2892, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.26617714547957777, |
|
"grad_norm": 2.6037489307720936, |
|
"learning_rate": 1.8364971504026273e-05, |
|
"loss": 1.2493, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.2673244607618174, |
|
"grad_norm": 2.722204223715423, |
|
"learning_rate": 1.834295772948703e-05, |
|
"loss": 1.2827, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.2684717760440569, |
|
"grad_norm": 2.7859216758984555, |
|
"learning_rate": 1.8320810127856706e-05, |
|
"loss": 1.2338, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.26961909132629647, |
|
"grad_norm": 2.6379505799749174, |
|
"learning_rate": 1.8298529054398896e-05, |
|
"loss": 1.2653, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.270766406608536, |
|
"grad_norm": 2.70334833150815, |
|
"learning_rate": 1.827611486651817e-05, |
|
"loss": 1.2373, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.27191372189077556, |
|
"grad_norm": 2.689144836189015, |
|
"learning_rate": 1.8253567923754353e-05, |
|
"loss": 1.2876, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.27306103717301516, |
|
"grad_norm": 2.8506131594938497, |
|
"learning_rate": 1.8230888587776758e-05, |
|
"loss": 1.254, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.2742083524552547, |
|
"grad_norm": 2.7967969326711333, |
|
"learning_rate": 1.8208077222378376e-05, |
|
"loss": 1.2268, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.27535566773749426, |
|
"grad_norm": 2.8211137090561986, |
|
"learning_rate": 1.8185134193470043e-05, |
|
"loss": 1.3033, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.2765029830197338, |
|
"grad_norm": 2.6789699271006118, |
|
"learning_rate": 1.8162059869074586e-05, |
|
"loss": 1.2396, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.2776502983019734, |
|
"grad_norm": 2.669915646408006, |
|
"learning_rate": 1.8138854619320893e-05, |
|
"loss": 1.2382, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.27879761358421296, |
|
"grad_norm": 2.8691949960246697, |
|
"learning_rate": 1.8115518816437997e-05, |
|
"loss": 1.2627, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.2799449288664525, |
|
"grad_norm": 2.5198989709998605, |
|
"learning_rate": 1.8092052834749094e-05, |
|
"loss": 1.2421, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.28109224414869205, |
|
"grad_norm": 2.623953735014251, |
|
"learning_rate": 1.8068457050665547e-05, |
|
"loss": 1.2411, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.2822395594309316, |
|
"grad_norm": 2.701213051257084, |
|
"learning_rate": 1.804473184268084e-05, |
|
"loss": 1.2664, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.2833868747131712, |
|
"grad_norm": 2.5467029866920954, |
|
"learning_rate": 1.8020877591364508e-05, |
|
"loss": 1.2607, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.28453418999541075, |
|
"grad_norm": 2.5990206825217377, |
|
"learning_rate": 1.799689467935604e-05, |
|
"loss": 1.2528, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.2856815052776503, |
|
"grad_norm": 2.7964994357271413, |
|
"learning_rate": 1.797278349135874e-05, |
|
"loss": 1.2496, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.28682882055988984, |
|
"grad_norm": 2.6044762278441858, |
|
"learning_rate": 1.7948544414133534e-05, |
|
"loss": 1.2508, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.28797613584212944, |
|
"grad_norm": 2.72468981644295, |
|
"learning_rate": 1.7924177836492802e-05, |
|
"loss": 1.2324, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.289123451124369, |
|
"grad_norm": 2.596318179164043, |
|
"learning_rate": 1.7899684149294118e-05, |
|
"loss": 1.2392, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.29027076640660854, |
|
"grad_norm": 2.681066970647215, |
|
"learning_rate": 1.7875063745433978e-05, |
|
"loss": 1.2719, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.2914180816888481, |
|
"grad_norm": 2.7223877723737577, |
|
"learning_rate": 1.7850317019841514e-05, |
|
"loss": 1.2569, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.29256539697108763, |
|
"grad_norm": 2.4605258800436474, |
|
"learning_rate": 1.7825444369472147e-05, |
|
"loss": 1.2495, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.29371271225332723, |
|
"grad_norm": 2.645246095924411, |
|
"learning_rate": 1.7800446193301225e-05, |
|
"loss": 1.2494, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.2948600275355668, |
|
"grad_norm": 2.686411807869381, |
|
"learning_rate": 1.7775322892317618e-05, |
|
"loss": 1.2565, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 0.29600734281780633, |
|
"grad_norm": 2.5616644208417036, |
|
"learning_rate": 1.7750074869517285e-05, |
|
"loss": 1.2373, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.2971546581000459, |
|
"grad_norm": 2.752699987412825, |
|
"learning_rate": 1.7724702529896824e-05, |
|
"loss": 1.2349, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 0.2983019733822855, |
|
"grad_norm": 2.6292531808701103, |
|
"learning_rate": 1.7699206280446955e-05, |
|
"loss": 1.2284, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.299449288664525, |
|
"grad_norm": 2.4894873410954657, |
|
"learning_rate": 1.767358653014601e-05, |
|
"loss": 1.2206, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.3005966039467646, |
|
"grad_norm": 2.4189638669221507, |
|
"learning_rate": 1.7647843689953352e-05, |
|
"loss": 1.2321, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.3017439192290041, |
|
"grad_norm": 2.569424733644508, |
|
"learning_rate": 1.762197817280281e-05, |
|
"loss": 1.2487, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 0.30289123451124367, |
|
"grad_norm": 2.581452393958971, |
|
"learning_rate": 1.759599039359603e-05, |
|
"loss": 1.2311, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.30403854979348327, |
|
"grad_norm": 2.4458262952088536, |
|
"learning_rate": 1.756988076919583e-05, |
|
"loss": 1.2615, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.3051858650757228, |
|
"grad_norm": 2.511753933286628, |
|
"learning_rate": 1.754364971841952e-05, |
|
"loss": 1.2479, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.30633318035796236, |
|
"grad_norm": 2.61085957686584, |
|
"learning_rate": 1.7517297662032174e-05, |
|
"loss": 1.2379, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.3074804956402019, |
|
"grad_norm": 2.6407993774020206, |
|
"learning_rate": 1.749082502273988e-05, |
|
"loss": 1.2579, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.3086278109224415, |
|
"grad_norm": 2.6120216096924977, |
|
"learning_rate": 1.746423222518297e-05, |
|
"loss": 1.2387, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 0.30977512620468106, |
|
"grad_norm": 2.8982851648085504, |
|
"learning_rate": 1.7437519695929194e-05, |
|
"loss": 1.23, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.3109224414869206, |
|
"grad_norm": 2.5580235398027793, |
|
"learning_rate": 1.741068786346689e-05, |
|
"loss": 1.2557, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 0.31206975676916016, |
|
"grad_norm": 2.565411769595978, |
|
"learning_rate": 1.738373715819811e-05, |
|
"loss": 1.2392, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.3132170720513997, |
|
"grad_norm": 2.617152710791485, |
|
"learning_rate": 1.7356668012431705e-05, |
|
"loss": 1.2352, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 0.3143643873336393, |
|
"grad_norm": 2.503182025195152, |
|
"learning_rate": 1.7329480860376392e-05, |
|
"loss": 1.2393, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.31551170261587885, |
|
"grad_norm": 2.504235729550804, |
|
"learning_rate": 1.7302176138133814e-05, |
|
"loss": 1.2367, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.3166590178981184, |
|
"grad_norm": 2.589109156050675, |
|
"learning_rate": 1.7274754283691507e-05, |
|
"loss": 1.2437, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.31780633318035795, |
|
"grad_norm": 2.5984716155591223, |
|
"learning_rate": 1.72472157369159e-05, |
|
"loss": 1.2332, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 0.3189536484625975, |
|
"grad_norm": 2.5431122123105374, |
|
"learning_rate": 1.7219560939545246e-05, |
|
"loss": 1.2475, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.3201009637448371, |
|
"grad_norm": 2.5803698318259682, |
|
"learning_rate": 1.719179033518255e-05, |
|
"loss": 1.2456, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.32124827902707664, |
|
"grad_norm": 2.654846032452953, |
|
"learning_rate": 1.7163904369288443e-05, |
|
"loss": 1.2485, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.3223955943093162, |
|
"grad_norm": 2.4536340664314955, |
|
"learning_rate": 1.7135903489174034e-05, |
|
"loss": 1.2536, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 0.32354290959155574, |
|
"grad_norm": 2.5841724320770005, |
|
"learning_rate": 1.710778814399374e-05, |
|
"loss": 1.2392, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.32469022487379534, |
|
"grad_norm": 2.527960263106006, |
|
"learning_rate": 1.7079558784738092e-05, |
|
"loss": 1.2336, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 0.3258375401560349, |
|
"grad_norm": 2.470359172698634, |
|
"learning_rate": 1.705121586422647e-05, |
|
"loss": 1.2138, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.32698485543827444, |
|
"grad_norm": 2.619939129309239, |
|
"learning_rate": 1.702275983709987e-05, |
|
"loss": 1.2346, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.328132170720514, |
|
"grad_norm": 2.655269081012586, |
|
"learning_rate": 1.699419115981361e-05, |
|
"loss": 1.241, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.32927948600275353, |
|
"grad_norm": 2.5162818627628796, |
|
"learning_rate": 1.6965510290629973e-05, |
|
"loss": 1.2222, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 0.33042680128499313, |
|
"grad_norm": 2.5758937912713447, |
|
"learning_rate": 1.69367176896109e-05, |
|
"loss": 1.2419, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.3315741165672327, |
|
"grad_norm": 2.4682531973490263, |
|
"learning_rate": 1.6907813818610597e-05, |
|
"loss": 1.2358, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 0.3327214318494722, |
|
"grad_norm": 2.6517734913980466, |
|
"learning_rate": 1.6878799141268107e-05, |
|
"loss": 1.2399, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.3338687471317118, |
|
"grad_norm": 2.5133914405781392, |
|
"learning_rate": 1.6849674122999878e-05, |
|
"loss": 1.2167, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 0.3350160624139514, |
|
"grad_norm": 2.4861956286667852, |
|
"learning_rate": 1.682043923099234e-05, |
|
"loss": 1.2341, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.3361633776961909, |
|
"grad_norm": 2.476043042749583, |
|
"learning_rate": 1.679109493419435e-05, |
|
"loss": 1.2068, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 0.33731069297843047, |
|
"grad_norm": 2.45123241603087, |
|
"learning_rate": 1.6761641703309702e-05, |
|
"loss": 1.2214, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.33845800826067, |
|
"grad_norm": 2.4841737308673912, |
|
"learning_rate": 1.673208001078958e-05, |
|
"loss": 1.2608, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.33960532354290957, |
|
"grad_norm": 2.559450448851183, |
|
"learning_rate": 1.6702410330824962e-05, |
|
"loss": 1.2409, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.34075263882514917, |
|
"grad_norm": 2.7859811126370433, |
|
"learning_rate": 1.6672633139339028e-05, |
|
"loss": 1.2102, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 0.3418999541073887, |
|
"grad_norm": 2.57977983935283, |
|
"learning_rate": 1.6642748913979515e-05, |
|
"loss": 1.2228, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.34304726938962826, |
|
"grad_norm": 2.533334211060345, |
|
"learning_rate": 1.6612758134111072e-05, |
|
"loss": 1.2459, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 0.3441945846718678, |
|
"grad_norm": 2.502270363127228, |
|
"learning_rate": 1.6582661280807553e-05, |
|
"loss": 1.2251, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.3453418999541074, |
|
"grad_norm": 2.6568753367931963, |
|
"learning_rate": 1.65524588368443e-05, |
|
"loss": 1.232, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 0.34648921523634696, |
|
"grad_norm": 2.4840738066226775, |
|
"learning_rate": 1.652215128669042e-05, |
|
"loss": 1.2391, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.3476365305185865, |
|
"grad_norm": 2.473433983671793, |
|
"learning_rate": 1.649173911650099e-05, |
|
"loss": 1.2104, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 0.34878384580082605, |
|
"grad_norm": 2.5021088399168177, |
|
"learning_rate": 1.646122281410927e-05, |
|
"loss": 1.2164, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.3499311610830656, |
|
"grad_norm": 2.4403773517332654, |
|
"learning_rate": 1.6430602869018867e-05, |
|
"loss": 1.2525, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.3510784763653052, |
|
"grad_norm": 2.601036367621894, |
|
"learning_rate": 1.6399879772395915e-05, |
|
"loss": 1.2246, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.35222579164754475, |
|
"grad_norm": 2.526793579521016, |
|
"learning_rate": 1.636905401706116e-05, |
|
"loss": 1.2336, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 0.3533731069297843, |
|
"grad_norm": 2.4874250571730174, |
|
"learning_rate": 1.633812609748206e-05, |
|
"loss": 1.2335, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.35452042221202384, |
|
"grad_norm": 2.4642069605568433, |
|
"learning_rate": 1.630709650976487e-05, |
|
"loss": 1.2266, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 0.35566773749426345, |
|
"grad_norm": 2.941877356467088, |
|
"learning_rate": 1.6275965751646682e-05, |
|
"loss": 1.24, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.356815052776503, |
|
"grad_norm": 2.906262225664714, |
|
"learning_rate": 1.6244734322487415e-05, |
|
"loss": 1.2295, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 0.35796236805874254, |
|
"grad_norm": 2.501934363393508, |
|
"learning_rate": 1.6213402723261852e-05, |
|
"loss": 1.2005, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.3591096833409821, |
|
"grad_norm": 2.6600548799060095, |
|
"learning_rate": 1.618197145655155e-05, |
|
"loss": 1.2381, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 0.36025699862322164, |
|
"grad_norm": 2.443840455534142, |
|
"learning_rate": 1.6150441026536827e-05, |
|
"loss": 1.2189, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.36140431390546124, |
|
"grad_norm": 2.5147950136244126, |
|
"learning_rate": 1.6118811938988632e-05, |
|
"loss": 1.2204, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 0.3625516291877008, |
|
"grad_norm": 2.695659707022499, |
|
"learning_rate": 1.6087084701260468e-05, |
|
"loss": 1.2358, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.36369894446994033, |
|
"grad_norm": 2.6213066605333943, |
|
"learning_rate": 1.605525982228023e-05, |
|
"loss": 1.2266, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 0.3648462597521799, |
|
"grad_norm": 2.522457456754264, |
|
"learning_rate": 1.6023337812542048e-05, |
|
"loss": 1.2281, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.3659935750344195, |
|
"grad_norm": 2.611040744359311, |
|
"learning_rate": 1.5991319184098107e-05, |
|
"loss": 1.2363, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 0.36714089031665903, |
|
"grad_norm": 2.6055726423625605, |
|
"learning_rate": 1.5959204450550427e-05, |
|
"loss": 1.1846, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.3682882055988986, |
|
"grad_norm": 2.7334871548994233, |
|
"learning_rate": 1.5926994127042615e-05, |
|
"loss": 1.2297, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 0.3694355208811381, |
|
"grad_norm": 3.065228106081381, |
|
"learning_rate": 1.5894688730251613e-05, |
|
"loss": 1.2259, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.37058283616337767, |
|
"grad_norm": 2.53275476586628, |
|
"learning_rate": 1.586228877837941e-05, |
|
"loss": 1.2223, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 0.3717301514456173, |
|
"grad_norm": 2.6450492194453754, |
|
"learning_rate": 1.5829794791144723e-05, |
|
"loss": 1.2324, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.3728774667278568, |
|
"grad_norm": 2.6505407805972925, |
|
"learning_rate": 1.5797207289774668e-05, |
|
"loss": 1.206, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 0.37402478201009637, |
|
"grad_norm": 2.4512098548124084, |
|
"learning_rate": 1.57645267969964e-05, |
|
"loss": 1.2094, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.3751720972923359, |
|
"grad_norm": 2.4974996247628565, |
|
"learning_rate": 1.5731753837028714e-05, |
|
"loss": 1.2384, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 0.3763194125745755, |
|
"grad_norm": 2.5336333006362772, |
|
"learning_rate": 1.569888893557365e-05, |
|
"loss": 1.2179, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.37746672785681507, |
|
"grad_norm": 2.480462659266847, |
|
"learning_rate": 1.5665932619808058e-05, |
|
"loss": 1.2137, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 0.3786140431390546, |
|
"grad_norm": 2.5575394813805388, |
|
"learning_rate": 1.5632885418375136e-05, |
|
"loss": 1.2099, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.37976135842129416, |
|
"grad_norm": 2.638731671620944, |
|
"learning_rate": 1.5599747861375957e-05, |
|
"loss": 1.2067, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 0.3809086737035337, |
|
"grad_norm": 2.7046597751215704, |
|
"learning_rate": 1.556652048036096e-05, |
|
"loss": 1.2139, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.3820559889857733, |
|
"grad_norm": 2.434498118282716, |
|
"learning_rate": 1.553320380832143e-05, |
|
"loss": 1.2299, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 0.38320330426801286, |
|
"grad_norm": 2.5520040709852148, |
|
"learning_rate": 1.549979837968094e-05, |
|
"loss": 1.1987, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.3843506195502524, |
|
"grad_norm": 2.4129325989146735, |
|
"learning_rate": 1.5466304730286795e-05, |
|
"loss": 1.2333, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 0.38549793483249195, |
|
"grad_norm": 2.3734664430568286, |
|
"learning_rate": 1.5432723397401406e-05, |
|
"loss": 1.2118, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.38664525011473155, |
|
"grad_norm": 2.489082024919344, |
|
"learning_rate": 1.5399054919693704e-05, |
|
"loss": 1.2249, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 0.3877925653969711, |
|
"grad_norm": 2.632893560679943, |
|
"learning_rate": 1.5365299837230483e-05, |
|
"loss": 1.2447, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.38893988067921065, |
|
"grad_norm": 2.502360122889922, |
|
"learning_rate": 1.5331458691467742e-05, |
|
"loss": 1.2039, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 0.3900871959614502, |
|
"grad_norm": 2.56570123550657, |
|
"learning_rate": 1.5297532025241993e-05, |
|
"loss": 1.2, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.39123451124368974, |
|
"grad_norm": 2.4982474943791706, |
|
"learning_rate": 1.5263520382761563e-05, |
|
"loss": 1.2182, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 0.39238182652592934, |
|
"grad_norm": 2.5057101433951776, |
|
"learning_rate": 1.5229424309597853e-05, |
|
"loss": 1.2199, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.3935291418081689, |
|
"grad_norm": 2.5257720554796403, |
|
"learning_rate": 1.5195244352676606e-05, |
|
"loss": 1.2072, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 0.39467645709040844, |
|
"grad_norm": 2.5247477276803516, |
|
"learning_rate": 1.5160981060269107e-05, |
|
"loss": 1.2059, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.395823772372648, |
|
"grad_norm": 2.3583351198103784, |
|
"learning_rate": 1.5126634981983412e-05, |
|
"loss": 1.215, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 0.3969710876548876, |
|
"grad_norm": 2.459251655659526, |
|
"learning_rate": 1.5092206668755518e-05, |
|
"loss": 1.203, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.39811840293712714, |
|
"grad_norm": 2.5334007418963753, |
|
"learning_rate": 1.5057696672840529e-05, |
|
"loss": 1.2473, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 0.3992657182193667, |
|
"grad_norm": 2.3391171431662108, |
|
"learning_rate": 1.5023105547803807e-05, |
|
"loss": 1.2226, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.40041303350160623, |
|
"grad_norm": 2.3984874089782573, |
|
"learning_rate": 1.4988433848512074e-05, |
|
"loss": 1.1848, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 0.4015603487838458, |
|
"grad_norm": 2.3056101239425986, |
|
"learning_rate": 1.4953682131124527e-05, |
|
"loss": 1.2188, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.4027076640660854, |
|
"grad_norm": 2.556190876138143, |
|
"learning_rate": 1.491885095308391e-05, |
|
"loss": 1.1984, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 0.4038549793483249, |
|
"grad_norm": 2.4518910879213895, |
|
"learning_rate": 1.4883940873107572e-05, |
|
"loss": 1.2238, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.4050022946305645, |
|
"grad_norm": 2.473122618546395, |
|
"learning_rate": 1.4848952451178508e-05, |
|
"loss": 1.207, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 0.406149609912804, |
|
"grad_norm": 2.359534310542907, |
|
"learning_rate": 1.4813886248536376e-05, |
|
"loss": 1.1977, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.4072969251950436, |
|
"grad_norm": 2.477818274819773, |
|
"learning_rate": 1.4778742827668484e-05, |
|
"loss": 1.2391, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 0.40844424047728317, |
|
"grad_norm": 2.530388893111552, |
|
"learning_rate": 1.4743522752300793e-05, |
|
"loss": 1.2163, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.4095915557595227, |
|
"grad_norm": 2.5519663146982605, |
|
"learning_rate": 1.4708226587388845e-05, |
|
"loss": 1.2532, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 0.41073887104176227, |
|
"grad_norm": 2.394616840944208, |
|
"learning_rate": 1.467285489910872e-05, |
|
"loss": 1.2155, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.4118861863240018, |
|
"grad_norm": 2.412230711815024, |
|
"learning_rate": 1.4637408254847936e-05, |
|
"loss": 1.1907, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 0.4130335016062414, |
|
"grad_norm": 2.5078430973460817, |
|
"learning_rate": 1.4601887223196374e-05, |
|
"loss": 1.1998, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.41418081688848096, |
|
"grad_norm": 2.4576469102426373, |
|
"learning_rate": 1.4566292373937133e-05, |
|
"loss": 1.1996, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 0.4153281321707205, |
|
"grad_norm": 2.7831279786324137, |
|
"learning_rate": 1.4530624278037406e-05, |
|
"loss": 1.2164, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.41647544745296006, |
|
"grad_norm": 2.478975911392516, |
|
"learning_rate": 1.449488350763931e-05, |
|
"loss": 1.2199, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 0.41762276273519966, |
|
"grad_norm": 2.446888200491552, |
|
"learning_rate": 1.4459070636050721e-05, |
|
"loss": 1.1929, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.4187700780174392, |
|
"grad_norm": 2.456196149028385, |
|
"learning_rate": 1.4423186237736063e-05, |
|
"loss": 1.2198, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 0.41991739329967875, |
|
"grad_norm": 2.4671601504916296, |
|
"learning_rate": 1.4387230888307098e-05, |
|
"loss": 1.2093, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.4210647085819183, |
|
"grad_norm": 2.469445463788116, |
|
"learning_rate": 1.4351205164513708e-05, |
|
"loss": 1.2115, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 0.42221202386415785, |
|
"grad_norm": 2.412944791501572, |
|
"learning_rate": 1.4315109644234619e-05, |
|
"loss": 1.1899, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.42335933914639745, |
|
"grad_norm": 2.537897011913316, |
|
"learning_rate": 1.427894490646815e-05, |
|
"loss": 1.1931, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 0.424506654428637, |
|
"grad_norm": 2.4797926920153275, |
|
"learning_rate": 1.4242711531322912e-05, |
|
"loss": 1.2071, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.42565396971087655, |
|
"grad_norm": 2.4620300685296095, |
|
"learning_rate": 1.420641010000852e-05, |
|
"loss": 1.1901, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 0.4268012849931161, |
|
"grad_norm": 2.5654139268387905, |
|
"learning_rate": 1.4170041194826247e-05, |
|
"loss": 1.1672, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.4279486002753557, |
|
"grad_norm": 2.349473278321755, |
|
"learning_rate": 1.4133605399159706e-05, |
|
"loss": 1.1863, |
|
"step": 1865 |
|
}, |
|
{ |
|
"epoch": 0.42909591555759524, |
|
"grad_norm": 2.4170921121837603, |
|
"learning_rate": 1.4097103297465471e-05, |
|
"loss": 1.1997, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.4302432308398348, |
|
"grad_norm": 2.5029515745014277, |
|
"learning_rate": 1.4060535475263725e-05, |
|
"loss": 1.2134, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 0.43139054612207434, |
|
"grad_norm": 2.3135883184156136, |
|
"learning_rate": 1.402390251912885e-05, |
|
"loss": 1.1844, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.4325378614043139, |
|
"grad_norm": 2.404470246112949, |
|
"learning_rate": 1.398720501668002e-05, |
|
"loss": 1.202, |
|
"step": 1885 |
|
}, |
|
{ |
|
"epoch": 0.4336851766865535, |
|
"grad_norm": 2.5569577276828035, |
|
"learning_rate": 1.395044355657178e-05, |
|
"loss": 1.2208, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.43483249196879303, |
|
"grad_norm": 2.3371018844193108, |
|
"learning_rate": 1.391361872848461e-05, |
|
"loss": 1.1737, |
|
"step": 1895 |
|
}, |
|
{ |
|
"epoch": 0.4359798072510326, |
|
"grad_norm": 2.660539570330875, |
|
"learning_rate": 1.387673112311545e-05, |
|
"loss": 1.1778, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.4371271225332721, |
|
"grad_norm": 2.3038070568716793, |
|
"learning_rate": 1.3839781332168236e-05, |
|
"loss": 1.1921, |
|
"step": 1905 |
|
}, |
|
{ |
|
"epoch": 0.4382744378155117, |
|
"grad_norm": 2.4670441595509995, |
|
"learning_rate": 1.3802769948344406e-05, |
|
"loss": 1.1833, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.4394217530977513, |
|
"grad_norm": 2.4330880664690864, |
|
"learning_rate": 1.3765697565333387e-05, |
|
"loss": 1.1835, |
|
"step": 1915 |
|
}, |
|
{ |
|
"epoch": 0.4405690683799908, |
|
"grad_norm": 2.5476719771012886, |
|
"learning_rate": 1.3728564777803089e-05, |
|
"loss": 1.1809, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.44171638366223037, |
|
"grad_norm": 2.4245419288540906, |
|
"learning_rate": 1.369137218139034e-05, |
|
"loss": 1.216, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 0.4428636989444699, |
|
"grad_norm": 2.48980878325673, |
|
"learning_rate": 1.3654120372691361e-05, |
|
"loss": 1.2043, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.4440110142267095, |
|
"grad_norm": 2.309423158584073, |
|
"learning_rate": 1.3616809949252168e-05, |
|
"loss": 1.2084, |
|
"step": 1935 |
|
}, |
|
{ |
|
"epoch": 0.44515832950894907, |
|
"grad_norm": 2.4292416714345526, |
|
"learning_rate": 1.3579441509559007e-05, |
|
"loss": 1.2251, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.4463056447911886, |
|
"grad_norm": 2.5052554766414996, |
|
"learning_rate": 1.3542015653028742e-05, |
|
"loss": 1.2122, |
|
"step": 1945 |
|
}, |
|
{ |
|
"epoch": 0.44745296007342816, |
|
"grad_norm": 2.5664797047962433, |
|
"learning_rate": 1.350453297999925e-05, |
|
"loss": 1.2101, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.4486002753556677, |
|
"grad_norm": 2.5032873925292973, |
|
"learning_rate": 1.3466994091719782e-05, |
|
"loss": 1.2125, |
|
"step": 1955 |
|
}, |
|
{ |
|
"epoch": 0.4497475906379073, |
|
"grad_norm": 2.4259701176858126, |
|
"learning_rate": 1.3429399590341325e-05, |
|
"loss": 1.1872, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.45089490592014686, |
|
"grad_norm": 2.4344258356766844, |
|
"learning_rate": 1.3391750078906939e-05, |
|
"loss": 1.1538, |
|
"step": 1965 |
|
}, |
|
{ |
|
"epoch": 0.4520422212023864, |
|
"grad_norm": 2.473246644054591, |
|
"learning_rate": 1.3354046161342087e-05, |
|
"loss": 1.1941, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.45318953648462595, |
|
"grad_norm": 2.4762521196200233, |
|
"learning_rate": 1.3316288442444943e-05, |
|
"loss": 1.2287, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 0.45433685176686556, |
|
"grad_norm": 2.5747496155427925, |
|
"learning_rate": 1.327847752787669e-05, |
|
"loss": 1.1946, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.4554841670491051, |
|
"grad_norm": 2.4957625591645396, |
|
"learning_rate": 1.324061402415182e-05, |
|
"loss": 1.1943, |
|
"step": 1985 |
|
}, |
|
{ |
|
"epoch": 0.45663148233134465, |
|
"grad_norm": 2.4803686926028354, |
|
"learning_rate": 1.3202698538628376e-05, |
|
"loss": 1.1958, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.4577787976135842, |
|
"grad_norm": 2.515928887088066, |
|
"learning_rate": 1.3164731679498249e-05, |
|
"loss": 1.1914, |
|
"step": 1995 |
|
}, |
|
{ |
|
"epoch": 0.45892611289582375, |
|
"grad_norm": 2.40416499995082, |
|
"learning_rate": 1.3126714055777378e-05, |
|
"loss": 1.2007, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.46007342817806335, |
|
"grad_norm": 2.4071385033932726, |
|
"learning_rate": 1.3088646277296018e-05, |
|
"loss": 1.205, |
|
"step": 2005 |
|
}, |
|
{ |
|
"epoch": 0.4612207434603029, |
|
"grad_norm": 2.3336952244417857, |
|
"learning_rate": 1.3050528954688932e-05, |
|
"loss": 1.1807, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.46236805874254244, |
|
"grad_norm": 2.570415975161917, |
|
"learning_rate": 1.3012362699385616e-05, |
|
"loss": 1.1677, |
|
"step": 2015 |
|
}, |
|
{ |
|
"epoch": 0.463515374024782, |
|
"grad_norm": 2.4434180063843396, |
|
"learning_rate": 1.2974148123600477e-05, |
|
"loss": 1.1938, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.4646626893070216, |
|
"grad_norm": 2.388368907016271, |
|
"learning_rate": 1.2935885840323015e-05, |
|
"loss": 1.1938, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 0.46581000458926114, |
|
"grad_norm": 2.4093448614688953, |
|
"learning_rate": 1.2897576463307999e-05, |
|
"loss": 1.1814, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.4669573198715007, |
|
"grad_norm": 2.5376955340059646, |
|
"learning_rate": 1.285922060706561e-05, |
|
"loss": 1.1668, |
|
"step": 2035 |
|
}, |
|
{ |
|
"epoch": 0.46810463515374023, |
|
"grad_norm": 2.417127573905711, |
|
"learning_rate": 1.2820818886851599e-05, |
|
"loss": 1.1821, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.4692519504359798, |
|
"grad_norm": 2.298184877749985, |
|
"learning_rate": 1.2782371918657393e-05, |
|
"loss": 1.1954, |
|
"step": 2045 |
|
}, |
|
{ |
|
"epoch": 0.4703992657182194, |
|
"grad_norm": 2.417757124526873, |
|
"learning_rate": 1.2743880319200241e-05, |
|
"loss": 1.166, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.47154658100045893, |
|
"grad_norm": 2.371659919793289, |
|
"learning_rate": 1.270534470591331e-05, |
|
"loss": 1.1936, |
|
"step": 2055 |
|
}, |
|
{ |
|
"epoch": 0.4726938962826985, |
|
"grad_norm": 2.4662501454162062, |
|
"learning_rate": 1.2666765696935773e-05, |
|
"loss": 1.2021, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.473841211564938, |
|
"grad_norm": 2.4729202869839115, |
|
"learning_rate": 1.2628143911102905e-05, |
|
"loss": 1.1952, |
|
"step": 2065 |
|
}, |
|
{ |
|
"epoch": 0.47498852684717763, |
|
"grad_norm": 2.3449678946057113, |
|
"learning_rate": 1.2589479967936163e-05, |
|
"loss": 1.1959, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.4761358421294172, |
|
"grad_norm": 2.439109075077548, |
|
"learning_rate": 1.2550774487633218e-05, |
|
"loss": 1.1864, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 0.4772831574116567, |
|
"grad_norm": 2.529102082715549, |
|
"learning_rate": 1.2512028091058044e-05, |
|
"loss": 1.186, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.47843047269389627, |
|
"grad_norm": 2.376604159825014, |
|
"learning_rate": 1.2473241399730931e-05, |
|
"loss": 1.1701, |
|
"step": 2085 |
|
}, |
|
{ |
|
"epoch": 0.4795777879761358, |
|
"grad_norm": 2.4169774686564263, |
|
"learning_rate": 1.2434415035818535e-05, |
|
"loss": 1.2191, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.4807251032583754, |
|
"grad_norm": 2.532981770965977, |
|
"learning_rate": 1.239554962212388e-05, |
|
"loss": 1.2286, |
|
"step": 2095 |
|
}, |
|
{ |
|
"epoch": 0.48187241854061497, |
|
"grad_norm": 2.378557479987113, |
|
"learning_rate": 1.2356645782076384e-05, |
|
"loss": 1.1677, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.4830197338228545, |
|
"grad_norm": 2.709648710575487, |
|
"learning_rate": 1.2317704139721847e-05, |
|
"loss": 1.1751, |
|
"step": 2105 |
|
}, |
|
{ |
|
"epoch": 0.48416704910509406, |
|
"grad_norm": 2.25659549412137, |
|
"learning_rate": 1.2278725319712449e-05, |
|
"loss": 1.1706, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.48531436438733366, |
|
"grad_norm": 2.410190860485096, |
|
"learning_rate": 1.2239709947296722e-05, |
|
"loss": 1.1681, |
|
"step": 2115 |
|
}, |
|
{ |
|
"epoch": 0.4864616796695732, |
|
"grad_norm": 2.5109950886921233, |
|
"learning_rate": 1.2200658648309531e-05, |
|
"loss": 1.1655, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.48760899495181276, |
|
"grad_norm": 2.4008584174966243, |
|
"learning_rate": 1.2161572049162027e-05, |
|
"loss": 1.1665, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 0.4887563102340523, |
|
"grad_norm": 2.3827246030282194, |
|
"learning_rate": 1.2122450776831593e-05, |
|
"loss": 1.1662, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.48990362551629185, |
|
"grad_norm": 2.424648067070357, |
|
"learning_rate": 1.208329545885181e-05, |
|
"loss": 1.2112, |
|
"step": 2135 |
|
}, |
|
{ |
|
"epoch": 0.49105094079853145, |
|
"grad_norm": 2.412168453903357, |
|
"learning_rate": 1.2044106723302364e-05, |
|
"loss": 1.18, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.492198256080771, |
|
"grad_norm": 2.5463621491353865, |
|
"learning_rate": 1.200488519879899e-05, |
|
"loss": 1.1426, |
|
"step": 2145 |
|
}, |
|
{ |
|
"epoch": 0.49334557136301055, |
|
"grad_norm": 3.2345084391862216, |
|
"learning_rate": 1.1965631514483376e-05, |
|
"loss": 1.1739, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.4944928866452501, |
|
"grad_norm": 2.3242325394764176, |
|
"learning_rate": 1.1926346300013078e-05, |
|
"loss": 1.1795, |
|
"step": 2155 |
|
}, |
|
{ |
|
"epoch": 0.4956402019274897, |
|
"grad_norm": 2.4343577481110334, |
|
"learning_rate": 1.1887030185551427e-05, |
|
"loss": 1.1751, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.49678751720972925, |
|
"grad_norm": 2.536903854244761, |
|
"learning_rate": 1.18476838017574e-05, |
|
"loss": 1.1886, |
|
"step": 2165 |
|
}, |
|
{ |
|
"epoch": 0.4979348324919688, |
|
"grad_norm": 2.518861365226075, |
|
"learning_rate": 1.1808307779775518e-05, |
|
"loss": 1.1822, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.49908214777420834, |
|
"grad_norm": 2.33985336402769, |
|
"learning_rate": 1.176890275122573e-05, |
|
"loss": 1.1852, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 0.5002294630564479, |
|
"grad_norm": 2.695318454608909, |
|
"learning_rate": 1.1729469348193263e-05, |
|
"loss": 1.164, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.5013767783386874, |
|
"grad_norm": 2.4114946936382835, |
|
"learning_rate": 1.1690008203218493e-05, |
|
"loss": 1.1793, |
|
"step": 2185 |
|
}, |
|
{ |
|
"epoch": 0.502524093620927, |
|
"grad_norm": 2.414381546692464, |
|
"learning_rate": 1.1650519949286797e-05, |
|
"loss": 1.1716, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.5036714089031666, |
|
"grad_norm": 2.3096954634729525, |
|
"learning_rate": 1.1611005219818392e-05, |
|
"loss": 1.1771, |
|
"step": 2195 |
|
}, |
|
{ |
|
"epoch": 0.5048187241854062, |
|
"grad_norm": 2.377639229729014, |
|
"learning_rate": 1.1571464648658201e-05, |
|
"loss": 1.1594, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.5059660394676457, |
|
"grad_norm": 2.413947191443181, |
|
"learning_rate": 1.1531898870065645e-05, |
|
"loss": 1.1377, |
|
"step": 2205 |
|
}, |
|
{ |
|
"epoch": 0.5071133547498853, |
|
"grad_norm": 2.413078162601223, |
|
"learning_rate": 1.1492308518704507e-05, |
|
"loss": 1.1643, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 0.5082606700321248, |
|
"grad_norm": 2.3703796014060083, |
|
"learning_rate": 1.145269422963272e-05, |
|
"loss": 1.1672, |
|
"step": 2215 |
|
}, |
|
{ |
|
"epoch": 0.5094079853143644, |
|
"grad_norm": 2.52185413660041, |
|
"learning_rate": 1.1413056638292215e-05, |
|
"loss": 1.1482, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.5105553005966039, |
|
"grad_norm": 2.5494899458552704, |
|
"learning_rate": 1.1373396380498683e-05, |
|
"loss": 1.1855, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 0.5117026158788435, |
|
"grad_norm": 2.4759934453822687, |
|
"learning_rate": 1.1333714092431423e-05, |
|
"loss": 1.183, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 0.512849931161083, |
|
"grad_norm": 2.739893863248377, |
|
"learning_rate": 1.1294010410623107e-05, |
|
"loss": 1.1619, |
|
"step": 2235 |
|
}, |
|
{ |
|
"epoch": 0.5139972464433227, |
|
"grad_norm": 2.3307913465594736, |
|
"learning_rate": 1.1254285971949574e-05, |
|
"loss": 1.1752, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.5151445617255622, |
|
"grad_norm": 2.407069188869029, |
|
"learning_rate": 1.1214541413619628e-05, |
|
"loss": 1.1682, |
|
"step": 2245 |
|
}, |
|
{ |
|
"epoch": 0.5162918770078018, |
|
"grad_norm": 2.38595550036874, |
|
"learning_rate": 1.1174777373164797e-05, |
|
"loss": 1.1804, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.5174391922900413, |
|
"grad_norm": 2.3691735593741865, |
|
"learning_rate": 1.1134994488429128e-05, |
|
"loss": 1.1982, |
|
"step": 2255 |
|
}, |
|
{ |
|
"epoch": 0.5185865075722809, |
|
"grad_norm": 2.3715490304848696, |
|
"learning_rate": 1.109519339755893e-05, |
|
"loss": 1.18, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 0.5197338228545204, |
|
"grad_norm": 2.452149269203383, |
|
"learning_rate": 1.1055374738992561e-05, |
|
"loss": 1.1726, |
|
"step": 2265 |
|
}, |
|
{ |
|
"epoch": 0.52088113813676, |
|
"grad_norm": 2.3884283160593136, |
|
"learning_rate": 1.1015539151450172e-05, |
|
"loss": 1.1706, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 0.5220284534189995, |
|
"grad_norm": 2.3927663987307253, |
|
"learning_rate": 1.0975687273923474e-05, |
|
"loss": 1.1737, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 0.523175768701239, |
|
"grad_norm": 2.480726870506535, |
|
"learning_rate": 1.0935819745665477e-05, |
|
"loss": 1.177, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.5243230839834787, |
|
"grad_norm": 2.6254719429941, |
|
"learning_rate": 1.0895937206180243e-05, |
|
"loss": 1.2074, |
|
"step": 2285 |
|
}, |
|
{ |
|
"epoch": 0.5254703992657183, |
|
"grad_norm": 2.4621930223006436, |
|
"learning_rate": 1.0856040295212614e-05, |
|
"loss": 1.1801, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 0.5266177145479578, |
|
"grad_norm": 2.411669222534401, |
|
"learning_rate": 1.0816129652737976e-05, |
|
"loss": 1.1549, |
|
"step": 2295 |
|
}, |
|
{ |
|
"epoch": 0.5277650298301974, |
|
"grad_norm": 2.3766352222254876, |
|
"learning_rate": 1.077620591895197e-05, |
|
"loss": 1.1882, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.5289123451124369, |
|
"grad_norm": 2.387743064914442, |
|
"learning_rate": 1.0736269734260232e-05, |
|
"loss": 1.1474, |
|
"step": 2305 |
|
}, |
|
{ |
|
"epoch": 0.5300596603946764, |
|
"grad_norm": 2.336284695991881, |
|
"learning_rate": 1.069632173926812e-05, |
|
"loss": 1.145, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.531206975676916, |
|
"grad_norm": 2.234183118079691, |
|
"learning_rate": 1.0656362574770442e-05, |
|
"loss": 1.1274, |
|
"step": 2315 |
|
}, |
|
{ |
|
"epoch": 0.5323542909591555, |
|
"grad_norm": 2.494121033628584, |
|
"learning_rate": 1.0616392881741166e-05, |
|
"loss": 1.1803, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 0.5335016062413951, |
|
"grad_norm": 2.4656108820538294, |
|
"learning_rate": 1.0576413301323148e-05, |
|
"loss": 1.1594, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 0.5346489215236347, |
|
"grad_norm": 2.3529378211004017, |
|
"learning_rate": 1.0536424474817848e-05, |
|
"loss": 1.1669, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 0.5357962368058743, |
|
"grad_norm": 2.447756807412706, |
|
"learning_rate": 1.0496427043675032e-05, |
|
"loss": 1.1577, |
|
"step": 2335 |
|
}, |
|
{ |
|
"epoch": 0.5369435520881138, |
|
"grad_norm": 2.4126468040306768, |
|
"learning_rate": 1.0456421649482502e-05, |
|
"loss": 1.1351, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.5380908673703534, |
|
"grad_norm": 2.582646994016727, |
|
"learning_rate": 1.041640893395578e-05, |
|
"loss": 1.1625, |
|
"step": 2345 |
|
}, |
|
{ |
|
"epoch": 0.5392381826525929, |
|
"grad_norm": 2.424869345038524, |
|
"learning_rate": 1.0376389538927841e-05, |
|
"loss": 1.1444, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.5403854979348325, |
|
"grad_norm": 2.3759407482791186, |
|
"learning_rate": 1.0336364106338793e-05, |
|
"loss": 1.1473, |
|
"step": 2355 |
|
}, |
|
{ |
|
"epoch": 0.541532813217072, |
|
"grad_norm": 2.3954926124121982, |
|
"learning_rate": 1.0296333278225599e-05, |
|
"loss": 1.15, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 0.5426801284993116, |
|
"grad_norm": 2.3048166671918517, |
|
"learning_rate": 1.0256297696711764e-05, |
|
"loss": 1.1688, |
|
"step": 2365 |
|
}, |
|
{ |
|
"epoch": 0.5438274437815511, |
|
"grad_norm": 2.455044790822921, |
|
"learning_rate": 1.0216258003997044e-05, |
|
"loss": 1.1499, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.5449747590637908, |
|
"grad_norm": 2.440410779702323, |
|
"learning_rate": 1.0176214842347143e-05, |
|
"loss": 1.1883, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 0.5461220743460303, |
|
"grad_norm": 2.469069553035829, |
|
"learning_rate": 1.0136168854083401e-05, |
|
"loss": 1.1564, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 0.5472693896282699, |
|
"grad_norm": 2.36749378237334, |
|
"learning_rate": 1.0096120681572513e-05, |
|
"loss": 1.1581, |
|
"step": 2385 |
|
}, |
|
{ |
|
"epoch": 0.5484167049105094, |
|
"grad_norm": 2.4927612686073446, |
|
"learning_rate": 1.0056070967216199e-05, |
|
"loss": 1.1604, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 0.549564020192749, |
|
"grad_norm": 2.282784095648045, |
|
"learning_rate": 1.0016020353440916e-05, |
|
"loss": 1.1495, |
|
"step": 2395 |
|
}, |
|
{ |
|
"epoch": 0.5507113354749885, |
|
"grad_norm": 2.319998209280646, |
|
"learning_rate": 9.975969482687547e-06, |
|
"loss": 1.1709, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.5518586507572281, |
|
"grad_norm": 2.4648551075271348, |
|
"learning_rate": 9.935918997401104e-06, |
|
"loss": 1.2095, |
|
"step": 2405 |
|
}, |
|
{ |
|
"epoch": 0.5530059660394676, |
|
"grad_norm": 2.401099714183983, |
|
"learning_rate": 9.8958695400204e-06, |
|
"loss": 1.1616, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 0.5541532813217072, |
|
"grad_norm": 2.427714048915411, |
|
"learning_rate": 9.855821752967779e-06, |
|
"loss": 1.1622, |
|
"step": 2415 |
|
}, |
|
{ |
|
"epoch": 0.5553005966039468, |
|
"grad_norm": 2.3990234519386924, |
|
"learning_rate": 9.815776278638772e-06, |
|
"loss": 1.1666, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 0.5564479118861864, |
|
"grad_norm": 2.5621471996893592, |
|
"learning_rate": 9.775733759391833e-06, |
|
"loss": 1.1379, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 0.5575952271684259, |
|
"grad_norm": 2.343484566189912, |
|
"learning_rate": 9.735694837537993e-06, |
|
"loss": 1.1399, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 0.5587425424506655, |
|
"grad_norm": 2.612209603264774, |
|
"learning_rate": 9.695660155330598e-06, |
|
"loss": 1.165, |
|
"step": 2435 |
|
}, |
|
{ |
|
"epoch": 0.559889857732905, |
|
"grad_norm": 2.3356646935305254, |
|
"learning_rate": 9.655630354954974e-06, |
|
"loss": 1.1964, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 0.5610371730151446, |
|
"grad_norm": 2.5695540299579402, |
|
"learning_rate": 9.615606078518143e-06, |
|
"loss": 1.159, |
|
"step": 2445 |
|
}, |
|
{ |
|
"epoch": 0.5621844882973841, |
|
"grad_norm": 2.3599515222260705, |
|
"learning_rate": 9.57558796803852e-06, |
|
"loss": 1.1316, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.5633318035796236, |
|
"grad_norm": 2.2759018372704802, |
|
"learning_rate": 9.535576665435606e-06, |
|
"loss": 1.1426, |
|
"step": 2455 |
|
}, |
|
{ |
|
"epoch": 0.5644791188618632, |
|
"grad_norm": 2.483770091141199, |
|
"learning_rate": 9.495572812519718e-06, |
|
"loss": 1.1813, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 0.5656264341441029, |
|
"grad_norm": 2.2856329803598565, |
|
"learning_rate": 9.455577050981648e-06, |
|
"loss": 1.1421, |
|
"step": 2465 |
|
}, |
|
{ |
|
"epoch": 0.5667737494263424, |
|
"grad_norm": 2.3740281936243446, |
|
"learning_rate": 9.41559002238242e-06, |
|
"loss": 1.1607, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 0.567921064708582, |
|
"grad_norm": 2.3093025719761475, |
|
"learning_rate": 9.375612368142962e-06, |
|
"loss": 1.1422, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 0.5690683799908215, |
|
"grad_norm": 2.360053549045114, |
|
"learning_rate": 9.33564472953383e-06, |
|
"loss": 1.1494, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.570215695273061, |
|
"grad_norm": 2.521839899431946, |
|
"learning_rate": 9.295687747664935e-06, |
|
"loss": 1.1842, |
|
"step": 2485 |
|
}, |
|
{ |
|
"epoch": 0.5713630105553006, |
|
"grad_norm": 2.3908456398757916, |
|
"learning_rate": 9.255742063475228e-06, |
|
"loss": 1.1461, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 0.5725103258375401, |
|
"grad_norm": 2.3888342198222663, |
|
"learning_rate": 9.215808317722453e-06, |
|
"loss": 1.1216, |
|
"step": 2495 |
|
}, |
|
{ |
|
"epoch": 0.5736576411197797, |
|
"grad_norm": 2.1979418755669724, |
|
"learning_rate": 9.175887150972841e-06, |
|
"loss": 1.1301, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.5748049564020192, |
|
"grad_norm": 2.3114444692539076, |
|
"learning_rate": 9.135979203590852e-06, |
|
"loss": 1.1469, |
|
"step": 2505 |
|
}, |
|
{ |
|
"epoch": 0.5759522716842589, |
|
"grad_norm": 2.410614961263144, |
|
"learning_rate": 9.096085115728902e-06, |
|
"loss": 1.1377, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 0.5770995869664984, |
|
"grad_norm": 2.4404399337721054, |
|
"learning_rate": 9.056205527317082e-06, |
|
"loss": 1.1678, |
|
"step": 2515 |
|
}, |
|
{ |
|
"epoch": 0.578246902248738, |
|
"grad_norm": 2.3903862678162513, |
|
"learning_rate": 9.016341078052908e-06, |
|
"loss": 1.1566, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 0.5793942175309775, |
|
"grad_norm": 2.487113987482187, |
|
"learning_rate": 8.976492407391046e-06, |
|
"loss": 1.1502, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 0.5805415328132171, |
|
"grad_norm": 2.265788508857309, |
|
"learning_rate": 8.93666015453307e-06, |
|
"loss": 1.1359, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 0.5816888480954566, |
|
"grad_norm": 2.361564602393008, |
|
"learning_rate": 8.89684495841719e-06, |
|
"loss": 1.1664, |
|
"step": 2535 |
|
}, |
|
{ |
|
"epoch": 0.5828361633776962, |
|
"grad_norm": 2.4240744893246156, |
|
"learning_rate": 8.857047457708023e-06, |
|
"loss": 1.1523, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 0.5839834786599357, |
|
"grad_norm": 2.338487255417482, |
|
"learning_rate": 8.817268290786343e-06, |
|
"loss": 1.1125, |
|
"step": 2545 |
|
}, |
|
{ |
|
"epoch": 0.5851307939421753, |
|
"grad_norm": 2.3267291458801727, |
|
"learning_rate": 8.777508095738818e-06, |
|
"loss": 1.123, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.5862781092244149, |
|
"grad_norm": 2.3955672934820966, |
|
"learning_rate": 8.737767510347816e-06, |
|
"loss": 1.1247, |
|
"step": 2555 |
|
}, |
|
{ |
|
"epoch": 0.5874254245066545, |
|
"grad_norm": 2.463831836478037, |
|
"learning_rate": 8.698047172081129e-06, |
|
"loss": 1.148, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.588572739788894, |
|
"grad_norm": 2.370560723719375, |
|
"learning_rate": 8.658347718081791e-06, |
|
"loss": 1.1158, |
|
"step": 2565 |
|
}, |
|
{ |
|
"epoch": 0.5897200550711336, |
|
"grad_norm": 2.387926689961274, |
|
"learning_rate": 8.618669785157825e-06, |
|
"loss": 1.1733, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 0.5908673703533731, |
|
"grad_norm": 2.383333743021682, |
|
"learning_rate": 8.579014009772045e-06, |
|
"loss": 1.1675, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 0.5920146856356127, |
|
"grad_norm": 2.3291743526646633, |
|
"learning_rate": 8.539381028031838e-06, |
|
"loss": 1.1252, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 0.5931620009178522, |
|
"grad_norm": 2.1671129717844213, |
|
"learning_rate": 8.499771475678968e-06, |
|
"loss": 1.1349, |
|
"step": 2585 |
|
}, |
|
{ |
|
"epoch": 0.5943093162000918, |
|
"grad_norm": 2.3005770803883885, |
|
"learning_rate": 8.46018598807938e-06, |
|
"loss": 1.1272, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 0.5954566314823313, |
|
"grad_norm": 2.387101372210533, |
|
"learning_rate": 8.420625200212985e-06, |
|
"loss": 1.1567, |
|
"step": 2595 |
|
}, |
|
{ |
|
"epoch": 0.596603946764571, |
|
"grad_norm": 2.4728108936932736, |
|
"learning_rate": 8.381089746663517e-06, |
|
"loss": 1.1681, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.5977512620468105, |
|
"grad_norm": 2.4007708346991357, |
|
"learning_rate": 8.341580261608305e-06, |
|
"loss": 1.1507, |
|
"step": 2605 |
|
}, |
|
{ |
|
"epoch": 0.59889857732905, |
|
"grad_norm": 2.4457553394851774, |
|
"learning_rate": 8.302097378808147e-06, |
|
"loss": 1.1337, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 0.6000458926112896, |
|
"grad_norm": 2.315516541044818, |
|
"learning_rate": 8.262641731597097e-06, |
|
"loss": 1.1388, |
|
"step": 2615 |
|
}, |
|
{ |
|
"epoch": 0.6011932078935291, |
|
"grad_norm": 2.479154641664564, |
|
"learning_rate": 8.223213952872353e-06, |
|
"loss": 1.1541, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 0.6023405231757687, |
|
"grad_norm": 2.352645397604372, |
|
"learning_rate": 8.183814675084074e-06, |
|
"loss": 1.1265, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 0.6034878384580082, |
|
"grad_norm": 2.3289347257902775, |
|
"learning_rate": 8.144444530225237e-06, |
|
"loss": 1.1359, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 0.6046351537402478, |
|
"grad_norm": 2.4536927501335444, |
|
"learning_rate": 8.105104149821515e-06, |
|
"loss": 1.1372, |
|
"step": 2635 |
|
}, |
|
{ |
|
"epoch": 0.6057824690224873, |
|
"grad_norm": 2.254438191832268, |
|
"learning_rate": 8.065794164921128e-06, |
|
"loss": 1.1621, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 0.606929784304727, |
|
"grad_norm": 2.385368404798516, |
|
"learning_rate": 8.026515206084744e-06, |
|
"loss": 1.1321, |
|
"step": 2645 |
|
}, |
|
{ |
|
"epoch": 0.6080770995869665, |
|
"grad_norm": 2.3736422295023494, |
|
"learning_rate": 7.987267903375331e-06, |
|
"loss": 1.14, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 0.6092244148692061, |
|
"grad_norm": 2.3859764598647235, |
|
"learning_rate": 7.948052886348091e-06, |
|
"loss": 1.1566, |
|
"step": 2655 |
|
}, |
|
{ |
|
"epoch": 0.6103717301514456, |
|
"grad_norm": 2.3273871754564923, |
|
"learning_rate": 7.90887078404033e-06, |
|
"loss": 1.1258, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 0.6115190454336852, |
|
"grad_norm": 2.3442824667455096, |
|
"learning_rate": 7.869722224961372e-06, |
|
"loss": 1.1323, |
|
"step": 2665 |
|
}, |
|
{ |
|
"epoch": 0.6126663607159247, |
|
"grad_norm": 2.2232728108361033, |
|
"learning_rate": 7.830607837082494e-06, |
|
"loss": 1.1503, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 0.6138136759981643, |
|
"grad_norm": 2.265021484521007, |
|
"learning_rate": 7.791528247826832e-06, |
|
"loss": 1.112, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 0.6149609912804038, |
|
"grad_norm": 2.4203498234905694, |
|
"learning_rate": 7.75248408405934e-06, |
|
"loss": 1.1267, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 0.6161083065626434, |
|
"grad_norm": 2.3786029930756527, |
|
"learning_rate": 7.71347597207671e-06, |
|
"loss": 1.1377, |
|
"step": 2685 |
|
}, |
|
{ |
|
"epoch": 0.617255621844883, |
|
"grad_norm": 2.3033389080041706, |
|
"learning_rate": 7.674504537597336e-06, |
|
"loss": 1.12, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 0.6184029371271226, |
|
"grad_norm": 2.349456539458806, |
|
"learning_rate": 7.635570405751297e-06, |
|
"loss": 1.112, |
|
"step": 2695 |
|
}, |
|
{ |
|
"epoch": 0.6195502524093621, |
|
"grad_norm": 2.547343077883558, |
|
"learning_rate": 7.596674201070282e-06, |
|
"loss": 1.134, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.6206975676916017, |
|
"grad_norm": 2.3588027590426344, |
|
"learning_rate": 7.557816547477627e-06, |
|
"loss": 1.1496, |
|
"step": 2705 |
|
}, |
|
{ |
|
"epoch": 0.6218448829738412, |
|
"grad_norm": 2.4201812828437794, |
|
"learning_rate": 7.518998068278266e-06, |
|
"loss": 1.1345, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 0.6229921982560808, |
|
"grad_norm": 2.497692339906205, |
|
"learning_rate": 7.480219386148751e-06, |
|
"loss": 1.144, |
|
"step": 2715 |
|
}, |
|
{ |
|
"epoch": 0.6241395135383203, |
|
"grad_norm": 2.326917172406579, |
|
"learning_rate": 7.441481123127257e-06, |
|
"loss": 1.1416, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 0.6252868288205599, |
|
"grad_norm": 2.3514950300039628, |
|
"learning_rate": 7.402783900603612e-06, |
|
"loss": 1.1094, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 0.6264341441027994, |
|
"grad_norm": 2.4507528851663283, |
|
"learning_rate": 7.364128339309326e-06, |
|
"loss": 1.1325, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 0.627581459385039, |
|
"grad_norm": 2.3290237424744418, |
|
"learning_rate": 7.325515059307622e-06, |
|
"loss": 1.1292, |
|
"step": 2735 |
|
}, |
|
{ |
|
"epoch": 0.6287287746672786, |
|
"grad_norm": 2.3160967005917077, |
|
"learning_rate": 7.286944679983521e-06, |
|
"loss": 1.1128, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 0.6298760899495182, |
|
"grad_norm": 2.26849447367049, |
|
"learning_rate": 7.248417820033857e-06, |
|
"loss": 1.1198, |
|
"step": 2745 |
|
}, |
|
{ |
|
"epoch": 0.6310234052317577, |
|
"grad_norm": 2.3127943494175187, |
|
"learning_rate": 7.209935097457412e-06, |
|
"loss": 1.1356, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 0.6321707205139973, |
|
"grad_norm": 2.2785842730855586, |
|
"learning_rate": 7.171497129544946e-06, |
|
"loss": 1.1264, |
|
"step": 2755 |
|
}, |
|
{ |
|
"epoch": 0.6333180357962368, |
|
"grad_norm": 2.2682813135513937, |
|
"learning_rate": 7.133104532869342e-06, |
|
"loss": 1.1096, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 0.6344653510784763, |
|
"grad_norm": 2.331975147659969, |
|
"learning_rate": 7.094757923275688e-06, |
|
"loss": 1.1119, |
|
"step": 2765 |
|
}, |
|
{ |
|
"epoch": 0.6356126663607159, |
|
"grad_norm": 2.4071880692345116, |
|
"learning_rate": 7.056457915871399e-06, |
|
"loss": 1.109, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 0.6367599816429554, |
|
"grad_norm": 2.3292160849189902, |
|
"learning_rate": 7.018205125016369e-06, |
|
"loss": 1.1561, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 0.637907296925195, |
|
"grad_norm": 2.347302425036466, |
|
"learning_rate": 6.980000164313093e-06, |
|
"loss": 1.1349, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 0.6390546122074346, |
|
"grad_norm": 2.4185665904201694, |
|
"learning_rate": 6.9418436465968485e-06, |
|
"loss": 1.1423, |
|
"step": 2785 |
|
}, |
|
{ |
|
"epoch": 0.6402019274896742, |
|
"grad_norm": 2.462052073994329, |
|
"learning_rate": 6.903736183925835e-06, |
|
"loss": 1.1228, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 0.6413492427719137, |
|
"grad_norm": 2.464606580026005, |
|
"learning_rate": 6.865678387571394e-06, |
|
"loss": 1.1109, |
|
"step": 2795 |
|
}, |
|
{ |
|
"epoch": 0.6424965580541533, |
|
"grad_norm": 2.3960490934288226, |
|
"learning_rate": 6.82767086800817e-06, |
|
"loss": 1.1206, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.6436438733363928, |
|
"grad_norm": 2.307847274056824, |
|
"learning_rate": 6.789714234904332e-06, |
|
"loss": 1.1223, |
|
"step": 2805 |
|
}, |
|
{ |
|
"epoch": 0.6447911886186324, |
|
"grad_norm": 2.378007781793691, |
|
"learning_rate": 6.751809097111799e-06, |
|
"loss": 1.1254, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 0.6459385039008719, |
|
"grad_norm": 2.412595548847994, |
|
"learning_rate": 6.71395606265646e-06, |
|
"loss": 1.1342, |
|
"step": 2815 |
|
}, |
|
{ |
|
"epoch": 0.6470858191831115, |
|
"grad_norm": 2.376108073746517, |
|
"learning_rate": 6.676155738728438e-06, |
|
"loss": 1.1388, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 0.648233134465351, |
|
"grad_norm": 2.368409721798146, |
|
"learning_rate": 6.638408731672332e-06, |
|
"loss": 1.1375, |
|
"step": 2825 |
|
}, |
|
{ |
|
"epoch": 0.6493804497475907, |
|
"grad_norm": 2.338962451200504, |
|
"learning_rate": 6.600715646977503e-06, |
|
"loss": 1.1278, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 0.6505277650298302, |
|
"grad_norm": 2.4197757460285767, |
|
"learning_rate": 6.5630770892683656e-06, |
|
"loss": 1.1146, |
|
"step": 2835 |
|
}, |
|
{ |
|
"epoch": 0.6516750803120698, |
|
"grad_norm": 2.456285922205978, |
|
"learning_rate": 6.525493662294669e-06, |
|
"loss": 1.1124, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 0.6528223955943093, |
|
"grad_norm": 2.377267536143265, |
|
"learning_rate": 6.487965968921834e-06, |
|
"loss": 1.1435, |
|
"step": 2845 |
|
}, |
|
{ |
|
"epoch": 0.6539697108765489, |
|
"grad_norm": 2.282462616393301, |
|
"learning_rate": 6.450494611121274e-06, |
|
"loss": 1.1134, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 0.6551170261587884, |
|
"grad_norm": 2.364080280114686, |
|
"learning_rate": 6.413080189960734e-06, |
|
"loss": 1.1174, |
|
"step": 2855 |
|
}, |
|
{ |
|
"epoch": 0.656264341441028, |
|
"grad_norm": 3.2795783117457513, |
|
"learning_rate": 6.375723305594658e-06, |
|
"loss": 1.1226, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 0.6574116567232675, |
|
"grad_norm": 2.381603120461069, |
|
"learning_rate": 6.338424557254556e-06, |
|
"loss": 1.1112, |
|
"step": 2865 |
|
}, |
|
{ |
|
"epoch": 0.6585589720055071, |
|
"grad_norm": 2.471915466738649, |
|
"learning_rate": 6.301184543239398e-06, |
|
"loss": 1.1375, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 0.6597062872877467, |
|
"grad_norm": 2.668918836628048, |
|
"learning_rate": 6.264003860906003e-06, |
|
"loss": 1.0987, |
|
"step": 2875 |
|
}, |
|
{ |
|
"epoch": 0.6608536025699863, |
|
"grad_norm": 2.415869244791648, |
|
"learning_rate": 6.2268831066594846e-06, |
|
"loss": 1.0971, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 0.6620009178522258, |
|
"grad_norm": 2.4645346416387524, |
|
"learning_rate": 6.189822875943644e-06, |
|
"loss": 1.1365, |
|
"step": 2885 |
|
}, |
|
{ |
|
"epoch": 0.6631482331344654, |
|
"grad_norm": 2.3243005208081025, |
|
"learning_rate": 6.152823763231463e-06, |
|
"loss": 1.1058, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 0.6642955484167049, |
|
"grad_norm": 2.2736888091121026, |
|
"learning_rate": 6.115886362015525e-06, |
|
"loss": 1.1106, |
|
"step": 2895 |
|
}, |
|
{ |
|
"epoch": 0.6654428636989445, |
|
"grad_norm": 2.3904086910487616, |
|
"learning_rate": 6.079011264798534e-06, |
|
"loss": 1.0944, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.666590178981184, |
|
"grad_norm": 2.427732424122635, |
|
"learning_rate": 6.042199063083787e-06, |
|
"loss": 1.1154, |
|
"step": 2905 |
|
}, |
|
{ |
|
"epoch": 0.6677374942634235, |
|
"grad_norm": 2.238637944143973, |
|
"learning_rate": 6.005450347365687e-06, |
|
"loss": 1.1079, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 0.6688848095456631, |
|
"grad_norm": 2.491540791091301, |
|
"learning_rate": 5.96876570712028e-06, |
|
"loss": 1.1369, |
|
"step": 2915 |
|
}, |
|
{ |
|
"epoch": 0.6700321248279028, |
|
"grad_norm": 2.388157401107027, |
|
"learning_rate": 5.932145730795793e-06, |
|
"loss": 1.0972, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 0.6711794401101423, |
|
"grad_norm": 2.281687888087642, |
|
"learning_rate": 5.895591005803198e-06, |
|
"loss": 1.0911, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 0.6723267553923818, |
|
"grad_norm": 2.435185289282404, |
|
"learning_rate": 5.859102118506787e-06, |
|
"loss": 1.1201, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 0.6734740706746214, |
|
"grad_norm": 2.4699883089278156, |
|
"learning_rate": 5.822679654214771e-06, |
|
"loss": 1.0911, |
|
"step": 2935 |
|
}, |
|
{ |
|
"epoch": 0.6746213859568609, |
|
"grad_norm": 2.2306849746847264, |
|
"learning_rate": 5.786324197169887e-06, |
|
"loss": 1.12, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 0.6757687012391005, |
|
"grad_norm": 2.3632510883273037, |
|
"learning_rate": 5.7500363305400185e-06, |
|
"loss": 1.1199, |
|
"step": 2945 |
|
}, |
|
{ |
|
"epoch": 0.67691601652134, |
|
"grad_norm": 2.2743440540898257, |
|
"learning_rate": 5.713816636408871e-06, |
|
"loss": 1.1177, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 0.6780633318035796, |
|
"grad_norm": 2.471910084539622, |
|
"learning_rate": 5.677665695766581e-06, |
|
"loss": 1.1034, |
|
"step": 2955 |
|
}, |
|
{ |
|
"epoch": 0.6792106470858191, |
|
"grad_norm": 2.2906550476388934, |
|
"learning_rate": 5.641584088500461e-06, |
|
"loss": 1.1142, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 0.6803579623680588, |
|
"grad_norm": 2.348585353757232, |
|
"learning_rate": 5.605572393385645e-06, |
|
"loss": 1.1398, |
|
"step": 2965 |
|
}, |
|
{ |
|
"epoch": 0.6815052776502983, |
|
"grad_norm": 2.300443710441418, |
|
"learning_rate": 5.569631188075842e-06, |
|
"loss": 1.0944, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 0.6826525929325379, |
|
"grad_norm": 2.436421664434051, |
|
"learning_rate": 5.5337610490940375e-06, |
|
"loss": 1.1178, |
|
"step": 2975 |
|
}, |
|
{ |
|
"epoch": 0.6837999082147774, |
|
"grad_norm": 2.3576525615606716, |
|
"learning_rate": 5.497962551823266e-06, |
|
"loss": 1.1096, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 0.684947223497017, |
|
"grad_norm": 2.3782230649051437, |
|
"learning_rate": 5.46223627049739e-06, |
|
"loss": 1.1149, |
|
"step": 2985 |
|
}, |
|
{ |
|
"epoch": 0.6860945387792565, |
|
"grad_norm": 2.308682218164475, |
|
"learning_rate": 5.426582778191858e-06, |
|
"loss": 1.0993, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 0.6872418540614961, |
|
"grad_norm": 2.357568661419494, |
|
"learning_rate": 5.3910026468145384e-06, |
|
"loss": 1.0961, |
|
"step": 2995 |
|
}, |
|
{ |
|
"epoch": 0.6883891693437356, |
|
"grad_norm": 2.444899732941997, |
|
"learning_rate": 5.355496447096533e-06, |
|
"loss": 1.1036, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.6895364846259752, |
|
"grad_norm": 2.3977195078454443, |
|
"learning_rate": 5.320064748583031e-06, |
|
"loss": 1.1348, |
|
"step": 3005 |
|
}, |
|
{ |
|
"epoch": 0.6906837999082148, |
|
"grad_norm": 2.462475456258121, |
|
"learning_rate": 5.284708119624173e-06, |
|
"loss": 1.1045, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 0.6918311151904544, |
|
"grad_norm": 2.4248136095007564, |
|
"learning_rate": 5.249427127365918e-06, |
|
"loss": 1.1213, |
|
"step": 3015 |
|
}, |
|
{ |
|
"epoch": 0.6929784304726939, |
|
"grad_norm": 2.4415766775762506, |
|
"learning_rate": 5.2142223377409616e-06, |
|
"loss": 1.0923, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 0.6941257457549335, |
|
"grad_norm": 2.2409511706430645, |
|
"learning_rate": 5.179094315459652e-06, |
|
"loss": 1.1032, |
|
"step": 3025 |
|
}, |
|
{ |
|
"epoch": 0.695273061037173, |
|
"grad_norm": 2.4777655292781247, |
|
"learning_rate": 5.144043624000944e-06, |
|
"loss": 1.0872, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 0.6964203763194126, |
|
"grad_norm": 2.341568313842261, |
|
"learning_rate": 5.109070825603338e-06, |
|
"loss": 1.1318, |
|
"step": 3035 |
|
}, |
|
{ |
|
"epoch": 0.6975676916016521, |
|
"grad_norm": 2.3725651477887397, |
|
"learning_rate": 5.074176481255873e-06, |
|
"loss": 1.0942, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 0.6987150068838917, |
|
"grad_norm": 2.523833216091205, |
|
"learning_rate": 5.039361150689141e-06, |
|
"loss": 1.0842, |
|
"step": 3045 |
|
}, |
|
{ |
|
"epoch": 0.6998623221661312, |
|
"grad_norm": 2.2418957167374964, |
|
"learning_rate": 5.00462539236628e-06, |
|
"loss": 1.1216, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 0.7010096374483709, |
|
"grad_norm": 2.8693298084928625, |
|
"learning_rate": 4.969969763474047e-06, |
|
"loss": 1.1108, |
|
"step": 3055 |
|
}, |
|
{ |
|
"epoch": 0.7021569527306104, |
|
"grad_norm": 2.2823674364430167, |
|
"learning_rate": 4.935394819913849e-06, |
|
"loss": 1.1102, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 0.70330426801285, |
|
"grad_norm": 2.470986373278573, |
|
"learning_rate": 4.900901116292854e-06, |
|
"loss": 1.1494, |
|
"step": 3065 |
|
}, |
|
{ |
|
"epoch": 0.7044515832950895, |
|
"grad_norm": 2.395023784542252, |
|
"learning_rate": 4.866489205915072e-06, |
|
"loss": 1.082, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 0.705598898577329, |
|
"grad_norm": 2.291309312067593, |
|
"learning_rate": 4.8321596407725044e-06, |
|
"loss": 1.1017, |
|
"step": 3075 |
|
}, |
|
{ |
|
"epoch": 0.7067462138595686, |
|
"grad_norm": 2.2843719962521223, |
|
"learning_rate": 4.7979129715362625e-06, |
|
"loss": 1.1089, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 0.7078935291418081, |
|
"grad_norm": 2.2151523970617504, |
|
"learning_rate": 4.7637497475477465e-06, |
|
"loss": 1.0997, |
|
"step": 3085 |
|
}, |
|
{ |
|
"epoch": 0.7090408444240477, |
|
"grad_norm": 2.402700465949013, |
|
"learning_rate": 4.72967051680985e-06, |
|
"loss": 1.1124, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 0.7101881597062872, |
|
"grad_norm": 2.4442267015771466, |
|
"learning_rate": 4.695675825978133e-06, |
|
"loss": 1.0944, |
|
"step": 3095 |
|
}, |
|
{ |
|
"epoch": 0.7113354749885269, |
|
"grad_norm": 2.388630826645324, |
|
"learning_rate": 4.661766220352098e-06, |
|
"loss": 1.0959, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.7124827902707664, |
|
"grad_norm": 2.309795253109109, |
|
"learning_rate": 4.627942243866387e-06, |
|
"loss": 1.1013, |
|
"step": 3105 |
|
}, |
|
{ |
|
"epoch": 0.713630105553006, |
|
"grad_norm": 2.467492297537252, |
|
"learning_rate": 4.594204439082122e-06, |
|
"loss": 1.0966, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 0.7147774208352455, |
|
"grad_norm": 2.4257152563230058, |
|
"learning_rate": 4.560553347178144e-06, |
|
"loss": 1.0993, |
|
"step": 3115 |
|
}, |
|
{ |
|
"epoch": 0.7159247361174851, |
|
"grad_norm": 2.481936943192826, |
|
"learning_rate": 4.526989507942374e-06, |
|
"loss": 1.0981, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 0.7170720513997246, |
|
"grad_norm": 2.34807669335114, |
|
"learning_rate": 4.493513459763126e-06, |
|
"loss": 1.0751, |
|
"step": 3125 |
|
}, |
|
{ |
|
"epoch": 0.7182193666819642, |
|
"grad_norm": 2.3700833632481193, |
|
"learning_rate": 4.460125739620479e-06, |
|
"loss": 1.098, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 0.7193666819642037, |
|
"grad_norm": 2.438776471298164, |
|
"learning_rate": 4.426826883077681e-06, |
|
"loss": 1.1084, |
|
"step": 3135 |
|
}, |
|
{ |
|
"epoch": 0.7205139972464433, |
|
"grad_norm": 2.272516466827752, |
|
"learning_rate": 4.393617424272527e-06, |
|
"loss": 1.0798, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 0.7216613125286829, |
|
"grad_norm": 2.3356663935933293, |
|
"learning_rate": 4.360497895908826e-06, |
|
"loss": 1.0999, |
|
"step": 3145 |
|
}, |
|
{ |
|
"epoch": 0.7228086278109225, |
|
"grad_norm": 2.3180589970845604, |
|
"learning_rate": 4.3274688292478105e-06, |
|
"loss": 1.1116, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 0.723955943093162, |
|
"grad_norm": 2.2901324964266174, |
|
"learning_rate": 4.294530754099666e-06, |
|
"loss": 1.1108, |
|
"step": 3155 |
|
}, |
|
{ |
|
"epoch": 0.7251032583754016, |
|
"grad_norm": 2.3563145658091993, |
|
"learning_rate": 4.261684198815004e-06, |
|
"loss": 1.0863, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 0.7262505736576411, |
|
"grad_norm": 2.309763606122749, |
|
"learning_rate": 4.228929690276381e-06, |
|
"loss": 1.0647, |
|
"step": 3165 |
|
}, |
|
{ |
|
"epoch": 0.7273978889398807, |
|
"grad_norm": 2.2777260232984675, |
|
"learning_rate": 4.196267753889864e-06, |
|
"loss": 1.0915, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 0.7285452042221202, |
|
"grad_norm": 2.34894914988906, |
|
"learning_rate": 4.163698913576592e-06, |
|
"loss": 1.0917, |
|
"step": 3175 |
|
}, |
|
{ |
|
"epoch": 0.7296925195043598, |
|
"grad_norm": 2.289533046571488, |
|
"learning_rate": 4.131223691764384e-06, |
|
"loss": 1.0791, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 0.7308398347865993, |
|
"grad_norm": 2.351419451457308, |
|
"learning_rate": 4.098842609379339e-06, |
|
"loss": 1.1045, |
|
"step": 3185 |
|
}, |
|
{ |
|
"epoch": 0.731987150068839, |
|
"grad_norm": 2.318863793299261, |
|
"learning_rate": 4.066556185837494e-06, |
|
"loss": 1.1015, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 0.7331344653510785, |
|
"grad_norm": 2.3923461411768847, |
|
"learning_rate": 4.0343649390365e-06, |
|
"loss": 1.0958, |
|
"step": 3195 |
|
}, |
|
{ |
|
"epoch": 0.7342817806333181, |
|
"grad_norm": 2.3929957610837342, |
|
"learning_rate": 4.002269385347289e-06, |
|
"loss": 1.1092, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.7354290959155576, |
|
"grad_norm": 2.4257111429359566, |
|
"learning_rate": 3.970270039605818e-06, |
|
"loss": 1.0883, |
|
"step": 3205 |
|
}, |
|
{ |
|
"epoch": 0.7365764111977972, |
|
"grad_norm": 2.2636635937684217, |
|
"learning_rate": 3.9383674151047936e-06, |
|
"loss": 1.0814, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 0.7377237264800367, |
|
"grad_norm": 2.382278040921493, |
|
"learning_rate": 3.906562023585442e-06, |
|
"loss": 1.0917, |
|
"step": 3215 |
|
}, |
|
{ |
|
"epoch": 0.7388710417622762, |
|
"grad_norm": 2.5011209640625407, |
|
"learning_rate": 3.8748543752293e-06, |
|
"loss": 1.0927, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 0.7400183570445158, |
|
"grad_norm": 2.4421075205952527, |
|
"learning_rate": 3.843244978650045e-06, |
|
"loss": 1.1204, |
|
"step": 3225 |
|
}, |
|
{ |
|
"epoch": 0.7411656723267553, |
|
"grad_norm": 2.298007752819784, |
|
"learning_rate": 3.8117343408853124e-06, |
|
"loss": 1.0542, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 0.742312987608995, |
|
"grad_norm": 2.3328586721249733, |
|
"learning_rate": 3.780322967388577e-06, |
|
"loss": 1.0953, |
|
"step": 3235 |
|
}, |
|
{ |
|
"epoch": 0.7434603028912345, |
|
"grad_norm": 2.2089269310188726, |
|
"learning_rate": 3.7490113620210487e-06, |
|
"loss": 1.1072, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 0.7446076181734741, |
|
"grad_norm": 2.4006844079537206, |
|
"learning_rate": 3.7178000270435765e-06, |
|
"loss": 1.1153, |
|
"step": 3245 |
|
}, |
|
{ |
|
"epoch": 0.7457549334557136, |
|
"grad_norm": 2.3450482883357897, |
|
"learning_rate": 3.686689463108608e-06, |
|
"loss": 1.0809, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 0.7469022487379532, |
|
"grad_norm": 2.3120419044499614, |
|
"learning_rate": 3.6556801692521426e-06, |
|
"loss": 1.1197, |
|
"step": 3255 |
|
}, |
|
{ |
|
"epoch": 0.7480495640201927, |
|
"grad_norm": 2.24362750024479, |
|
"learning_rate": 3.6247726428857344e-06, |
|
"loss": 1.0798, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 0.7491968793024323, |
|
"grad_norm": 2.2810362806921476, |
|
"learning_rate": 3.593967379788522e-06, |
|
"loss": 1.1028, |
|
"step": 3265 |
|
}, |
|
{ |
|
"epoch": 0.7503441945846718, |
|
"grad_norm": 2.2340180118435122, |
|
"learning_rate": 3.563264874099258e-06, |
|
"loss": 1.1028, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 0.7514915098669114, |
|
"grad_norm": 2.3620284704853787, |
|
"learning_rate": 3.532665618308395e-06, |
|
"loss": 1.0889, |
|
"step": 3275 |
|
}, |
|
{ |
|
"epoch": 0.752638825149151, |
|
"grad_norm": 2.4078730171290834, |
|
"learning_rate": 3.5021701032501777e-06, |
|
"loss": 1.0841, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 0.7537861404313906, |
|
"grad_norm": 2.4065328343083214, |
|
"learning_rate": 3.4717788180947855e-06, |
|
"loss": 1.0774, |
|
"step": 3285 |
|
}, |
|
{ |
|
"epoch": 0.7549334557136301, |
|
"grad_norm": 2.2828217399348074, |
|
"learning_rate": 3.441492250340461e-06, |
|
"loss": 1.0648, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 0.7560807709958697, |
|
"grad_norm": 2.390793459374678, |
|
"learning_rate": 3.4113108858057175e-06, |
|
"loss": 1.0954, |
|
"step": 3295 |
|
}, |
|
{ |
|
"epoch": 0.7572280862781092, |
|
"grad_norm": 2.322350920799196, |
|
"learning_rate": 3.3812352086215216e-06, |
|
"loss": 1.079, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.7583754015603488, |
|
"grad_norm": 2.357540821713843, |
|
"learning_rate": 3.3512657012235396e-06, |
|
"loss": 1.1212, |
|
"step": 3305 |
|
}, |
|
{ |
|
"epoch": 0.7595227168425883, |
|
"grad_norm": 2.278921399044481, |
|
"learning_rate": 3.3214028443444034e-06, |
|
"loss": 1.0907, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 0.7606700321248279, |
|
"grad_norm": 2.43748599532908, |
|
"learning_rate": 3.2916471170059895e-06, |
|
"loss": 1.1205, |
|
"step": 3315 |
|
}, |
|
{ |
|
"epoch": 0.7618173474070674, |
|
"grad_norm": 2.4397124395924314, |
|
"learning_rate": 3.261998996511736e-06, |
|
"loss": 1.1171, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 0.7629646626893071, |
|
"grad_norm": 2.334365795517474, |
|
"learning_rate": 3.232458958438992e-06, |
|
"loss": 1.0866, |
|
"step": 3325 |
|
}, |
|
{ |
|
"epoch": 0.7641119779715466, |
|
"grad_norm": 2.285744606391934, |
|
"learning_rate": 3.203027476631386e-06, |
|
"loss": 1.075, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 0.7652592932537862, |
|
"grad_norm": 2.221051450915321, |
|
"learning_rate": 3.1737050231912324e-06, |
|
"loss": 1.0806, |
|
"step": 3335 |
|
}, |
|
{ |
|
"epoch": 0.7664066085360257, |
|
"grad_norm": 2.4305076526822655, |
|
"learning_rate": 3.1444920684719394e-06, |
|
"loss": 1.0797, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 0.7675539238182653, |
|
"grad_norm": 2.294796028036383, |
|
"learning_rate": 3.115389081070481e-06, |
|
"loss": 1.0972, |
|
"step": 3345 |
|
}, |
|
{ |
|
"epoch": 0.7687012391005048, |
|
"grad_norm": 2.4013023682687593, |
|
"learning_rate": 3.086396527819876e-06, |
|
"loss": 1.0907, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 0.7698485543827444, |
|
"grad_norm": 2.380235464496965, |
|
"learning_rate": 3.057514873781703e-06, |
|
"loss": 1.0727, |
|
"step": 3355 |
|
}, |
|
{ |
|
"epoch": 0.7709958696649839, |
|
"grad_norm": 2.3192527318279113, |
|
"learning_rate": 3.028744582238633e-06, |
|
"loss": 1.0798, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 0.7721431849472234, |
|
"grad_norm": 2.4421045687535194, |
|
"learning_rate": 3.0000861146869963e-06, |
|
"loss": 1.1253, |
|
"step": 3365 |
|
}, |
|
{ |
|
"epoch": 0.7732905002294631, |
|
"grad_norm": 2.299556073481683, |
|
"learning_rate": 2.9715399308294003e-06, |
|
"loss": 1.0641, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 0.7744378155117027, |
|
"grad_norm": 2.39327873552009, |
|
"learning_rate": 2.9431064885673245e-06, |
|
"loss": 1.0821, |
|
"step": 3375 |
|
}, |
|
{ |
|
"epoch": 0.7755851307939422, |
|
"grad_norm": 2.5227837528194694, |
|
"learning_rate": 2.914786243993808e-06, |
|
"loss": 1.0753, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 0.7767324460761817, |
|
"grad_norm": 2.313573533828395, |
|
"learning_rate": 2.8865796513860933e-06, |
|
"loss": 1.1004, |
|
"step": 3385 |
|
}, |
|
{ |
|
"epoch": 0.7778797613584213, |
|
"grad_norm": 2.3172251611325922, |
|
"learning_rate": 2.858487163198389e-06, |
|
"loss": 1.069, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 0.7790270766406608, |
|
"grad_norm": 2.4697157072807534, |
|
"learning_rate": 2.8305092300545668e-06, |
|
"loss": 1.0763, |
|
"step": 3395 |
|
}, |
|
{ |
|
"epoch": 0.7801743919229004, |
|
"grad_norm": 2.2944498736872, |
|
"learning_rate": 2.8026463007409665e-06, |
|
"loss": 1.1025, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 0.7813217072051399, |
|
"grad_norm": 2.40801000676571, |
|
"learning_rate": 2.7748988221991722e-06, |
|
"loss": 1.0889, |
|
"step": 3405 |
|
}, |
|
{ |
|
"epoch": 0.7824690224873795, |
|
"grad_norm": 2.3225590835236005, |
|
"learning_rate": 2.747267239518857e-06, |
|
"loss": 1.1041, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 0.7836163377696191, |
|
"grad_norm": 2.3739143205259725, |
|
"learning_rate": 2.719751995930645e-06, |
|
"loss": 1.0998, |
|
"step": 3415 |
|
}, |
|
{ |
|
"epoch": 0.7847636530518587, |
|
"grad_norm": 2.1994328046650975, |
|
"learning_rate": 2.6923535327989925e-06, |
|
"loss": 1.0643, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 0.7859109683340982, |
|
"grad_norm": 2.4392403454609513, |
|
"learning_rate": 2.6650722896151126e-06, |
|
"loss": 1.0657, |
|
"step": 3425 |
|
}, |
|
{ |
|
"epoch": 0.7870582836163378, |
|
"grad_norm": 2.243421023055753, |
|
"learning_rate": 2.637908703989924e-06, |
|
"loss": 1.0965, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 0.7882055988985773, |
|
"grad_norm": 2.5872874083756052, |
|
"learning_rate": 2.610863211647038e-06, |
|
"loss": 1.068, |
|
"step": 3435 |
|
}, |
|
{ |
|
"epoch": 0.7893529141808169, |
|
"grad_norm": 2.3302790860000777, |
|
"learning_rate": 2.5839362464157635e-06, |
|
"loss": 1.0682, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 0.7905002294630564, |
|
"grad_norm": 2.314204825191404, |
|
"learning_rate": 2.5571282402241435e-06, |
|
"loss": 1.0691, |
|
"step": 3445 |
|
}, |
|
{ |
|
"epoch": 0.791647544745296, |
|
"grad_norm": 5.3426573880056845, |
|
"learning_rate": 2.5304396230920346e-06, |
|
"loss": 1.0978, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 0.7927948600275355, |
|
"grad_norm": 2.583278253386985, |
|
"learning_rate": 2.5038708231242047e-06, |
|
"loss": 1.0788, |
|
"step": 3455 |
|
}, |
|
{ |
|
"epoch": 0.7939421753097752, |
|
"grad_norm": 2.374823385658736, |
|
"learning_rate": 2.477422266503473e-06, |
|
"loss": 1.0795, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 0.7950894905920147, |
|
"grad_norm": 2.416491427084334, |
|
"learning_rate": 2.4510943774838624e-06, |
|
"loss": 1.0881, |
|
"step": 3465 |
|
}, |
|
{ |
|
"epoch": 0.7962368058742543, |
|
"grad_norm": 2.340810846568605, |
|
"learning_rate": 2.424887578383799e-06, |
|
"loss": 1.1142, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 0.7973841211564938, |
|
"grad_norm": 2.310980554847478, |
|
"learning_rate": 2.398802289579347e-06, |
|
"loss": 1.0724, |
|
"step": 3475 |
|
}, |
|
{ |
|
"epoch": 0.7985314364387334, |
|
"grad_norm": 2.4718474311759424, |
|
"learning_rate": 2.3728389294974472e-06, |
|
"loss": 1.0926, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 0.7996787517209729, |
|
"grad_norm": 2.381651829008113, |
|
"learning_rate": 2.346997914609226e-06, |
|
"loss": 1.119, |
|
"step": 3485 |
|
}, |
|
{ |
|
"epoch": 0.8008260670032125, |
|
"grad_norm": 2.3463971161415587, |
|
"learning_rate": 2.3212796594232947e-06, |
|
"loss": 1.0799, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 0.801973382285452, |
|
"grad_norm": 2.3280363845921923, |
|
"learning_rate": 2.2956845764791126e-06, |
|
"loss": 1.0642, |
|
"step": 3495 |
|
}, |
|
{ |
|
"epoch": 0.8031206975676916, |
|
"grad_norm": 2.4526295488857124, |
|
"learning_rate": 2.2702130763403674e-06, |
|
"loss": 1.0997, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.8042680128499312, |
|
"grad_norm": 2.354907631567458, |
|
"learning_rate": 2.2448655675883936e-06, |
|
"loss": 1.0713, |
|
"step": 3505 |
|
}, |
|
{ |
|
"epoch": 0.8054153281321708, |
|
"grad_norm": 2.275567450615706, |
|
"learning_rate": 2.2196424568156073e-06, |
|
"loss": 1.0788, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 0.8065626434144103, |
|
"grad_norm": 2.592545388172047, |
|
"learning_rate": 2.1945441486189913e-06, |
|
"loss": 1.1048, |
|
"step": 3515 |
|
}, |
|
{ |
|
"epoch": 0.8077099586966499, |
|
"grad_norm": 2.3536440458369747, |
|
"learning_rate": 2.1695710455936115e-06, |
|
"loss": 1.0987, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 0.8088572739788894, |
|
"grad_norm": 2.3512904218054813, |
|
"learning_rate": 2.144723548326142e-06, |
|
"loss": 1.074, |
|
"step": 3525 |
|
}, |
|
{ |
|
"epoch": 0.810004589261129, |
|
"grad_norm": 2.395880729511023, |
|
"learning_rate": 2.1200020553884603e-06, |
|
"loss": 1.0636, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 0.8111519045433685, |
|
"grad_norm": 2.2521164104118996, |
|
"learning_rate": 2.095406963331236e-06, |
|
"loss": 1.0795, |
|
"step": 3535 |
|
}, |
|
{ |
|
"epoch": 0.812299219825608, |
|
"grad_norm": 2.337428316331845, |
|
"learning_rate": 2.0709386666775732e-06, |
|
"loss": 1.0928, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 0.8134465351078476, |
|
"grad_norm": 2.2804754836362666, |
|
"learning_rate": 2.0465975579166984e-06, |
|
"loss": 1.077, |
|
"step": 3545 |
|
}, |
|
{ |
|
"epoch": 0.8145938503900872, |
|
"grad_norm": 2.3835852120695584, |
|
"learning_rate": 2.0223840274976413e-06, |
|
"loss": 1.0681, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 0.8157411656723268, |
|
"grad_norm": 2.3451403504700408, |
|
"learning_rate": 1.998298463822986e-06, |
|
"loss": 1.0379, |
|
"step": 3555 |
|
}, |
|
{ |
|
"epoch": 0.8168884809545663, |
|
"grad_norm": 2.391585746462503, |
|
"learning_rate": 1.9743412532426355e-06, |
|
"loss": 1.0713, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 0.8180357962368059, |
|
"grad_norm": 2.565408804237695, |
|
"learning_rate": 1.950512780047622e-06, |
|
"loss": 1.0727, |
|
"step": 3565 |
|
}, |
|
{ |
|
"epoch": 0.8191831115190454, |
|
"grad_norm": 2.2127377834416313, |
|
"learning_rate": 1.9268134264639273e-06, |
|
"loss": 1.0681, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 0.820330426801285, |
|
"grad_norm": 2.4867018259226623, |
|
"learning_rate": 1.9032435726463716e-06, |
|
"loss": 1.0865, |
|
"step": 3575 |
|
}, |
|
{ |
|
"epoch": 0.8214777420835245, |
|
"grad_norm": 2.589421332937048, |
|
"learning_rate": 1.879803596672497e-06, |
|
"loss": 1.0573, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 0.8226250573657641, |
|
"grad_norm": 2.7149409401173474, |
|
"learning_rate": 1.8564938745365102e-06, |
|
"loss": 1.0896, |
|
"step": 3585 |
|
}, |
|
{ |
|
"epoch": 0.8237723726480036, |
|
"grad_norm": 2.433136107961442, |
|
"learning_rate": 1.8333147801432616e-06, |
|
"loss": 1.0915, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 0.8249196879302433, |
|
"grad_norm": 2.5073579970693887, |
|
"learning_rate": 1.8102666853022277e-06, |
|
"loss": 1.0904, |
|
"step": 3595 |
|
}, |
|
{ |
|
"epoch": 0.8260670032124828, |
|
"grad_norm": 2.3242486986861093, |
|
"learning_rate": 1.7873499597215604e-06, |
|
"loss": 1.0629, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.8272143184947224, |
|
"grad_norm": 2.3675402875292035, |
|
"learning_rate": 1.7645649710021528e-06, |
|
"loss": 1.0695, |
|
"step": 3605 |
|
}, |
|
{ |
|
"epoch": 0.8283616337769619, |
|
"grad_norm": 2.469832693689684, |
|
"learning_rate": 1.7419120846317462e-06, |
|
"loss": 1.067, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 0.8295089490592015, |
|
"grad_norm": 2.2836763919133776, |
|
"learning_rate": 1.7193916639790665e-06, |
|
"loss": 1.0598, |
|
"step": 3615 |
|
}, |
|
{ |
|
"epoch": 0.830656264341441, |
|
"grad_norm": 2.3357605323364665, |
|
"learning_rate": 1.697004070287982e-06, |
|
"loss": 1.0814, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 0.8318035796236806, |
|
"grad_norm": 2.419230420031552, |
|
"learning_rate": 1.6747496626717318e-06, |
|
"loss": 1.0622, |
|
"step": 3625 |
|
}, |
|
{ |
|
"epoch": 0.8329508949059201, |
|
"grad_norm": 2.2377807288438363, |
|
"learning_rate": 1.6526287981071477e-06, |
|
"loss": 1.0619, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 0.8340982101881597, |
|
"grad_norm": 2.313360528415539, |
|
"learning_rate": 1.6306418314289408e-06, |
|
"loss": 1.0754, |
|
"step": 3635 |
|
}, |
|
{ |
|
"epoch": 0.8352455254703993, |
|
"grad_norm": 2.388054570280702, |
|
"learning_rate": 1.6087891153239932e-06, |
|
"loss": 1.0828, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 0.8363928407526389, |
|
"grad_norm": 2.3362823868458267, |
|
"learning_rate": 1.5870710003257162e-06, |
|
"loss": 1.049, |
|
"step": 3645 |
|
}, |
|
{ |
|
"epoch": 0.8375401560348784, |
|
"grad_norm": 2.3694652165740466, |
|
"learning_rate": 1.5654878348084246e-06, |
|
"loss": 1.0882, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 0.838687471317118, |
|
"grad_norm": 2.4438443231900355, |
|
"learning_rate": 1.5440399649817384e-06, |
|
"loss": 1.0493, |
|
"step": 3655 |
|
}, |
|
{ |
|
"epoch": 0.8398347865993575, |
|
"grad_norm": 2.479186138101549, |
|
"learning_rate": 1.5227277348850466e-06, |
|
"loss": 1.0997, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 0.840982101881597, |
|
"grad_norm": 2.298179774902708, |
|
"learning_rate": 1.5015514863819625e-06, |
|
"loss": 1.0802, |
|
"step": 3665 |
|
}, |
|
{ |
|
"epoch": 0.8421294171638366, |
|
"grad_norm": 2.384294974826556, |
|
"learning_rate": 1.4805115591548746e-06, |
|
"loss": 1.1026, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 0.8432767324460761, |
|
"grad_norm": 2.2772068888876635, |
|
"learning_rate": 1.4596082906994658e-06, |
|
"loss": 1.0767, |
|
"step": 3675 |
|
}, |
|
{ |
|
"epoch": 0.8444240477283157, |
|
"grad_norm": 2.539754475914315, |
|
"learning_rate": 1.4388420163193217e-06, |
|
"loss": 1.0786, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 0.8455713630105554, |
|
"grad_norm": 2.2681491099219464, |
|
"learning_rate": 1.4182130691205399e-06, |
|
"loss": 1.0681, |
|
"step": 3685 |
|
}, |
|
{ |
|
"epoch": 0.8467186782927949, |
|
"grad_norm": 2.328527473494352, |
|
"learning_rate": 1.3977217800063847e-06, |
|
"loss": 1.0636, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 0.8478659935750344, |
|
"grad_norm": 2.3756645866176567, |
|
"learning_rate": 1.3773684776719987e-06, |
|
"loss": 1.0834, |
|
"step": 3695 |
|
}, |
|
{ |
|
"epoch": 0.849013308857274, |
|
"grad_norm": 2.3382653231993684, |
|
"learning_rate": 1.3571534885991044e-06, |
|
"loss": 1.0656, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 0.8501606241395135, |
|
"grad_norm": 2.3242086558835173, |
|
"learning_rate": 1.337077137050784e-06, |
|
"loss": 1.0451, |
|
"step": 3705 |
|
}, |
|
{ |
|
"epoch": 0.8513079394217531, |
|
"grad_norm": 2.269434671317697, |
|
"learning_rate": 1.3171397450662716e-06, |
|
"loss": 1.0571, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 0.8524552547039926, |
|
"grad_norm": 2.2469307076032483, |
|
"learning_rate": 1.297341632455793e-06, |
|
"loss": 1.0557, |
|
"step": 3715 |
|
}, |
|
{ |
|
"epoch": 0.8536025699862322, |
|
"grad_norm": 2.420410716316704, |
|
"learning_rate": 1.2776831167954252e-06, |
|
"loss": 1.0919, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 0.8547498852684717, |
|
"grad_norm": 2.406358800823869, |
|
"learning_rate": 1.258164513422019e-06, |
|
"loss": 1.0696, |
|
"step": 3725 |
|
}, |
|
{ |
|
"epoch": 0.8558972005507114, |
|
"grad_norm": 2.312198841285599, |
|
"learning_rate": 1.2387861354281194e-06, |
|
"loss": 1.0645, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 0.8570445158329509, |
|
"grad_norm": 2.337798774099059, |
|
"learning_rate": 1.2195482936569603e-06, |
|
"loss": 1.0698, |
|
"step": 3735 |
|
}, |
|
{ |
|
"epoch": 0.8581918311151905, |
|
"grad_norm": 2.3239212258602424, |
|
"learning_rate": 1.2004512966974746e-06, |
|
"loss": 1.0672, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 0.85933914639743, |
|
"grad_norm": 2.317821805872963, |
|
"learning_rate": 1.1814954508793397e-06, |
|
"loss": 1.0875, |
|
"step": 3745 |
|
}, |
|
{ |
|
"epoch": 0.8604864616796696, |
|
"grad_norm": 2.34518455218088, |
|
"learning_rate": 1.162681060268065e-06, |
|
"loss": 1.0615, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 0.8616337769619091, |
|
"grad_norm": 2.3102053326026226, |
|
"learning_rate": 1.1440084266601148e-06, |
|
"loss": 1.0692, |
|
"step": 3755 |
|
}, |
|
{ |
|
"epoch": 0.8627810922441487, |
|
"grad_norm": 2.2988208279144744, |
|
"learning_rate": 1.1254778495780749e-06, |
|
"loss": 1.0365, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 0.8639284075263882, |
|
"grad_norm": 2.3343105163956603, |
|
"learning_rate": 1.1070896262658381e-06, |
|
"loss": 1.0526, |
|
"step": 3765 |
|
}, |
|
{ |
|
"epoch": 0.8650757228086278, |
|
"grad_norm": 2.484363397400702, |
|
"learning_rate": 1.0888440516838373e-06, |
|
"loss": 1.0601, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 0.8662230380908674, |
|
"grad_norm": 2.3781607794246287, |
|
"learning_rate": 1.0707414185043163e-06, |
|
"loss": 1.0706, |
|
"step": 3775 |
|
}, |
|
{ |
|
"epoch": 0.867370353373107, |
|
"grad_norm": 2.3738365009035376, |
|
"learning_rate": 1.0527820171066372e-06, |
|
"loss": 1.0825, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 0.8685176686553465, |
|
"grad_norm": 2.407144887683863, |
|
"learning_rate": 1.0349661355726215e-06, |
|
"loss": 1.0824, |
|
"step": 3785 |
|
}, |
|
{ |
|
"epoch": 0.8696649839375861, |
|
"grad_norm": 2.3740565527233812, |
|
"learning_rate": 1.0172940596819258e-06, |
|
"loss": 1.0609, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 0.8708122992198256, |
|
"grad_norm": 2.4197584148785265, |
|
"learning_rate": 9.997660729074587e-07, |
|
"loss": 1.0759, |
|
"step": 3795 |
|
}, |
|
{ |
|
"epoch": 0.8719596145020652, |
|
"grad_norm": 2.4001709433977165, |
|
"learning_rate": 9.823824564108408e-07, |
|
"loss": 1.0529, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 0.8731069297843047, |
|
"grad_norm": 2.3761802261416745, |
|
"learning_rate": 9.651434890378797e-07, |
|
"loss": 1.0864, |
|
"step": 3805 |
|
}, |
|
{ |
|
"epoch": 0.8742542450665443, |
|
"grad_norm": 2.390851465404339, |
|
"learning_rate": 9.480494473141189e-07, |
|
"loss": 1.0641, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 0.8754015603487838, |
|
"grad_norm": 2.2302978594213583, |
|
"learning_rate": 9.311006054403726e-07, |
|
"loss": 1.0548, |
|
"step": 3815 |
|
}, |
|
{ |
|
"epoch": 0.8765488756310233, |
|
"grad_norm": 2.375126839515668, |
|
"learning_rate": 9.142972352883595e-07, |
|
"loss": 1.0781, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 0.877696190913263, |
|
"grad_norm": 2.3054748603808237, |
|
"learning_rate": 8.976396063963156e-07, |
|
"loss": 1.0679, |
|
"step": 3825 |
|
}, |
|
{ |
|
"epoch": 0.8788435061955026, |
|
"grad_norm": 2.6101738491562783, |
|
"learning_rate": 8.811279859646915e-07, |
|
"loss": 1.038, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 0.8799908214777421, |
|
"grad_norm": 2.2926250243874575, |
|
"learning_rate": 8.647626388518471e-07, |
|
"loss": 1.0416, |
|
"step": 3835 |
|
}, |
|
{ |
|
"epoch": 0.8811381367599816, |
|
"grad_norm": 2.3466635198769112, |
|
"learning_rate": 8.485438275698154e-07, |
|
"loss": 1.0915, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 0.8822854520422212, |
|
"grad_norm": 2.346242158668616, |
|
"learning_rate": 8.324718122800912e-07, |
|
"loss": 1.0658, |
|
"step": 3845 |
|
}, |
|
{ |
|
"epoch": 0.8834327673244607, |
|
"grad_norm": 2.2828753234624672, |
|
"learning_rate": 8.165468507894514e-07, |
|
"loss": 1.0691, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 0.8845800826067003, |
|
"grad_norm": 2.2381710090655362, |
|
"learning_rate": 8.007691985458277e-07, |
|
"loss": 1.0695, |
|
"step": 3855 |
|
}, |
|
{ |
|
"epoch": 0.8857273978889398, |
|
"grad_norm": 2.3043691660140637, |
|
"learning_rate": 7.851391086341953e-07, |
|
"loss": 1.0627, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 0.8868747131711794, |
|
"grad_norm": 2.3258581770015176, |
|
"learning_rate": 7.696568317725339e-07, |
|
"loss": 1.0805, |
|
"step": 3865 |
|
}, |
|
{ |
|
"epoch": 0.888022028453419, |
|
"grad_norm": 2.3844984378244263, |
|
"learning_rate": 7.543226163077899e-07, |
|
"loss": 1.0669, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 0.8891693437356586, |
|
"grad_norm": 2.4109038157393012, |
|
"learning_rate": 7.391367082118961e-07, |
|
"loss": 1.0861, |
|
"step": 3875 |
|
}, |
|
{ |
|
"epoch": 0.8903166590178981, |
|
"grad_norm": 2.355682219549452, |
|
"learning_rate": 7.240993510778304e-07, |
|
"loss": 1.0607, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 0.8914639743001377, |
|
"grad_norm": 2.3034428538054232, |
|
"learning_rate": 7.092107861157004e-07, |
|
"loss": 1.0718, |
|
"step": 3885 |
|
}, |
|
{ |
|
"epoch": 0.8926112895823772, |
|
"grad_norm": 2.3594432455984116, |
|
"learning_rate": 6.944712521488884e-07, |
|
"loss": 1.058, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 0.8937586048646168, |
|
"grad_norm": 2.384920183351963, |
|
"learning_rate": 6.798809856102028e-07, |
|
"loss": 1.0624, |
|
"step": 3895 |
|
}, |
|
{ |
|
"epoch": 0.8949059201468563, |
|
"grad_norm": 2.315792099017849, |
|
"learning_rate": 6.654402205380961e-07, |
|
"loss": 1.0824, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 0.8960532354290959, |
|
"grad_norm": 2.354796149058742, |
|
"learning_rate": 6.511491885729149e-07, |
|
"loss": 1.0613, |
|
"step": 3905 |
|
}, |
|
{ |
|
"epoch": 0.8972005507113354, |
|
"grad_norm": 2.397012765505873, |
|
"learning_rate": 6.370081189531707e-07, |
|
"loss": 1.0625, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 0.8983478659935751, |
|
"grad_norm": 2.400999165531199, |
|
"learning_rate": 6.230172385118738e-07, |
|
"loss": 1.0912, |
|
"step": 3915 |
|
}, |
|
{ |
|
"epoch": 0.8994951812758146, |
|
"grad_norm": 2.2856810018900036, |
|
"learning_rate": 6.091767716728924e-07, |
|
"loss": 1.0627, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 0.9006424965580542, |
|
"grad_norm": 2.4240133866264273, |
|
"learning_rate": 5.954869404473473e-07, |
|
"loss": 1.1025, |
|
"step": 3925 |
|
}, |
|
{ |
|
"epoch": 0.9017898118402937, |
|
"grad_norm": 2.3880077625070313, |
|
"learning_rate": 5.819479644300563e-07, |
|
"loss": 1.0495, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 0.9029371271225333, |
|
"grad_norm": 2.316683568122776, |
|
"learning_rate": 5.685600607960129e-07, |
|
"loss": 1.0508, |
|
"step": 3935 |
|
}, |
|
{ |
|
"epoch": 0.9040844424047728, |
|
"grad_norm": 2.2964133731561525, |
|
"learning_rate": 5.553234442969014e-07, |
|
"loss": 1.0649, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 0.9052317576870124, |
|
"grad_norm": 2.385202578198628, |
|
"learning_rate": 5.422383272576426e-07, |
|
"loss": 1.0615, |
|
"step": 3945 |
|
}, |
|
{ |
|
"epoch": 0.9063790729692519, |
|
"grad_norm": 2.3105288018374512, |
|
"learning_rate": 5.293049195730038e-07, |
|
"loss": 1.0376, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 0.9075263882514915, |
|
"grad_norm": 2.3047993770301893, |
|
"learning_rate": 5.165234287042198e-07, |
|
"loss": 1.0253, |
|
"step": 3955 |
|
}, |
|
{ |
|
"epoch": 0.9086737035337311, |
|
"grad_norm": 2.350917765719763, |
|
"learning_rate": 5.038940596756747e-07, |
|
"loss": 1.0706, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 0.9098210188159707, |
|
"grad_norm": 2.4392692899669117, |
|
"learning_rate": 4.914170150716024e-07, |
|
"loss": 1.0762, |
|
"step": 3965 |
|
}, |
|
{ |
|
"epoch": 0.9109683340982102, |
|
"grad_norm": 2.3371470208418916, |
|
"learning_rate": 4.790924950328435e-07, |
|
"loss": 1.0453, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 0.9121156493804498, |
|
"grad_norm": 2.204682457809935, |
|
"learning_rate": 4.6692069725363887e-07, |
|
"loss": 1.0761, |
|
"step": 3975 |
|
}, |
|
{ |
|
"epoch": 0.9132629646626893, |
|
"grad_norm": 2.290854793691128, |
|
"learning_rate": 4.5490181697844916e-07, |
|
"loss": 1.052, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 0.9144102799449288, |
|
"grad_norm": 2.2988253093129174, |
|
"learning_rate": 4.4303604699882594e-07, |
|
"loss": 1.0451, |
|
"step": 3985 |
|
}, |
|
{ |
|
"epoch": 0.9155575952271684, |
|
"grad_norm": 2.26124979946759, |
|
"learning_rate": 4.313235776503244e-07, |
|
"loss": 1.0555, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 0.9167049105094079, |
|
"grad_norm": 2.274647741800421, |
|
"learning_rate": 4.197645968094466e-07, |
|
"loss": 1.0421, |
|
"step": 3995 |
|
}, |
|
{ |
|
"epoch": 0.9178522257916475, |
|
"grad_norm": 2.3957337901708953, |
|
"learning_rate": 4.08359289890623e-07, |
|
"loss": 1.0559, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.9189995410738871, |
|
"grad_norm": 2.3442250173869827, |
|
"learning_rate": 3.971078398432482e-07, |
|
"loss": 1.0783, |
|
"step": 4005 |
|
}, |
|
{ |
|
"epoch": 0.9201468563561267, |
|
"grad_norm": 2.3642424317720954, |
|
"learning_rate": 3.860104271487397e-07, |
|
"loss": 1.0403, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 0.9212941716383662, |
|
"grad_norm": 2.2488142508080893, |
|
"learning_rate": 3.750672298176405e-07, |
|
"loss": 1.0504, |
|
"step": 4015 |
|
}, |
|
{ |
|
"epoch": 0.9224414869206058, |
|
"grad_norm": 2.3517485020535576, |
|
"learning_rate": 3.6427842338677353e-07, |
|
"loss": 1.0618, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 0.9235888022028453, |
|
"grad_norm": 2.3231400273137033, |
|
"learning_rate": 3.5364418091641374e-07, |
|
"loss": 1.0808, |
|
"step": 4025 |
|
}, |
|
{ |
|
"epoch": 0.9247361174850849, |
|
"grad_norm": 2.3960144408780715, |
|
"learning_rate": 3.4316467298752264e-07, |
|
"loss": 1.0768, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 0.9258834327673244, |
|
"grad_norm": 2.481754354110254, |
|
"learning_rate": 3.328400676990029e-07, |
|
"loss": 1.0819, |
|
"step": 4035 |
|
}, |
|
{ |
|
"epoch": 0.927030748049564, |
|
"grad_norm": 2.261698190389203, |
|
"learning_rate": 3.226705306650113e-07, |
|
"loss": 1.0653, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 0.9281780633318035, |
|
"grad_norm": 2.347018729974591, |
|
"learning_rate": 3.1265622501229554e-07, |
|
"loss": 1.0485, |
|
"step": 4045 |
|
}, |
|
{ |
|
"epoch": 0.9293253786140432, |
|
"grad_norm": 2.3350732569344985, |
|
"learning_rate": 3.027973113775795e-07, |
|
"loss": 1.0678, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 0.9304726938962827, |
|
"grad_norm": 2.1947433426596867, |
|
"learning_rate": 2.9309394790498547e-07, |
|
"loss": 1.057, |
|
"step": 4055 |
|
}, |
|
{ |
|
"epoch": 0.9316200091785223, |
|
"grad_norm": 2.3905173803456816, |
|
"learning_rate": 2.835462902434971e-07, |
|
"loss": 1.0724, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 0.9327673244607618, |
|
"grad_norm": 2.4899822063351524, |
|
"learning_rate": 2.741544915444694e-07, |
|
"loss": 1.0759, |
|
"step": 4065 |
|
}, |
|
{ |
|
"epoch": 0.9339146397430014, |
|
"grad_norm": 2.3105967151836206, |
|
"learning_rate": 2.649187024591604e-07, |
|
"loss": 1.0325, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 0.9350619550252409, |
|
"grad_norm": 2.3253855249232105, |
|
"learning_rate": 2.5583907113632456e-07, |
|
"loss": 1.0736, |
|
"step": 4075 |
|
}, |
|
{ |
|
"epoch": 0.9362092703074805, |
|
"grad_norm": 2.5115169921624334, |
|
"learning_rate": 2.4691574321983216e-07, |
|
"loss": 1.0662, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 0.93735658558972, |
|
"grad_norm": 2.240118179487899, |
|
"learning_rate": 2.3814886184633012e-07, |
|
"loss": 1.0598, |
|
"step": 4085 |
|
}, |
|
{ |
|
"epoch": 0.9385039008719596, |
|
"grad_norm": 2.4398202590323885, |
|
"learning_rate": 2.2953856764295623e-07, |
|
"loss": 1.0763, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 0.9396512161541992, |
|
"grad_norm": 2.384051442170586, |
|
"learning_rate": 2.210849987250685e-07, |
|
"loss": 1.0578, |
|
"step": 4095 |
|
}, |
|
{ |
|
"epoch": 0.9407985314364388, |
|
"grad_norm": 2.521911651181548, |
|
"learning_rate": 2.1278829069404483e-07, |
|
"loss": 1.0507, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 0.9419458467186783, |
|
"grad_norm": 2.312630644718287, |
|
"learning_rate": 2.0464857663509473e-07, |
|
"loss": 1.0632, |
|
"step": 4105 |
|
}, |
|
{ |
|
"epoch": 0.9430931620009179, |
|
"grad_norm": 2.28669658207446, |
|
"learning_rate": 1.9666598711513663e-07, |
|
"loss": 1.0546, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 0.9442404772831574, |
|
"grad_norm": 2.4280041176246336, |
|
"learning_rate": 1.8884065018069165e-07, |
|
"loss": 1.0682, |
|
"step": 4115 |
|
}, |
|
{ |
|
"epoch": 0.945387792565397, |
|
"grad_norm": 2.3965039181313172, |
|
"learning_rate": 1.811726913558387e-07, |
|
"loss": 1.053, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 0.9465351078476365, |
|
"grad_norm": 2.317349780596125, |
|
"learning_rate": 1.736622336401983e-07, |
|
"loss": 1.0586, |
|
"step": 4125 |
|
}, |
|
{ |
|
"epoch": 0.947682423129876, |
|
"grad_norm": 2.4267122952778695, |
|
"learning_rate": 1.663093975069552e-07, |
|
"loss": 1.0677, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 0.9488297384121156, |
|
"grad_norm": 2.267834335988602, |
|
"learning_rate": 1.5911430090093437e-07, |
|
"loss": 1.0375, |
|
"step": 4135 |
|
}, |
|
{ |
|
"epoch": 0.9499770536943553, |
|
"grad_norm": 2.2507286853587347, |
|
"learning_rate": 1.5207705923670158e-07, |
|
"loss": 1.0486, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 0.9511243689765948, |
|
"grad_norm": 2.351735220333465, |
|
"learning_rate": 1.451977853967146e-07, |
|
"loss": 1.0634, |
|
"step": 4145 |
|
}, |
|
{ |
|
"epoch": 0.9522716842588343, |
|
"grad_norm": 2.354995051605583, |
|
"learning_rate": 1.3847658972951482e-07, |
|
"loss": 1.059, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 0.9534189995410739, |
|
"grad_norm": 2.4112199312162312, |
|
"learning_rate": 1.319135800479543e-07, |
|
"loss": 1.0663, |
|
"step": 4155 |
|
}, |
|
{ |
|
"epoch": 0.9545663148233134, |
|
"grad_norm": 2.224163787960051, |
|
"learning_rate": 1.2550886162746468e-07, |
|
"loss": 1.0321, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 0.955713630105553, |
|
"grad_norm": 2.3832043966523475, |
|
"learning_rate": 1.192625372043754e-07, |
|
"loss": 1.0558, |
|
"step": 4165 |
|
}, |
|
{ |
|
"epoch": 0.9568609453877925, |
|
"grad_norm": 2.573113863913223, |
|
"learning_rate": 1.1317470697425837e-07, |
|
"loss": 1.0453, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 0.9580082606700321, |
|
"grad_norm": 2.36325059791216, |
|
"learning_rate": 1.072454685903257e-07, |
|
"loss": 1.0619, |
|
"step": 4175 |
|
}, |
|
{ |
|
"epoch": 0.9591555759522716, |
|
"grad_norm": 2.3121307241018036, |
|
"learning_rate": 1.0147491716185675e-07, |
|
"loss": 1.0611, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 0.9603028912345113, |
|
"grad_norm": 2.400560326114734, |
|
"learning_rate": 9.586314525268369e-08, |
|
"loss": 1.0828, |
|
"step": 4185 |
|
}, |
|
{ |
|
"epoch": 0.9614502065167508, |
|
"grad_norm": 2.422338514530241, |
|
"learning_rate": 9.041024287969491e-08, |
|
"loss": 1.062, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 0.9625975217989904, |
|
"grad_norm": 2.3711025333563263, |
|
"learning_rate": 8.511629751139949e-08, |
|
"loss": 1.046, |
|
"step": 4195 |
|
}, |
|
{ |
|
"epoch": 0.9637448370812299, |
|
"grad_norm": 2.3607300247468586, |
|
"learning_rate": 7.99813940665195e-08, |
|
"loss": 1.0718, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 0.9648921523634695, |
|
"grad_norm": 2.2844036833072745, |
|
"learning_rate": 7.50056149126277e-08, |
|
"loss": 1.0338, |
|
"step": 4205 |
|
}, |
|
{ |
|
"epoch": 0.966039467645709, |
|
"grad_norm": 2.412561145835342, |
|
"learning_rate": 7.018903986483083e-08, |
|
"loss": 1.0733, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 0.9671867829279486, |
|
"grad_norm": 2.3903032571937444, |
|
"learning_rate": 6.553174618448399e-08, |
|
"loss": 1.0629, |
|
"step": 4215 |
|
}, |
|
{ |
|
"epoch": 0.9683340982101881, |
|
"grad_norm": 2.3221221391589038, |
|
"learning_rate": 6.103380857795604e-08, |
|
"loss": 1.0822, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 0.9694814134924277, |
|
"grad_norm": 2.285250340291083, |
|
"learning_rate": 5.6695299195425045e-08, |
|
"loss": 1.0854, |
|
"step": 4225 |
|
}, |
|
{ |
|
"epoch": 0.9706287287746673, |
|
"grad_norm": 2.2592180518969434, |
|
"learning_rate": 5.251628762972916e-08, |
|
"loss": 1.0692, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 0.9717760440569069, |
|
"grad_norm": 2.31137882081886, |
|
"learning_rate": 4.84968409152442e-08, |
|
"loss": 1.076, |
|
"step": 4235 |
|
}, |
|
{ |
|
"epoch": 0.9729233593391464, |
|
"grad_norm": 2.3345498965147993, |
|
"learning_rate": 4.4637023526807875e-08, |
|
"loss": 1.0913, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 0.974070674621386, |
|
"grad_norm": 2.306918943368033, |
|
"learning_rate": 4.0936897378691664e-08, |
|
"loss": 1.0631, |
|
"step": 4245 |
|
}, |
|
{ |
|
"epoch": 0.9752179899036255, |
|
"grad_norm": 2.3923389986430137, |
|
"learning_rate": 3.739652182360054e-08, |
|
"loss": 1.0489, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 0.9763653051858651, |
|
"grad_norm": 2.3531625372494007, |
|
"learning_rate": 3.401595365172483e-08, |
|
"loss": 1.0733, |
|
"step": 4255 |
|
}, |
|
{ |
|
"epoch": 0.9775126204681046, |
|
"grad_norm": 2.436621801398954, |
|
"learning_rate": 3.079524708983095e-08, |
|
"loss": 1.0576, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 0.9786599357503442, |
|
"grad_norm": 2.3786365675633108, |
|
"learning_rate": 2.773445380038653e-08, |
|
"loss": 1.0554, |
|
"step": 4265 |
|
}, |
|
{ |
|
"epoch": 0.9798072510325837, |
|
"grad_norm": 2.3509821615607454, |
|
"learning_rate": 2.483362288073443e-08, |
|
"loss": 1.0824, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 0.9809545663148234, |
|
"grad_norm": 2.3808539729525844, |
|
"learning_rate": 2.2092800862305587e-08, |
|
"loss": 1.0991, |
|
"step": 4275 |
|
}, |
|
{ |
|
"epoch": 0.9821018815970629, |
|
"grad_norm": 2.249305593751699, |
|
"learning_rate": 1.9512031709874037e-08, |
|
"loss": 1.0636, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 0.9832491968793025, |
|
"grad_norm": 2.3078218741343544, |
|
"learning_rate": 1.7091356820848616e-08, |
|
"loss": 1.0602, |
|
"step": 4285 |
|
}, |
|
{ |
|
"epoch": 0.984396512161542, |
|
"grad_norm": 2.529969042443467, |
|
"learning_rate": 1.4830815024606815e-08, |
|
"loss": 1.0555, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 0.9855438274437816, |
|
"grad_norm": 2.3368971664389084, |
|
"learning_rate": 1.2730442581879721e-08, |
|
"loss": 1.0576, |
|
"step": 4295 |
|
}, |
|
{ |
|
"epoch": 0.9866911427260211, |
|
"grad_norm": 2.478745371368321, |
|
"learning_rate": 1.0790273184164701e-08, |
|
"loss": 1.0713, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 0.9878384580082606, |
|
"grad_norm": 2.347771751748443, |
|
"learning_rate": 9.010337953185843e-09, |
|
"loss": 1.0794, |
|
"step": 4305 |
|
}, |
|
{ |
|
"epoch": 0.9889857732905002, |
|
"grad_norm": 2.3670492207343177, |
|
"learning_rate": 7.390665440393241e-09, |
|
"loss": 1.0731, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 0.9901330885727397, |
|
"grad_norm": 2.301292219008886, |
|
"learning_rate": 5.931281626508911e-09, |
|
"loss": 1.0511, |
|
"step": 4315 |
|
}, |
|
{ |
|
"epoch": 0.9912804038549794, |
|
"grad_norm": 2.3578534915978784, |
|
"learning_rate": 4.632209921107133e-09, |
|
"loss": 1.0602, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 0.9924277191372189, |
|
"grad_norm": 2.2665225995048828, |
|
"learning_rate": 3.493471162241413e-09, |
|
"loss": 1.0423, |
|
"step": 4325 |
|
}, |
|
{ |
|
"epoch": 0.9935750344194585, |
|
"grad_norm": 2.3787177342094417, |
|
"learning_rate": 2.5150836161058624e-09, |
|
"loss": 1.0538, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 0.994722349701698, |
|
"grad_norm": 2.308731224538364, |
|
"learning_rate": 1.6970629767465441e-09, |
|
"loss": 1.0852, |
|
"step": 4335 |
|
}, |
|
{ |
|
"epoch": 0.9958696649839376, |
|
"grad_norm": 2.402013691289646, |
|
"learning_rate": 1.03942236580723e-09, |
|
"loss": 1.0744, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 0.9970169802661771, |
|
"grad_norm": 2.4162986481825364, |
|
"learning_rate": 5.421723323195682e-10, |
|
"loss": 1.0759, |
|
"step": 4345 |
|
}, |
|
{ |
|
"epoch": 0.9981642955484167, |
|
"grad_norm": 2.445243310213381, |
|
"learning_rate": 2.053208525365502e-10, |
|
"loss": 1.0729, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 0.9993116108306562, |
|
"grad_norm": 2.3681587132053297, |
|
"learning_rate": 2.8873329798173588e-11, |
|
"loss": 1.0697, |
|
"step": 4355 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 1.1125566959381104, |
|
"eval_runtime": 852.54, |
|
"eval_samples_per_second": 72.392, |
|
"eval_steps_per_second": 1.132, |
|
"step": 4358 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 4358, |
|
"total_flos": 456238269726720.0, |
|
"train_loss": 1.1632422284009862, |
|
"train_runtime": 29234.3114, |
|
"train_samples_per_second": 19.077, |
|
"train_steps_per_second": 0.149 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 4358, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 100, |
|
"total_flos": 456238269726720.0, |
|
"train_batch_size": 32, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|