|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 10.0, |
|
"global_step": 570, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.4285714285714286e-06, |
|
"loss": 1.0455, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 7.142857142857143e-06, |
|
"loss": 1.0885, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.4285714285714285e-05, |
|
"loss": 0.9453, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 2.1428571428571428e-05, |
|
"loss": 0.8539, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 2.857142857142857e-05, |
|
"loss": 0.8017, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 3.571428571428572e-05, |
|
"loss": 0.7249, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.2857142857142856e-05, |
|
"loss": 0.7551, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 5e-05, |
|
"loss": 0.8209, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.998922515567496e-05, |
|
"loss": 0.7474, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.995690991048146e-05, |
|
"loss": 0.6913, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.99030821197584e-05, |
|
"loss": 0.6817, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 4.982778818239101e-05, |
|
"loss": 0.5816, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_f1": 0.837114288786152, |
|
"eval_loss": 0.3865455389022827, |
|
"eval_runtime": 0.687, |
|
"eval_samples_per_second": 147.026, |
|
"eval_steps_per_second": 18.924, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 4.97310930008156e-05, |
|
"loss": 0.497, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 4.961307992507443e-05, |
|
"loss": 0.5068, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 4.947385068096907e-05, |
|
"loss": 0.3726, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 4.9313525282373974e-05, |
|
"loss": 0.3573, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 4.9132241927786035e-05, |
|
"loss": 0.2819, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 4.893015688119921e-05, |
|
"loss": 0.0521, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 4.870744433740688e-05, |
|
"loss": 0.5087, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 4.8464296271848155e-05, |
|
"loss": 0.3012, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 4.8200922275127355e-05, |
|
"loss": 0.4872, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 4.7917549372349616e-05, |
|
"loss": 0.151, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 4.761442182742799e-05, |
|
"loss": 0.3685, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_f1": 0.9324866310160427, |
|
"eval_loss": 0.17068691551685333, |
|
"eval_runtime": 0.4648, |
|
"eval_samples_per_second": 217.297, |
|
"eval_steps_per_second": 27.969, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.7291800932531064e-05, |
|
"loss": 0.2192, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 4.694996478285231e-05, |
|
"loss": 0.2307, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 4.6589208036895535e-05, |
|
"loss": 0.3376, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 4.620984166248288e-05, |
|
"loss": 0.1176, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 4.581219266870446e-05, |
|
"loss": 0.2623, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 4.53966038240406e-05, |
|
"loss": 0.1813, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 4.496343336089965e-05, |
|
"loss": 0.1195, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 4.4513054666826146e-05, |
|
"loss": 0.014, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.4045855962645363e-05, |
|
"loss": 0.1911, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 4.3562239967821805e-05, |
|
"loss": 0.3593, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 4.306262355332006e-05, |
|
"loss": 0.1833, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 4.254743738226721e-05, |
|
"loss": 0.1057, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_f1": 0.9562770562770562, |
|
"eval_loss": 0.09715107828378677, |
|
"eval_runtime": 1.8292, |
|
"eval_samples_per_second": 55.215, |
|
"eval_steps_per_second": 7.107, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 4.201712553872658e-05, |
|
"loss": 0.1703, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 4.147214514490278e-05, |
|
"loss": 0.1729, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 4.0912965967108125e-05, |
|
"loss": 0.0638, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 4.034007001082985e-05, |
|
"loss": 0.065, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 3.975395110524742e-05, |
|
"loss": 0.0849, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 3.9155114477557933e-05, |
|
"loss": 0.0794, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 3.854407631747654e-05, |
|
"loss": 0.0102, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 3.792136333228735e-05, |
|
"loss": 0.0812, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 3.728751229282836e-05, |
|
"loss": 0.173, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 3.664306957080159e-05, |
|
"loss": 0.1416, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 3.598859066780754e-05, |
|
"loss": 0.0964, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_f1": 0.9774955436720143, |
|
"eval_loss": 0.142888143658638, |
|
"eval_runtime": 0.4809, |
|
"eval_samples_per_second": 210.014, |
|
"eval_steps_per_second": 27.031, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 3.5324639736509714e-05, |
|
"loss": 0.1196, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 3.4651789094342044e-05, |
|
"loss": 0.0048, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"learning_rate": 3.39706187301784e-05, |
|
"loss": 0.0135, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"learning_rate": 3.3281715804389403e-05, |
|
"loss": 0.0931, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"learning_rate": 3.258567414271748e-05, |
|
"loss": 0.0049, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 3.18830937244065e-05, |
|
"loss": 0.0034, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 3.117458016502711e-05, |
|
"loss": 0.0118, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 3.046074419444366e-05, |
|
"loss": 0.0654, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"learning_rate": 2.9742201130372693e-05, |
|
"loss": 0.0013, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 2.901957034798671e-05, |
|
"loss": 0.1997, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 2.8293474746020472e-05, |
|
"loss": 0.0035, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 2.756454020984009e-05, |
|
"loss": 0.1789, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_f1": 0.9457047629287174, |
|
"eval_loss": 0.24929571151733398, |
|
"eval_runtime": 0.4598, |
|
"eval_samples_per_second": 219.675, |
|
"eval_steps_per_second": 28.275, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 5.09, |
|
"learning_rate": 2.68333950719376e-05, |
|
"loss": 0.0928, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 5.18, |
|
"learning_rate": 2.6100669570316195e-05, |
|
"loss": 0.0014, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 5.26, |
|
"learning_rate": 2.5366995305232916e-05, |
|
"loss": 0.0015, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 5.35, |
|
"learning_rate": 2.463300469476709e-05, |
|
"loss": 0.0019, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 5.44, |
|
"learning_rate": 2.3899330429683807e-05, |
|
"loss": 0.0243, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 5.53, |
|
"learning_rate": 2.3166604928062406e-05, |
|
"loss": 0.0476, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 5.61, |
|
"learning_rate": 2.243545979015992e-05, |
|
"loss": 0.0026, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 5.7, |
|
"learning_rate": 2.1706525253979534e-05, |
|
"loss": 0.0404, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 5.79, |
|
"learning_rate": 2.0980429652013297e-05, |
|
"loss": 0.0814, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 5.88, |
|
"learning_rate": 2.025779886962731e-05, |
|
"loss": 0.0722, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 5.96, |
|
"learning_rate": 1.9539255805556344e-05, |
|
"loss": 0.0016, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"eval_f1": 0.634896401308615, |
|
"eval_loss": 0.18999898433685303, |
|
"eval_runtime": 0.4683, |
|
"eval_samples_per_second": 215.658, |
|
"eval_steps_per_second": 27.758, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 6.05, |
|
"learning_rate": 1.8825419834972902e-05, |
|
"loss": 0.0021, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 6.14, |
|
"learning_rate": 1.811690627559351e-05, |
|
"loss": 0.0019, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 6.23, |
|
"learning_rate": 1.7414325857282526e-05, |
|
"loss": 0.0016, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 6.32, |
|
"learning_rate": 1.6718284195610606e-05, |
|
"loss": 0.0787, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 6.4, |
|
"learning_rate": 1.6029381269821604e-05, |
|
"loss": 0.0018, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 6.49, |
|
"learning_rate": 1.534821090565796e-05, |
|
"loss": 0.0014, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 6.58, |
|
"learning_rate": 1.4675360263490295e-05, |
|
"loss": 0.0969, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 6.67, |
|
"learning_rate": 1.4011409332192472e-05, |
|
"loss": 0.0013, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 6.75, |
|
"learning_rate": 1.335693042919841e-05, |
|
"loss": 0.059, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 6.84, |
|
"learning_rate": 1.2712487707171645e-05, |
|
"loss": 0.0015, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 6.93, |
|
"learning_rate": 1.2078636667712649e-05, |
|
"loss": 0.0013, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"eval_f1": 0.9562770562770562, |
|
"eval_loss": 0.20598751306533813, |
|
"eval_runtime": 0.4544, |
|
"eval_samples_per_second": 222.287, |
|
"eval_steps_per_second": 28.611, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 7.02, |
|
"learning_rate": 1.1455923682523475e-05, |
|
"loss": 0.0016, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 7.11, |
|
"learning_rate": 1.0844885522442074e-05, |
|
"loss": 0.0017, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 7.19, |
|
"learning_rate": 1.0246048894752589e-05, |
|
"loss": 0.0011, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 7.28, |
|
"learning_rate": 9.659929989170154e-06, |
|
"loss": 0.0011, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 7.37, |
|
"learning_rate": 9.087034032891883e-06, |
|
"loss": 0.0012, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 7.46, |
|
"learning_rate": 8.527854855097225e-06, |
|
"loss": 0.0009, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 7.54, |
|
"learning_rate": 7.982874461273438e-06, |
|
"loss": 0.0009, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 7.63, |
|
"learning_rate": 7.452562617732794e-06, |
|
"loss": 0.0009, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 7.72, |
|
"learning_rate": 6.93737644667995e-06, |
|
"loss": 0.0008, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 7.81, |
|
"learning_rate": 6.4377600321782e-06, |
|
"loss": 0.0008, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 7.89, |
|
"learning_rate": 5.954144037354645e-06, |
|
"loss": 0.0009, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 7.98, |
|
"learning_rate": 5.486945333173851e-06, |
|
"loss": 0.0008, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_f1": 0.9562770562770562, |
|
"eval_loss": 0.2320912480354309, |
|
"eval_runtime": 0.4633, |
|
"eval_samples_per_second": 218.013, |
|
"eval_steps_per_second": 28.061, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 8.07, |
|
"learning_rate": 5.036566639100351e-06, |
|
"loss": 0.0009, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 8.16, |
|
"learning_rate": 4.603396175959404e-06, |
|
"loss": 0.0008, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 8.25, |
|
"learning_rate": 4.187807331295549e-06, |
|
"loss": 0.0007, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 8.33, |
|
"learning_rate": 3.7901583375171273e-06, |
|
"loss": 0.0008, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 8.42, |
|
"learning_rate": 3.4107919631044732e-06, |
|
"loss": 0.0006, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 8.51, |
|
"learning_rate": 3.0500352171476897e-06, |
|
"loss": 0.0007, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 8.6, |
|
"learning_rate": 2.708199067468939e-06, |
|
"loss": 0.0006, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 8.68, |
|
"learning_rate": 2.385578172572009e-06, |
|
"loss": 0.0007, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 8.77, |
|
"learning_rate": 2.0824506276503897e-06, |
|
"loss": 0.0007, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 8.86, |
|
"learning_rate": 1.7990777248726442e-06, |
|
"loss": 0.0007, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 8.95, |
|
"learning_rate": 1.5357037281518522e-06, |
|
"loss": 0.0006, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"eval_f1": 0.9562770562770562, |
|
"eval_loss": 0.24122363328933716, |
|
"eval_runtime": 0.4601, |
|
"eval_samples_per_second": 219.509, |
|
"eval_steps_per_second": 28.254, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 9.04, |
|
"learning_rate": 1.2925556625931173e-06, |
|
"loss": 0.0007, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 9.12, |
|
"learning_rate": 1.0698431188007952e-06, |
|
"loss": 0.0005, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 9.21, |
|
"learning_rate": 8.677580722139672e-07, |
|
"loss": 0.0006, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 9.3, |
|
"learning_rate": 6.864747176260289e-07, |
|
"loss": 0.0007, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 9.39, |
|
"learning_rate": 5.261493190309303e-07, |
|
"loss": 0.0007, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 9.47, |
|
"learning_rate": 3.8692007492557024e-07, |
|
"loss": 0.0006, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 9.56, |
|
"learning_rate": 2.6890699918440676e-07, |
|
"loss": 0.0007, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 9.65, |
|
"learning_rate": 1.7221181760899152e-07, |
|
"loss": 0.0006, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 9.74, |
|
"learning_rate": 9.691788024160376e-08, |
|
"loss": 0.0006, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 9.82, |
|
"learning_rate": 4.3090089518540987e-08, |
|
"loss": 0.0007, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 9.91, |
|
"learning_rate": 1.0774844325039946e-08, |
|
"loss": 0.0007, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"learning_rate": 0.0, |
|
"loss": 0.0006, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"eval_f1": 0.9562770562770562, |
|
"eval_loss": 0.2428739070892334, |
|
"eval_runtime": 1.7956, |
|
"eval_samples_per_second": 56.247, |
|
"eval_steps_per_second": 7.24, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"step": 570, |
|
"total_flos": 200235477611736.0, |
|
"train_loss": 0.15238243901587434, |
|
"train_runtime": 195.4639, |
|
"train_samples_per_second": 46.454, |
|
"train_steps_per_second": 2.916 |
|
} |
|
], |
|
"max_steps": 570, |
|
"num_train_epochs": 10, |
|
"total_flos": 200235477611736.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|