|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 1857, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0005385029617662897, |
|
"grad_norm": 22.7613713160104, |
|
"learning_rate": 5.376344086021506e-08, |
|
"loss": 1.3472, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0026925148088314485, |
|
"grad_norm": 22.37683485347051, |
|
"learning_rate": 2.688172043010753e-07, |
|
"loss": 1.334, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.005385029617662897, |
|
"grad_norm": 16.198717013287386, |
|
"learning_rate": 5.376344086021506e-07, |
|
"loss": 1.3098, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.008077544426494346, |
|
"grad_norm": 12.105460048613201, |
|
"learning_rate": 8.064516129032258e-07, |
|
"loss": 1.1643, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.010770059235325794, |
|
"grad_norm": 7.514844224376465, |
|
"learning_rate": 1.0752688172043011e-06, |
|
"loss": 1.0474, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.013462574044157244, |
|
"grad_norm": 3.8632002795574367, |
|
"learning_rate": 1.3440860215053765e-06, |
|
"loss": 0.9317, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.01615508885298869, |
|
"grad_norm": 3.400445342173989, |
|
"learning_rate": 1.6129032258064516e-06, |
|
"loss": 0.8826, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.01884760366182014, |
|
"grad_norm": 2.9898366351100547, |
|
"learning_rate": 1.881720430107527e-06, |
|
"loss": 0.8635, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.021540118470651588, |
|
"grad_norm": 2.8585969196090373, |
|
"learning_rate": 2.1505376344086023e-06, |
|
"loss": 0.8577, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.024232633279483037, |
|
"grad_norm": 3.0122772432907277, |
|
"learning_rate": 2.4193548387096776e-06, |
|
"loss": 0.8418, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.026925148088314487, |
|
"grad_norm": 3.0903177647257336, |
|
"learning_rate": 2.688172043010753e-06, |
|
"loss": 0.8311, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.029617662897145933, |
|
"grad_norm": 3.0109701116495216, |
|
"learning_rate": 2.9569892473118283e-06, |
|
"loss": 0.8072, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.03231017770597738, |
|
"grad_norm": 2.9583394058653876, |
|
"learning_rate": 3.225806451612903e-06, |
|
"loss": 0.7965, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.03500269251480883, |
|
"grad_norm": 3.1460179996258146, |
|
"learning_rate": 3.494623655913979e-06, |
|
"loss": 0.8118, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.03769520732364028, |
|
"grad_norm": 3.376763980257778, |
|
"learning_rate": 3.763440860215054e-06, |
|
"loss": 0.7851, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.04038772213247173, |
|
"grad_norm": 2.9673425200438013, |
|
"learning_rate": 4.032258064516129e-06, |
|
"loss": 0.7957, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.043080236941303175, |
|
"grad_norm": 3.0128704990376267, |
|
"learning_rate": 4.3010752688172045e-06, |
|
"loss": 0.7751, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.04577275175013463, |
|
"grad_norm": 3.3210132634127656, |
|
"learning_rate": 4.56989247311828e-06, |
|
"loss": 0.7689, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.048465266558966075, |
|
"grad_norm": 3.175041603297429, |
|
"learning_rate": 4.838709677419355e-06, |
|
"loss": 0.7685, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.05115778136779752, |
|
"grad_norm": 3.1666439879487838, |
|
"learning_rate": 5.1075268817204305e-06, |
|
"loss": 0.7642, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.053850296176628974, |
|
"grad_norm": 3.050039288877326, |
|
"learning_rate": 5.376344086021506e-06, |
|
"loss": 0.75, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.05654281098546042, |
|
"grad_norm": 2.9472819168277455, |
|
"learning_rate": 5.645161290322582e-06, |
|
"loss": 0.7437, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.05923532579429187, |
|
"grad_norm": 3.1420868913301185, |
|
"learning_rate": 5.9139784946236566e-06, |
|
"loss": 0.7317, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.06192784060312332, |
|
"grad_norm": 3.2485163047441423, |
|
"learning_rate": 6.182795698924732e-06, |
|
"loss": 0.7392, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.06462035541195477, |
|
"grad_norm": 3.4854700395087552, |
|
"learning_rate": 6.451612903225806e-06, |
|
"loss": 0.7441, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.06731287022078622, |
|
"grad_norm": 2.9119403460101148, |
|
"learning_rate": 6.720430107526882e-06, |
|
"loss": 0.7216, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.07000538502961766, |
|
"grad_norm": 2.956926563215588, |
|
"learning_rate": 6.989247311827958e-06, |
|
"loss": 0.7283, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.07269789983844911, |
|
"grad_norm": 3.256266278148656, |
|
"learning_rate": 7.258064516129033e-06, |
|
"loss": 0.7296, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.07539041464728057, |
|
"grad_norm": 2.8692943178965185, |
|
"learning_rate": 7.526881720430108e-06, |
|
"loss": 0.7202, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.078082929456112, |
|
"grad_norm": 2.8804161059666935, |
|
"learning_rate": 7.795698924731183e-06, |
|
"loss": 0.7195, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.08077544426494346, |
|
"grad_norm": 3.0006510283335848, |
|
"learning_rate": 8.064516129032258e-06, |
|
"loss": 0.721, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.08346795907377491, |
|
"grad_norm": 2.978282671191466, |
|
"learning_rate": 8.333333333333334e-06, |
|
"loss": 0.7151, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.08616047388260635, |
|
"grad_norm": 2.832558314575903, |
|
"learning_rate": 8.602150537634409e-06, |
|
"loss": 0.712, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.0888529886914378, |
|
"grad_norm": 2.9438520239067265, |
|
"learning_rate": 8.870967741935484e-06, |
|
"loss": 0.7161, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.09154550350026926, |
|
"grad_norm": 3.165458696824085, |
|
"learning_rate": 9.13978494623656e-06, |
|
"loss": 0.7146, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.0942380183091007, |
|
"grad_norm": 3.372895429211009, |
|
"learning_rate": 9.408602150537635e-06, |
|
"loss": 0.7008, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.09693053311793215, |
|
"grad_norm": 2.9822482771935785, |
|
"learning_rate": 9.67741935483871e-06, |
|
"loss": 0.702, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.0996230479267636, |
|
"grad_norm": 2.7297175029131555, |
|
"learning_rate": 9.946236559139786e-06, |
|
"loss": 0.701, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.10231556273559504, |
|
"grad_norm": 2.797512257800779, |
|
"learning_rate": 9.999858614526654e-06, |
|
"loss": 0.7073, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.1050080775444265, |
|
"grad_norm": 2.7494639915648516, |
|
"learning_rate": 9.999284249745143e-06, |
|
"loss": 0.6922, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.10770059235325795, |
|
"grad_norm": 2.8999873055676404, |
|
"learning_rate": 9.998268119778471e-06, |
|
"loss": 0.6998, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.11039310716208939, |
|
"grad_norm": 2.709033463381412, |
|
"learning_rate": 9.996810314417666e-06, |
|
"loss": 0.7017, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.11308562197092084, |
|
"grad_norm": 2.6497064978122498, |
|
"learning_rate": 9.9949109624827e-06, |
|
"loss": 0.697, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.1157781367797523, |
|
"grad_norm": 2.850561924743422, |
|
"learning_rate": 9.992570231811108e-06, |
|
"loss": 0.6937, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.11847065158858373, |
|
"grad_norm": 2.7049716604581953, |
|
"learning_rate": 9.989788329243163e-06, |
|
"loss": 0.7057, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.12116316639741519, |
|
"grad_norm": 2.5917851309791673, |
|
"learning_rate": 9.986565500603598e-06, |
|
"loss": 0.6956, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.12385568120624664, |
|
"grad_norm": 2.6711850256792258, |
|
"learning_rate": 9.982902030679875e-06, |
|
"loss": 0.7109, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.12654819601507808, |
|
"grad_norm": 2.6766440274545995, |
|
"learning_rate": 9.978798243197031e-06, |
|
"loss": 0.6909, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.12924071082390953, |
|
"grad_norm": 2.8552384364271775, |
|
"learning_rate": 9.974254500789065e-06, |
|
"loss": 0.6879, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.13193322563274099, |
|
"grad_norm": 2.5756385146469882, |
|
"learning_rate": 9.969271204966895e-06, |
|
"loss": 0.6802, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.13462574044157244, |
|
"grad_norm": 2.717537618462663, |
|
"learning_rate": 9.963848796082873e-06, |
|
"loss": 0.6921, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.13731825525040386, |
|
"grad_norm": 2.5131980662996756, |
|
"learning_rate": 9.95798775329189e-06, |
|
"loss": 0.689, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.14001077005923532, |
|
"grad_norm": 2.51836898271677, |
|
"learning_rate": 9.951688594509012e-06, |
|
"loss": 0.6793, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.14270328486806677, |
|
"grad_norm": 2.6980273572649676, |
|
"learning_rate": 9.944951876363731e-06, |
|
"loss": 0.6834, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.14539579967689822, |
|
"grad_norm": 2.646134452091696, |
|
"learning_rate": 9.937778194150771e-06, |
|
"loss": 0.6767, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.14808831448572968, |
|
"grad_norm": 2.5534608799808627, |
|
"learning_rate": 9.930168181777486e-06, |
|
"loss": 0.6907, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.15078082929456113, |
|
"grad_norm": 2.5908475307902656, |
|
"learning_rate": 9.922122511707843e-06, |
|
"loss": 0.6861, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.15347334410339256, |
|
"grad_norm": 2.5765130409804695, |
|
"learning_rate": 9.913641894903006e-06, |
|
"loss": 0.6791, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.156165858912224, |
|
"grad_norm": 2.741146958946548, |
|
"learning_rate": 9.904727080758495e-06, |
|
"loss": 0.6874, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.15885837372105546, |
|
"grad_norm": 2.8381749428470595, |
|
"learning_rate": 9.895378857037989e-06, |
|
"loss": 0.676, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.16155088852988692, |
|
"grad_norm": 2.7204348331914447, |
|
"learning_rate": 9.885598049803693e-06, |
|
"loss": 0.673, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.16424340333871837, |
|
"grad_norm": 2.6441432061304178, |
|
"learning_rate": 9.875385523343353e-06, |
|
"loss": 0.671, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.16693591814754982, |
|
"grad_norm": 2.5486118216678277, |
|
"learning_rate": 9.86474218009388e-06, |
|
"loss": 0.6622, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.16962843295638125, |
|
"grad_norm": 2.448370238657585, |
|
"learning_rate": 9.853668960561611e-06, |
|
"loss": 0.6848, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.1723209477652127, |
|
"grad_norm": 2.5840299111800955, |
|
"learning_rate": 9.84216684323919e-06, |
|
"loss": 0.6568, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.17501346257404415, |
|
"grad_norm": 2.57305271825157, |
|
"learning_rate": 9.830236844519114e-06, |
|
"loss": 0.66, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.1777059773828756, |
|
"grad_norm": 2.53266567006045, |
|
"learning_rate": 9.81788001860391e-06, |
|
"loss": 0.662, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.18039849219170706, |
|
"grad_norm": 2.4978159844537458, |
|
"learning_rate": 9.805097457412981e-06, |
|
"loss": 0.6509, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.1830910070005385, |
|
"grad_norm": 2.533762872533261, |
|
"learning_rate": 9.791890290486127e-06, |
|
"loss": 0.6336, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.18578352180936994, |
|
"grad_norm": 2.5709499517182888, |
|
"learning_rate": 9.77825968488372e-06, |
|
"loss": 0.6672, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.1884760366182014, |
|
"grad_norm": 2.5155435537189432, |
|
"learning_rate": 9.76420684508358e-06, |
|
"loss": 0.6587, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.19116855142703285, |
|
"grad_norm": 2.3629608947394214, |
|
"learning_rate": 9.74973301287455e-06, |
|
"loss": 0.6382, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.1938610662358643, |
|
"grad_norm": 2.423886375936418, |
|
"learning_rate": 9.734839467246744e-06, |
|
"loss": 0.6399, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.19655358104469575, |
|
"grad_norm": 2.4151592067478713, |
|
"learning_rate": 9.719527524278552e-06, |
|
"loss": 0.6567, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.1992460958535272, |
|
"grad_norm": 2.339311457038808, |
|
"learning_rate": 9.703798537020327e-06, |
|
"loss": 0.6329, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.20193861066235863, |
|
"grad_norm": 2.401955911603551, |
|
"learning_rate": 9.687653895374824e-06, |
|
"loss": 0.6443, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.20463112547119008, |
|
"grad_norm": 2.349530919733858, |
|
"learning_rate": 9.671095025974388e-06, |
|
"loss": 0.6262, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.20732364028002154, |
|
"grad_norm": 2.5574771679537, |
|
"learning_rate": 9.654123392054884e-06, |
|
"loss": 0.6493, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.210016155088853, |
|
"grad_norm": 2.5279142423497443, |
|
"learning_rate": 9.636740493326398e-06, |
|
"loss": 0.6458, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.21270866989768444, |
|
"grad_norm": 2.407565936871044, |
|
"learning_rate": 9.618947865840705e-06, |
|
"loss": 0.6356, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.2154011847065159, |
|
"grad_norm": 2.50737386034651, |
|
"learning_rate": 9.600747081855553e-06, |
|
"loss": 0.6505, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.21809369951534732, |
|
"grad_norm": 2.492501588167292, |
|
"learning_rate": 9.582139749695713e-06, |
|
"loss": 0.625, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.22078621432417878, |
|
"grad_norm": 2.3085166355816322, |
|
"learning_rate": 9.563127513610868e-06, |
|
"loss": 0.6531, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.22347872913301023, |
|
"grad_norm": 2.3663625111521553, |
|
"learning_rate": 9.543712053630314e-06, |
|
"loss": 0.6185, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.22617124394184168, |
|
"grad_norm": 2.362289550568963, |
|
"learning_rate": 9.523895085414503e-06, |
|
"loss": 0.6151, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.22886375875067314, |
|
"grad_norm": 2.4285748744254394, |
|
"learning_rate": 9.503678360103433e-06, |
|
"loss": 0.6195, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.2315562735595046, |
|
"grad_norm": 2.320249188264771, |
|
"learning_rate": 9.483063664161922e-06, |
|
"loss": 0.6217, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.23424878836833601, |
|
"grad_norm": 2.4068743254230953, |
|
"learning_rate": 9.462052819221726e-06, |
|
"loss": 0.6269, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.23694130317716747, |
|
"grad_norm": 2.443692426314357, |
|
"learning_rate": 9.440647681920587e-06, |
|
"loss": 0.6414, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.23963381798599892, |
|
"grad_norm": 2.376216504990232, |
|
"learning_rate": 9.418850143738158e-06, |
|
"loss": 0.6349, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.24232633279483037, |
|
"grad_norm": 2.4206153603989566, |
|
"learning_rate": 9.396662130828869e-06, |
|
"loss": 0.6059, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.24501884760366183, |
|
"grad_norm": 2.4419798835688074, |
|
"learning_rate": 9.374085603851709e-06, |
|
"loss": 0.6091, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.24771136241249328, |
|
"grad_norm": 2.3263506335508817, |
|
"learning_rate": 9.351122557796987e-06, |
|
"loss": 0.6188, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.25040387722132473, |
|
"grad_norm": 2.533382037846899, |
|
"learning_rate": 9.327775021810037e-06, |
|
"loss": 0.6145, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.25309639203015616, |
|
"grad_norm": 2.678311932146126, |
|
"learning_rate": 9.304045059011905e-06, |
|
"loss": 0.5881, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.25578890683898764, |
|
"grad_norm": 2.5127273745887466, |
|
"learning_rate": 9.279934766317045e-06, |
|
"loss": 0.621, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.25848142164781907, |
|
"grad_norm": 2.303317205670643, |
|
"learning_rate": 9.255446274248023e-06, |
|
"loss": 0.6036, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.2611739364566505, |
|
"grad_norm": 2.4449467721888376, |
|
"learning_rate": 9.230581746747257e-06, |
|
"loss": 0.6035, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.26386645126548197, |
|
"grad_norm": 2.4020663292502165, |
|
"learning_rate": 9.205343380985793e-06, |
|
"loss": 0.5939, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.2665589660743134, |
|
"grad_norm": 2.526561720865294, |
|
"learning_rate": 9.179733407169146e-06, |
|
"loss": 0.6, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.2692514808831449, |
|
"grad_norm": 2.4642667203514783, |
|
"learning_rate": 9.153754088340237e-06, |
|
"loss": 0.6143, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.2719439956919763, |
|
"grad_norm": 2.546297508506232, |
|
"learning_rate": 9.12740772017941e-06, |
|
"loss": 0.6006, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.27463651050080773, |
|
"grad_norm": 2.332663269345773, |
|
"learning_rate": 9.10069663080158e-06, |
|
"loss": 0.5894, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.2773290253096392, |
|
"grad_norm": 2.414500443877397, |
|
"learning_rate": 9.073623180550496e-06, |
|
"loss": 0.5978, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.28002154011847064, |
|
"grad_norm": 2.618196788357666, |
|
"learning_rate": 9.046189761790179e-06, |
|
"loss": 0.5914, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.2827140549273021, |
|
"grad_norm": 2.4760448307330964, |
|
"learning_rate": 9.018398798693512e-06, |
|
"loss": 0.6094, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.28540656973613354, |
|
"grad_norm": 2.403492095012741, |
|
"learning_rate": 8.99025274702803e-06, |
|
"loss": 0.5792, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.288099084544965, |
|
"grad_norm": 2.424533931438387, |
|
"learning_rate": 8.961754093938917e-06, |
|
"loss": 0.5813, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.29079159935379645, |
|
"grad_norm": 2.4025980246295555, |
|
"learning_rate": 8.932905357729213e-06, |
|
"loss": 0.5747, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.2934841141626279, |
|
"grad_norm": 2.4566763739595343, |
|
"learning_rate": 8.903709087637307e-06, |
|
"loss": 0.6003, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.29617662897145935, |
|
"grad_norm": 2.307834128807205, |
|
"learning_rate": 8.874167863611647e-06, |
|
"loss": 0.5838, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.2988691437802908, |
|
"grad_norm": 2.352469567943909, |
|
"learning_rate": 8.844284296082776e-06, |
|
"loss": 0.5845, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.30156165858912226, |
|
"grad_norm": 2.3341023442033895, |
|
"learning_rate": 8.81406102573266e-06, |
|
"loss": 0.5689, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.3042541733979537, |
|
"grad_norm": 2.4151388881341322, |
|
"learning_rate": 8.783500723261333e-06, |
|
"loss": 0.5793, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.3069466882067851, |
|
"grad_norm": 2.352881741468682, |
|
"learning_rate": 8.752606089150903e-06, |
|
"loss": 0.5872, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.3096392030156166, |
|
"grad_norm": 2.2929845371154665, |
|
"learning_rate": 8.721379853426928e-06, |
|
"loss": 0.5827, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.312331717824448, |
|
"grad_norm": 2.351649375339974, |
|
"learning_rate": 8.689824775417164e-06, |
|
"loss": 0.5704, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.3150242326332795, |
|
"grad_norm": 2.3536783488717425, |
|
"learning_rate": 8.657943643507747e-06, |
|
"loss": 0.5916, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.3177167474421109, |
|
"grad_norm": 2.4858567387352783, |
|
"learning_rate": 8.625739274896785e-06, |
|
"loss": 0.5679, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.3204092622509424, |
|
"grad_norm": 2.5420824976461613, |
|
"learning_rate": 8.593214515345426e-06, |
|
"loss": 0.5747, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.32310177705977383, |
|
"grad_norm": 2.411084856535756, |
|
"learning_rate": 8.560372238926372e-06, |
|
"loss": 0.5818, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.32579429186860526, |
|
"grad_norm": 2.493451871387303, |
|
"learning_rate": 8.527215347769932e-06, |
|
"loss": 0.5709, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.32848680667743674, |
|
"grad_norm": 2.2974929968587388, |
|
"learning_rate": 8.493746771807559e-06, |
|
"loss": 0.578, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.33117932148626816, |
|
"grad_norm": 2.368635502736958, |
|
"learning_rate": 8.459969468512943e-06, |
|
"loss": 0.5507, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.33387183629509964, |
|
"grad_norm": 2.5093780273624384, |
|
"learning_rate": 8.425886422640684e-06, |
|
"loss": 0.5574, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.33656435110393107, |
|
"grad_norm": 2.3784956807998254, |
|
"learning_rate": 8.391500645962527e-06, |
|
"loss": 0.5629, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.3392568659127625, |
|
"grad_norm": 2.5705211303648987, |
|
"learning_rate": 8.356815177001243e-06, |
|
"loss": 0.5563, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.341949380721594, |
|
"grad_norm": 2.296265064615366, |
|
"learning_rate": 8.321833080762109e-06, |
|
"loss": 0.5673, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.3446418955304254, |
|
"grad_norm": 2.468686941019328, |
|
"learning_rate": 8.286557448462074e-06, |
|
"loss": 0.5615, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.3473344103392569, |
|
"grad_norm": 2.304658882065399, |
|
"learning_rate": 8.250991397256609e-06, |
|
"loss": 0.5716, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.3500269251480883, |
|
"grad_norm": 2.4328147271748968, |
|
"learning_rate": 8.215138069964248e-06, |
|
"loss": 0.5497, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.3527194399569198, |
|
"grad_norm": 2.5638444164193626, |
|
"learning_rate": 8.179000634788872e-06, |
|
"loss": 0.5634, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.3554119547657512, |
|
"grad_norm": 2.304000568970403, |
|
"learning_rate": 8.142582285039753e-06, |
|
"loss": 0.5552, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.35810446957458264, |
|
"grad_norm": 2.312118564540539, |
|
"learning_rate": 8.105886238849369e-06, |
|
"loss": 0.5665, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.3607969843834141, |
|
"grad_norm": 2.564926504630545, |
|
"learning_rate": 8.068915738889038e-06, |
|
"loss": 0.5432, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.36348949919224555, |
|
"grad_norm": 2.552416968513296, |
|
"learning_rate": 8.031674052082372e-06, |
|
"loss": 0.5418, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.366182014001077, |
|
"grad_norm": 2.465276318426562, |
|
"learning_rate": 7.994164469316596e-06, |
|
"loss": 0.5438, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.36887452880990845, |
|
"grad_norm": 2.3946509022495728, |
|
"learning_rate": 7.956390305151744e-06, |
|
"loss": 0.5399, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.3715670436187399, |
|
"grad_norm": 2.5006069528524266, |
|
"learning_rate": 7.918354897527767e-06, |
|
"loss": 0.5556, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.37425955842757136, |
|
"grad_norm": 2.4646542978549633, |
|
"learning_rate": 7.880061607469574e-06, |
|
"loss": 0.5465, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.3769520732364028, |
|
"grad_norm": 2.464786801902601, |
|
"learning_rate": 7.841513818790035e-06, |
|
"loss": 0.5412, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.37964458804523427, |
|
"grad_norm": 2.470119700617401, |
|
"learning_rate": 7.80271493779096e-06, |
|
"loss": 0.5518, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.3823371028540657, |
|
"grad_norm": 2.487807822690958, |
|
"learning_rate": 7.763668392962107e-06, |
|
"loss": 0.539, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.3850296176628972, |
|
"grad_norm": 2.3449768254321812, |
|
"learning_rate": 7.724377634678222e-06, |
|
"loss": 0.5367, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.3877221324717286, |
|
"grad_norm": 2.342137329316273, |
|
"learning_rate": 7.684846134894133e-06, |
|
"loss": 0.5361, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.39041464728056, |
|
"grad_norm": 2.3478632374387236, |
|
"learning_rate": 7.645077386837963e-06, |
|
"loss": 0.5377, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.3931071620893915, |
|
"grad_norm": 2.311127470477535, |
|
"learning_rate": 7.605074904702438e-06, |
|
"loss": 0.5326, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.39579967689822293, |
|
"grad_norm": 2.5257265487257317, |
|
"learning_rate": 7.5648422233343564e-06, |
|
"loss": 0.5253, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.3984921917070544, |
|
"grad_norm": 2.2883296027896862, |
|
"learning_rate": 7.524382897922229e-06, |
|
"loss": 0.5198, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.40118470651588584, |
|
"grad_norm": 2.2938224585559994, |
|
"learning_rate": 7.483700503682127e-06, |
|
"loss": 0.5179, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.40387722132471726, |
|
"grad_norm": 2.351764729142054, |
|
"learning_rate": 7.442798635541749e-06, |
|
"loss": 0.5245, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.40656973613354874, |
|
"grad_norm": 2.2446519389708057, |
|
"learning_rate": 7.401680907822756e-06, |
|
"loss": 0.5356, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.40926225094238017, |
|
"grad_norm": 2.463243359027525, |
|
"learning_rate": 7.360350953921386e-06, |
|
"loss": 0.5261, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.41195476575121165, |
|
"grad_norm": 2.1930439197029634, |
|
"learning_rate": 7.3188124259873946e-06, |
|
"loss": 0.5401, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.4146472805600431, |
|
"grad_norm": 2.37252598970325, |
|
"learning_rate": 7.2770689946013215e-06, |
|
"loss": 0.5047, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.41733979536887456, |
|
"grad_norm": 2.284338792322131, |
|
"learning_rate": 7.235124348450143e-06, |
|
"loss": 0.5121, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.420032310177706, |
|
"grad_norm": 2.253488965644508, |
|
"learning_rate": 7.192982194001312e-06, |
|
"loss": 0.5029, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.4227248249865374, |
|
"grad_norm": 2.258949760855827, |
|
"learning_rate": 7.150646255175242e-06, |
|
"loss": 0.5282, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.4254173397953689, |
|
"grad_norm": 2.2735662795523317, |
|
"learning_rate": 7.108120273016236e-06, |
|
"loss": 0.514, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.4281098546042003, |
|
"grad_norm": 2.2850305304777865, |
|
"learning_rate": 7.065408005361902e-06, |
|
"loss": 0.5046, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.4308023694130318, |
|
"grad_norm": 2.2431623733294925, |
|
"learning_rate": 7.022513226511098e-06, |
|
"loss": 0.5108, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.4334948842218632, |
|
"grad_norm": 2.3702355406435958, |
|
"learning_rate": 6.9794397268904094e-06, |
|
"loss": 0.5039, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.43618739903069464, |
|
"grad_norm": 2.424641619769436, |
|
"learning_rate": 6.9361913127192026e-06, |
|
"loss": 0.5135, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.4388799138395261, |
|
"grad_norm": 2.417084290942502, |
|
"learning_rate": 6.892771805673289e-06, |
|
"loss": 0.5008, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.44157242864835755, |
|
"grad_norm": 2.483155905595343, |
|
"learning_rate": 6.8491850425472216e-06, |
|
"loss": 0.5061, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.44426494345718903, |
|
"grad_norm": 2.3647966120276624, |
|
"learning_rate": 6.805434874915249e-06, |
|
"loss": 0.5116, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.44695745826602046, |
|
"grad_norm": 2.3941067808468235, |
|
"learning_rate": 6.7615251687909714e-06, |
|
"loss": 0.5115, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.44964997307485194, |
|
"grad_norm": 2.431013960473848, |
|
"learning_rate": 6.7174598042857186e-06, |
|
"loss": 0.4884, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.45234248788368336, |
|
"grad_norm": 2.3101487072378575, |
|
"learning_rate": 6.67324267526568e-06, |
|
"loss": 0.4952, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.4550350026925148, |
|
"grad_norm": 2.252689751264616, |
|
"learning_rate": 6.62887768900782e-06, |
|
"loss": 0.4908, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.45772751750134627, |
|
"grad_norm": 2.308986864251815, |
|
"learning_rate": 6.58436876585461e-06, |
|
"loss": 0.5115, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.4604200323101777, |
|
"grad_norm": 2.3582282125809333, |
|
"learning_rate": 6.539719838867604e-06, |
|
"loss": 0.5037, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.4631125471190092, |
|
"grad_norm": 2.7549506575377065, |
|
"learning_rate": 6.494934853479889e-06, |
|
"loss": 0.4939, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.4658050619278406, |
|
"grad_norm": 3.0601934459715916, |
|
"learning_rate": 6.450017767147447e-06, |
|
"loss": 0.4988, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.46849757673667203, |
|
"grad_norm": 2.3645028092509914, |
|
"learning_rate": 6.404972548999453e-06, |
|
"loss": 0.4962, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.4711900915455035, |
|
"grad_norm": 2.345655002717708, |
|
"learning_rate": 6.3598031794875355e-06, |
|
"loss": 0.4993, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.47388260635433493, |
|
"grad_norm": 2.4224055568308858, |
|
"learning_rate": 6.314513650034044e-06, |
|
"loss": 0.4798, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.4765751211631664, |
|
"grad_norm": 2.3397132476622047, |
|
"learning_rate": 6.269107962679344e-06, |
|
"loss": 0.4879, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.47926763597199784, |
|
"grad_norm": 2.4522897916842914, |
|
"learning_rate": 6.223590129728171e-06, |
|
"loss": 0.4765, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.4819601507808293, |
|
"grad_norm": 2.3108192463540593, |
|
"learning_rate": 6.177964173395087e-06, |
|
"loss": 0.4864, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.48465266558966075, |
|
"grad_norm": 2.267641247075609, |
|
"learning_rate": 6.1322341254490515e-06, |
|
"loss": 0.4836, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.4873451803984922, |
|
"grad_norm": 2.2555810036335497, |
|
"learning_rate": 6.086404026857146e-06, |
|
"loss": 0.4842, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.49003769520732365, |
|
"grad_norm": 2.327632792501213, |
|
"learning_rate": 6.040477927427502e-06, |
|
"loss": 0.4704, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.4927302100161551, |
|
"grad_norm": 2.2698670809887185, |
|
"learning_rate": 5.994459885451423e-06, |
|
"loss": 0.4749, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.49542272482498656, |
|
"grad_norm": 2.4725183020536847, |
|
"learning_rate": 5.948353967344786e-06, |
|
"loss": 0.4713, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.498115239633818, |
|
"grad_norm": 2.3182514849471993, |
|
"learning_rate": 5.902164247288701e-06, |
|
"loss": 0.4591, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.5008077544426495, |
|
"grad_norm": 2.2450126172776543, |
|
"learning_rate": 5.855894806869493e-06, |
|
"loss": 0.4706, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.5035002692514808, |
|
"grad_norm": 2.4095070380073333, |
|
"learning_rate": 5.809549734718031e-06, |
|
"loss": 0.4706, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.5061927840603123, |
|
"grad_norm": 2.2933657857094905, |
|
"learning_rate": 5.763133126148438e-06, |
|
"loss": 0.4665, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.5088852988691438, |
|
"grad_norm": 2.3063316226503123, |
|
"learning_rate": 5.716649082796199e-06, |
|
"loss": 0.4625, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.5115778136779753, |
|
"grad_norm": 2.3233885177577966, |
|
"learning_rate": 5.670101712255716e-06, |
|
"loss": 0.4688, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.5142703284868066, |
|
"grad_norm": 2.211725156600061, |
|
"learning_rate": 5.6234951277173524e-06, |
|
"loss": 0.4543, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.5169628432956381, |
|
"grad_norm": 2.4389466884168147, |
|
"learning_rate": 5.576833447603943e-06, |
|
"loss": 0.4703, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.5196553581044696, |
|
"grad_norm": 2.3368172021818725, |
|
"learning_rate": 5.530120795206894e-06, |
|
"loss": 0.4655, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.522347872913301, |
|
"grad_norm": 2.2744905601737804, |
|
"learning_rate": 5.483361298321803e-06, |
|
"loss": 0.4598, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.5250403877221325, |
|
"grad_norm": 2.2417520684111354, |
|
"learning_rate": 5.4365590888837175e-06, |
|
"loss": 0.4542, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.5277329025309639, |
|
"grad_norm": 2.260056934844272, |
|
"learning_rate": 5.389718302602008e-06, |
|
"loss": 0.4786, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.5304254173397954, |
|
"grad_norm": 2.2937595883241, |
|
"learning_rate": 5.342843078594916e-06, |
|
"loss": 0.4649, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.5331179321486268, |
|
"grad_norm": 2.298350623188229, |
|
"learning_rate": 5.295937559023794e-06, |
|
"loss": 0.4605, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.5358104469574583, |
|
"grad_norm": 2.2861153437802915, |
|
"learning_rate": 5.2490058887270815e-06, |
|
"loss": 0.4529, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.5385029617662898, |
|
"grad_norm": 2.2661516039174288, |
|
"learning_rate": 5.202052214854048e-06, |
|
"loss": 0.448, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.5411954765751211, |
|
"grad_norm": 2.287756336798117, |
|
"learning_rate": 5.1550806864983235e-06, |
|
"loss": 0.4577, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.5438879913839526, |
|
"grad_norm": 2.21952469191807, |
|
"learning_rate": 5.108095454331261e-06, |
|
"loss": 0.4401, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.5465805061927841, |
|
"grad_norm": 2.289575301200996, |
|
"learning_rate": 5.061100670235161e-06, |
|
"loss": 0.4504, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.5492730210016155, |
|
"grad_norm": 2.254586229300791, |
|
"learning_rate": 5.014100486936383e-06, |
|
"loss": 0.4454, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.5519655358104469, |
|
"grad_norm": 2.137160999612689, |
|
"learning_rate": 4.967099057638397e-06, |
|
"loss": 0.4366, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.5546580506192784, |
|
"grad_norm": 2.241158588636164, |
|
"learning_rate": 4.92010053565477e-06, |
|
"loss": 0.442, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.5573505654281099, |
|
"grad_norm": 2.218608478823859, |
|
"learning_rate": 4.873109074042169e-06, |
|
"loss": 0.4346, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.5600430802369413, |
|
"grad_norm": 2.43524119080364, |
|
"learning_rate": 4.826128825233362e-06, |
|
"loss": 0.4428, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.5627355950457728, |
|
"grad_norm": 2.353557854902857, |
|
"learning_rate": 4.77916394067029e-06, |
|
"loss": 0.4414, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.5654281098546042, |
|
"grad_norm": 2.2938687493350423, |
|
"learning_rate": 4.732218570437224e-06, |
|
"loss": 0.4568, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.5681206246634356, |
|
"grad_norm": 2.3216946351133405, |
|
"learning_rate": 4.6852968628940345e-06, |
|
"loss": 0.4397, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.5708131394722671, |
|
"grad_norm": 2.392290169077891, |
|
"learning_rate": 4.638402964309626e-06, |
|
"loss": 0.4323, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.5735056542810986, |
|
"grad_norm": 2.259967562074028, |
|
"learning_rate": 4.591541018495543e-06, |
|
"loss": 0.4427, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.57619816908993, |
|
"grad_norm": 2.4321895943132597, |
|
"learning_rate": 4.5447151664398e-06, |
|
"loss": 0.44, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.5788906838987614, |
|
"grad_norm": 2.350780966144005, |
|
"learning_rate": 4.497929545940968e-06, |
|
"loss": 0.44, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.5815831987075929, |
|
"grad_norm": 2.327312584804818, |
|
"learning_rate": 4.4511882912425214e-06, |
|
"loss": 0.4366, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.5842757135164244, |
|
"grad_norm": 2.294345174675599, |
|
"learning_rate": 4.404495532667525e-06, |
|
"loss": 0.4383, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.5869682283252557, |
|
"grad_norm": 2.2174354559341767, |
|
"learning_rate": 4.357855396253648e-06, |
|
"loss": 0.4412, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.5896607431340872, |
|
"grad_norm": 2.350816369766935, |
|
"learning_rate": 4.311272003388569e-06, |
|
"loss": 0.442, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.5923532579429187, |
|
"grad_norm": 2.3824872125165286, |
|
"learning_rate": 4.2647494704457775e-06, |
|
"loss": 0.4349, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.5950457727517501, |
|
"grad_norm": 2.2923371304598605, |
|
"learning_rate": 4.218291908420842e-06, |
|
"loss": 0.4286, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.5977382875605816, |
|
"grad_norm": 2.346360268676029, |
|
"learning_rate": 4.171903422568128e-06, |
|
"loss": 0.4336, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.600430802369413, |
|
"grad_norm": 2.218093217821692, |
|
"learning_rate": 4.1255881120380335e-06, |
|
"loss": 0.446, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.6031233171782445, |
|
"grad_norm": 2.276362681621089, |
|
"learning_rate": 4.079350069514779e-06, |
|
"loss": 0.4407, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.6058158319870759, |
|
"grad_norm": 2.3056261390539934, |
|
"learning_rate": 4.033193380854733e-06, |
|
"loss": 0.4289, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.6085083467959074, |
|
"grad_norm": 2.1989394458411686, |
|
"learning_rate": 3.9871221247253835e-06, |
|
"loss": 0.4175, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.6112008616047389, |
|
"grad_norm": 2.3562705886952915, |
|
"learning_rate": 3.9411403722449075e-06, |
|
"loss": 0.4259, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.6138933764135702, |
|
"grad_norm": 2.322065883191611, |
|
"learning_rate": 3.895252186622433e-06, |
|
"loss": 0.418, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.6165858912224017, |
|
"grad_norm": 2.315390737203203, |
|
"learning_rate": 3.849461622798993e-06, |
|
"loss": 0.4174, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.6192784060312332, |
|
"grad_norm": 2.136385235781553, |
|
"learning_rate": 3.8037727270891927e-06, |
|
"loss": 0.4182, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.6219709208400647, |
|
"grad_norm": 2.2676350954914257, |
|
"learning_rate": 3.758189536823673e-06, |
|
"loss": 0.4162, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.624663435648896, |
|
"grad_norm": 2.3081143159586164, |
|
"learning_rate": 3.71271607999234e-06, |
|
"loss": 0.415, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.6273559504577275, |
|
"grad_norm": 2.1196383763132784, |
|
"learning_rate": 3.6673563748884268e-06, |
|
"loss": 0.4225, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.630048465266559, |
|
"grad_norm": 2.3577148406371284, |
|
"learning_rate": 3.6221144297534178e-06, |
|
"loss": 0.41, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.6327409800753904, |
|
"grad_norm": 2.332635685786891, |
|
"learning_rate": 3.5769942424228578e-06, |
|
"loss": 0.4137, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.6354334948842219, |
|
"grad_norm": 2.254772419320123, |
|
"learning_rate": 3.531999799973079e-06, |
|
"loss": 0.429, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.6381260096930533, |
|
"grad_norm": 2.1991733799648907, |
|
"learning_rate": 3.4871350783688795e-06, |
|
"loss": 0.3971, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.6408185245018848, |
|
"grad_norm": 2.2671939485505916, |
|
"learning_rate": 3.4424040421121903e-06, |
|
"loss": 0.4163, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.6435110393107162, |
|
"grad_norm": 2.258087169479648, |
|
"learning_rate": 3.397810643891741e-06, |
|
"loss": 0.4059, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.6462035541195477, |
|
"grad_norm": 2.3543023457270325, |
|
"learning_rate": 3.35335882423378e-06, |
|
"loss": 0.4224, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.6488960689283791, |
|
"grad_norm": 2.146955492636297, |
|
"learning_rate": 3.309052511153873e-06, |
|
"loss": 0.4087, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.6515885837372105, |
|
"grad_norm": 2.238937583269941, |
|
"learning_rate": 3.2648956198097946e-06, |
|
"loss": 0.4053, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.654281098546042, |
|
"grad_norm": 2.190819239320771, |
|
"learning_rate": 3.2208920521555677e-06, |
|
"loss": 0.3945, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.6569736133548735, |
|
"grad_norm": 2.1827434556639442, |
|
"learning_rate": 3.177045696596658e-06, |
|
"loss": 0.4089, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.6596661281637048, |
|
"grad_norm": 2.128197496062037, |
|
"learning_rate": 3.133360427646378e-06, |
|
"loss": 0.3943, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.6623586429725363, |
|
"grad_norm": 2.258181737280764, |
|
"learning_rate": 3.089840105583516e-06, |
|
"loss": 0.4014, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.6650511577813678, |
|
"grad_norm": 2.2263287080503202, |
|
"learning_rate": 3.0464885761112083e-06, |
|
"loss": 0.4085, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.6677436725901993, |
|
"grad_norm": 2.1158495070811467, |
|
"learning_rate": 3.0033096700171218e-06, |
|
"loss": 0.3919, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.6704361873990307, |
|
"grad_norm": 2.1418686648492415, |
|
"learning_rate": 2.960307202834941e-06, |
|
"loss": 0.408, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.6731287022078621, |
|
"grad_norm": 2.13102582996017, |
|
"learning_rate": 2.9174849745072003e-06, |
|
"loss": 0.3941, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.6758212170166936, |
|
"grad_norm": 2.1255705801818277, |
|
"learning_rate": 2.874846769049509e-06, |
|
"loss": 0.4043, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.678513731825525, |
|
"grad_norm": 2.314046111156198, |
|
"learning_rate": 2.8323963542161665e-06, |
|
"loss": 0.4049, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.6812062466343565, |
|
"grad_norm": 2.247094999606519, |
|
"learning_rate": 2.7901374811672256e-06, |
|
"loss": 0.3924, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.683898761443188, |
|
"grad_norm": 2.1331620184210958, |
|
"learning_rate": 2.748073884137016e-06, |
|
"loss": 0.3942, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.6865912762520194, |
|
"grad_norm": 2.2729136222517257, |
|
"learning_rate": 2.7062092801041717e-06, |
|
"loss": 0.39, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.6892837910608508, |
|
"grad_norm": 2.1267931341347968, |
|
"learning_rate": 2.664547368463171e-06, |
|
"loss": 0.3833, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.6919763058696823, |
|
"grad_norm": 2.1728485155426838, |
|
"learning_rate": 2.6230918306974378e-06, |
|
"loss": 0.3929, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 0.6946688206785138, |
|
"grad_norm": 2.247179993428309, |
|
"learning_rate": 2.581846330054034e-06, |
|
"loss": 0.3915, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.6973613354873451, |
|
"grad_norm": 2.146951309924855, |
|
"learning_rate": 2.5408145112199424e-06, |
|
"loss": 0.3894, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 0.7000538502961766, |
|
"grad_norm": 2.1704461453183628, |
|
"learning_rate": 2.5000000000000015e-06, |
|
"loss": 0.3942, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.7027463651050081, |
|
"grad_norm": 2.164674877249693, |
|
"learning_rate": 2.4594064029965197e-06, |
|
"loss": 0.3883, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.7054388799138396, |
|
"grad_norm": 2.2000767508999175, |
|
"learning_rate": 2.4190373072905687e-06, |
|
"loss": 0.3884, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.708131394722671, |
|
"grad_norm": 2.3684557762781546, |
|
"learning_rate": 2.378896280125006e-06, |
|
"loss": 0.3864, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 0.7108239095315024, |
|
"grad_norm": 1.98524822133492, |
|
"learning_rate": 2.3389868685892573e-06, |
|
"loss": 0.3704, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.7135164243403339, |
|
"grad_norm": 2.19288217749482, |
|
"learning_rate": 2.2993125993058727e-06, |
|
"loss": 0.3747, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.7162089391491653, |
|
"grad_norm": 2.1617200756921497, |
|
"learning_rate": 2.2598769781189004e-06, |
|
"loss": 0.3865, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.7189014539579968, |
|
"grad_norm": 2.132621198575595, |
|
"learning_rate": 2.2206834897840814e-06, |
|
"loss": 0.3871, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.7215939687668282, |
|
"grad_norm": 2.145671425646657, |
|
"learning_rate": 2.1817355976609212e-06, |
|
"loss": 0.3865, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.7242864835756596, |
|
"grad_norm": 2.0530895422503868, |
|
"learning_rate": 2.14303674340665e-06, |
|
"loss": 0.3673, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 0.7269789983844911, |
|
"grad_norm": 2.2838748871806054, |
|
"learning_rate": 2.1045903466720915e-06, |
|
"loss": 0.38, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.7296715131933226, |
|
"grad_norm": 2.2502198899358348, |
|
"learning_rate": 2.0663998047994906e-06, |
|
"loss": 0.3874, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 0.732364028002154, |
|
"grad_norm": 2.128622996026954, |
|
"learning_rate": 2.0284684925223006e-06, |
|
"loss": 0.3775, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.7350565428109854, |
|
"grad_norm": 2.219696821235303, |
|
"learning_rate": 1.990799761666975e-06, |
|
"loss": 0.3798, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 0.7377490576198169, |
|
"grad_norm": 2.197304231151606, |
|
"learning_rate": 1.953396940856776e-06, |
|
"loss": 0.3724, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.7404415724286484, |
|
"grad_norm": 2.2465171659133674, |
|
"learning_rate": 1.9162633352176484e-06, |
|
"loss": 0.3831, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.7431340872374798, |
|
"grad_norm": 2.1996595546449367, |
|
"learning_rate": 1.8794022260861483e-06, |
|
"loss": 0.383, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.7458266020463112, |
|
"grad_norm": 2.1017548100685417, |
|
"learning_rate": 1.8428168707194922e-06, |
|
"loss": 0.3765, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 0.7485191168551427, |
|
"grad_norm": 2.064327514284528, |
|
"learning_rate": 1.8065105020077262e-06, |
|
"loss": 0.3694, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.7512116316639742, |
|
"grad_norm": 2.0962178314477016, |
|
"learning_rate": 1.7704863281880496e-06, |
|
"loss": 0.3866, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.7539041464728056, |
|
"grad_norm": 2.124206708166897, |
|
"learning_rate": 1.7347475325613134e-06, |
|
"loss": 0.3741, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.756596661281637, |
|
"grad_norm": 2.379442181939827, |
|
"learning_rate": 1.6992972732107322e-06, |
|
"loss": 0.3762, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 0.7592891760904685, |
|
"grad_norm": 2.2436516667976933, |
|
"learning_rate": 1.6641386827228107e-06, |
|
"loss": 0.3668, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.7619816908992999, |
|
"grad_norm": 2.106915257521231, |
|
"learning_rate": 1.6292748679105407e-06, |
|
"loss": 0.3715, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 0.7646742057081314, |
|
"grad_norm": 2.0865052999042546, |
|
"learning_rate": 1.594708909538854e-06, |
|
"loss": 0.3798, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.7673667205169629, |
|
"grad_norm": 2.2132376460253753, |
|
"learning_rate": 1.5604438620523932e-06, |
|
"loss": 0.3641, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.7700592353257943, |
|
"grad_norm": 2.101010632458687, |
|
"learning_rate": 1.5264827533056058e-06, |
|
"loss": 0.3736, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.7727517501346257, |
|
"grad_norm": 2.0835331975727573, |
|
"learning_rate": 1.492828584295184e-06, |
|
"loss": 0.3569, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 0.7754442649434572, |
|
"grad_norm": 2.265759304633531, |
|
"learning_rate": 1.4594843288948773e-06, |
|
"loss": 0.3662, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.7781367797522887, |
|
"grad_norm": 2.2173463946322323, |
|
"learning_rate": 1.4264529335927114e-06, |
|
"loss": 0.37, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 0.78082929456112, |
|
"grad_norm": 2.1518226151098125, |
|
"learning_rate": 1.3937373172306056e-06, |
|
"loss": 0.3638, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.7835218093699515, |
|
"grad_norm": 2.055682580111425, |
|
"learning_rate": 1.361340370746464e-06, |
|
"loss": 0.3702, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 0.786214324178783, |
|
"grad_norm": 2.0945805072566586, |
|
"learning_rate": 1.3292649569187072e-06, |
|
"loss": 0.3641, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.7889068389876144, |
|
"grad_norm": 2.099143771457234, |
|
"learning_rate": 1.2975139101132999e-06, |
|
"loss": 0.3647, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 0.7915993537964459, |
|
"grad_norm": 2.2165194526856378, |
|
"learning_rate": 1.2660900360332927e-06, |
|
"loss": 0.3715, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.7942918686052773, |
|
"grad_norm": 2.244374868499319, |
|
"learning_rate": 1.2349961114708936e-06, |
|
"loss": 0.3623, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.7969843834141088, |
|
"grad_norm": 2.186870000073866, |
|
"learning_rate": 1.2042348840620955e-06, |
|
"loss": 0.3729, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.7996768982229402, |
|
"grad_norm": 2.5050253183494244, |
|
"learning_rate": 1.1738090720438782e-06, |
|
"loss": 0.3576, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 0.8023694130317717, |
|
"grad_norm": 2.0739438241919594, |
|
"learning_rate": 1.1437213640140105e-06, |
|
"loss": 0.3617, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.8050619278406032, |
|
"grad_norm": 2.0754012982092265, |
|
"learning_rate": 1.113974418693471e-06, |
|
"loss": 0.3595, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 0.8077544426494345, |
|
"grad_norm": 2.3081364092115133, |
|
"learning_rate": 1.0845708646915054e-06, |
|
"loss": 0.361, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.810446957458266, |
|
"grad_norm": 2.0032280348669067, |
|
"learning_rate": 1.055513300273352e-06, |
|
"loss": 0.3578, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 0.8131394722670975, |
|
"grad_norm": 2.0937736954570365, |
|
"learning_rate": 1.026804293130641e-06, |
|
"loss": 0.3616, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.815831987075929, |
|
"grad_norm": 2.1070535051869443, |
|
"learning_rate": 9.984463801544992e-07, |
|
"loss": 0.3535, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 0.8185245018847603, |
|
"grad_norm": 2.3516801013906097, |
|
"learning_rate": 9.704420672113774e-07, |
|
"loss": 0.3536, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.8212170166935918, |
|
"grad_norm": 1.9740143572115736, |
|
"learning_rate": 9.42793828921616e-07, |
|
"loss": 0.3608, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.8239095315024233, |
|
"grad_norm": 2.0979215240658027, |
|
"learning_rate": 9.15504108440774e-07, |
|
"loss": 0.3736, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.8266020463112547, |
|
"grad_norm": 2.0284760942560323, |
|
"learning_rate": 8.885753172437389e-07, |
|
"loss": 0.3508, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 0.8292945611200861, |
|
"grad_norm": 2.0211247506598395, |
|
"learning_rate": 8.620098349116329e-07, |
|
"loss": 0.352, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.8319870759289176, |
|
"grad_norm": 2.3428781214119985, |
|
"learning_rate": 8.358100089215426e-07, |
|
"loss": 0.3603, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 0.8346795907377491, |
|
"grad_norm": 2.109207498019035, |
|
"learning_rate": 8.099781544390795e-07, |
|
"loss": 0.3629, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.8373721055465805, |
|
"grad_norm": 2.100615618738898, |
|
"learning_rate": 7.845165541138011e-07, |
|
"loss": 0.3556, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 0.840064620355412, |
|
"grad_norm": 2.0303755145271243, |
|
"learning_rate": 7.594274578775007e-07, |
|
"loss": 0.3555, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.8427571351642434, |
|
"grad_norm": 2.0652917821688104, |
|
"learning_rate": 7.347130827453941e-07, |
|
"loss": 0.3588, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 0.8454496499730748, |
|
"grad_norm": 1.9882512618837487, |
|
"learning_rate": 7.103756126202071e-07, |
|
"loss": 0.3613, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.8481421647819063, |
|
"grad_norm": 2.067179222751156, |
|
"learning_rate": 6.864171980991985e-07, |
|
"loss": 0.3453, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 0.8508346795907378, |
|
"grad_norm": 2.093050443455704, |
|
"learning_rate": 6.628399562841159e-07, |
|
"loss": 0.3548, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.8535271943995691, |
|
"grad_norm": 2.1033534008065073, |
|
"learning_rate": 6.396459705941204e-07, |
|
"loss": 0.3471, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 0.8562197092084006, |
|
"grad_norm": 1.9167623533757014, |
|
"learning_rate": 6.168372905816822e-07, |
|
"loss": 0.3477, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.8589122240172321, |
|
"grad_norm": 2.1430265742267105, |
|
"learning_rate": 5.944159317514709e-07, |
|
"loss": 0.3517, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 0.8616047388260636, |
|
"grad_norm": 2.1988162518545686, |
|
"learning_rate": 5.723838753822525e-07, |
|
"loss": 0.3689, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.864297253634895, |
|
"grad_norm": 2.2356747531202807, |
|
"learning_rate": 5.507430683518161e-07, |
|
"loss": 0.3449, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 0.8669897684437264, |
|
"grad_norm": 2.1308682594997324, |
|
"learning_rate": 5.294954229649329e-07, |
|
"loss": 0.3507, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.8696822832525579, |
|
"grad_norm": 2.0875409392190107, |
|
"learning_rate": 5.086428167843755e-07, |
|
"loss": 0.3476, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 0.8723747980613893, |
|
"grad_norm": 2.147966222762087, |
|
"learning_rate": 4.881870924650062e-07, |
|
"loss": 0.342, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.8750673128702208, |
|
"grad_norm": 2.0556458342996464, |
|
"learning_rate": 4.681300575909492e-07, |
|
"loss": 0.3471, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 0.8777598276790523, |
|
"grad_norm": 2.015550654050713, |
|
"learning_rate": 4.4847348451586226e-07, |
|
"loss": 0.3478, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.8804523424878837, |
|
"grad_norm": 1.998136493936924, |
|
"learning_rate": 4.2921911020631926e-07, |
|
"loss": 0.3503, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 0.8831448572967151, |
|
"grad_norm": 1.8537901854708771, |
|
"learning_rate": 4.1036863608832856e-07, |
|
"loss": 0.3468, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.8858373721055466, |
|
"grad_norm": 2.080626726929853, |
|
"learning_rate": 3.919237278969773e-07, |
|
"loss": 0.3445, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 0.8885298869143781, |
|
"grad_norm": 1.8500187551625011, |
|
"learning_rate": 3.7388601552924066e-07, |
|
"loss": 0.3515, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.8912224017232094, |
|
"grad_norm": 1.9930943811147626, |
|
"learning_rate": 3.56257092899957e-07, |
|
"loss": 0.3389, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 0.8939149165320409, |
|
"grad_norm": 2.053675256803709, |
|
"learning_rate": 3.390385178009781e-07, |
|
"loss": 0.3534, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.8966074313408724, |
|
"grad_norm": 2.0176018019009225, |
|
"learning_rate": 3.222318117635143e-07, |
|
"loss": 0.3391, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 0.8992999461497039, |
|
"grad_norm": 2.165084875616374, |
|
"learning_rate": 3.058384599236847e-07, |
|
"loss": 0.3392, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.9019924609585352, |
|
"grad_norm": 1.8300243137706005, |
|
"learning_rate": 2.8985991089127916e-07, |
|
"loss": 0.3519, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 0.9046849757673667, |
|
"grad_norm": 1.9039488415439678, |
|
"learning_rate": 2.7429757662175316e-07, |
|
"loss": 0.3362, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.9073774905761982, |
|
"grad_norm": 2.1498883952955805, |
|
"learning_rate": 2.5915283229146106e-07, |
|
"loss": 0.3485, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 0.9100700053850296, |
|
"grad_norm": 1.9985654494313574, |
|
"learning_rate": 2.44427016176132e-07, |
|
"loss": 0.349, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.9127625201938611, |
|
"grad_norm": 2.2813576484209896, |
|
"learning_rate": 2.301214295326193e-07, |
|
"loss": 0.3462, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 0.9154550350026925, |
|
"grad_norm": 2.0155307395335385, |
|
"learning_rate": 2.162373364839071e-07, |
|
"loss": 0.334, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.9181475498115239, |
|
"grad_norm": 1.865462382587576, |
|
"learning_rate": 2.0277596390741005e-07, |
|
"loss": 0.3465, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 0.9208400646203554, |
|
"grad_norm": 1.9778403222139225, |
|
"learning_rate": 1.8973850132655957e-07, |
|
"loss": 0.3399, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.9235325794291869, |
|
"grad_norm": 2.1982054104516986, |
|
"learning_rate": 1.7712610080568738e-07, |
|
"loss": 0.3471, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 0.9262250942380184, |
|
"grad_norm": 2.0362535026984396, |
|
"learning_rate": 1.649398768482241e-07, |
|
"loss": 0.35, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.9289176090468497, |
|
"grad_norm": 2.0586480283901714, |
|
"learning_rate": 1.5318090629821757e-07, |
|
"loss": 0.3414, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 0.9316101238556812, |
|
"grad_norm": 2.0363932956107, |
|
"learning_rate": 1.4185022824517392e-07, |
|
"loss": 0.3432, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.9343026386645127, |
|
"grad_norm": 2.0787927070430943, |
|
"learning_rate": 1.3094884393223872e-07, |
|
"loss": 0.3491, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 0.9369951534733441, |
|
"grad_norm": 1.9680040636502156, |
|
"learning_rate": 1.2047771666772124e-07, |
|
"loss": 0.3452, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.9396876682821755, |
|
"grad_norm": 1.8963738671804522, |
|
"learning_rate": 1.1043777173997239e-07, |
|
"loss": 0.3437, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 0.942380183091007, |
|
"grad_norm": 2.074849031085683, |
|
"learning_rate": 1.0082989633561746e-07, |
|
"loss": 0.3399, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.9450726978998385, |
|
"grad_norm": 2.005988363818133, |
|
"learning_rate": 9.165493946116432e-08, |
|
"loss": 0.341, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 0.9477652127086699, |
|
"grad_norm": 1.9739191876783688, |
|
"learning_rate": 8.29137118679757e-08, |
|
"loss": 0.3505, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.9504577275175013, |
|
"grad_norm": 2.0544856800909863, |
|
"learning_rate": 7.460698598063032e-08, |
|
"loss": 0.3504, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 0.9531502423263328, |
|
"grad_norm": 2.0266695028121604, |
|
"learning_rate": 6.673549582866368e-08, |
|
"loss": 0.3437, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.9558427571351642, |
|
"grad_norm": 1.9158086758881532, |
|
"learning_rate": 5.929993698170877e-08, |
|
"loss": 0.3407, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 0.9585352719439957, |
|
"grad_norm": 2.060993161808353, |
|
"learning_rate": 5.2300966488026406e-08, |
|
"loss": 0.3514, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.9612277867528272, |
|
"grad_norm": 2.2300563421538735, |
|
"learning_rate": 4.573920281645161e-08, |
|
"loss": 0.3329, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 0.9639203015616586, |
|
"grad_norm": 1.9501346199167315, |
|
"learning_rate": 3.96152258017346e-08, |
|
"loss": 0.3373, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.96661281637049, |
|
"grad_norm": 2.078047814961423, |
|
"learning_rate": 3.3929576593310644e-08, |
|
"loss": 0.3498, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 0.9693053311793215, |
|
"grad_norm": 1.9588663874547265, |
|
"learning_rate": 2.868275760747441e-08, |
|
"loss": 0.3351, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.971997845988153, |
|
"grad_norm": 2.0555012024055563, |
|
"learning_rate": 2.387523248298773e-08, |
|
"loss": 0.3438, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 0.9746903607969843, |
|
"grad_norm": 2.02985190947748, |
|
"learning_rate": 1.9507426040110134e-08, |
|
"loss": 0.3371, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.9773828756058158, |
|
"grad_norm": 1.9411470270835758, |
|
"learning_rate": 1.557972424305665e-08, |
|
"loss": 0.3423, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 0.9800753904146473, |
|
"grad_norm": 2.1196737223933773, |
|
"learning_rate": 1.209247416589232e-08, |
|
"loss": 0.344, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.9827679052234787, |
|
"grad_norm": 2.051817684549473, |
|
"learning_rate": 9.045983961865622e-09, |
|
"loss": 0.3568, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 0.9854604200323102, |
|
"grad_norm": 2.0189362221244957, |
|
"learning_rate": 6.440522836174135e-09, |
|
"loss": 0.3428, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.9881529348411416, |
|
"grad_norm": 1.9669363709037755, |
|
"learning_rate": 4.27632102218023e-09, |
|
"loss": 0.3344, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 0.9908454496499731, |
|
"grad_norm": 1.905202601388718, |
|
"learning_rate": 2.5535697610618026e-09, |
|
"loss": 0.3398, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.9935379644588045, |
|
"grad_norm": 1.9833936251009352, |
|
"learning_rate": 1.2724212849180019e-09, |
|
"loss": 0.3458, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 0.996230479267636, |
|
"grad_norm": 1.8216154780128988, |
|
"learning_rate": 4.329888033127727e-10, |
|
"loss": 0.3413, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.9989229940764675, |
|
"grad_norm": 2.0143818367459474, |
|
"learning_rate": 3.5346493273968886e-11, |
|
"loss": 0.3454, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_runtime": 3.4477, |
|
"eval_samples_per_second": 2.9, |
|
"eval_steps_per_second": 0.87, |
|
"step": 1857 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 1857, |
|
"total_flos": 194409010298880.0, |
|
"train_loss": 0.5086988852623655, |
|
"train_runtime": 16625.9643, |
|
"train_samples_per_second": 1.787, |
|
"train_steps_per_second": 0.112 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 1857, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 194409010298880.0, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|