|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 1207, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0008285004142502071, |
|
"grad_norm": 23.866117550094145, |
|
"learning_rate": 8.264462809917357e-08, |
|
"loss": 1.4163, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.004142502071251036, |
|
"grad_norm": 22.991207135218495, |
|
"learning_rate": 4.132231404958678e-07, |
|
"loss": 1.4008, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.008285004142502071, |
|
"grad_norm": 8.161072224423009, |
|
"learning_rate": 8.264462809917356e-07, |
|
"loss": 1.3121, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.012427506213753107, |
|
"grad_norm": 10.651287293408194, |
|
"learning_rate": 1.2396694214876035e-06, |
|
"loss": 1.1732, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.016570008285004142, |
|
"grad_norm": 2.9695263532042517, |
|
"learning_rate": 1.6528925619834712e-06, |
|
"loss": 1.0137, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.020712510356255178, |
|
"grad_norm": 3.0649608491168774, |
|
"learning_rate": 2.066115702479339e-06, |
|
"loss": 0.9608, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.024855012427506214, |
|
"grad_norm": 2.4113702925470575, |
|
"learning_rate": 2.479338842975207e-06, |
|
"loss": 0.9263, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.02899751449875725, |
|
"grad_norm": 2.2764880745044525, |
|
"learning_rate": 2.8925619834710743e-06, |
|
"loss": 0.9069, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.033140016570008285, |
|
"grad_norm": 2.307963331742189, |
|
"learning_rate": 3.3057851239669424e-06, |
|
"loss": 0.8949, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.037282518641259324, |
|
"grad_norm": 2.290742694256339, |
|
"learning_rate": 3.71900826446281e-06, |
|
"loss": 0.8849, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.041425020712510356, |
|
"grad_norm": 2.418716943098049, |
|
"learning_rate": 4.132231404958678e-06, |
|
"loss": 0.8662, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.045567522783761395, |
|
"grad_norm": 2.256258952989886, |
|
"learning_rate": 4.5454545454545455e-06, |
|
"loss": 0.8617, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.04971002485501243, |
|
"grad_norm": 2.3643676843983013, |
|
"learning_rate": 4.958677685950414e-06, |
|
"loss": 0.8434, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.053852526926263466, |
|
"grad_norm": 2.51362497881208, |
|
"learning_rate": 5.371900826446281e-06, |
|
"loss": 0.8586, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.0579950289975145, |
|
"grad_norm": 2.409197722940547, |
|
"learning_rate": 5.785123966942149e-06, |
|
"loss": 0.8391, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.06213753106876554, |
|
"grad_norm": 2.504763734639035, |
|
"learning_rate": 6.198347107438017e-06, |
|
"loss": 0.8323, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.06628003314001657, |
|
"grad_norm": 2.342235286421664, |
|
"learning_rate": 6.611570247933885e-06, |
|
"loss": 0.8366, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07042253521126761, |
|
"grad_norm": 2.601805905814255, |
|
"learning_rate": 7.0247933884297525e-06, |
|
"loss": 0.83, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.07456503728251865, |
|
"grad_norm": 2.4729528650877226, |
|
"learning_rate": 7.43801652892562e-06, |
|
"loss": 0.8193, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.07870753935376967, |
|
"grad_norm": 2.347978873060577, |
|
"learning_rate": 7.851239669421489e-06, |
|
"loss": 0.8108, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.08285004142502071, |
|
"grad_norm": 2.3865550609084005, |
|
"learning_rate": 8.264462809917356e-06, |
|
"loss": 0.8068, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.08699254349627175, |
|
"grad_norm": 2.397866066956603, |
|
"learning_rate": 8.677685950413224e-06, |
|
"loss": 0.8146, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.09113504556752279, |
|
"grad_norm": 2.528140675905014, |
|
"learning_rate": 9.090909090909091e-06, |
|
"loss": 0.8014, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.09527754763877382, |
|
"grad_norm": 2.5772809506040004, |
|
"learning_rate": 9.50413223140496e-06, |
|
"loss": 0.7929, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.09942004971002485, |
|
"grad_norm": 2.4698447608273226, |
|
"learning_rate": 9.917355371900828e-06, |
|
"loss": 0.7957, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.1035625517812759, |
|
"grad_norm": 2.4271647358481077, |
|
"learning_rate": 9.999665269535307e-06, |
|
"loss": 0.7947, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.10770505385252693, |
|
"grad_norm": 2.399585162136438, |
|
"learning_rate": 9.998305503833872e-06, |
|
"loss": 0.7952, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.11184755592377796, |
|
"grad_norm": 2.356625988593114, |
|
"learning_rate": 9.995900066492902e-06, |
|
"loss": 0.7903, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.115990057995029, |
|
"grad_norm": 2.3562051609533894, |
|
"learning_rate": 9.992449460742464e-06, |
|
"loss": 0.7855, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.12013256006628004, |
|
"grad_norm": 2.2315403444753925, |
|
"learning_rate": 9.98795440846732e-06, |
|
"loss": 0.7721, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.12427506213753108, |
|
"grad_norm": 2.3299296938897203, |
|
"learning_rate": 9.982415850055902e-06, |
|
"loss": 0.7733, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.12841756420878211, |
|
"grad_norm": 2.176864153510824, |
|
"learning_rate": 9.975834944203581e-06, |
|
"loss": 0.7641, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.13256006628003314, |
|
"grad_norm": 2.189529036305693, |
|
"learning_rate": 9.968213067670265e-06, |
|
"loss": 0.7688, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.13670256835128416, |
|
"grad_norm": 2.232386403924057, |
|
"learning_rate": 9.959551814992364e-06, |
|
"loss": 0.7458, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.14084507042253522, |
|
"grad_norm": 2.596315903481849, |
|
"learning_rate": 9.949852998149217e-06, |
|
"loss": 0.7621, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.14498757249378624, |
|
"grad_norm": 2.2955324137983615, |
|
"learning_rate": 9.939118646184007e-06, |
|
"loss": 0.7456, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.1491300745650373, |
|
"grad_norm": 2.5048442022915363, |
|
"learning_rate": 9.927351004779275e-06, |
|
"loss": 0.7536, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.15327257663628832, |
|
"grad_norm": 2.373845911290789, |
|
"learning_rate": 9.914552535787122e-06, |
|
"loss": 0.7334, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.15741507870753935, |
|
"grad_norm": 2.490700611604481, |
|
"learning_rate": 9.900725916714157e-06, |
|
"loss": 0.7385, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.1615575807787904, |
|
"grad_norm": 2.4943108442217667, |
|
"learning_rate": 9.885874040161373e-06, |
|
"loss": 0.7226, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.16570008285004142, |
|
"grad_norm": 2.6495649004577753, |
|
"learning_rate": 9.87000001321898e-06, |
|
"loss": 0.7309, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.16984258492129245, |
|
"grad_norm": 2.24352822185874, |
|
"learning_rate": 9.853107156816393e-06, |
|
"loss": 0.724, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.1739850869925435, |
|
"grad_norm": 2.577647983056561, |
|
"learning_rate": 9.835199005027477e-06, |
|
"loss": 0.7137, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.17812758906379453, |
|
"grad_norm": 2.4661359633344566, |
|
"learning_rate": 9.816279304331202e-06, |
|
"loss": 0.711, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.18227009113504558, |
|
"grad_norm": 2.3694444605967147, |
|
"learning_rate": 9.79635201282785e-06, |
|
"loss": 0.7101, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.1864125932062966, |
|
"grad_norm": 2.198135101539898, |
|
"learning_rate": 9.775421299410977e-06, |
|
"loss": 0.6933, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.19055509527754763, |
|
"grad_norm": 2.3950158836366224, |
|
"learning_rate": 9.753491542895237e-06, |
|
"loss": 0.707, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.19469759734879868, |
|
"grad_norm": 2.443771290836774, |
|
"learning_rate": 9.730567331100333e-06, |
|
"loss": 0.693, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.1988400994200497, |
|
"grad_norm": 2.69404610606291, |
|
"learning_rate": 9.706653459891207e-06, |
|
"loss": 0.6823, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.20298260149130073, |
|
"grad_norm": 2.6424681261577265, |
|
"learning_rate": 9.681754932174719e-06, |
|
"loss": 0.6846, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.2071251035625518, |
|
"grad_norm": 2.3720791178993106, |
|
"learning_rate": 9.655876956853025e-06, |
|
"loss": 0.6829, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.2112676056338028, |
|
"grad_norm": 2.246578155524963, |
|
"learning_rate": 9.629024947733836e-06, |
|
"loss": 0.6802, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.21541010770505387, |
|
"grad_norm": 2.532693053271893, |
|
"learning_rate": 9.601204522397826e-06, |
|
"loss": 0.6811, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.2195526097763049, |
|
"grad_norm": 2.3168391897960996, |
|
"learning_rate": 9.572421501023403e-06, |
|
"loss": 0.6737, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.22369511184755592, |
|
"grad_norm": 2.2740027059012102, |
|
"learning_rate": 9.5426819051691e-06, |
|
"loss": 0.6762, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.22783761391880697, |
|
"grad_norm": 2.453598814349311, |
|
"learning_rate": 9.511991956513828e-06, |
|
"loss": 0.6689, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.231980115990058, |
|
"grad_norm": 2.6061851377315857, |
|
"learning_rate": 9.480358075555278e-06, |
|
"loss": 0.6523, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.23612261806130902, |
|
"grad_norm": 2.3350040791986517, |
|
"learning_rate": 9.447786880266706e-06, |
|
"loss": 0.6626, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.24026512013256007, |
|
"grad_norm": 2.16738386427134, |
|
"learning_rate": 9.414285184712432e-06, |
|
"loss": 0.6526, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.2444076222038111, |
|
"grad_norm": 2.353502989995007, |
|
"learning_rate": 9.37985999762229e-06, |
|
"loss": 0.6503, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.24855012427506215, |
|
"grad_norm": 2.559615826375987, |
|
"learning_rate": 9.344518520925377e-06, |
|
"loss": 0.6409, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.2526926263463132, |
|
"grad_norm": 2.2923285566494296, |
|
"learning_rate": 9.308268148243355e-06, |
|
"loss": 0.628, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.25683512841756423, |
|
"grad_norm": 2.20361635294026, |
|
"learning_rate": 9.271116463343692e-06, |
|
"loss": 0.6401, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.2609776304888152, |
|
"grad_norm": 2.3077464669006966, |
|
"learning_rate": 9.23307123855307e-06, |
|
"loss": 0.6322, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.2651201325600663, |
|
"grad_norm": 2.36438437607611, |
|
"learning_rate": 9.194140433131397e-06, |
|
"loss": 0.6122, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.26926263463131733, |
|
"grad_norm": 2.3361508013893255, |
|
"learning_rate": 9.154332191606671e-06, |
|
"loss": 0.6279, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.27340513670256833, |
|
"grad_norm": 2.3055892316817324, |
|
"learning_rate": 9.113654842071114e-06, |
|
"loss": 0.6136, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.2775476387738194, |
|
"grad_norm": 2.150656102185965, |
|
"learning_rate": 9.072116894438885e-06, |
|
"loss": 0.6132, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.28169014084507044, |
|
"grad_norm": 2.1980462175987707, |
|
"learning_rate": 9.029727038665765e-06, |
|
"loss": 0.6238, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.28583264291632143, |
|
"grad_norm": 2.1777751804898915, |
|
"learning_rate": 8.986494142931168e-06, |
|
"loss": 0.6065, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.2899751449875725, |
|
"grad_norm": 2.33402398930558, |
|
"learning_rate": 8.94242725178288e-06, |
|
"loss": 0.6107, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.29411764705882354, |
|
"grad_norm": 2.2919553245717306, |
|
"learning_rate": 8.89753558424488e-06, |
|
"loss": 0.6065, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.2982601491300746, |
|
"grad_norm": 2.421470706741744, |
|
"learning_rate": 8.851828531888692e-06, |
|
"loss": 0.6123, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.3024026512013256, |
|
"grad_norm": 2.226227311801291, |
|
"learning_rate": 8.805315656868587e-06, |
|
"loss": 0.614, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.30654515327257664, |
|
"grad_norm": 2.2094485446668233, |
|
"learning_rate": 8.75800668992117e-06, |
|
"loss": 0.5961, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.3106876553438277, |
|
"grad_norm": 2.172339243964824, |
|
"learning_rate": 8.709911528329623e-06, |
|
"loss": 0.5977, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.3148301574150787, |
|
"grad_norm": 2.202510437920946, |
|
"learning_rate": 8.661040233853166e-06, |
|
"loss": 0.5913, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.31897265948632975, |
|
"grad_norm": 2.4240297905693535, |
|
"learning_rate": 8.611403030622074e-06, |
|
"loss": 0.5885, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.3231151615575808, |
|
"grad_norm": 2.3295900029200447, |
|
"learning_rate": 8.561010302998734e-06, |
|
"loss": 0.5754, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.3272576636288318, |
|
"grad_norm": 2.396863934008288, |
|
"learning_rate": 8.509872593405189e-06, |
|
"loss": 0.5852, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.33140016570008285, |
|
"grad_norm": 2.292182476322964, |
|
"learning_rate": 8.458000600117604e-06, |
|
"loss": 0.5747, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.3355426677713339, |
|
"grad_norm": 2.317074544699955, |
|
"learning_rate": 8.40540517502813e-06, |
|
"loss": 0.5719, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.3396851698425849, |
|
"grad_norm": 2.1839364363051357, |
|
"learning_rate": 8.35209732137463e-06, |
|
"loss": 0.5718, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.34382767191383595, |
|
"grad_norm": 2.252030122787996, |
|
"learning_rate": 8.298088191438753e-06, |
|
"loss": 0.5697, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.347970173985087, |
|
"grad_norm": 2.1248298616099475, |
|
"learning_rate": 8.243389084212808e-06, |
|
"loss": 0.5523, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.352112676056338, |
|
"grad_norm": 2.173293611964785, |
|
"learning_rate": 8.188011443035962e-06, |
|
"loss": 0.5447, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.35625517812758906, |
|
"grad_norm": 2.1954256707470075, |
|
"learning_rate": 8.131966853200226e-06, |
|
"loss": 0.549, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.3603976801988401, |
|
"grad_norm": 2.149878633517254, |
|
"learning_rate": 8.075267039526764e-06, |
|
"loss": 0.5499, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.36454018227009116, |
|
"grad_norm": 2.4205240164300315, |
|
"learning_rate": 8.017923863912989e-06, |
|
"loss": 0.5575, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.36868268434134216, |
|
"grad_norm": 2.0890174160238737, |
|
"learning_rate": 7.959949322850994e-06, |
|
"loss": 0.5453, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.3728251864125932, |
|
"grad_norm": 2.0946555310239887, |
|
"learning_rate": 7.901355544917827e-06, |
|
"loss": 0.5414, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.37696768848384427, |
|
"grad_norm": 2.126619902644704, |
|
"learning_rate": 7.842154788238124e-06, |
|
"loss": 0.5495, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.38111019055509526, |
|
"grad_norm": 2.2360988935530033, |
|
"learning_rate": 7.782359437919644e-06, |
|
"loss": 0.5438, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.3852526926263463, |
|
"grad_norm": 2.1459033718890077, |
|
"learning_rate": 7.721982003462255e-06, |
|
"loss": 0.5353, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.38939519469759737, |
|
"grad_norm": 2.283786387893568, |
|
"learning_rate": 7.661035116140856e-06, |
|
"loss": 0.5357, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.39353769676884837, |
|
"grad_norm": 2.1816854698567, |
|
"learning_rate": 7.599531526362873e-06, |
|
"loss": 0.5449, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.3976801988400994, |
|
"grad_norm": 2.2069133172922255, |
|
"learning_rate": 7.537484101000787e-06, |
|
"loss": 0.526, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.40182270091135047, |
|
"grad_norm": 2.118219362380726, |
|
"learning_rate": 7.474905820700334e-06, |
|
"loss": 0.534, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.40596520298260147, |
|
"grad_norm": 2.264878177122059, |
|
"learning_rate": 7.411809777164873e-06, |
|
"loss": 0.5165, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.4101077050538525, |
|
"grad_norm": 2.1705389482990363, |
|
"learning_rate": 7.3482091704165405e-06, |
|
"loss": 0.5245, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.4142502071251036, |
|
"grad_norm": 2.2941159685489336, |
|
"learning_rate": 7.284117306034733e-06, |
|
"loss": 0.5103, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.4183927091963546, |
|
"grad_norm": 2.2324713585602254, |
|
"learning_rate": 7.219547592372512e-06, |
|
"loss": 0.5214, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.4225352112676056, |
|
"grad_norm": 2.2391139429721236, |
|
"learning_rate": 7.15451353775151e-06, |
|
"loss": 0.5195, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.4266777133388567, |
|
"grad_norm": 2.2449960646697513, |
|
"learning_rate": 7.089028747635908e-06, |
|
"loss": 0.4994, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.43082021541010773, |
|
"grad_norm": 2.4452540985802687, |
|
"learning_rate": 7.023106921786118e-06, |
|
"loss": 0.5079, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.43496271748135873, |
|
"grad_norm": 2.1476468599114065, |
|
"learning_rate": 6.956761851392706e-06, |
|
"loss": 0.5122, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.4391052195526098, |
|
"grad_norm": 2.216754671617839, |
|
"learning_rate": 6.890007416191209e-06, |
|
"loss": 0.508, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.44324772162386084, |
|
"grad_norm": 2.0655580498022537, |
|
"learning_rate": 6.822857581558423e-06, |
|
"loss": 0.5005, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.44739022369511183, |
|
"grad_norm": 2.1974636316633975, |
|
"learning_rate": 6.7553263955907755e-06, |
|
"loss": 0.4997, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.4515327257663629, |
|
"grad_norm": 2.3612483703811127, |
|
"learning_rate": 6.687427986165379e-06, |
|
"loss": 0.4912, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.45567522783761394, |
|
"grad_norm": 2.1051086338026055, |
|
"learning_rate": 6.6191765579844205e-06, |
|
"loss": 0.4958, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.45981772990886494, |
|
"grad_norm": 2.1903547319110013, |
|
"learning_rate": 6.550586389603451e-06, |
|
"loss": 0.4876, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.463960231980116, |
|
"grad_norm": 2.2727306239643523, |
|
"learning_rate": 6.481671830444243e-06, |
|
"loss": 0.4843, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.46810273405136704, |
|
"grad_norm": 2.111227587642084, |
|
"learning_rate": 6.412447297792818e-06, |
|
"loss": 0.4716, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.47224523612261804, |
|
"grad_norm": 2.205352360925359, |
|
"learning_rate": 6.3429272737832726e-06, |
|
"loss": 0.4784, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.4763877381938691, |
|
"grad_norm": 2.16751009903555, |
|
"learning_rate": 6.273126302368037e-06, |
|
"loss": 0.487, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.48053024026512015, |
|
"grad_norm": 2.1849501403147404, |
|
"learning_rate": 6.203058986275207e-06, |
|
"loss": 0.4727, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.48467274233637114, |
|
"grad_norm": 2.2295368151988284, |
|
"learning_rate": 6.132739983953579e-06, |
|
"loss": 0.4687, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.4888152444076222, |
|
"grad_norm": 2.101227396044922, |
|
"learning_rate": 6.062184006506027e-06, |
|
"loss": 0.4775, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.49295774647887325, |
|
"grad_norm": 2.248193658880927, |
|
"learning_rate": 5.991405814611855e-06, |
|
"loss": 0.4665, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.4971002485501243, |
|
"grad_norm": 2.2704834915264662, |
|
"learning_rate": 5.920420215438794e-06, |
|
"loss": 0.4789, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.5012427506213754, |
|
"grad_norm": 2.32381879564274, |
|
"learning_rate": 5.849242059545259e-06, |
|
"loss": 0.4697, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.5053852526926264, |
|
"grad_norm": 2.2576290726260373, |
|
"learning_rate": 5.777886237773542e-06, |
|
"loss": 0.4593, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.5095277547638773, |
|
"grad_norm": 2.135648372066762, |
|
"learning_rate": 5.706367678134562e-06, |
|
"loss": 0.4668, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.5136702568351285, |
|
"grad_norm": 2.07573918775664, |
|
"learning_rate": 5.634701342684852e-06, |
|
"loss": 0.4632, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.5178127589063795, |
|
"grad_norm": 2.1248245211465244, |
|
"learning_rate": 5.562902224396416e-06, |
|
"loss": 0.4595, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.5219552609776305, |
|
"grad_norm": 2.130098034424365, |
|
"learning_rate": 5.49098534402012e-06, |
|
"loss": 0.4598, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.5260977630488816, |
|
"grad_norm": 2.334633169152005, |
|
"learning_rate": 5.418965746943281e-06, |
|
"loss": 0.4559, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.5302402651201326, |
|
"grad_norm": 2.1778621060776193, |
|
"learning_rate": 5.34685850004208e-06, |
|
"loss": 0.4513, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.5343827671913836, |
|
"grad_norm": 2.168236740017704, |
|
"learning_rate": 5.2746786885295034e-06, |
|
"loss": 0.4556, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.5385252692626347, |
|
"grad_norm": 1.993317342719062, |
|
"learning_rate": 5.2024414127994325e-06, |
|
"loss": 0.4462, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.5426677713338857, |
|
"grad_norm": 2.126792629247378, |
|
"learning_rate": 5.13016178526756e-06, |
|
"loss": 0.4365, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.5468102734051367, |
|
"grad_norm": 2.1107942879381727, |
|
"learning_rate": 5.057854927209804e-06, |
|
"loss": 0.4434, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.5509527754763878, |
|
"grad_norm": 2.1123391684515815, |
|
"learning_rate": 4.985535965598843e-06, |
|
"loss": 0.4489, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.5550952775476388, |
|
"grad_norm": 2.20383507837916, |
|
"learning_rate": 4.913220029939491e-06, |
|
"loss": 0.4426, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.5592377796188898, |
|
"grad_norm": 2.024690328428114, |
|
"learning_rate": 4.840922249103506e-06, |
|
"loss": 0.4372, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.5633802816901409, |
|
"grad_norm": 2.1658026493211713, |
|
"learning_rate": 4.7686577481645745e-06, |
|
"loss": 0.4344, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.5675227837613919, |
|
"grad_norm": 2.130458226840472, |
|
"learning_rate": 4.696441645234042e-06, |
|
"loss": 0.4423, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.5716652858326429, |
|
"grad_norm": 2.130335330283906, |
|
"learning_rate": 4.624289048298147e-06, |
|
"loss": 0.4324, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.575807787903894, |
|
"grad_norm": 2.0262507094150077, |
|
"learning_rate": 4.55221505205734e-06, |
|
"loss": 0.4338, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.579950289975145, |
|
"grad_norm": 2.0666888969605277, |
|
"learning_rate": 4.480234734768393e-06, |
|
"loss": 0.4286, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.584092792046396, |
|
"grad_norm": 2.0542477917888453, |
|
"learning_rate": 4.408363155089952e-06, |
|
"loss": 0.4307, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.5882352941176471, |
|
"grad_norm": 2.1776472324502327, |
|
"learning_rate": 4.3366153489321855e-06, |
|
"loss": 0.4344, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.5923777961888981, |
|
"grad_norm": 2.0982654105842475, |
|
"learning_rate": 4.265006326311199e-06, |
|
"loss": 0.418, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.5965202982601492, |
|
"grad_norm": 2.0399148247049363, |
|
"learning_rate": 4.1935510682088545e-06, |
|
"loss": 0.422, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.6006628003314002, |
|
"grad_norm": 2.132392353626558, |
|
"learning_rate": 4.122264523438668e-06, |
|
"loss": 0.4122, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.6048053024026512, |
|
"grad_norm": 2.0436310042803094, |
|
"learning_rate": 4.051161605518453e-06, |
|
"loss": 0.4233, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.6089478044739023, |
|
"grad_norm": 2.0217278892738504, |
|
"learning_rate": 3.980257189550316e-06, |
|
"loss": 0.4226, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.6130903065451533, |
|
"grad_norm": 1.8834806774575015, |
|
"learning_rate": 3.909566109108727e-06, |
|
"loss": 0.411, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.6172328086164043, |
|
"grad_norm": 2.158825355814557, |
|
"learning_rate": 3.839103153137247e-06, |
|
"loss": 0.4175, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.6213753106876554, |
|
"grad_norm": 2.1656769542527363, |
|
"learning_rate": 3.768883062854598e-06, |
|
"loss": 0.4178, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.6255178127589064, |
|
"grad_norm": 2.028679198499099, |
|
"learning_rate": 3.6989205286707398e-06, |
|
"loss": 0.4169, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.6296603148301574, |
|
"grad_norm": 2.0638901993850087, |
|
"learning_rate": 3.6292301871135425e-06, |
|
"loss": 0.4074, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.6338028169014085, |
|
"grad_norm": 2.1149502198751344, |
|
"learning_rate": 3.55982661776676e-06, |
|
"loss": 0.4126, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.6379453189726595, |
|
"grad_norm": 2.242329346685076, |
|
"learning_rate": 3.4907243402199013e-06, |
|
"loss": 0.4165, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.6420878210439105, |
|
"grad_norm": 2.1066940525003326, |
|
"learning_rate": 3.4219378110306523e-06, |
|
"loss": 0.3997, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.6462303231151616, |
|
"grad_norm": 2.169707690258044, |
|
"learning_rate": 3.353481420700495e-06, |
|
"loss": 0.4104, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.6503728251864126, |
|
"grad_norm": 2.098473714909255, |
|
"learning_rate": 3.285369490664133e-06, |
|
"loss": 0.3956, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.6545153272576636, |
|
"grad_norm": 2.1657310992501295, |
|
"learning_rate": 3.2176162702933816e-06, |
|
"loss": 0.4135, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.6586578293289147, |
|
"grad_norm": 2.1452143463915307, |
|
"learning_rate": 3.150235933916115e-06, |
|
"loss": 0.3934, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.6628003314001657, |
|
"grad_norm": 2.052594101769282, |
|
"learning_rate": 3.0832425778509235e-06, |
|
"loss": 0.3979, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.6669428334714167, |
|
"grad_norm": 2.0098945834829824, |
|
"learning_rate": 3.0166502174581012e-06, |
|
"loss": 0.3967, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.6710853355426678, |
|
"grad_norm": 2.042954604946958, |
|
"learning_rate": 2.950472784207544e-06, |
|
"loss": 0.3945, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.6752278376139188, |
|
"grad_norm": 2.0032403215865506, |
|
"learning_rate": 2.8847241227642255e-06, |
|
"loss": 0.3967, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.6793703396851698, |
|
"grad_norm": 2.1221545047500214, |
|
"learning_rate": 2.819417988091814e-06, |
|
"loss": 0.3868, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.6835128417564209, |
|
"grad_norm": 2.130884956811238, |
|
"learning_rate": 2.754568042575061e-06, |
|
"loss": 0.3922, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.6876553438276719, |
|
"grad_norm": 1.9774695256683483, |
|
"learning_rate": 2.6901878531615677e-06, |
|
"loss": 0.3901, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.6917978458989229, |
|
"grad_norm": 2.1120886925275055, |
|
"learning_rate": 2.6262908885235046e-06, |
|
"loss": 0.3941, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.695940347970174, |
|
"grad_norm": 2.045887267583242, |
|
"learning_rate": 2.5628905162398797e-06, |
|
"loss": 0.3959, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.700082850041425, |
|
"grad_norm": 1.8783748619115133, |
|
"learning_rate": 2.5000000000000015e-06, |
|
"loss": 0.3806, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.704225352112676, |
|
"grad_norm": 2.08098731426819, |
|
"learning_rate": 2.4376324968286154e-06, |
|
"loss": 0.387, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.7083678541839271, |
|
"grad_norm": 2.1586747995562705, |
|
"learning_rate": 2.375801054333409e-06, |
|
"loss": 0.3729, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.7125103562551781, |
|
"grad_norm": 2.0279143582578616, |
|
"learning_rate": 2.3145186079753685e-06, |
|
"loss": 0.3758, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.7166528583264291, |
|
"grad_norm": 2.2429697814696854, |
|
"learning_rate": 2.253797978362617e-06, |
|
"loss": 0.3786, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.7207953603976802, |
|
"grad_norm": 2.0360034426541556, |
|
"learning_rate": 2.193651868568285e-06, |
|
"loss": 0.377, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.7249378624689312, |
|
"grad_norm": 1.9725681488817448, |
|
"learning_rate": 2.1340928614729445e-06, |
|
"loss": 0.3721, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.7290803645401823, |
|
"grad_norm": 2.103021145158836, |
|
"learning_rate": 2.075133417132223e-06, |
|
"loss": 0.3719, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.7332228666114333, |
|
"grad_norm": 2.092900322935167, |
|
"learning_rate": 2.016785870170079e-06, |
|
"loss": 0.3714, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.7373653686826843, |
|
"grad_norm": 2.0906479452415336, |
|
"learning_rate": 1.9590624271983406e-06, |
|
"loss": 0.3749, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.7415078707539354, |
|
"grad_norm": 2.096208214143563, |
|
"learning_rate": 1.9019751642630252e-06, |
|
"loss": 0.37, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.7456503728251864, |
|
"grad_norm": 2.0164719015170656, |
|
"learning_rate": 1.8455360243179537e-06, |
|
"loss": 0.3724, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.7497928748964374, |
|
"grad_norm": 1.9827943207162666, |
|
"learning_rate": 1.7897568147262323e-06, |
|
"loss": 0.3747, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.7539353769676885, |
|
"grad_norm": 1.9263424969881737, |
|
"learning_rate": 1.7346492047900897e-06, |
|
"loss": 0.3635, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.7580778790389395, |
|
"grad_norm": 2.2195829689186106, |
|
"learning_rate": 1.6802247233095914e-06, |
|
"loss": 0.3667, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.7622203811101905, |
|
"grad_norm": 2.0103741607119185, |
|
"learning_rate": 1.626494756170765e-06, |
|
"loss": 0.3692, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.7663628831814416, |
|
"grad_norm": 2.1097047260728963, |
|
"learning_rate": 1.5734705439636017e-06, |
|
"loss": 0.365, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.7705053852526926, |
|
"grad_norm": 2.036839326608303, |
|
"learning_rate": 1.5211631796304721e-06, |
|
"loss": 0.3648, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.7746478873239436, |
|
"grad_norm": 1.9414201763962056, |
|
"learning_rate": 1.46958360614543e-06, |
|
"loss": 0.3656, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.7787903893951947, |
|
"grad_norm": 2.047677571212274, |
|
"learning_rate": 1.4187426142248723e-06, |
|
"loss": 0.3542, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.7829328914664457, |
|
"grad_norm": 2.06103898039325, |
|
"learning_rate": 1.3686508400700787e-06, |
|
"loss": 0.3646, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.7870753935376967, |
|
"grad_norm": 2.0587768499958394, |
|
"learning_rate": 1.3193187631420462e-06, |
|
"loss": 0.3579, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.7912178956089478, |
|
"grad_norm": 1.9792392112511976, |
|
"learning_rate": 1.2707567039691505e-06, |
|
"loss": 0.3607, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.7953603976801988, |
|
"grad_norm": 1.9865516347697185, |
|
"learning_rate": 1.222974821988024e-06, |
|
"loss": 0.3588, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.7995028997514498, |
|
"grad_norm": 2.0887989420586055, |
|
"learning_rate": 1.1759831134181504e-06, |
|
"loss": 0.3604, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.8036454018227009, |
|
"grad_norm": 2.1350401974441575, |
|
"learning_rate": 1.1297914091706086e-06, |
|
"loss": 0.3578, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.8077879038939519, |
|
"grad_norm": 2.0023054097994417, |
|
"learning_rate": 1.0844093727913868e-06, |
|
"loss": 0.352, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.8119304059652029, |
|
"grad_norm": 2.006501689438896, |
|
"learning_rate": 1.039846498439727e-06, |
|
"loss": 0.3508, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.816072908036454, |
|
"grad_norm": 2.1675559938665017, |
|
"learning_rate": 9.961121089018933e-07, |
|
"loss": 0.3513, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.820215410107705, |
|
"grad_norm": 1.9789546403226348, |
|
"learning_rate": 9.532153536407923e-07, |
|
"loss": 0.3548, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.824357912178956, |
|
"grad_norm": 2.2136680684771215, |
|
"learning_rate": 9.111652068818621e-07, |
|
"loss": 0.3515, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.8285004142502072, |
|
"grad_norm": 2.0985287351131183, |
|
"learning_rate": 8.699704657356195e-07, |
|
"loss": 0.3494, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.8326429163214581, |
|
"grad_norm": 2.03602744057189, |
|
"learning_rate": 8.296397483572515e-07, |
|
"loss": 0.3502, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.8367854183927091, |
|
"grad_norm": 1.9529410237196838, |
|
"learning_rate": 7.901814921436624e-07, |
|
"loss": 0.3412, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.8409279204639603, |
|
"grad_norm": 2.056866748380538, |
|
"learning_rate": 7.516039519683105e-07, |
|
"loss": 0.3451, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.8450704225352113, |
|
"grad_norm": 2.1162292783700596, |
|
"learning_rate": 7.139151984542636e-07, |
|
"loss": 0.346, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.8492129246064622, |
|
"grad_norm": 2.199566140011582, |
|
"learning_rate": 6.771231162857722e-07, |
|
"loss": 0.3503, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.8533554266777134, |
|
"grad_norm": 2.071853569310024, |
|
"learning_rate": 6.412354025587509e-07, |
|
"loss": 0.3394, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.8574979287489644, |
|
"grad_norm": 1.9821516702262831, |
|
"learning_rate": 6.062595651705111e-07, |
|
"loss": 0.3599, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.8616404308202155, |
|
"grad_norm": 2.03084885183303, |
|
"learning_rate": 5.722029212490666e-07, |
|
"loss": 0.3473, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.8657829328914665, |
|
"grad_norm": 1.9891381030542377, |
|
"learning_rate": 5.390725956223531e-07, |
|
"loss": 0.3399, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.8699254349627175, |
|
"grad_norm": 1.996858017795371, |
|
"learning_rate": 5.068755193276798e-07, |
|
"loss": 0.3438, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.8740679370339686, |
|
"grad_norm": 2.0102978075746436, |
|
"learning_rate": 4.756184281617121e-07, |
|
"loss": 0.3596, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.8782104391052196, |
|
"grad_norm": 2.096782816191255, |
|
"learning_rate": 4.4530786127131575e-07, |
|
"loss": 0.3426, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.8823529411764706, |
|
"grad_norm": 2.041538694858829, |
|
"learning_rate": 4.159501597855287e-07, |
|
"loss": 0.3469, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.8864954432477217, |
|
"grad_norm": 2.050624816044249, |
|
"learning_rate": 3.8755146548896784e-07, |
|
"loss": 0.3453, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.8906379453189727, |
|
"grad_norm": 2.134183615956234, |
|
"learning_rate": 3.6011771953693044e-07, |
|
"loss": 0.3383, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.8947804473902237, |
|
"grad_norm": 2.0340883063292456, |
|
"learning_rate": 3.336546612124758e-07, |
|
"loss": 0.3394, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.8989229494614748, |
|
"grad_norm": 2.142834891831593, |
|
"learning_rate": 3.081678267257404e-07, |
|
"loss": 0.3377, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.9030654515327258, |
|
"grad_norm": 2.1312482485517608, |
|
"learning_rate": 2.836625480557265e-07, |
|
"loss": 0.3347, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.9072079536039768, |
|
"grad_norm": 2.076260403631579, |
|
"learning_rate": 2.601439518348331e-07, |
|
"loss": 0.3408, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.9113504556752279, |
|
"grad_norm": 2.063238043755945, |
|
"learning_rate": 2.376169582763288e-07, |
|
"loss": 0.347, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.9154929577464789, |
|
"grad_norm": 2.045149868081977, |
|
"learning_rate": 2.1608628014502364e-07, |
|
"loss": 0.338, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.9196354598177299, |
|
"grad_norm": 2.0728146587926535, |
|
"learning_rate": 1.955564217713335e-07, |
|
"loss": 0.3385, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.923777961888981, |
|
"grad_norm": 1.980028386143511, |
|
"learning_rate": 1.7603167810894662e-07, |
|
"loss": 0.3397, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.927920463960232, |
|
"grad_norm": 2.040384287235922, |
|
"learning_rate": 1.5751613383630128e-07, |
|
"loss": 0.337, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.932062966031483, |
|
"grad_norm": 2.0668331131661337, |
|
"learning_rate": 1.4001366250204762e-07, |
|
"loss": 0.3432, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.9362054681027341, |
|
"grad_norm": 2.1215696014539636, |
|
"learning_rate": 1.235279257146804e-07, |
|
"loss": 0.348, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.9403479701739851, |
|
"grad_norm": 2.0968064514077187, |
|
"learning_rate": 1.080623723765134e-07, |
|
"loss": 0.3351, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.9444904722452361, |
|
"grad_norm": 2.0386634215167305, |
|
"learning_rate": 9.362023796215036e-08, |
|
"loss": 0.335, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.9486329743164872, |
|
"grad_norm": 2.081961716266303, |
|
"learning_rate": 8.020454384160437e-08, |
|
"loss": 0.345, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.9527754763877382, |
|
"grad_norm": 2.0627875473620443, |
|
"learning_rate": 6.78180966482156e-08, |
|
"loss": 0.3367, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.9569179784589892, |
|
"grad_norm": 2.0611893408248587, |
|
"learning_rate": 5.646348769148491e-08, |
|
"loss": 0.3388, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.9610604805302403, |
|
"grad_norm": 2.1119589491438315, |
|
"learning_rate": 4.6143092414961396e-08, |
|
"loss": 0.3406, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.9652029826014913, |
|
"grad_norm": 2.0229820894857866, |
|
"learning_rate": 3.685906989928656e-08, |
|
"loss": 0.3446, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.9693454846727423, |
|
"grad_norm": 1.9950176331137996, |
|
"learning_rate": 2.861336241050061e-08, |
|
"loss": 0.3361, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.9734879867439934, |
|
"grad_norm": 2.051002382020688, |
|
"learning_rate": 2.1407694993714755e-08, |
|
"loss": 0.3352, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.9776304888152444, |
|
"grad_norm": 2.0952467848619967, |
|
"learning_rate": 1.5243575112218744e-08, |
|
"loss": 0.3278, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.9817729908864954, |
|
"grad_norm": 2.1473598929894164, |
|
"learning_rate": 1.0122292332114814e-08, |
|
"loss": 0.3386, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.9859154929577465, |
|
"grad_norm": 2.027802609236831, |
|
"learning_rate": 6.044918052531268e-09, |
|
"loss": 0.3429, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.9900579950289975, |
|
"grad_norm": 2.088880428637403, |
|
"learning_rate": 3.0123052814812203e-09, |
|
"loss": 0.3452, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.9942004971002486, |
|
"grad_norm": 2.1283431853256705, |
|
"learning_rate": 1.025088457409229e-09, |
|
"loss": 0.3321, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.9983429991714996, |
|
"grad_norm": 1.953227804218852, |
|
"learning_rate": 8.368331646302353e-11, |
|
"loss": 0.3403, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 0.33688703179359436, |
|
"eval_runtime": 1.1942, |
|
"eval_samples_per_second": 2.512, |
|
"eval_steps_per_second": 0.837, |
|
"step": 1207 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 1207, |
|
"total_flos": 252668899491840.0, |
|
"train_loss": 0.5252359404283411, |
|
"train_runtime": 27071.2973, |
|
"train_samples_per_second": 1.427, |
|
"train_steps_per_second": 0.045 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 1207, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 252668899491840.0, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|