|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 598, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0016722408026755853, |
|
"grad_norm": 108.1450424194336, |
|
"learning_rate": 4.9999999999999996e-06, |
|
"loss": 23.7219, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.008361204013377926, |
|
"grad_norm": 80.87652587890625, |
|
"learning_rate": 2.4999999999999998e-05, |
|
"loss": 22.4468, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.016722408026755852, |
|
"grad_norm": 25.19646644592285, |
|
"learning_rate": 4.9999999999999996e-05, |
|
"loss": 18.8421, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.02508361204013378, |
|
"grad_norm": 7.7148966789245605, |
|
"learning_rate": 7.5e-05, |
|
"loss": 15.947, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.033444816053511704, |
|
"grad_norm": 6.524266719818115, |
|
"learning_rate": 9.999999999999999e-05, |
|
"loss": 14.4621, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.04180602006688963, |
|
"grad_norm": 3.7445762157440186, |
|
"learning_rate": 0.000125, |
|
"loss": 13.5317, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.05016722408026756, |
|
"grad_norm": 2.5251731872558594, |
|
"learning_rate": 0.00015, |
|
"loss": 13.0802, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.05852842809364549, |
|
"grad_norm": 3.446436882019043, |
|
"learning_rate": 0.000175, |
|
"loss": 12.5052, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.06688963210702341, |
|
"grad_norm": 5.8851447105407715, |
|
"learning_rate": 0.00019999999999999998, |
|
"loss": 11.5731, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.07525083612040134, |
|
"grad_norm": 11.619170188903809, |
|
"learning_rate": 0.000225, |
|
"loss": 9.873, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.08361204013377926, |
|
"grad_norm": 20.056072235107422, |
|
"learning_rate": 0.00025, |
|
"loss": 7.4632, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.09197324414715718, |
|
"grad_norm": 9.337392807006836, |
|
"learning_rate": 0.00027499999999999996, |
|
"loss": 3.5644, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.10033444816053512, |
|
"grad_norm": 5.715840816497803, |
|
"learning_rate": 0.0003, |
|
"loss": 2.2307, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.10869565217391304, |
|
"grad_norm": 3.3433170318603516, |
|
"learning_rate": 0.0002999360698660697, |
|
"loss": 1.735, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.11705685618729098, |
|
"grad_norm": 1.1295098066329956, |
|
"learning_rate": 0.0002997443339584391, |
|
"loss": 1.4312, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.1254180602006689, |
|
"grad_norm": 1.1192564964294434, |
|
"learning_rate": 0.0002994249557131383, |
|
"loss": 1.26, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.13377926421404682, |
|
"grad_norm": 0.5016701817512512, |
|
"learning_rate": 0.00029897820736875393, |
|
"loss": 1.18, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.14214046822742474, |
|
"grad_norm": 1.4025403261184692, |
|
"learning_rate": 0.0002984044697343724, |
|
"loss": 1.1377, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.1505016722408027, |
|
"grad_norm": 0.6089034080505371, |
|
"learning_rate": 0.00029770423186497794, |
|
"loss": 1.0823, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.1588628762541806, |
|
"grad_norm": 0.8872767090797424, |
|
"learning_rate": 0.0002968780906445808, |
|
"loss": 1.0462, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.16722408026755853, |
|
"grad_norm": 1.0498641729354858, |
|
"learning_rate": 0.00029592675027743253, |
|
"loss": 1.0203, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.17558528428093645, |
|
"grad_norm": 1.5126526355743408, |
|
"learning_rate": 0.00029485102168776096, |
|
"loss": 1.0039, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.18394648829431437, |
|
"grad_norm": 1.2154971361160278, |
|
"learning_rate": 0.0002936518218285368, |
|
"loss": 0.9897, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.19230769230769232, |
|
"grad_norm": 0.8289320468902588, |
|
"learning_rate": 0.0002923301728998616, |
|
"loss": 0.9631, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.20066889632107024, |
|
"grad_norm": 1.4787485599517822, |
|
"learning_rate": 0.0002908872014776423, |
|
"loss": 0.9517, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.20903010033444816, |
|
"grad_norm": 1.2933125495910645, |
|
"learning_rate": 0.0002893241375532959, |
|
"loss": 0.9611, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.21739130434782608, |
|
"grad_norm": 1.0930614471435547, |
|
"learning_rate": 0.00028764231348530267, |
|
"loss": 0.9246, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.225752508361204, |
|
"grad_norm": 1.1177003383636475, |
|
"learning_rate": 0.00028584316286350164, |
|
"loss": 0.9513, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.23411371237458195, |
|
"grad_norm": 1.164829969406128, |
|
"learning_rate": 0.0002839282192870955, |
|
"loss": 0.9224, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.24247491638795987, |
|
"grad_norm": 0.5613699555397034, |
|
"learning_rate": 0.00028189911505740843, |
|
"loss": 0.9142, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.2508361204013378, |
|
"grad_norm": 0.8771304488182068, |
|
"learning_rate": 0.0002797575797865093, |
|
"loss": 0.9032, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.2591973244147157, |
|
"grad_norm": 1.156224250793457, |
|
"learning_rate": 0.0002775054389228871, |
|
"loss": 0.8969, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.26755852842809363, |
|
"grad_norm": 1.482100486755371, |
|
"learning_rate": 0.0002751446121954359, |
|
"loss": 0.8972, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.27591973244147155, |
|
"grad_norm": 1.0462350845336914, |
|
"learning_rate": 0.00027267711197707386, |
|
"loss": 0.8902, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.2842809364548495, |
|
"grad_norm": 0.8120772838592529, |
|
"learning_rate": 0.0002701050415693934, |
|
"loss": 0.8766, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.29264214046822745, |
|
"grad_norm": 0.8219690322875977, |
|
"learning_rate": 0.0002674305934098032, |
|
"loss": 0.8838, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.3010033444816054, |
|
"grad_norm": 0.643622875213623, |
|
"learning_rate": 0.00026465604720269026, |
|
"loss": 0.882, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.3093645484949833, |
|
"grad_norm": 1.0544142723083496, |
|
"learning_rate": 0.0002617837679761962, |
|
"loss": 0.8643, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.3177257525083612, |
|
"grad_norm": 0.718562126159668, |
|
"learning_rate": 0.0002588162040662628, |
|
"loss": 0.8456, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.32608695652173914, |
|
"grad_norm": 1.0501580238342285, |
|
"learning_rate": 0.0002557558850296661, |
|
"loss": 0.866, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.33444816053511706, |
|
"grad_norm": 1.50191330909729, |
|
"learning_rate": 0.00025260541948781784, |
|
"loss": 0.8682, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.342809364548495, |
|
"grad_norm": 2.241793394088745, |
|
"learning_rate": 0.00024936749290317196, |
|
"loss": 0.858, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.3511705685618729, |
|
"grad_norm": 0.8153356313705444, |
|
"learning_rate": 0.00024604486529013106, |
|
"loss": 0.8508, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.3595317725752508, |
|
"grad_norm": 1.1809695959091187, |
|
"learning_rate": 0.00024264036886240603, |
|
"loss": 0.845, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.36789297658862874, |
|
"grad_norm": 0.45723193883895874, |
|
"learning_rate": 0.00023915690561883125, |
|
"loss": 0.8446, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.3762541806020067, |
|
"grad_norm": 1.4294965267181396, |
|
"learning_rate": 0.00023559744486969622, |
|
"loss": 0.8342, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.38461538461538464, |
|
"grad_norm": 0.41016700863838196, |
|
"learning_rate": 0.00023196502070569963, |
|
"loss": 0.8355, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.39297658862876256, |
|
"grad_norm": 0.39777177572250366, |
|
"learning_rate": 0.00022826272941168556, |
|
"loss": 0.8328, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.4013377926421405, |
|
"grad_norm": 0.9254685044288635, |
|
"learning_rate": 0.00022449372682736428, |
|
"loss": 0.8285, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.4096989966555184, |
|
"grad_norm": 0.9923207759857178, |
|
"learning_rate": 0.00022066122565726914, |
|
"loss": 0.8205, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.4180602006688963, |
|
"grad_norm": 1.5853259563446045, |
|
"learning_rate": 0.00021676849273224132, |
|
"loss": 0.8431, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.42642140468227424, |
|
"grad_norm": 0.5669292211532593, |
|
"learning_rate": 0.00021281884622477757, |
|
"loss": 0.828, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.43478260869565216, |
|
"grad_norm": 0.7299262285232544, |
|
"learning_rate": 0.00020881565282061386, |
|
"loss": 0.82, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.4431438127090301, |
|
"grad_norm": 2.6832640171051025, |
|
"learning_rate": 0.00020476232484895653, |
|
"loss": 0.8197, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.451505016722408, |
|
"grad_norm": 1.040622591972351, |
|
"learning_rate": 0.00020066231737380684, |
|
"loss": 0.8163, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.459866220735786, |
|
"grad_norm": 0.9296326637268066, |
|
"learning_rate": 0.00019651912524885814, |
|
"loss": 0.8207, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.4682274247491639, |
|
"grad_norm": 1.341401219367981, |
|
"learning_rate": 0.00019233628013847636, |
|
"loss": 0.7954, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.4765886287625418, |
|
"grad_norm": 1.4546632766723633, |
|
"learning_rate": 0.00018811734750730306, |
|
"loss": 0.8087, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.48494983277591974, |
|
"grad_norm": 2.639110803604126, |
|
"learning_rate": 0.000183865923581047, |
|
"loss": 0.82, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.49331103678929766, |
|
"grad_norm": 1.9307732582092285, |
|
"learning_rate": 0.00017958563228105473, |
|
"loss": 0.8261, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.5016722408026756, |
|
"grad_norm": 0.8820024132728577, |
|
"learning_rate": 0.000175280122135274, |
|
"loss": 0.8188, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.5100334448160535, |
|
"grad_norm": 1.1421868801116943, |
|
"learning_rate": 0.00017095306316824145, |
|
"loss": 0.808, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.5183946488294314, |
|
"grad_norm": 1.443213701248169, |
|
"learning_rate": 0.00016660814377274764, |
|
"loss": 0.8246, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.5267558528428093, |
|
"grad_norm": 1.0917268991470337, |
|
"learning_rate": 0.00016224906756584414, |
|
"loss": 0.8132, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.5351170568561873, |
|
"grad_norm": 1.4697446823120117, |
|
"learning_rate": 0.00015787955023187394, |
|
"loss": 0.7961, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.5434782608695652, |
|
"grad_norm": 0.5695840716362, |
|
"learning_rate": 0.00015350331635521528, |
|
"loss": 0.8022, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.5518394648829431, |
|
"grad_norm": 0.5283718705177307, |
|
"learning_rate": 0.00014912409624543942, |
|
"loss": 0.8126, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.560200668896321, |
|
"grad_norm": 0.604545533657074, |
|
"learning_rate": 0.000144745622757588, |
|
"loss": 0.8025, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.568561872909699, |
|
"grad_norm": 0.5798037648200989, |
|
"learning_rate": 0.00014037162811028105, |
|
"loss": 0.7849, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.5769230769230769, |
|
"grad_norm": 0.8034297227859497, |
|
"learning_rate": 0.00013600584070436663, |
|
"loss": 0.7902, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.5852842809364549, |
|
"grad_norm": 0.454662948846817, |
|
"learning_rate": 0.0001316519819448257, |
|
"loss": 0.79, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.5936454849498328, |
|
"grad_norm": 0.8804046511650085, |
|
"learning_rate": 0.0001273137630686397, |
|
"loss": 0.7963, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.6020066889632107, |
|
"grad_norm": 0.7838584780693054, |
|
"learning_rate": 0.0001229948819813257, |
|
"loss": 0.7967, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.6103678929765887, |
|
"grad_norm": 1.194453477859497, |
|
"learning_rate": 0.00011869902010483487, |
|
"loss": 0.7879, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.6187290969899666, |
|
"grad_norm": 0.972303032875061, |
|
"learning_rate": 0.00011442983923950197, |
|
"loss": 0.8019, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.6270903010033445, |
|
"grad_norm": 0.7446233034133911, |
|
"learning_rate": 0.00011019097844272025, |
|
"loss": 0.7786, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.6354515050167224, |
|
"grad_norm": 0.754374086856842, |
|
"learning_rate": 0.00010598605092700247, |
|
"loss": 0.792, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.6438127090301003, |
|
"grad_norm": 1.5724862813949585, |
|
"learning_rate": 0.00010181864098007187, |
|
"loss": 0.7877, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.6521739130434783, |
|
"grad_norm": 0.7619232535362244, |
|
"learning_rate": 9.76923009096091e-05, |
|
"loss": 0.7795, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.6605351170568562, |
|
"grad_norm": 0.7642078995704651, |
|
"learning_rate": 9.361054801525882e-05, |
|
"loss": 0.7811, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.6688963210702341, |
|
"grad_norm": 0.6380756497383118, |
|
"learning_rate": 8.9576861590477e-05, |
|
"loss": 0.7793, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.677257525083612, |
|
"grad_norm": 1.0837666988372803, |
|
"learning_rate": 8.559467995677535e-05, |
|
"loss": 0.7833, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.68561872909699, |
|
"grad_norm": 0.4329131245613098, |
|
"learning_rate": 8.166739753288954e-05, |
|
"loss": 0.7889, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.6939799331103679, |
|
"grad_norm": 0.6933662295341492, |
|
"learning_rate": 7.77983619413708e-05, |
|
"loss": 0.7903, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.7023411371237458, |
|
"grad_norm": 0.7759296298027039, |
|
"learning_rate": 7.399087115506633e-05, |
|
"loss": 0.7835, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.7107023411371237, |
|
"grad_norm": 0.7649604082107544, |
|
"learning_rate": 7.024817068592166e-05, |
|
"loss": 0.7865, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.7190635451505016, |
|
"grad_norm": 0.4624633193016052, |
|
"learning_rate": 6.657345081850015e-05, |
|
"loss": 0.788, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.7274247491638796, |
|
"grad_norm": 1.3198479413986206, |
|
"learning_rate": 6.296984389057886e-05, |
|
"loss": 0.7813, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.7357859531772575, |
|
"grad_norm": 0.5763963460922241, |
|
"learning_rate": 5.944042162313818e-05, |
|
"loss": 0.7887, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.7441471571906354, |
|
"grad_norm": 0.595403254032135, |
|
"learning_rate": 5.59881925020216e-05, |
|
"loss": 0.7767, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.7525083612040134, |
|
"grad_norm": 0.9068358540534973, |
|
"learning_rate": 5.2616099213496704e-05, |
|
"loss": 0.7774, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.7608695652173914, |
|
"grad_norm": 0.6962704658508301, |
|
"learning_rate": 4.93270161359043e-05, |
|
"loss": 0.7844, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.7692307692307693, |
|
"grad_norm": 0.6204519271850586, |
|
"learning_rate": 4.612374688953315e-05, |
|
"loss": 0.7835, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.7775919732441472, |
|
"grad_norm": 0.4705691635608673, |
|
"learning_rate": 4.300902194680902e-05, |
|
"loss": 0.7796, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.7859531772575251, |
|
"grad_norm": 0.7778177857398987, |
|
"learning_rate": 3.998549630483525e-05, |
|
"loss": 0.7864, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.794314381270903, |
|
"grad_norm": 0.7931321859359741, |
|
"learning_rate": 3.7055747222268256e-05, |
|
"loss": 0.7733, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.802675585284281, |
|
"grad_norm": 0.5870620012283325, |
|
"learning_rate": 3.422227202245778e-05, |
|
"loss": 0.7615, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.8110367892976589, |
|
"grad_norm": 0.9947313666343689, |
|
"learning_rate": 3.1487485964723966e-05, |
|
"loss": 0.7693, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.8193979933110368, |
|
"grad_norm": 0.5786251425743103, |
|
"learning_rate": 2.885372018558604e-05, |
|
"loss": 0.7859, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.8277591973244147, |
|
"grad_norm": 0.5991637110710144, |
|
"learning_rate": 2.6323219711697368e-05, |
|
"loss": 0.7781, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.8361204013377926, |
|
"grad_norm": 0.7340580821037292, |
|
"learning_rate": 2.3898141546180678e-05, |
|
"loss": 0.7689, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.8444816053511706, |
|
"grad_norm": 0.4942578971385956, |
|
"learning_rate": 2.1580552829994785e-05, |
|
"loss": 0.7829, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.8528428093645485, |
|
"grad_norm": 0.4644772708415985, |
|
"learning_rate": 1.9372429079900052e-05, |
|
"loss": 0.7672, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.8612040133779264, |
|
"grad_norm": 0.36809882521629333, |
|
"learning_rate": 1.727565250452414e-05, |
|
"loss": 0.754, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.8695652173913043, |
|
"grad_norm": 0.4026561379432678, |
|
"learning_rate": 1.52920103999642e-05, |
|
"loss": 0.7628, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.8779264214046822, |
|
"grad_norm": 0.45631930232048035, |
|
"learning_rate": 1.3423193626292405e-05, |
|
"loss": 0.7838, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.8862876254180602, |
|
"grad_norm": 0.4896789789199829, |
|
"learning_rate": 1.1670795166263846e-05, |
|
"loss": 0.7797, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.8946488294314381, |
|
"grad_norm": 0.4223991334438324, |
|
"learning_rate": 1.003630876745522e-05, |
|
"loss": 0.7747, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.903010033444816, |
|
"grad_norm": 0.4364998936653137, |
|
"learning_rate": 8.521127668991612e-06, |
|
"loss": 0.7628, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.9113712374581939, |
|
"grad_norm": 0.4812104403972626, |
|
"learning_rate": 7.126543413947045e-06, |
|
"loss": 0.7615, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.919732441471572, |
|
"grad_norm": 0.4221721291542053, |
|
"learning_rate": 5.853744748430883e-06, |
|
"loss": 0.7692, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.9280936454849499, |
|
"grad_norm": 0.42621150612831116, |
|
"learning_rate": 4.703816608298533e-06, |
|
"loss": 0.7862, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.9364548494983278, |
|
"grad_norm": 0.3731984794139862, |
|
"learning_rate": 3.6777391943500877e-06, |
|
"loss": 0.7716, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.9448160535117057, |
|
"grad_norm": 0.409588485956192, |
|
"learning_rate": 2.7763871368055137e-06, |
|
"loss": 0.7705, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.9531772575250836, |
|
"grad_norm": 0.45919135212898254, |
|
"learning_rate": 2.000528749768199e-06, |
|
"loss": 0.7648, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.9615384615384616, |
|
"grad_norm": 0.4654495120048523, |
|
"learning_rate": 1.3508253763127974e-06, |
|
"loss": 0.7715, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.9698996655518395, |
|
"grad_norm": 0.37038466334342957, |
|
"learning_rate": 8.278308247549992e-07, |
|
"loss": 0.7731, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.9782608695652174, |
|
"grad_norm": 0.40466973185539246, |
|
"learning_rate": 4.3199089658449425e-07, |
|
"loss": 0.7629, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.9866220735785953, |
|
"grad_norm": 0.4049622118473053, |
|
"learning_rate": 1.6364300646291018e-07, |
|
"loss": 0.7578, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.9949832775919732, |
|
"grad_norm": 0.37162256240844727, |
|
"learning_rate": 2.30158946110115e-08, |
|
"loss": 0.7674, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 1.468013048171997, |
|
"eval_runtime": 1.3394, |
|
"eval_samples_per_second": 3.733, |
|
"eval_steps_per_second": 0.747, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 598, |
|
"total_flos": 9.117045735743816e+17, |
|
"train_loss": 1.9824217689475885, |
|
"train_runtime": 7933.7629, |
|
"train_samples_per_second": 2.411, |
|
"train_steps_per_second": 0.075 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 598, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 9.117045735743816e+17, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|