|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 10.0, |
|
"global_step": 157480, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.97140678612276e-05, |
|
"loss": 2.3365, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.939636548481383e-05, |
|
"loss": 1.9007, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.907866310840005e-05, |
|
"loss": 1.8121, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.8760960731986276e-05, |
|
"loss": 1.7655, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.84432583555725e-05, |
|
"loss": 1.7269, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.8125555979158726e-05, |
|
"loss": 1.6977, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.780785360274495e-05, |
|
"loss": 1.678, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.7490151226331175e-05, |
|
"loss": 1.6612, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.71724488499174e-05, |
|
"loss": 1.6365, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.6854746473503625e-05, |
|
"loss": 1.6223, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.653704409708985e-05, |
|
"loss": 1.6127, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.6219341720676074e-05, |
|
"loss": 1.5877, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.59016393442623e-05, |
|
"loss": 1.5809, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.5583936967848524e-05, |
|
"loss": 1.5737, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 4.526623459143475e-05, |
|
"loss": 1.5611, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.494853221502097e-05, |
|
"loss": 1.5297, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 4.46308298386072e-05, |
|
"loss": 1.4783, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 4.431312746219342e-05, |
|
"loss": 1.472, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 4.399542508577965e-05, |
|
"loss": 1.4821, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 4.367772270936587e-05, |
|
"loss": 1.4681, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 4.33600203329521e-05, |
|
"loss": 1.4564, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 4.304231795653832e-05, |
|
"loss": 1.4509, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 4.2724615580124546e-05, |
|
"loss": 1.4478, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 4.2406913203710764e-05, |
|
"loss": 1.4446, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 4.208921082729699e-05, |
|
"loss": 1.439, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 4.1771508450883214e-05, |
|
"loss": 1.4367, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 4.145380607446944e-05, |
|
"loss": 1.4181, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 4.113610369805566e-05, |
|
"loss": 1.417, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 4.081840132164189e-05, |
|
"loss": 1.4056, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 4.050069894522811e-05, |
|
"loss": 1.4006, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 4.018299656881434e-05, |
|
"loss": 1.4053, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 3.986529419240056e-05, |
|
"loss": 1.3491, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 3.954759181598679e-05, |
|
"loss": 1.3028, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 3.922988943957301e-05, |
|
"loss": 1.3012, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 3.8912187063159236e-05, |
|
"loss": 1.2896, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 3.859448468674546e-05, |
|
"loss": 1.2955, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 3.8276782310331686e-05, |
|
"loss": 1.2843, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 3.7959079933917904e-05, |
|
"loss": 1.2876, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 3.764137755750413e-05, |
|
"loss": 1.2826, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 3.732367518109035e-05, |
|
"loss": 1.2706, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 3.700597280467658e-05, |
|
"loss": 1.2743, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 3.66882704282628e-05, |
|
"loss": 1.2783, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 3.637056805184903e-05, |
|
"loss": 1.264, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 3.605286567543525e-05, |
|
"loss": 1.2614, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 3.573516329902148e-05, |
|
"loss": 1.2549, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 3.54174609226077e-05, |
|
"loss": 1.2453, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 3.5099758546193926e-05, |
|
"loss": 1.2425, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 3.478205616978015e-05, |
|
"loss": 1.1538, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 3.4464353793366376e-05, |
|
"loss": 1.1307, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 3.41466514169526e-05, |
|
"loss": 1.1375, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 3.3828949040538825e-05, |
|
"loss": 1.1258, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 3.351124666412505e-05, |
|
"loss": 1.1237, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 3.3193544287711275e-05, |
|
"loss": 1.1226, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 3.28758419112975e-05, |
|
"loss": 1.1246, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 3.2558139534883724e-05, |
|
"loss": 1.1197, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 3.224043715846995e-05, |
|
"loss": 1.117, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 3.1922734782056174e-05, |
|
"loss": 1.1206, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 3.160503240564239e-05, |
|
"loss": 1.1023, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 3.1287330029228617e-05, |
|
"loss": 1.1076, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 3.096962765281484e-05, |
|
"loss": 1.1061, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 3.0651925276401066e-05, |
|
"loss": 1.1017, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 3.033422289998729e-05, |
|
"loss": 1.1005, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 3.0016520523573516e-05, |
|
"loss": 1.0891, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 2.969881814715974e-05, |
|
"loss": 0.9693, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 2.9381115770745965e-05, |
|
"loss": 0.9703, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"learning_rate": 2.906341339433219e-05, |
|
"loss": 0.9693, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"learning_rate": 2.8745711017918414e-05, |
|
"loss": 0.9698, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 2.842800864150464e-05, |
|
"loss": 0.9715, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 2.8110306265090864e-05, |
|
"loss": 0.9734, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"learning_rate": 2.779260388867709e-05, |
|
"loss": 0.9796, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"learning_rate": 2.7474901512263313e-05, |
|
"loss": 0.9729, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 2.7157199135849538e-05, |
|
"loss": 0.9727, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 2.6839496759435763e-05, |
|
"loss": 0.9721, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"learning_rate": 2.6521794383021988e-05, |
|
"loss": 0.9643, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"learning_rate": 2.6204092006608212e-05, |
|
"loss": 0.9599, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 2.5886389630194434e-05, |
|
"loss": 0.9651, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"learning_rate": 2.556868725378066e-05, |
|
"loss": 0.9587, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"learning_rate": 2.5250984877366883e-05, |
|
"loss": 0.9568, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 5.02, |
|
"learning_rate": 2.4933282500953108e-05, |
|
"loss": 0.9187, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 5.08, |
|
"learning_rate": 2.4615580124539333e-05, |
|
"loss": 0.8357, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 5.14, |
|
"learning_rate": 2.4297877748125557e-05, |
|
"loss": 0.8324, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 5.21, |
|
"learning_rate": 2.3980175371711782e-05, |
|
"loss": 0.84, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 5.27, |
|
"learning_rate": 2.3662472995298007e-05, |
|
"loss": 0.8392, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 5.33, |
|
"learning_rate": 2.3344770618884228e-05, |
|
"loss": 0.8439, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 5.4, |
|
"learning_rate": 2.3027068242470453e-05, |
|
"loss": 0.8375, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 5.46, |
|
"learning_rate": 2.2709365866056678e-05, |
|
"loss": 0.8363, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 5.52, |
|
"learning_rate": 2.2391663489642903e-05, |
|
"loss": 0.8387, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 5.59, |
|
"learning_rate": 2.2073961113229127e-05, |
|
"loss": 0.8458, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 5.65, |
|
"learning_rate": 2.1756258736815352e-05, |
|
"loss": 0.84, |
|
"step": 89000 |
|
}, |
|
{ |
|
"epoch": 5.72, |
|
"learning_rate": 2.1438556360401577e-05, |
|
"loss": 0.833, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 5.78, |
|
"learning_rate": 2.11208539839878e-05, |
|
"loss": 0.8356, |
|
"step": 91000 |
|
}, |
|
{ |
|
"epoch": 5.84, |
|
"learning_rate": 2.0803151607574026e-05, |
|
"loss": 0.8367, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 5.91, |
|
"learning_rate": 2.048544923116025e-05, |
|
"loss": 0.8323, |
|
"step": 93000 |
|
}, |
|
{ |
|
"epoch": 5.97, |
|
"learning_rate": 2.0167746854746476e-05, |
|
"loss": 0.8304, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 6.03, |
|
"learning_rate": 1.98500444783327e-05, |
|
"loss": 0.7779, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 6.1, |
|
"learning_rate": 1.9532342101918925e-05, |
|
"loss": 0.7192, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 6.16, |
|
"learning_rate": 1.921463972550515e-05, |
|
"loss": 0.7189, |
|
"step": 97000 |
|
}, |
|
{ |
|
"epoch": 6.22, |
|
"learning_rate": 1.8896937349091375e-05, |
|
"loss": 0.7193, |
|
"step": 98000 |
|
}, |
|
{ |
|
"epoch": 6.29, |
|
"learning_rate": 1.85792349726776e-05, |
|
"loss": 0.7281, |
|
"step": 99000 |
|
}, |
|
{ |
|
"epoch": 6.35, |
|
"learning_rate": 1.826153259626382e-05, |
|
"loss": 0.7303, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 6.41, |
|
"learning_rate": 1.7943830219850045e-05, |
|
"loss": 0.7308, |
|
"step": 101000 |
|
}, |
|
{ |
|
"epoch": 6.48, |
|
"learning_rate": 1.762612784343627e-05, |
|
"loss": 0.7285, |
|
"step": 102000 |
|
}, |
|
{ |
|
"epoch": 6.54, |
|
"learning_rate": 1.730842546702249e-05, |
|
"loss": 0.7279, |
|
"step": 103000 |
|
}, |
|
{ |
|
"epoch": 6.6, |
|
"learning_rate": 1.6990723090608716e-05, |
|
"loss": 0.7323, |
|
"step": 104000 |
|
}, |
|
{ |
|
"epoch": 6.67, |
|
"learning_rate": 1.667302071419494e-05, |
|
"loss": 0.7309, |
|
"step": 105000 |
|
}, |
|
{ |
|
"epoch": 6.73, |
|
"learning_rate": 1.6355318337781166e-05, |
|
"loss": 0.729, |
|
"step": 106000 |
|
}, |
|
{ |
|
"epoch": 6.79, |
|
"learning_rate": 1.603761596136739e-05, |
|
"loss": 0.7285, |
|
"step": 107000 |
|
}, |
|
{ |
|
"epoch": 6.86, |
|
"learning_rate": 1.5719913584953615e-05, |
|
"loss": 0.7269, |
|
"step": 108000 |
|
}, |
|
{ |
|
"epoch": 6.92, |
|
"learning_rate": 1.540221120853984e-05, |
|
"loss": 0.7236, |
|
"step": 109000 |
|
}, |
|
{ |
|
"epoch": 6.99, |
|
"learning_rate": 1.5084508832126065e-05, |
|
"loss": 0.7238, |
|
"step": 110000 |
|
}, |
|
{ |
|
"epoch": 7.05, |
|
"learning_rate": 1.476680645571229e-05, |
|
"loss": 0.6498, |
|
"step": 111000 |
|
}, |
|
{ |
|
"epoch": 7.11, |
|
"learning_rate": 1.4449104079298514e-05, |
|
"loss": 0.6289, |
|
"step": 112000 |
|
}, |
|
{ |
|
"epoch": 7.18, |
|
"learning_rate": 1.4131401702884739e-05, |
|
"loss": 0.629, |
|
"step": 113000 |
|
}, |
|
{ |
|
"epoch": 7.24, |
|
"learning_rate": 1.3813699326470964e-05, |
|
"loss": 0.6331, |
|
"step": 114000 |
|
}, |
|
{ |
|
"epoch": 7.3, |
|
"learning_rate": 1.3495996950057188e-05, |
|
"loss": 0.6354, |
|
"step": 115000 |
|
}, |
|
{ |
|
"epoch": 7.37, |
|
"learning_rate": 1.3178294573643413e-05, |
|
"loss": 0.634, |
|
"step": 116000 |
|
}, |
|
{ |
|
"epoch": 7.43, |
|
"learning_rate": 1.2860592197229638e-05, |
|
"loss": 0.636, |
|
"step": 117000 |
|
}, |
|
{ |
|
"epoch": 7.49, |
|
"learning_rate": 1.254288982081586e-05, |
|
"loss": 0.6422, |
|
"step": 118000 |
|
}, |
|
{ |
|
"epoch": 7.56, |
|
"learning_rate": 1.2225187444402086e-05, |
|
"loss": 0.6371, |
|
"step": 119000 |
|
}, |
|
{ |
|
"epoch": 7.62, |
|
"learning_rate": 1.1907485067988309e-05, |
|
"loss": 0.6383, |
|
"step": 120000 |
|
}, |
|
{ |
|
"epoch": 7.68, |
|
"learning_rate": 1.1589782691574534e-05, |
|
"loss": 0.6401, |
|
"step": 121000 |
|
}, |
|
{ |
|
"epoch": 7.75, |
|
"learning_rate": 1.1272080315160757e-05, |
|
"loss": 0.6332, |
|
"step": 122000 |
|
}, |
|
{ |
|
"epoch": 7.81, |
|
"learning_rate": 1.0954377938746981e-05, |
|
"loss": 0.6408, |
|
"step": 123000 |
|
}, |
|
{ |
|
"epoch": 7.87, |
|
"learning_rate": 1.0636675562333206e-05, |
|
"loss": 0.6337, |
|
"step": 124000 |
|
}, |
|
{ |
|
"epoch": 7.94, |
|
"learning_rate": 1.031897318591943e-05, |
|
"loss": 0.6406, |
|
"step": 125000 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"learning_rate": 1.0001270809505656e-05, |
|
"loss": 0.6372, |
|
"step": 126000 |
|
}, |
|
{ |
|
"epoch": 8.06, |
|
"learning_rate": 9.68356843309188e-06, |
|
"loss": 0.5542, |
|
"step": 127000 |
|
}, |
|
{ |
|
"epoch": 8.13, |
|
"learning_rate": 9.365866056678105e-06, |
|
"loss": 0.5541, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 8.19, |
|
"learning_rate": 9.04816368026433e-06, |
|
"loss": 0.565, |
|
"step": 129000 |
|
}, |
|
{ |
|
"epoch": 8.26, |
|
"learning_rate": 8.730461303850553e-06, |
|
"loss": 0.5644, |
|
"step": 130000 |
|
}, |
|
{ |
|
"epoch": 8.32, |
|
"learning_rate": 8.412758927436778e-06, |
|
"loss": 0.5651, |
|
"step": 131000 |
|
}, |
|
{ |
|
"epoch": 8.38, |
|
"learning_rate": 8.095056551023002e-06, |
|
"loss": 0.5661, |
|
"step": 132000 |
|
}, |
|
{ |
|
"epoch": 8.45, |
|
"learning_rate": 7.777354174609227e-06, |
|
"loss": 0.5704, |
|
"step": 133000 |
|
}, |
|
{ |
|
"epoch": 8.51, |
|
"learning_rate": 7.459651798195451e-06, |
|
"loss": 0.5669, |
|
"step": 134000 |
|
}, |
|
{ |
|
"epoch": 8.57, |
|
"learning_rate": 7.141949421781676e-06, |
|
"loss": 0.5694, |
|
"step": 135000 |
|
}, |
|
{ |
|
"epoch": 8.64, |
|
"learning_rate": 6.8242470453679e-06, |
|
"loss": 0.5705, |
|
"step": 136000 |
|
}, |
|
{ |
|
"epoch": 8.7, |
|
"learning_rate": 6.506544668954124e-06, |
|
"loss": 0.5682, |
|
"step": 137000 |
|
}, |
|
{ |
|
"epoch": 8.76, |
|
"learning_rate": 6.188842292540348e-06, |
|
"loss": 0.5676, |
|
"step": 138000 |
|
}, |
|
{ |
|
"epoch": 8.83, |
|
"learning_rate": 5.871139916126573e-06, |
|
"loss": 0.5674, |
|
"step": 139000 |
|
}, |
|
{ |
|
"epoch": 8.89, |
|
"learning_rate": 5.553437539712798e-06, |
|
"loss": 0.5681, |
|
"step": 140000 |
|
}, |
|
{ |
|
"epoch": 8.95, |
|
"learning_rate": 5.2357351632990216e-06, |
|
"loss": 0.5636, |
|
"step": 141000 |
|
}, |
|
{ |
|
"epoch": 9.02, |
|
"learning_rate": 4.918032786885246e-06, |
|
"loss": 0.5495, |
|
"step": 142000 |
|
}, |
|
{ |
|
"epoch": 9.08, |
|
"learning_rate": 4.60033041047147e-06, |
|
"loss": 0.5124, |
|
"step": 143000 |
|
}, |
|
{ |
|
"epoch": 9.14, |
|
"learning_rate": 4.282628034057695e-06, |
|
"loss": 0.514, |
|
"step": 144000 |
|
}, |
|
{ |
|
"epoch": 9.21, |
|
"learning_rate": 3.964925657643919e-06, |
|
"loss": 0.517, |
|
"step": 145000 |
|
}, |
|
{ |
|
"epoch": 9.27, |
|
"learning_rate": 3.6472232812301436e-06, |
|
"loss": 0.5173, |
|
"step": 146000 |
|
}, |
|
{ |
|
"epoch": 9.33, |
|
"learning_rate": 3.3295209048163683e-06, |
|
"loss": 0.5164, |
|
"step": 147000 |
|
}, |
|
{ |
|
"epoch": 9.4, |
|
"learning_rate": 3.0118185284025926e-06, |
|
"loss": 0.5202, |
|
"step": 148000 |
|
}, |
|
{ |
|
"epoch": 9.46, |
|
"learning_rate": 2.694116151988817e-06, |
|
"loss": 0.5183, |
|
"step": 149000 |
|
}, |
|
{ |
|
"epoch": 9.53, |
|
"learning_rate": 2.3764137755750413e-06, |
|
"loss": 0.517, |
|
"step": 150000 |
|
}, |
|
{ |
|
"epoch": 9.59, |
|
"learning_rate": 2.058711399161266e-06, |
|
"loss": 0.5131, |
|
"step": 151000 |
|
}, |
|
{ |
|
"epoch": 9.65, |
|
"learning_rate": 1.7410090227474903e-06, |
|
"loss": 0.516, |
|
"step": 152000 |
|
}, |
|
{ |
|
"epoch": 9.72, |
|
"learning_rate": 1.4233066463337146e-06, |
|
"loss": 0.5135, |
|
"step": 153000 |
|
}, |
|
{ |
|
"epoch": 9.78, |
|
"learning_rate": 1.105604269919939e-06, |
|
"loss": 0.5121, |
|
"step": 154000 |
|
}, |
|
{ |
|
"epoch": 9.84, |
|
"learning_rate": 7.879018935061634e-07, |
|
"loss": 0.514, |
|
"step": 155000 |
|
}, |
|
{ |
|
"epoch": 9.91, |
|
"learning_rate": 4.701995170923879e-07, |
|
"loss": 0.5134, |
|
"step": 156000 |
|
}, |
|
{ |
|
"epoch": 9.97, |
|
"learning_rate": 1.5249714067861227e-07, |
|
"loss": 0.5109, |
|
"step": 157000 |
|
} |
|
], |
|
"max_steps": 157480, |
|
"num_train_epochs": 10, |
|
"total_flos": 1.68490001031168e+16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|