|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.0, |
|
"global_step": 260853, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.990416058086355e-05, |
|
"loss": 2.1636, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9808321161727104e-05, |
|
"loss": 1.994, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.971248174259066e-05, |
|
"loss": 1.907, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.961664232345421e-05, |
|
"loss": 1.8301, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.952080290431776e-05, |
|
"loss": 1.7698, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.942496348518131e-05, |
|
"loss": 1.7158, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.932912406604486e-05, |
|
"loss": 1.6953, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.9233284646908415e-05, |
|
"loss": 1.6492, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.913744522777197e-05, |
|
"loss": 1.6356, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.9041605808635516e-05, |
|
"loss": 1.6396, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.894576638949907e-05, |
|
"loss": 1.6158, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.884992697036262e-05, |
|
"loss": 1.5995, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.875408755122617e-05, |
|
"loss": 1.5564, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.8658248132089725e-05, |
|
"loss": 1.5438, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.856240871295327e-05, |
|
"loss": 1.5618, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.846656929381683e-05, |
|
"loss": 1.5249, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.8370729874680374e-05, |
|
"loss": 1.5021, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.827489045554393e-05, |
|
"loss": 1.4965, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.817905103640748e-05, |
|
"loss": 1.482, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.8083211617271036e-05, |
|
"loss": 1.478, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.798737219813458e-05, |
|
"loss": 1.4683, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.789153277899813e-05, |
|
"loss": 1.4413, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.7795693359861685e-05, |
|
"loss": 1.4603, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.769985394072524e-05, |
|
"loss": 1.4551, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.760401452158879e-05, |
|
"loss": 1.443, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.750817510245234e-05, |
|
"loss": 1.4114, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.7412335683315894e-05, |
|
"loss": 1.4287, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.731649626417944e-05, |
|
"loss": 1.4237, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.7220656845042995e-05, |
|
"loss": 1.4067, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.712481742590655e-05, |
|
"loss": 1.4095, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.70289780067701e-05, |
|
"loss": 1.3931, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.693313858763365e-05, |
|
"loss": 1.3703, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.68372991684972e-05, |
|
"loss": 1.3877, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.674145974936075e-05, |
|
"loss": 1.3768, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.6645620330224306e-05, |
|
"loss": 1.3745, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.6549780911087854e-05, |
|
"loss": 1.3683, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.645394149195141e-05, |
|
"loss": 1.3696, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.6358102072814955e-05, |
|
"loss": 1.3424, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.626226265367851e-05, |
|
"loss": 1.3405, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.616642323454206e-05, |
|
"loss": 1.3524, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.607058381540562e-05, |
|
"loss": 1.3253, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.5974744396269164e-05, |
|
"loss": 1.323, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.587890497713272e-05, |
|
"loss": 1.3265, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.5783065557996266e-05, |
|
"loss": 1.3285, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.568722613885982e-05, |
|
"loss": 1.3064, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.5591386719723374e-05, |
|
"loss": 1.3208, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.549554730058692e-05, |
|
"loss": 1.3224, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.5399707881450475e-05, |
|
"loss": 1.3329, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.530386846231402e-05, |
|
"loss": 1.318, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.520802904317758e-05, |
|
"loss": 1.3029, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.511218962404113e-05, |
|
"loss": 1.3139, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.501635020490468e-05, |
|
"loss": 1.3033, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.492051078576823e-05, |
|
"loss": 1.3046, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.482467136663178e-05, |
|
"loss": 1.2975, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.472883194749533e-05, |
|
"loss": 1.2869, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.463299252835889e-05, |
|
"loss": 1.2797, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.453715310922244e-05, |
|
"loss": 1.2803, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.444131369008599e-05, |
|
"loss": 1.2613, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.4345474270949536e-05, |
|
"loss": 1.2781, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.424963485181309e-05, |
|
"loss": 1.2875, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.4153795432676644e-05, |
|
"loss": 1.2775, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.40579560135402e-05, |
|
"loss": 1.272, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.3962116594403745e-05, |
|
"loss": 1.265, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.38662771752673e-05, |
|
"loss": 1.2522, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.377043775613085e-05, |
|
"loss": 1.262, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.36745983369944e-05, |
|
"loss": 1.2353, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.3578758917857955e-05, |
|
"loss": 1.2425, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.34829194987215e-05, |
|
"loss": 1.2536, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.3387080079585056e-05, |
|
"loss": 1.2463, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.3291240660448603e-05, |
|
"loss": 1.2224, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.3195401241312164e-05, |
|
"loss": 1.2313, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.309956182217571e-05, |
|
"loss": 1.2473, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.300372240303926e-05, |
|
"loss": 1.2193, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.290788298390281e-05, |
|
"loss": 1.2234, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.281204356476636e-05, |
|
"loss": 1.2365, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.271620414562992e-05, |
|
"loss": 1.2107, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.262036472649347e-05, |
|
"loss": 1.219, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.252452530735702e-05, |
|
"loss": 1.232, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.242868588822057e-05, |
|
"loss": 1.2158, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.233284646908412e-05, |
|
"loss": 1.2113, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.223700704994768e-05, |
|
"loss": 1.2027, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.2141167630811225e-05, |
|
"loss": 1.2351, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.204532821167478e-05, |
|
"loss": 1.2031, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.1949488792538326e-05, |
|
"loss": 1.2189, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.185364937340188e-05, |
|
"loss": 1.2178, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.1757809954265434e-05, |
|
"loss": 1.2127, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.166197053512898e-05, |
|
"loss": 1.2245, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.1566131115992536e-05, |
|
"loss": 1.2316, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.147029169685608e-05, |
|
"loss": 1.2077, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.137445227771964e-05, |
|
"loss": 1.1855, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.1278612858583184e-05, |
|
"loss": 1.2007, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.1182773439446745e-05, |
|
"loss": 1.2152, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.108693402031029e-05, |
|
"loss": 1.2201, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.099109460117384e-05, |
|
"loss": 1.2002, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.0895255182037394e-05, |
|
"loss": 1.2046, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.079941576290094e-05, |
|
"loss": 1.1818, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.07035763437645e-05, |
|
"loss": 1.1897, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.060773692462805e-05, |
|
"loss": 1.1969, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.05118975054916e-05, |
|
"loss": 1.1983, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.041605808635515e-05, |
|
"loss": 1.192, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.03202186672187e-05, |
|
"loss": 1.1977, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.022437924808226e-05, |
|
"loss": 1.1815, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.0128539828945806e-05, |
|
"loss": 1.1385, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.003270040980936e-05, |
|
"loss": 1.1852, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 3.993686099067291e-05, |
|
"loss": 1.1639, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.984102157153646e-05, |
|
"loss": 1.1693, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.9745182152400015e-05, |
|
"loss": 1.1489, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.964934273326356e-05, |
|
"loss": 1.1661, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.955350331412712e-05, |
|
"loss": 1.1732, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.9457663894990664e-05, |
|
"loss": 1.1839, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.936182447585422e-05, |
|
"loss": 1.1767, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.926598505671777e-05, |
|
"loss": 1.17, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.9170145637581326e-05, |
|
"loss": 1.1701, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.9074306218444873e-05, |
|
"loss": 1.1392, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.897846679930843e-05, |
|
"loss": 1.1493, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.8882627380171975e-05, |
|
"loss": 1.1413, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.878678796103553e-05, |
|
"loss": 1.1587, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.869094854189908e-05, |
|
"loss": 1.1577, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.859510912276263e-05, |
|
"loss": 1.1495, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.8499269703626184e-05, |
|
"loss": 1.1534, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.840343028448973e-05, |
|
"loss": 1.165, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.8307590865353286e-05, |
|
"loss": 1.1444, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.821175144621684e-05, |
|
"loss": 1.1464, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.811591202708039e-05, |
|
"loss": 1.1475, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.802007260794394e-05, |
|
"loss": 1.14, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.792423318880749e-05, |
|
"loss": 1.1265, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.782839376967104e-05, |
|
"loss": 1.1388, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.7732554350534596e-05, |
|
"loss": 1.1373, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.763671493139815e-05, |
|
"loss": 1.1309, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.75408755122617e-05, |
|
"loss": 1.1304, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.7445036093125245e-05, |
|
"loss": 1.1173, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.73491966739888e-05, |
|
"loss": 1.12, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.725335725485235e-05, |
|
"loss": 1.1116, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.715751783571591e-05, |
|
"loss": 1.149, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.7061678416579454e-05, |
|
"loss": 1.1255, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.696583899744301e-05, |
|
"loss": 1.1429, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.6869999578306556e-05, |
|
"loss": 1.1217, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.677416015917011e-05, |
|
"loss": 1.1174, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.6678320740033664e-05, |
|
"loss": 1.113, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.658248132089721e-05, |
|
"loss": 1.1208, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.6486641901760765e-05, |
|
"loss": 1.1174, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.639080248262431e-05, |
|
"loss": 1.1201, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.6294963063487867e-05, |
|
"loss": 1.1266, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.619912364435142e-05, |
|
"loss": 1.1201, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.610328422521497e-05, |
|
"loss": 1.1003, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.600744480607852e-05, |
|
"loss": 1.1004, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.591160538694207e-05, |
|
"loss": 1.1038, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.581576596780562e-05, |
|
"loss": 1.1264, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.571992654866918e-05, |
|
"loss": 1.0977, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.562408712953273e-05, |
|
"loss": 1.1079, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.552824771039628e-05, |
|
"loss": 1.1059, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.5432408291259826e-05, |
|
"loss": 1.1193, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.533656887212338e-05, |
|
"loss": 1.1115, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.5240729452986934e-05, |
|
"loss": 1.1191, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.514489003385049e-05, |
|
"loss": 1.1028, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.5049050614714035e-05, |
|
"loss": 1.0868, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.495321119557759e-05, |
|
"loss": 1.1205, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.485737177644114e-05, |
|
"loss": 1.0953, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.476153235730469e-05, |
|
"loss": 1.0871, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.4665692938168245e-05, |
|
"loss": 1.0888, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.456985351903179e-05, |
|
"loss": 1.0983, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.4474014099895346e-05, |
|
"loss": 1.1028, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.4378174680758893e-05, |
|
"loss": 1.0996, |
|
"step": 81500 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.428233526162245e-05, |
|
"loss": 1.0894, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.4186495842486e-05, |
|
"loss": 1.099, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.409065642334955e-05, |
|
"loss": 1.0693, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.39948170042131e-05, |
|
"loss": 1.1003, |
|
"step": 83500 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.389897758507665e-05, |
|
"loss": 1.0709, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.3803138165940204e-05, |
|
"loss": 1.098, |
|
"step": 84500 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.370729874680376e-05, |
|
"loss": 1.0889, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.361145932766731e-05, |
|
"loss": 1.1002, |
|
"step": 85500 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.351561990853086e-05, |
|
"loss": 1.0914, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.341978048939441e-05, |
|
"loss": 1.0916, |
|
"step": 86500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.332394107025796e-05, |
|
"loss": 1.0824, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.3228101651121515e-05, |
|
"loss": 0.9678, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.313226223198507e-05, |
|
"loss": 0.9703, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.3036422812848616e-05, |
|
"loss": 0.9833, |
|
"step": 88500 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.294058339371217e-05, |
|
"loss": 0.9731, |
|
"step": 89000 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.284474397457572e-05, |
|
"loss": 0.9738, |
|
"step": 89500 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.274890455543927e-05, |
|
"loss": 0.9739, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.2653065136302826e-05, |
|
"loss": 0.9543, |
|
"step": 90500 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.255722571716637e-05, |
|
"loss": 0.9771, |
|
"step": 91000 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.246138629802993e-05, |
|
"loss": 0.9846, |
|
"step": 91500 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.2365546878893474e-05, |
|
"loss": 0.9806, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.226970745975703e-05, |
|
"loss": 0.9792, |
|
"step": 92500 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.217386804062058e-05, |
|
"loss": 0.9908, |
|
"step": 93000 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.207802862148413e-05, |
|
"loss": 0.9726, |
|
"step": 93500 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.1982189202347684e-05, |
|
"loss": 0.9701, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.188634978321123e-05, |
|
"loss": 0.9697, |
|
"step": 94500 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.1790510364074785e-05, |
|
"loss": 0.9535, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.169467094493834e-05, |
|
"loss": 0.9686, |
|
"step": 95500 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.159883152580189e-05, |
|
"loss": 0.9778, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.150299210666544e-05, |
|
"loss": 0.9699, |
|
"step": 96500 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.1407152687528995e-05, |
|
"loss": 0.9701, |
|
"step": 97000 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.131131326839254e-05, |
|
"loss": 0.9829, |
|
"step": 97500 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.1215473849256096e-05, |
|
"loss": 0.9739, |
|
"step": 98000 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.111963443011965e-05, |
|
"loss": 0.9735, |
|
"step": 98500 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.10237950109832e-05, |
|
"loss": 0.9637, |
|
"step": 99000 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.092795559184675e-05, |
|
"loss": 0.9675, |
|
"step": 99500 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.08321161727103e-05, |
|
"loss": 0.9702, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.073627675357385e-05, |
|
"loss": 0.9812, |
|
"step": 100500 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.064043733443741e-05, |
|
"loss": 0.9677, |
|
"step": 101000 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.0544597915300954e-05, |
|
"loss": 0.9586, |
|
"step": 101500 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.0448758496164508e-05, |
|
"loss": 0.9602, |
|
"step": 102000 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.035291907702806e-05, |
|
"loss": 0.96, |
|
"step": 102500 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.0257079657891613e-05, |
|
"loss": 0.9658, |
|
"step": 103000 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.016124023875516e-05, |
|
"loss": 0.9756, |
|
"step": 103500 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 3.0065400819618718e-05, |
|
"loss": 0.986, |
|
"step": 104000 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 2.9969561400482265e-05, |
|
"loss": 0.9792, |
|
"step": 104500 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 2.9873721981345816e-05, |
|
"loss": 0.9735, |
|
"step": 105000 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 2.977788256220937e-05, |
|
"loss": 0.9672, |
|
"step": 105500 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.9682043143072917e-05, |
|
"loss": 0.9558, |
|
"step": 106000 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.9586203723936474e-05, |
|
"loss": 0.9605, |
|
"step": 106500 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 2.949036430480002e-05, |
|
"loss": 0.9929, |
|
"step": 107000 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 2.9394524885663576e-05, |
|
"loss": 0.9562, |
|
"step": 107500 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 2.9298685466527126e-05, |
|
"loss": 0.9618, |
|
"step": 108000 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 2.9202846047390674e-05, |
|
"loss": 0.9644, |
|
"step": 108500 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 2.910700662825423e-05, |
|
"loss": 0.9558, |
|
"step": 109000 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 2.901116720911778e-05, |
|
"loss": 0.9582, |
|
"step": 109500 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.8915327789981332e-05, |
|
"loss": 0.9661, |
|
"step": 110000 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.8819488370844883e-05, |
|
"loss": 0.9621, |
|
"step": 110500 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 2.8723648951708437e-05, |
|
"loss": 0.9647, |
|
"step": 111000 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 2.8627809532571988e-05, |
|
"loss": 0.9392, |
|
"step": 111500 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.8531970113435535e-05, |
|
"loss": 0.9643, |
|
"step": 112000 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.843613069429909e-05, |
|
"loss": 0.9783, |
|
"step": 112500 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 2.834029127516264e-05, |
|
"loss": 0.9703, |
|
"step": 113000 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.8244451856026194e-05, |
|
"loss": 0.9647, |
|
"step": 113500 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.8148612436889745e-05, |
|
"loss": 0.9492, |
|
"step": 114000 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.80527730177533e-05, |
|
"loss": 0.9603, |
|
"step": 114500 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.7956933598616846e-05, |
|
"loss": 0.9602, |
|
"step": 115000 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.7861094179480397e-05, |
|
"loss": 0.9663, |
|
"step": 115500 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.776525476034395e-05, |
|
"loss": 0.9556, |
|
"step": 116000 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.76694153412075e-05, |
|
"loss": 0.9564, |
|
"step": 116500 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.7573575922071055e-05, |
|
"loss": 0.9301, |
|
"step": 117000 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.7477736502934603e-05, |
|
"loss": 0.9284, |
|
"step": 117500 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.7381897083798157e-05, |
|
"loss": 0.9444, |
|
"step": 118000 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.7286057664661707e-05, |
|
"loss": 0.9767, |
|
"step": 118500 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.7190218245525258e-05, |
|
"loss": 0.9633, |
|
"step": 119000 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.7094378826388812e-05, |
|
"loss": 0.9468, |
|
"step": 119500 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.699853940725236e-05, |
|
"loss": 0.9455, |
|
"step": 120000 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.6902699988115913e-05, |
|
"loss": 0.9481, |
|
"step": 120500 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.6806860568979464e-05, |
|
"loss": 0.9511, |
|
"step": 121000 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.6711021149843018e-05, |
|
"loss": 0.9552, |
|
"step": 121500 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.661518173070657e-05, |
|
"loss": 0.9626, |
|
"step": 122000 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.6519342311570116e-05, |
|
"loss": 0.9528, |
|
"step": 122500 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.642350289243367e-05, |
|
"loss": 0.9672, |
|
"step": 123000 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.632766347329722e-05, |
|
"loss": 0.9479, |
|
"step": 123500 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.6231824054160775e-05, |
|
"loss": 0.9369, |
|
"step": 124000 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.6135984635024326e-05, |
|
"loss": 0.9545, |
|
"step": 124500 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 2.604014521588788e-05, |
|
"loss": 0.9515, |
|
"step": 125000 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 2.5944305796751427e-05, |
|
"loss": 0.9348, |
|
"step": 125500 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.5848466377614978e-05, |
|
"loss": 0.9501, |
|
"step": 126000 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.575262695847853e-05, |
|
"loss": 0.9564, |
|
"step": 126500 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.5656787539342082e-05, |
|
"loss": 0.9367, |
|
"step": 127000 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.5560948120205636e-05, |
|
"loss": 0.9495, |
|
"step": 127500 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.5465108701069184e-05, |
|
"loss": 0.9328, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.5369269281932738e-05, |
|
"loss": 0.9475, |
|
"step": 128500 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.5273429862796288e-05, |
|
"loss": 0.9601, |
|
"step": 129000 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.517759044365984e-05, |
|
"loss": 0.9438, |
|
"step": 129500 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.5081751024523393e-05, |
|
"loss": 0.9473, |
|
"step": 130000 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.4985911605386944e-05, |
|
"loss": 0.9677, |
|
"step": 130500 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.4890072186250494e-05, |
|
"loss": 0.9581, |
|
"step": 131000 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.4794232767114045e-05, |
|
"loss": 0.9359, |
|
"step": 131500 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.4698393347977596e-05, |
|
"loss": 0.9289, |
|
"step": 132000 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.460255392884115e-05, |
|
"loss": 0.9361, |
|
"step": 132500 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.45067145097047e-05, |
|
"loss": 0.9616, |
|
"step": 133000 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.441087509056825e-05, |
|
"loss": 0.9308, |
|
"step": 133500 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.4315035671431805e-05, |
|
"loss": 0.9437, |
|
"step": 134000 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.4219196252295356e-05, |
|
"loss": 0.9443, |
|
"step": 134500 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.4123356833158906e-05, |
|
"loss": 0.9324, |
|
"step": 135000 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.4027517414022457e-05, |
|
"loss": 0.9373, |
|
"step": 135500 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.3931677994886008e-05, |
|
"loss": 0.9414, |
|
"step": 136000 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.3835838575749562e-05, |
|
"loss": 0.9466, |
|
"step": 136500 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.3739999156613113e-05, |
|
"loss": 0.9383, |
|
"step": 137000 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.3644159737476663e-05, |
|
"loss": 0.9412, |
|
"step": 137500 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.3548320318340217e-05, |
|
"loss": 0.9454, |
|
"step": 138000 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.3452480899203765e-05, |
|
"loss": 0.9285, |
|
"step": 138500 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 2.335664148006732e-05, |
|
"loss": 0.9261, |
|
"step": 139000 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 2.326080206093087e-05, |
|
"loss": 0.9404, |
|
"step": 139500 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 2.316496264179442e-05, |
|
"loss": 0.9206, |
|
"step": 140000 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 2.3069123222657974e-05, |
|
"loss": 0.9298, |
|
"step": 140500 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 2.2973283803521525e-05, |
|
"loss": 0.9271, |
|
"step": 141000 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 2.287744438438508e-05, |
|
"loss": 0.9431, |
|
"step": 141500 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 2.278160496524863e-05, |
|
"loss": 0.9238, |
|
"step": 142000 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 2.2685765546112177e-05, |
|
"loss": 0.9367, |
|
"step": 142500 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 2.258992612697573e-05, |
|
"loss": 0.9312, |
|
"step": 143000 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 2.249408670783928e-05, |
|
"loss": 0.9355, |
|
"step": 143500 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.2398247288702835e-05, |
|
"loss": 0.9381, |
|
"step": 144000 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.2302407869566386e-05, |
|
"loss": 0.934, |
|
"step": 144500 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 2.2206568450429937e-05, |
|
"loss": 0.9078, |
|
"step": 145000 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 2.211072903129349e-05, |
|
"loss": 0.9244, |
|
"step": 145500 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 2.2014889612157038e-05, |
|
"loss": 0.9438, |
|
"step": 146000 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 2.191905019302059e-05, |
|
"loss": 0.9224, |
|
"step": 146500 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 2.1823210773884143e-05, |
|
"loss": 0.9323, |
|
"step": 147000 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 2.1727371354747694e-05, |
|
"loss": 0.9333, |
|
"step": 147500 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 2.1631531935611248e-05, |
|
"loss": 0.9267, |
|
"step": 148000 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.1535692516474798e-05, |
|
"loss": 0.9096, |
|
"step": 148500 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.143985309733835e-05, |
|
"loss": 0.938, |
|
"step": 149000 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 2.13440136782019e-05, |
|
"loss": 0.9271, |
|
"step": 149500 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.124817425906545e-05, |
|
"loss": 0.9221, |
|
"step": 150000 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.1152334839929004e-05, |
|
"loss": 0.9264, |
|
"step": 150500 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 2.1056495420792555e-05, |
|
"loss": 0.9166, |
|
"step": 151000 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 2.0960656001656106e-05, |
|
"loss": 0.9392, |
|
"step": 151500 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 2.086481658251966e-05, |
|
"loss": 0.9075, |
|
"step": 152000 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 2.076897716338321e-05, |
|
"loss": 0.9103, |
|
"step": 152500 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 2.067313774424676e-05, |
|
"loss": 0.9298, |
|
"step": 153000 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 2.0577298325110312e-05, |
|
"loss": 0.918, |
|
"step": 153500 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 2.0481458905973862e-05, |
|
"loss": 0.9088, |
|
"step": 154000 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 2.0385619486837416e-05, |
|
"loss": 0.9046, |
|
"step": 154500 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 2.0289780067700967e-05, |
|
"loss": 0.9116, |
|
"step": 155000 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 2.0193940648564518e-05, |
|
"loss": 0.9124, |
|
"step": 155500 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 2.0098101229428072e-05, |
|
"loss": 0.9167, |
|
"step": 156000 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 2.000226181029162e-05, |
|
"loss": 0.9143, |
|
"step": 156500 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.9906422391155173e-05, |
|
"loss": 0.9047, |
|
"step": 157000 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.9810582972018724e-05, |
|
"loss": 0.9129, |
|
"step": 157500 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.9714743552882275e-05, |
|
"loss": 0.9019, |
|
"step": 158000 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.961890413374583e-05, |
|
"loss": 0.9104, |
|
"step": 158500 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.952306471460938e-05, |
|
"loss": 0.9021, |
|
"step": 159000 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.942722529547293e-05, |
|
"loss": 0.9206, |
|
"step": 159500 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.933138587633648e-05, |
|
"loss": 0.9211, |
|
"step": 160000 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.923554645720003e-05, |
|
"loss": 0.9126, |
|
"step": 160500 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.9139707038063585e-05, |
|
"loss": 0.9167, |
|
"step": 161000 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.9043867618927136e-05, |
|
"loss": 0.9076, |
|
"step": 161500 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.8948028199790687e-05, |
|
"loss": 0.908, |
|
"step": 162000 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.885218878065424e-05, |
|
"loss": 0.9222, |
|
"step": 162500 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.875634936151779e-05, |
|
"loss": 0.9072, |
|
"step": 163000 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.8660509942381342e-05, |
|
"loss": 0.9079, |
|
"step": 163500 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.8564670523244893e-05, |
|
"loss": 0.9071, |
|
"step": 164000 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.8468831104108443e-05, |
|
"loss": 0.9093, |
|
"step": 164500 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.8372991684971997e-05, |
|
"loss": 0.8963, |
|
"step": 165000 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.8277152265835548e-05, |
|
"loss": 0.8875, |
|
"step": 165500 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.81813128466991e-05, |
|
"loss": 0.8972, |
|
"step": 166000 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.8085473427562653e-05, |
|
"loss": 0.9027, |
|
"step": 166500 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.7989634008426203e-05, |
|
"loss": 0.8843, |
|
"step": 167000 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.7893794589289754e-05, |
|
"loss": 0.9142, |
|
"step": 167500 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.7797955170153305e-05, |
|
"loss": 0.9125, |
|
"step": 168000 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.7702115751016855e-05, |
|
"loss": 0.8859, |
|
"step": 168500 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.760627633188041e-05, |
|
"loss": 0.9077, |
|
"step": 169000 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.751043691274396e-05, |
|
"loss": 0.911, |
|
"step": 169500 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.741459749360751e-05, |
|
"loss": 0.8953, |
|
"step": 170000 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.7318758074471065e-05, |
|
"loss": 0.8891, |
|
"step": 170500 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.7222918655334612e-05, |
|
"loss": 0.887, |
|
"step": 171000 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.7127079236198166e-05, |
|
"loss": 0.8849, |
|
"step": 171500 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.7031239817061717e-05, |
|
"loss": 0.8899, |
|
"step": 172000 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.6935400397925268e-05, |
|
"loss": 0.8997, |
|
"step": 172500 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.683956097878882e-05, |
|
"loss": 0.8807, |
|
"step": 173000 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.6743721559652372e-05, |
|
"loss": 0.9092, |
|
"step": 173500 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.6647882140515923e-05, |
|
"loss": 0.8584, |
|
"step": 174000 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.6552042721379474e-05, |
|
"loss": 0.7926, |
|
"step": 174500 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.6456203302243024e-05, |
|
"loss": 0.8028, |
|
"step": 175000 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.636036388310658e-05, |
|
"loss": 0.7806, |
|
"step": 175500 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.626452446397013e-05, |
|
"loss": 0.7893, |
|
"step": 176000 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.616868504483368e-05, |
|
"loss": 0.7922, |
|
"step": 176500 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.6072845625697234e-05, |
|
"loss": 0.782, |
|
"step": 177000 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.5977006206560784e-05, |
|
"loss": 0.782, |
|
"step": 177500 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.5881166787424335e-05, |
|
"loss": 0.7839, |
|
"step": 178000 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.5785327368287886e-05, |
|
"loss": 0.7829, |
|
"step": 178500 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.5689487949151436e-05, |
|
"loss": 0.784, |
|
"step": 179000 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.559364853001499e-05, |
|
"loss": 0.7905, |
|
"step": 179500 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 1.549780911087854e-05, |
|
"loss": 0.7764, |
|
"step": 180000 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.5401969691742095e-05, |
|
"loss": 0.8011, |
|
"step": 180500 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.5306130272605646e-05, |
|
"loss": 0.7833, |
|
"step": 181000 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 1.5210290853469197e-05, |
|
"loss": 0.7892, |
|
"step": 181500 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 1.5114451434332747e-05, |
|
"loss": 0.794, |
|
"step": 182000 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.5018612015196298e-05, |
|
"loss": 0.7772, |
|
"step": 182500 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.492277259605985e-05, |
|
"loss": 0.7736, |
|
"step": 183000 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 1.4826933176923403e-05, |
|
"loss": 0.7847, |
|
"step": 183500 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 1.4731093757786953e-05, |
|
"loss": 0.793, |
|
"step": 184000 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 1.4635254338650506e-05, |
|
"loss": 0.7799, |
|
"step": 184500 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 1.4539414919514058e-05, |
|
"loss": 0.7755, |
|
"step": 185000 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 1.4443575500377607e-05, |
|
"loss": 0.7853, |
|
"step": 185500 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 1.434773608124116e-05, |
|
"loss": 0.7751, |
|
"step": 186000 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 1.425189666210471e-05, |
|
"loss": 0.7965, |
|
"step": 186500 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 1.4156057242968262e-05, |
|
"loss": 0.778, |
|
"step": 187000 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 1.4060217823831815e-05, |
|
"loss": 0.7715, |
|
"step": 187500 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 1.3964378404695367e-05, |
|
"loss": 0.7746, |
|
"step": 188000 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 1.3868538985558918e-05, |
|
"loss": 0.7787, |
|
"step": 188500 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 1.3772699566422467e-05, |
|
"loss": 0.7924, |
|
"step": 189000 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 1.3676860147286019e-05, |
|
"loss": 0.7732, |
|
"step": 189500 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 1.3581020728149571e-05, |
|
"loss": 0.7836, |
|
"step": 190000 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 1.3485181309013122e-05, |
|
"loss": 0.7698, |
|
"step": 190500 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 1.3389341889876675e-05, |
|
"loss": 0.782, |
|
"step": 191000 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 1.3293502470740227e-05, |
|
"loss": 0.7732, |
|
"step": 191500 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 1.319766305160378e-05, |
|
"loss": 0.7871, |
|
"step": 192000 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 1.3101823632467328e-05, |
|
"loss": 0.7833, |
|
"step": 192500 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.3005984213330879e-05, |
|
"loss": 0.7968, |
|
"step": 193000 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 1.2910144794194431e-05, |
|
"loss": 0.7849, |
|
"step": 193500 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 1.2814305375057984e-05, |
|
"loss": 0.7978, |
|
"step": 194000 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 1.2718465955921536e-05, |
|
"loss": 0.7993, |
|
"step": 194500 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 1.2622626536785087e-05, |
|
"loss": 0.7859, |
|
"step": 195000 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 1.2526787117648639e-05, |
|
"loss": 0.7734, |
|
"step": 195500 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 1.243094769851219e-05, |
|
"loss": 0.7858, |
|
"step": 196000 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 1.2335108279375742e-05, |
|
"loss": 0.7904, |
|
"step": 196500 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 1.2239268860239291e-05, |
|
"loss": 0.7882, |
|
"step": 197000 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 1.2143429441102843e-05, |
|
"loss": 0.7795, |
|
"step": 197500 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 1.2047590021966396e-05, |
|
"loss": 0.7596, |
|
"step": 198000 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 1.1951750602829948e-05, |
|
"loss": 0.7613, |
|
"step": 198500 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.1855911183693499e-05, |
|
"loss": 0.7775, |
|
"step": 199000 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.176007176455705e-05, |
|
"loss": 0.7779, |
|
"step": 199500 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 1.1664232345420602e-05, |
|
"loss": 0.7957, |
|
"step": 200000 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 1.1568392926284152e-05, |
|
"loss": 0.7648, |
|
"step": 200500 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 1.1472553507147705e-05, |
|
"loss": 0.7709, |
|
"step": 201000 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.1376714088011256e-05, |
|
"loss": 0.7995, |
|
"step": 201500 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.1280874668874808e-05, |
|
"loss": 0.7712, |
|
"step": 202000 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.1185035249738359e-05, |
|
"loss": 0.7739, |
|
"step": 202500 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.1089195830601911e-05, |
|
"loss": 0.778, |
|
"step": 203000 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.0993356411465462e-05, |
|
"loss": 0.787, |
|
"step": 203500 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.0897516992329014e-05, |
|
"loss": 0.7862, |
|
"step": 204000 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.0801677573192565e-05, |
|
"loss": 0.7814, |
|
"step": 204500 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 1.0705838154056117e-05, |
|
"loss": 0.7752, |
|
"step": 205000 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 1.0609998734919668e-05, |
|
"loss": 0.7893, |
|
"step": 205500 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.0514159315783218e-05, |
|
"loss": 0.774, |
|
"step": 206000 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.041831989664677e-05, |
|
"loss": 0.7731, |
|
"step": 206500 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 1.0322480477510323e-05, |
|
"loss": 0.7874, |
|
"step": 207000 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.0226641058373874e-05, |
|
"loss": 0.7818, |
|
"step": 207500 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.0130801639237424e-05, |
|
"loss": 0.7946, |
|
"step": 208000 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 1.0034962220100977e-05, |
|
"loss": 0.7647, |
|
"step": 208500 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 9.939122800964529e-06, |
|
"loss": 0.7739, |
|
"step": 209000 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 9.84328338182808e-06, |
|
"loss": 0.7721, |
|
"step": 209500 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 9.74744396269163e-06, |
|
"loss": 0.765, |
|
"step": 210000 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 9.651604543555183e-06, |
|
"loss": 0.7783, |
|
"step": 210500 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 9.555765124418735e-06, |
|
"loss": 0.7697, |
|
"step": 211000 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 9.459925705282286e-06, |
|
"loss": 0.7731, |
|
"step": 211500 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 9.364086286145836e-06, |
|
"loss": 0.7833, |
|
"step": 212000 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 9.268246867009389e-06, |
|
"loss": 0.7816, |
|
"step": 212500 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 9.172407447872941e-06, |
|
"loss": 0.7775, |
|
"step": 213000 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 9.076568028736492e-06, |
|
"loss": 0.77, |
|
"step": 213500 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 8.980728609600044e-06, |
|
"loss": 0.7914, |
|
"step": 214000 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 8.884889190463595e-06, |
|
"loss": 0.7726, |
|
"step": 214500 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 8.789049771327146e-06, |
|
"loss": 0.7743, |
|
"step": 215000 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 8.693210352190698e-06, |
|
"loss": 0.7877, |
|
"step": 215500 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 8.59737093305425e-06, |
|
"loss": 0.7607, |
|
"step": 216000 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 8.501531513917801e-06, |
|
"loss": 0.779, |
|
"step": 216500 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 8.405692094781352e-06, |
|
"loss": 0.7781, |
|
"step": 217000 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 8.309852675644904e-06, |
|
"loss": 0.7649, |
|
"step": 217500 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 8.214013256508456e-06, |
|
"loss": 0.7554, |
|
"step": 218000 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 8.118173837372007e-06, |
|
"loss": 0.7413, |
|
"step": 218500 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 8.022334418235558e-06, |
|
"loss": 0.7772, |
|
"step": 219000 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 7.92649499909911e-06, |
|
"loss": 0.7876, |
|
"step": 219500 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 7.830655579962662e-06, |
|
"loss": 0.7572, |
|
"step": 220000 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 7.734816160826213e-06, |
|
"loss": 0.7536, |
|
"step": 220500 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 7.638976741689764e-06, |
|
"loss": 0.7587, |
|
"step": 221000 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 7.543137322553316e-06, |
|
"loss": 0.7718, |
|
"step": 221500 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 7.447297903416868e-06, |
|
"loss": 0.77, |
|
"step": 222000 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 7.351458484280418e-06, |
|
"loss": 0.7493, |
|
"step": 222500 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 7.255619065143971e-06, |
|
"loss": 0.7522, |
|
"step": 223000 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 7.159779646007522e-06, |
|
"loss": 0.7714, |
|
"step": 223500 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 7.063940226871073e-06, |
|
"loss": 0.7682, |
|
"step": 224000 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 6.968100807734624e-06, |
|
"loss": 0.7702, |
|
"step": 224500 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 6.872261388598177e-06, |
|
"loss": 0.7636, |
|
"step": 225000 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 6.776421969461728e-06, |
|
"loss": 0.767, |
|
"step": 225500 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 6.680582550325279e-06, |
|
"loss": 0.7633, |
|
"step": 226000 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 6.5847431311888304e-06, |
|
"loss": 0.7539, |
|
"step": 226500 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 6.488903712052383e-06, |
|
"loss": 0.7555, |
|
"step": 227000 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 6.3930642929159335e-06, |
|
"loss": 0.7587, |
|
"step": 227500 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 6.297224873779485e-06, |
|
"loss": 0.7676, |
|
"step": 228000 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 6.201385454643037e-06, |
|
"loss": 0.7547, |
|
"step": 228500 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 6.105546035506588e-06, |
|
"loss": 0.7619, |
|
"step": 229000 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 6.00970661637014e-06, |
|
"loss": 0.7683, |
|
"step": 229500 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 5.913867197233691e-06, |
|
"loss": 0.7539, |
|
"step": 230000 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 5.818027778097243e-06, |
|
"loss": 0.7586, |
|
"step": 230500 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 5.722188358960794e-06, |
|
"loss": 0.7735, |
|
"step": 231000 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 5.626348939824346e-06, |
|
"loss": 0.7663, |
|
"step": 231500 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 5.530509520687897e-06, |
|
"loss": 0.7616, |
|
"step": 232000 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 5.434670101551449e-06, |
|
"loss": 0.7814, |
|
"step": 232500 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 5.338830682415e-06, |
|
"loss": 0.7697, |
|
"step": 233000 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.242991263278552e-06, |
|
"loss": 0.7387, |
|
"step": 233500 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.147151844142103e-06, |
|
"loss": 0.7636, |
|
"step": 234000 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 5.051312425005655e-06, |
|
"loss": 0.7659, |
|
"step": 234500 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 4.955473005869206e-06, |
|
"loss": 0.7564, |
|
"step": 235000 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.859633586732758e-06, |
|
"loss": 0.7589, |
|
"step": 235500 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.763794167596309e-06, |
|
"loss": 0.7683, |
|
"step": 236000 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.667954748459861e-06, |
|
"loss": 0.7556, |
|
"step": 236500 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.572115329323412e-06, |
|
"loss": 0.7479, |
|
"step": 237000 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.476275910186964e-06, |
|
"loss": 0.7548, |
|
"step": 237500 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.380436491050515e-06, |
|
"loss": 0.7592, |
|
"step": 238000 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.284597071914067e-06, |
|
"loss": 0.7542, |
|
"step": 238500 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.188757652777618e-06, |
|
"loss": 0.773, |
|
"step": 239000 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.09291823364117e-06, |
|
"loss": 0.7506, |
|
"step": 239500 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.997078814504721e-06, |
|
"loss": 0.7489, |
|
"step": 240000 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.901239395368272e-06, |
|
"loss": 0.738, |
|
"step": 240500 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.8053999762318244e-06, |
|
"loss": 0.7563, |
|
"step": 241000 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.7095605570953755e-06, |
|
"loss": 0.7671, |
|
"step": 241500 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.6137211379589274e-06, |
|
"loss": 0.7705, |
|
"step": 242000 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 3.5178817188224785e-06, |
|
"loss": 0.7552, |
|
"step": 242500 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 3.4220422996860304e-06, |
|
"loss": 0.7537, |
|
"step": 243000 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 3.3262028805495815e-06, |
|
"loss": 0.7624, |
|
"step": 243500 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 3.2303634614131335e-06, |
|
"loss": 0.7334, |
|
"step": 244000 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 3.1345240422766846e-06, |
|
"loss": 0.7669, |
|
"step": 244500 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 3.038684623140236e-06, |
|
"loss": 0.7512, |
|
"step": 245000 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 2.9428452040037876e-06, |
|
"loss": 0.7587, |
|
"step": 245500 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 2.8470057848673395e-06, |
|
"loss": 0.7635, |
|
"step": 246000 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 2.751166365730891e-06, |
|
"loss": 0.746, |
|
"step": 246500 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 2.655326946594442e-06, |
|
"loss": 0.7512, |
|
"step": 247000 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 2.5594875274579937e-06, |
|
"loss": 0.758, |
|
"step": 247500 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 2.463648108321545e-06, |
|
"loss": 0.7662, |
|
"step": 248000 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 2.3678086891850967e-06, |
|
"loss": 0.7571, |
|
"step": 248500 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 2.271969270048648e-06, |
|
"loss": 0.7453, |
|
"step": 249000 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 2.1761298509121997e-06, |
|
"loss": 0.7494, |
|
"step": 249500 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 2.0802904317757512e-06, |
|
"loss": 0.7574, |
|
"step": 250000 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 1.9844510126393028e-06, |
|
"loss": 0.763, |
|
"step": 250500 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 1.8886115935028543e-06, |
|
"loss": 0.7713, |
|
"step": 251000 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 1.7927721743664056e-06, |
|
"loss": 0.738, |
|
"step": 251500 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 1.696932755229957e-06, |
|
"loss": 0.7483, |
|
"step": 252000 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 1.6010933360935086e-06, |
|
"loss": 0.7458, |
|
"step": 252500 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 1.5052539169570601e-06, |
|
"loss": 0.7664, |
|
"step": 253000 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 1.4094144978206116e-06, |
|
"loss": 0.7516, |
|
"step": 253500 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 1.3135750786841631e-06, |
|
"loss": 0.7628, |
|
"step": 254000 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 1.2177356595477147e-06, |
|
"loss": 0.7455, |
|
"step": 254500 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 1.1218962404112662e-06, |
|
"loss": 0.7601, |
|
"step": 255000 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 1.0260568212748177e-06, |
|
"loss": 0.7471, |
|
"step": 255500 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 9.302174021383692e-07, |
|
"loss": 0.7396, |
|
"step": 256000 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 8.343779830019207e-07, |
|
"loss": 0.7382, |
|
"step": 256500 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 7.385385638654721e-07, |
|
"loss": 0.7501, |
|
"step": 257000 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 6.426991447290237e-07, |
|
"loss": 0.7557, |
|
"step": 257500 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 5.468597255925752e-07, |
|
"loss": 0.7628, |
|
"step": 258000 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 4.5102030645612663e-07, |
|
"loss": 0.752, |
|
"step": 258500 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 3.5518088731967815e-07, |
|
"loss": 0.7496, |
|
"step": 259000 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 2.5934146818322966e-07, |
|
"loss": 0.7588, |
|
"step": 259500 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 1.6350204904678115e-07, |
|
"loss": 0.7423, |
|
"step": 260000 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 6.766262991033264e-08, |
|
"loss": 0.7348, |
|
"step": 260500 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 260853, |
|
"total_flos": 3.184114652036137e+17, |
|
"train_loss": 0.9910313752351659, |
|
"train_runtime": 64882.6806, |
|
"train_samples_per_second": 40.204, |
|
"train_steps_per_second": 4.02 |
|
} |
|
], |
|
"max_steps": 260853, |
|
"num_train_epochs": 3, |
|
"total_flos": 3.184114652036137e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|