|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.0, |
|
"global_step": 200220, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9875137348916196e-05, |
|
"loss": 3.7531, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.975027469783239e-05, |
|
"loss": 2.9041, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.962541204674858e-05, |
|
"loss": 2.5413, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.950054939566477e-05, |
|
"loss": 2.3266, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.937568674458096e-05, |
|
"loss": 2.1497, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.9250824093497155e-05, |
|
"loss": 2.0392, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.912596144241335e-05, |
|
"loss": 1.9413, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.900109879132954e-05, |
|
"loss": 1.8698, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.8876236140245735e-05, |
|
"loss": 1.8197, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.875137348916192e-05, |
|
"loss": 1.7706, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.8626510838078115e-05, |
|
"loss": 1.6978, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.850164818699431e-05, |
|
"loss": 1.6654, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.83767855359105e-05, |
|
"loss": 1.6313, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.8251922884826695e-05, |
|
"loss": 1.6022, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.812706023374289e-05, |
|
"loss": 1.558, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.8002197582659075e-05, |
|
"loss": 1.5389, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.787733493157527e-05, |
|
"loss": 1.5257, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.775247228049146e-05, |
|
"loss": 1.5091, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.7627609629407655e-05, |
|
"loss": 1.4909, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.750274697832385e-05, |
|
"loss": 1.479, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.737788432724004e-05, |
|
"loss": 1.4322, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.725302167615623e-05, |
|
"loss": 1.4256, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.712815902507242e-05, |
|
"loss": 1.4211, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.7003296373988614e-05, |
|
"loss": 1.3964, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.687843372290481e-05, |
|
"loss": 1.3892, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.6753571071821e-05, |
|
"loss": 1.3777, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.6628708420737194e-05, |
|
"loss": 1.3917, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.650384576965338e-05, |
|
"loss": 1.3443, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.6378983118569574e-05, |
|
"loss": 1.3544, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.625412046748577e-05, |
|
"loss": 1.3199, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.612925781640196e-05, |
|
"loss": 1.3102, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.6004395165318154e-05, |
|
"loss": 1.3007, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.587953251423435e-05, |
|
"loss": 1.3053, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.5754669863150534e-05, |
|
"loss": 1.2832, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.562980721206673e-05, |
|
"loss": 1.2878, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.550494456098292e-05, |
|
"loss": 1.2722, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.538008190989911e-05, |
|
"loss": 1.2699, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.525521925881531e-05, |
|
"loss": 1.2651, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.51303566077315e-05, |
|
"loss": 1.2586, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.5005493956647686e-05, |
|
"loss": 1.2421, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.488063130556388e-05, |
|
"loss": 1.2379, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.475576865448007e-05, |
|
"loss": 1.2221, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.4630906003396266e-05, |
|
"loss": 1.2206, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.450604335231246e-05, |
|
"loss": 1.2095, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.438118070122865e-05, |
|
"loss": 1.1966, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.425631805014484e-05, |
|
"loss": 1.2054, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.413145539906103e-05, |
|
"loss": 1.178, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.4006592747977226e-05, |
|
"loss": 1.1836, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.388173009689342e-05, |
|
"loss": 1.1883, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.375686744580961e-05, |
|
"loss": 1.1954, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.3632004794725806e-05, |
|
"loss": 1.177, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.350714214364199e-05, |
|
"loss": 1.1746, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.3382279492558186e-05, |
|
"loss": 1.1831, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.325741684147438e-05, |
|
"loss": 1.1552, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.313255419039057e-05, |
|
"loss": 1.1582, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.3007691539306765e-05, |
|
"loss": 1.1679, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.288282888822296e-05, |
|
"loss": 1.1695, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.2757966237139145e-05, |
|
"loss": 1.1419, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.263310358605534e-05, |
|
"loss": 1.1501, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.250824093497153e-05, |
|
"loss": 1.1318, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.2383378283887725e-05, |
|
"loss": 1.124, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.225851563280392e-05, |
|
"loss": 1.1139, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.213365298172011e-05, |
|
"loss": 1.1172, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.20087903306363e-05, |
|
"loss": 1.1086, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.188392767955249e-05, |
|
"loss": 1.1248, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.1759065028468685e-05, |
|
"loss": 1.1001, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.163420237738488e-05, |
|
"loss": 1.1074, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.150933972630107e-05, |
|
"loss": 1.0772, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.1384477075217265e-05, |
|
"loss": 1.0942, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.125961442413345e-05, |
|
"loss": 1.1072, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.1134751773049644e-05, |
|
"loss": 1.0815, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.100988912196584e-05, |
|
"loss": 1.0693, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.088502647088203e-05, |
|
"loss": 1.0837, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.0760163819798224e-05, |
|
"loss": 1.0951, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.063530116871442e-05, |
|
"loss": 1.0666, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.0510438517630604e-05, |
|
"loss": 1.0808, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.03855758665468e-05, |
|
"loss": 1.075, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.026071321546299e-05, |
|
"loss": 1.0683, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.0135850564379184e-05, |
|
"loss": 1.0633, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.001098791329538e-05, |
|
"loss": 1.0814, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.988612526221157e-05, |
|
"loss": 1.0485, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.976126261112776e-05, |
|
"loss": 1.0483, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.963639996004395e-05, |
|
"loss": 1.0656, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.9511537308960144e-05, |
|
"loss": 1.07, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.938667465787634e-05, |
|
"loss": 1.0441, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.926181200679253e-05, |
|
"loss": 1.0494, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.9136949355708724e-05, |
|
"loss": 1.0482, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.901208670462492e-05, |
|
"loss": 1.065, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.88872240535411e-05, |
|
"loss": 1.0641, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.8762361402457297e-05, |
|
"loss": 1.0439, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.863749875137349e-05, |
|
"loss": 1.0377, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.851263610028968e-05, |
|
"loss": 1.0256, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.8387773449205876e-05, |
|
"loss": 1.0327, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.826291079812207e-05, |
|
"loss": 1.0272, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.8138048147038256e-05, |
|
"loss": 1.0348, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.801318549595445e-05, |
|
"loss": 1.0194, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.788832284487064e-05, |
|
"loss": 1.0178, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.7763460193786836e-05, |
|
"loss": 1.0144, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.763859754270303e-05, |
|
"loss": 1.0073, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.751373489161922e-05, |
|
"loss": 1.0012, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.738887224053541e-05, |
|
"loss": 1.0055, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.72640095894516e-05, |
|
"loss": 1.0066, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.7139146938367796e-05, |
|
"loss": 1.0111, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.701428428728399e-05, |
|
"loss": 1.0177, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.688942163620018e-05, |
|
"loss": 1.0059, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.6764558985116376e-05, |
|
"loss": 0.9872, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.663969633403256e-05, |
|
"loss": 1.0135, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.6514833682948755e-05, |
|
"loss": 0.9899, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.638997103186495e-05, |
|
"loss": 0.9918, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.626510838078114e-05, |
|
"loss": 1.0154, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.6140245729697335e-05, |
|
"loss": 0.9957, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.601538307861353e-05, |
|
"loss": 0.9839, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.5890520427529715e-05, |
|
"loss": 0.999, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.576565777644591e-05, |
|
"loss": 0.9836, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.56407951253621e-05, |
|
"loss": 0.9774, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.5515932474278295e-05, |
|
"loss": 0.9709, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.539106982319449e-05, |
|
"loss": 0.985, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.526620717211068e-05, |
|
"loss": 0.977, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.514134452102687e-05, |
|
"loss": 0.9578, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.501648186994306e-05, |
|
"loss": 0.964, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.4891619218859255e-05, |
|
"loss": 0.9739, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.476675656777545e-05, |
|
"loss": 0.9647, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.464189391669164e-05, |
|
"loss": 0.9653, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.4517031265607834e-05, |
|
"loss": 0.9643, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.439216861452402e-05, |
|
"loss": 0.9644, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.4267305963440214e-05, |
|
"loss": 0.9384, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.414244331235641e-05, |
|
"loss": 0.969, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.40175806612726e-05, |
|
"loss": 0.9602, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.3892718010188794e-05, |
|
"loss": 0.9549, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.376785535910499e-05, |
|
"loss": 0.9607, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.3642992708021174e-05, |
|
"loss": 0.9616, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.351813005693737e-05, |
|
"loss": 0.9497, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.339326740585356e-05, |
|
"loss": 0.9618, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.3268404754769754e-05, |
|
"loss": 0.9013, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.314354210368595e-05, |
|
"loss": 0.8594, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.301867945260214e-05, |
|
"loss": 0.85, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.289381680151833e-05, |
|
"loss": 0.8586, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.276895415043452e-05, |
|
"loss": 0.864, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.2644091499350713e-05, |
|
"loss": 0.8557, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.251922884826691e-05, |
|
"loss": 0.8625, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.23943661971831e-05, |
|
"loss": 0.8501, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.226950354609929e-05, |
|
"loss": 0.8485, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.214464089501548e-05, |
|
"loss": 0.8504, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.201977824393167e-05, |
|
"loss": 0.8533, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.1894915592847866e-05, |
|
"loss": 0.8528, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.177005294176406e-05, |
|
"loss": 0.8327, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.164519029068025e-05, |
|
"loss": 0.8454, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.1520327639596446e-05, |
|
"loss": 0.8416, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.139546498851263e-05, |
|
"loss": 0.8413, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.1270602337428826e-05, |
|
"loss": 0.8443, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.114573968634502e-05, |
|
"loss": 0.8576, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.102087703526121e-05, |
|
"loss": 0.8517, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.0896014384177406e-05, |
|
"loss": 0.8397, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.07711517330936e-05, |
|
"loss": 0.8555, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.0646289082009786e-05, |
|
"loss": 0.8403, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.052142643092598e-05, |
|
"loss": 0.8531, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.0396563779842176e-05, |
|
"loss": 0.8494, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.027170112875837e-05, |
|
"loss": 0.8397, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.014683847767456e-05, |
|
"loss": 0.8535, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 3.0021975826590752e-05, |
|
"loss": 0.8388, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 2.9897113175506942e-05, |
|
"loss": 0.8364, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 2.9772250524423135e-05, |
|
"loss": 0.8333, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.964738787333933e-05, |
|
"loss": 0.8416, |
|
"step": 81500 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 2.9522525222255522e-05, |
|
"loss": 0.8578, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 2.9397662571171712e-05, |
|
"loss": 0.8357, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 2.9272799920087905e-05, |
|
"loss": 0.8309, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 2.9147937269004095e-05, |
|
"loss": 0.8379, |
|
"step": 83500 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 2.902307461792029e-05, |
|
"loss": 0.8419, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.889821196683648e-05, |
|
"loss": 0.8317, |
|
"step": 84500 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.8773349315752675e-05, |
|
"loss": 0.8287, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 2.8648486664668868e-05, |
|
"loss": 0.8308, |
|
"step": 85500 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.8523624013585058e-05, |
|
"loss": 0.8396, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 2.839876136250125e-05, |
|
"loss": 0.8418, |
|
"step": 86500 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 2.827389871141744e-05, |
|
"loss": 0.8404, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.8149036060333635e-05, |
|
"loss": 0.8323, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.8024173409249828e-05, |
|
"loss": 0.8432, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.789931075816602e-05, |
|
"loss": 0.8431, |
|
"step": 88500 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.777444810708221e-05, |
|
"loss": 0.8356, |
|
"step": 89000 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.7649585455998404e-05, |
|
"loss": 0.8334, |
|
"step": 89500 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.7524722804914594e-05, |
|
"loss": 0.8314, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.7399860153830787e-05, |
|
"loss": 0.8383, |
|
"step": 90500 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.727499750274698e-05, |
|
"loss": 0.8346, |
|
"step": 91000 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.7150134851663174e-05, |
|
"loss": 0.8259, |
|
"step": 91500 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.7025272200579367e-05, |
|
"loss": 0.8377, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.6900409549495557e-05, |
|
"loss": 0.8171, |
|
"step": 92500 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.6775546898411747e-05, |
|
"loss": 0.8252, |
|
"step": 93000 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.665068424732794e-05, |
|
"loss": 0.8242, |
|
"step": 93500 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.6525821596244134e-05, |
|
"loss": 0.8202, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.6400958945160327e-05, |
|
"loss": 0.8229, |
|
"step": 94500 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.627609629407652e-05, |
|
"loss": 0.8258, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.615123364299271e-05, |
|
"loss": 0.8152, |
|
"step": 95500 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 2.60263709919089e-05, |
|
"loss": 0.8174, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.5901508340825093e-05, |
|
"loss": 0.8291, |
|
"step": 96500 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.5776645689741287e-05, |
|
"loss": 0.8151, |
|
"step": 97000 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.565178303865748e-05, |
|
"loss": 0.8144, |
|
"step": 97500 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.5526920387573673e-05, |
|
"loss": 0.8142, |
|
"step": 98000 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.5402057736489867e-05, |
|
"loss": 0.8134, |
|
"step": 98500 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.5277195085406053e-05, |
|
"loss": 0.8164, |
|
"step": 99000 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.5152332434322246e-05, |
|
"loss": 0.8202, |
|
"step": 99500 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.502746978323844e-05, |
|
"loss": 0.8164, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.4902607132154633e-05, |
|
"loss": 0.8013, |
|
"step": 100500 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.4777744481070823e-05, |
|
"loss": 0.8098, |
|
"step": 101000 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.4652881829987016e-05, |
|
"loss": 0.8132, |
|
"step": 101500 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.452801917890321e-05, |
|
"loss": 0.8091, |
|
"step": 102000 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.44031565278194e-05, |
|
"loss": 0.8059, |
|
"step": 102500 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.4278293876735593e-05, |
|
"loss": 0.8035, |
|
"step": 103000 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.4153431225651786e-05, |
|
"loss": 0.8123, |
|
"step": 103500 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.4028568574567976e-05, |
|
"loss": 0.8141, |
|
"step": 104000 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.390370592348417e-05, |
|
"loss": 0.8042, |
|
"step": 104500 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.3778843272400362e-05, |
|
"loss": 0.8166, |
|
"step": 105000 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.3653980621316552e-05, |
|
"loss": 0.8021, |
|
"step": 105500 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.3529117970232746e-05, |
|
"loss": 0.798, |
|
"step": 106000 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 2.340425531914894e-05, |
|
"loss": 0.809, |
|
"step": 106500 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 2.3279392668065132e-05, |
|
"loss": 0.8161, |
|
"step": 107000 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 2.3154530016981322e-05, |
|
"loss": 0.8093, |
|
"step": 107500 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 2.3029667365897515e-05, |
|
"loss": 0.7977, |
|
"step": 108000 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 2.290480471481371e-05, |
|
"loss": 0.7985, |
|
"step": 108500 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 2.27799420637299e-05, |
|
"loss": 0.8036, |
|
"step": 109000 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 2.2655079412646092e-05, |
|
"loss": 0.7973, |
|
"step": 109500 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 2.2530216761562285e-05, |
|
"loss": 0.802, |
|
"step": 110000 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.2405354110478475e-05, |
|
"loss": 0.7877, |
|
"step": 110500 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.2280491459394668e-05, |
|
"loss": 0.8018, |
|
"step": 111000 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 2.215562880831086e-05, |
|
"loss": 0.7984, |
|
"step": 111500 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 2.203076615722705e-05, |
|
"loss": 0.794, |
|
"step": 112000 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 2.1905903506143245e-05, |
|
"loss": 0.7962, |
|
"step": 112500 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 2.1781040855059438e-05, |
|
"loss": 0.8085, |
|
"step": 113000 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 2.1656178203975628e-05, |
|
"loss": 0.7877, |
|
"step": 113500 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.153131555289182e-05, |
|
"loss": 0.7943, |
|
"step": 114000 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 2.1406452901808014e-05, |
|
"loss": 0.7982, |
|
"step": 114500 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 2.1281590250724204e-05, |
|
"loss": 0.8015, |
|
"step": 115000 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.1156727599640398e-05, |
|
"loss": 0.793, |
|
"step": 115500 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 2.103186494855659e-05, |
|
"loss": 0.7978, |
|
"step": 116000 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 2.090700229747278e-05, |
|
"loss": 0.796, |
|
"step": 116500 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 2.0782139646388974e-05, |
|
"loss": 0.7805, |
|
"step": 117000 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 2.0657276995305167e-05, |
|
"loss": 0.7859, |
|
"step": 117500 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 2.0532414344221357e-05, |
|
"loss": 0.7747, |
|
"step": 118000 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 2.040755169313755e-05, |
|
"loss": 0.7859, |
|
"step": 118500 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 2.0282689042053744e-05, |
|
"loss": 0.8023, |
|
"step": 119000 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 2.0157826390969934e-05, |
|
"loss": 0.7787, |
|
"step": 119500 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 2.0032963739886127e-05, |
|
"loss": 0.7812, |
|
"step": 120000 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.990810108880232e-05, |
|
"loss": 0.783, |
|
"step": 120500 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.978323843771851e-05, |
|
"loss": 0.7944, |
|
"step": 121000 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.9658375786634704e-05, |
|
"loss": 0.772, |
|
"step": 121500 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.9533513135550897e-05, |
|
"loss": 0.7822, |
|
"step": 122000 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.9408650484467087e-05, |
|
"loss": 0.7723, |
|
"step": 122500 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.928378783338328e-05, |
|
"loss": 0.7754, |
|
"step": 123000 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.9158925182299473e-05, |
|
"loss": 0.7796, |
|
"step": 123500 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.9034062531215663e-05, |
|
"loss": 0.7674, |
|
"step": 124000 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.8909199880131856e-05, |
|
"loss": 0.7939, |
|
"step": 124500 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.878433722904805e-05, |
|
"loss": 0.7741, |
|
"step": 125000 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.865947457796424e-05, |
|
"loss": 0.7817, |
|
"step": 125500 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.8534611926880433e-05, |
|
"loss": 0.7828, |
|
"step": 126000 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.8409749275796626e-05, |
|
"loss": 0.7731, |
|
"step": 126500 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.8284886624712816e-05, |
|
"loss": 0.7683, |
|
"step": 127000 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.816002397362901e-05, |
|
"loss": 0.7797, |
|
"step": 127500 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.8035161322545203e-05, |
|
"loss": 0.7831, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.7910298671461393e-05, |
|
"loss": 0.7736, |
|
"step": 128500 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.7785436020377586e-05, |
|
"loss": 0.77, |
|
"step": 129000 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.766057336929378e-05, |
|
"loss": 0.7739, |
|
"step": 129500 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.753571071820997e-05, |
|
"loss": 0.7743, |
|
"step": 130000 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.7410848067126162e-05, |
|
"loss": 0.7706, |
|
"step": 130500 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.7285985416042356e-05, |
|
"loss": 0.7727, |
|
"step": 131000 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.7161122764958546e-05, |
|
"loss": 0.7736, |
|
"step": 131500 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.703626011387474e-05, |
|
"loss": 0.7618, |
|
"step": 132000 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.6911397462790932e-05, |
|
"loss": 0.7685, |
|
"step": 132500 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.6786534811707122e-05, |
|
"loss": 0.7821, |
|
"step": 133000 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.6661672160623315e-05, |
|
"loss": 0.7712, |
|
"step": 133500 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.653680950953951e-05, |
|
"loss": 0.6751, |
|
"step": 134000 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.64119468584557e-05, |
|
"loss": 0.6709, |
|
"step": 134500 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.6287084207371892e-05, |
|
"loss": 0.6627, |
|
"step": 135000 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.6162221556288085e-05, |
|
"loss": 0.6725, |
|
"step": 135500 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.6037358905204275e-05, |
|
"loss": 0.672, |
|
"step": 136000 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.5912496254120468e-05, |
|
"loss": 0.6679, |
|
"step": 136500 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.578763360303666e-05, |
|
"loss": 0.6662, |
|
"step": 137000 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.566277095195285e-05, |
|
"loss": 0.673, |
|
"step": 137500 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 1.5537908300869045e-05, |
|
"loss": 0.6771, |
|
"step": 138000 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.5413045649785238e-05, |
|
"loss": 0.6617, |
|
"step": 138500 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.5288182998701428e-05, |
|
"loss": 0.6749, |
|
"step": 139000 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 1.5163320347617621e-05, |
|
"loss": 0.6715, |
|
"step": 139500 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.5038457696533815e-05, |
|
"loss": 0.6622, |
|
"step": 140000 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 1.4913595045450004e-05, |
|
"loss": 0.6756, |
|
"step": 140500 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 1.4788732394366198e-05, |
|
"loss": 0.6785, |
|
"step": 141000 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 1.4663869743282391e-05, |
|
"loss": 0.6577, |
|
"step": 141500 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 1.4539007092198581e-05, |
|
"loss": 0.6629, |
|
"step": 142000 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 1.4414144441114774e-05, |
|
"loss": 0.6707, |
|
"step": 142500 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 1.4289281790030967e-05, |
|
"loss": 0.6603, |
|
"step": 143000 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 1.4164419138947157e-05, |
|
"loss": 0.6583, |
|
"step": 143500 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 1.403955648786335e-05, |
|
"loss": 0.6674, |
|
"step": 144000 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 1.3914693836779544e-05, |
|
"loss": 0.6591, |
|
"step": 144500 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 1.3789831185695734e-05, |
|
"loss": 0.6607, |
|
"step": 145000 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 1.3664968534611927e-05, |
|
"loss": 0.6615, |
|
"step": 145500 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 1.354010588352812e-05, |
|
"loss": 0.6635, |
|
"step": 146000 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 1.341524323244431e-05, |
|
"loss": 0.6647, |
|
"step": 146500 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 1.3290380581360504e-05, |
|
"loss": 0.6744, |
|
"step": 147000 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 1.3165517930276697e-05, |
|
"loss": 0.6636, |
|
"step": 147500 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.3040655279192887e-05, |
|
"loss": 0.6746, |
|
"step": 148000 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 1.291579262810908e-05, |
|
"loss": 0.6613, |
|
"step": 148500 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 1.2790929977025273e-05, |
|
"loss": 0.6655, |
|
"step": 149000 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 1.2666067325941463e-05, |
|
"loss": 0.6717, |
|
"step": 149500 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 1.2541204674857657e-05, |
|
"loss": 0.6482, |
|
"step": 150000 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 1.241634202377385e-05, |
|
"loss": 0.655, |
|
"step": 150500 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 1.2291479372690041e-05, |
|
"loss": 0.6633, |
|
"step": 151000 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 1.2166616721606233e-05, |
|
"loss": 0.6691, |
|
"step": 151500 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 1.2041754070522426e-05, |
|
"loss": 0.6635, |
|
"step": 152000 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 1.1916891419438618e-05, |
|
"loss": 0.6657, |
|
"step": 152500 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.179202876835481e-05, |
|
"loss": 0.662, |
|
"step": 153000 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 1.1667166117271003e-05, |
|
"loss": 0.6477, |
|
"step": 153500 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 1.1542303466187194e-05, |
|
"loss": 0.6649, |
|
"step": 154000 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 1.1417440815103386e-05, |
|
"loss": 0.6545, |
|
"step": 154500 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.129257816401958e-05, |
|
"loss": 0.6579, |
|
"step": 155000 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.116771551293577e-05, |
|
"loss": 0.6722, |
|
"step": 155500 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.1042852861851962e-05, |
|
"loss": 0.6594, |
|
"step": 156000 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.0917990210768156e-05, |
|
"loss": 0.6604, |
|
"step": 156500 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.0793127559684347e-05, |
|
"loss": 0.6547, |
|
"step": 157000 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 1.0668264908600539e-05, |
|
"loss": 0.6678, |
|
"step": 157500 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.0543402257516732e-05, |
|
"loss": 0.6547, |
|
"step": 158000 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.0418539606432924e-05, |
|
"loss": 0.6576, |
|
"step": 158500 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 1.0293676955349115e-05, |
|
"loss": 0.6642, |
|
"step": 159000 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.0168814304265309e-05, |
|
"loss": 0.6558, |
|
"step": 159500 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 1.00439516531815e-05, |
|
"loss": 0.6534, |
|
"step": 160000 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 9.919089002097694e-06, |
|
"loss": 0.6602, |
|
"step": 160500 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 9.794226351013885e-06, |
|
"loss": 0.6471, |
|
"step": 161000 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 9.669363699930077e-06, |
|
"loss": 0.6636, |
|
"step": 161500 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 9.54450104884627e-06, |
|
"loss": 0.6533, |
|
"step": 162000 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 9.419638397762462e-06, |
|
"loss": 0.6559, |
|
"step": 162500 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 9.294775746678653e-06, |
|
"loss": 0.6645, |
|
"step": 163000 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 9.169913095594847e-06, |
|
"loss": 0.6521, |
|
"step": 163500 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 9.045050444511038e-06, |
|
"loss": 0.6454, |
|
"step": 164000 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 8.92018779342723e-06, |
|
"loss": 0.6482, |
|
"step": 164500 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 8.795325142343423e-06, |
|
"loss": 0.6569, |
|
"step": 165000 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 8.670462491259615e-06, |
|
"loss": 0.6583, |
|
"step": 165500 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 8.545599840175806e-06, |
|
"loss": 0.6541, |
|
"step": 166000 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 8.420737189092e-06, |
|
"loss": 0.649, |
|
"step": 166500 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 8.295874538008191e-06, |
|
"loss": 0.6499, |
|
"step": 167000 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 8.171011886924383e-06, |
|
"loss": 0.6679, |
|
"step": 167500 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 8.046149235840576e-06, |
|
"loss": 0.6361, |
|
"step": 168000 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 7.921286584756768e-06, |
|
"loss": 0.6512, |
|
"step": 168500 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 7.796423933672959e-06, |
|
"loss": 0.6354, |
|
"step": 169000 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 7.671561282589152e-06, |
|
"loss": 0.6456, |
|
"step": 169500 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 7.546698631505345e-06, |
|
"loss": 0.6523, |
|
"step": 170000 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 7.4218359804215364e-06, |
|
"loss": 0.6512, |
|
"step": 170500 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 7.29697332933773e-06, |
|
"loss": 0.6402, |
|
"step": 171000 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 7.172110678253921e-06, |
|
"loss": 0.6648, |
|
"step": 171500 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 7.047248027170113e-06, |
|
"loss": 0.6544, |
|
"step": 172000 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 6.922385376086306e-06, |
|
"loss": 0.6548, |
|
"step": 172500 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 6.797522725002498e-06, |
|
"loss": 0.6477, |
|
"step": 173000 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 6.672660073918689e-06, |
|
"loss": 0.64, |
|
"step": 173500 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 6.547797422834883e-06, |
|
"loss": 0.6502, |
|
"step": 174000 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 6.422934771751074e-06, |
|
"loss": 0.6518, |
|
"step": 174500 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 6.298072120667266e-06, |
|
"loss": 0.6559, |
|
"step": 175000 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 6.173209469583458e-06, |
|
"loss": 0.6492, |
|
"step": 175500 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 6.048346818499651e-06, |
|
"loss": 0.6395, |
|
"step": 176000 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 5.923484167415843e-06, |
|
"loss": 0.6527, |
|
"step": 176500 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 5.798621516332036e-06, |
|
"loss": 0.6505, |
|
"step": 177000 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 5.673758865248227e-06, |
|
"loss": 0.6385, |
|
"step": 177500 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 5.54889621416442e-06, |
|
"loss": 0.6353, |
|
"step": 178000 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 5.424033563080612e-06, |
|
"loss": 0.6428, |
|
"step": 178500 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 5.299170911996804e-06, |
|
"loss": 0.6349, |
|
"step": 179000 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.174308260912996e-06, |
|
"loss": 0.6387, |
|
"step": 179500 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 5.0494456098291886e-06, |
|
"loss": 0.6405, |
|
"step": 180000 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 4.92458295874538e-06, |
|
"loss": 0.6471, |
|
"step": 180500 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.799720307661573e-06, |
|
"loss": 0.644, |
|
"step": 181000 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.674857656577765e-06, |
|
"loss": 0.6441, |
|
"step": 181500 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.549995005493957e-06, |
|
"loss": 0.6438, |
|
"step": 182000 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.425132354410149e-06, |
|
"loss": 0.6464, |
|
"step": 182500 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.3002697033263415e-06, |
|
"loss": 0.6328, |
|
"step": 183000 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.175407052242533e-06, |
|
"loss": 0.6373, |
|
"step": 183500 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 4.0505444011587255e-06, |
|
"loss": 0.6333, |
|
"step": 184000 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.925681750074918e-06, |
|
"loss": 0.6367, |
|
"step": 184500 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.8008190989911096e-06, |
|
"loss": 0.635, |
|
"step": 185000 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.675956447907302e-06, |
|
"loss": 0.6364, |
|
"step": 185500 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 3.5510937968234945e-06, |
|
"loss": 0.6384, |
|
"step": 186000 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 3.426231145739686e-06, |
|
"loss": 0.626, |
|
"step": 186500 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 3.3013684946558785e-06, |
|
"loss": 0.6335, |
|
"step": 187000 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 3.176505843572071e-06, |
|
"loss": 0.6423, |
|
"step": 187500 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 3.051643192488263e-06, |
|
"loss": 0.6466, |
|
"step": 188000 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 2.926780541404455e-06, |
|
"loss": 0.6326, |
|
"step": 188500 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 2.8019178903206474e-06, |
|
"loss": 0.6382, |
|
"step": 189000 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 2.6770552392368394e-06, |
|
"loss": 0.638, |
|
"step": 189500 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 2.552192588153032e-06, |
|
"loss": 0.6279, |
|
"step": 190000 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 2.427329937069224e-06, |
|
"loss": 0.6362, |
|
"step": 190500 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 2.302467285985416e-06, |
|
"loss": 0.6331, |
|
"step": 191000 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 2.1776046349016083e-06, |
|
"loss": 0.6179, |
|
"step": 191500 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 2.0527419838178003e-06, |
|
"loss": 0.6424, |
|
"step": 192000 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 1.927879332733993e-06, |
|
"loss": 0.6323, |
|
"step": 192500 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 1.803016681650185e-06, |
|
"loss": 0.6359, |
|
"step": 193000 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 1.678154030566377e-06, |
|
"loss": 0.6377, |
|
"step": 193500 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 1.5532913794825693e-06, |
|
"loss": 0.6346, |
|
"step": 194000 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 1.4284287283987615e-06, |
|
"loss": 0.6344, |
|
"step": 194500 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 1.3035660773149535e-06, |
|
"loss": 0.627, |
|
"step": 195000 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 1.1787034262311457e-06, |
|
"loss": 0.6335, |
|
"step": 195500 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 1.053840775147338e-06, |
|
"loss": 0.6388, |
|
"step": 196000 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 9.289781240635301e-07, |
|
"loss": 0.632, |
|
"step": 196500 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 8.041154729797223e-07, |
|
"loss": 0.6351, |
|
"step": 197000 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 6.792528218959145e-07, |
|
"loss": 0.6242, |
|
"step": 197500 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 5.543901708121067e-07, |
|
"loss": 0.6258, |
|
"step": 198000 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 4.295275197282989e-07, |
|
"loss": 0.6375, |
|
"step": 198500 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 3.0466486864449107e-07, |
|
"loss": 0.6256, |
|
"step": 199000 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 1.7980221756068325e-07, |
|
"loss": 0.6334, |
|
"step": 199500 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 5.493956647687544e-08, |
|
"loss": 0.6239, |
|
"step": 200000 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 200220, |
|
"total_flos": 2.2322905274597376e+17, |
|
"train_loss": 0.8990719857052506, |
|
"train_runtime": 61304.5877, |
|
"train_samples_per_second": 32.66, |
|
"train_steps_per_second": 3.266 |
|
} |
|
], |
|
"max_steps": 200220, |
|
"num_train_epochs": 3, |
|
"total_flos": 2.2322905274597376e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|