|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.0, |
|
"global_step": 297351, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.991592427804178e-05, |
|
"loss": 3.9144, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.983184855608356e-05, |
|
"loss": 3.1636, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9747772834125326e-05, |
|
"loss": 2.8169, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.966369711216711e-05, |
|
"loss": 2.6277, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.957962139020888e-05, |
|
"loss": 2.454, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.9495545668250656e-05, |
|
"loss": 2.3704, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.941146994629243e-05, |
|
"loss": 2.2794, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.9327394224334204e-05, |
|
"loss": 2.199, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.9243318502375986e-05, |
|
"loss": 2.1311, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.915924278041776e-05, |
|
"loss": 2.0793, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.9075167058459534e-05, |
|
"loss": 2.0409, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.899109133650131e-05, |
|
"loss": 1.9857, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.890701561454308e-05, |
|
"loss": 1.9468, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.8822939892584864e-05, |
|
"loss": 1.9238, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.873886417062664e-05, |
|
"loss": 1.8897, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.8654788448668406e-05, |
|
"loss": 1.8494, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.857071272671019e-05, |
|
"loss": 1.8276, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.848663700475196e-05, |
|
"loss": 1.8104, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.840256128279374e-05, |
|
"loss": 1.8061, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.831848556083551e-05, |
|
"loss": 1.7745, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.8234409838877285e-05, |
|
"loss": 1.752, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.8150334116919066e-05, |
|
"loss": 1.7366, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.806625839496084e-05, |
|
"loss": 1.702, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.7982182673002615e-05, |
|
"loss": 1.7003, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.789810695104439e-05, |
|
"loss": 1.6979, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.7814031229086164e-05, |
|
"loss": 1.6692, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.7729955507127945e-05, |
|
"loss": 1.6541, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.764587978516972e-05, |
|
"loss": 1.6352, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.7561804063211494e-05, |
|
"loss": 1.6232, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.747772834125327e-05, |
|
"loss": 1.6187, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.739365261929504e-05, |
|
"loss": 1.5967, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.7309576897336824e-05, |
|
"loss": 1.6008, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.722550117537859e-05, |
|
"loss": 1.5823, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.7141425453420366e-05, |
|
"loss": 1.5706, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.705734973146215e-05, |
|
"loss": 1.5634, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.697327400950392e-05, |
|
"loss": 1.5677, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.6889198287545696e-05, |
|
"loss": 1.5298, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.680512256558747e-05, |
|
"loss": 1.5181, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.6721046843629245e-05, |
|
"loss": 1.567, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.6636971121671026e-05, |
|
"loss": 1.5376, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.65528953997128e-05, |
|
"loss": 1.5011, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.6468819677754575e-05, |
|
"loss": 1.525, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.638474395579635e-05, |
|
"loss": 1.4911, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.6300668233838124e-05, |
|
"loss": 1.4962, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.6216592511879905e-05, |
|
"loss": 1.4807, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.613251678992168e-05, |
|
"loss": 1.4768, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.6048441067963454e-05, |
|
"loss": 1.4802, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.596436534600523e-05, |
|
"loss": 1.4715, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.5880289624047e-05, |
|
"loss": 1.4622, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.5796213902088784e-05, |
|
"loss": 1.471, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.571213818013055e-05, |
|
"loss": 1.4274, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.562806245817233e-05, |
|
"loss": 1.4481, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.554398673621411e-05, |
|
"loss": 1.4226, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.545991101425588e-05, |
|
"loss": 1.4467, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.5375835292297656e-05, |
|
"loss": 1.4303, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.529175957033943e-05, |
|
"loss": 1.4354, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.520768384838121e-05, |
|
"loss": 1.4073, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.5123608126422986e-05, |
|
"loss": 1.4068, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.503953240446476e-05, |
|
"loss": 1.4203, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.4955456682506534e-05, |
|
"loss": 1.4178, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.487138096054831e-05, |
|
"loss": 1.4176, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.478730523859009e-05, |
|
"loss": 1.3696, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.4703229516631864e-05, |
|
"loss": 1.3805, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.461915379467363e-05, |
|
"loss": 1.3788, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.453507807271541e-05, |
|
"loss": 1.3756, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.445100235075719e-05, |
|
"loss": 1.3764, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.436692662879897e-05, |
|
"loss": 1.387, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.4282850906840737e-05, |
|
"loss": 1.3674, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.419877518488251e-05, |
|
"loss": 1.3703, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.411469946292429e-05, |
|
"loss": 1.3582, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.4030623740966067e-05, |
|
"loss": 1.3635, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.394654801900784e-05, |
|
"loss": 1.3379, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.3862472297049615e-05, |
|
"loss": 1.348, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.377839657509139e-05, |
|
"loss": 1.3515, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.369432085313317e-05, |
|
"loss": 1.3611, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.3610245131174945e-05, |
|
"loss": 1.349, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.352616940921672e-05, |
|
"loss": 1.3223, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.3442093687258494e-05, |
|
"loss": 1.3375, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.335801796530027e-05, |
|
"loss": 1.3312, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.327394224334205e-05, |
|
"loss": 1.3229, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.318986652138382e-05, |
|
"loss": 1.3212, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.31057907994256e-05, |
|
"loss": 1.3232, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.302171507746737e-05, |
|
"loss": 1.3423, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.293763935550915e-05, |
|
"loss": 1.3232, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.285356363355092e-05, |
|
"loss": 1.2953, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.2769487911592696e-05, |
|
"loss": 1.3158, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.268541218963448e-05, |
|
"loss": 1.3102, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.260133646767625e-05, |
|
"loss": 1.3198, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.2517260745718026e-05, |
|
"loss": 1.2985, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.24331850237598e-05, |
|
"loss": 1.3001, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.2349109301801575e-05, |
|
"loss": 1.2926, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.226503357984335e-05, |
|
"loss": 1.2729, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.218095785788513e-05, |
|
"loss": 1.2969, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.2096882135926905e-05, |
|
"loss": 1.2919, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.201280641396868e-05, |
|
"loss": 1.2952, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.1928730692010454e-05, |
|
"loss": 1.2857, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.184465497005223e-05, |
|
"loss": 1.2859, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.176057924809401e-05, |
|
"loss": 1.3007, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.167650352613578e-05, |
|
"loss": 1.2659, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.159242780417756e-05, |
|
"loss": 1.2771, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.150835208221933e-05, |
|
"loss": 1.2843, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.142427636026111e-05, |
|
"loss": 1.2892, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.134020063830288e-05, |
|
"loss": 1.2646, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.1256124916344656e-05, |
|
"loss": 1.2557, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.117204919438644e-05, |
|
"loss": 1.2601, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.108797347242821e-05, |
|
"loss": 1.2477, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.1003897750469986e-05, |
|
"loss": 1.2678, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.091982202851176e-05, |
|
"loss": 1.2583, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.0835746306553535e-05, |
|
"loss": 1.2588, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.0751670584595316e-05, |
|
"loss": 1.254, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.066759486263709e-05, |
|
"loss": 1.2323, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.058351914067886e-05, |
|
"loss": 1.2479, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.049944341872064e-05, |
|
"loss": 1.229, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.0415367696762413e-05, |
|
"loss": 1.2506, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.0331291974804195e-05, |
|
"loss": 1.2486, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.024721625284596e-05, |
|
"loss": 1.2388, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.0163140530887737e-05, |
|
"loss": 1.2589, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.007906480892952e-05, |
|
"loss": 1.2264, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 3.999498908697129e-05, |
|
"loss": 1.2464, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.9910913365013067e-05, |
|
"loss": 1.227, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.982683764305484e-05, |
|
"loss": 1.2497, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.9742761921096615e-05, |
|
"loss": 1.2341, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.9658686199138397e-05, |
|
"loss": 1.2376, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.957461047718017e-05, |
|
"loss": 1.2248, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.9490534755221945e-05, |
|
"loss": 1.2208, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.940645903326372e-05, |
|
"loss": 1.195, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.9322383311305494e-05, |
|
"loss": 1.2058, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.9238307589347275e-05, |
|
"loss": 1.2305, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.915423186738904e-05, |
|
"loss": 1.2375, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.9070156145430824e-05, |
|
"loss": 1.2202, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.89860804234726e-05, |
|
"loss": 1.2081, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.890200470151437e-05, |
|
"loss": 1.204, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.881792897955615e-05, |
|
"loss": 1.2004, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.873385325759792e-05, |
|
"loss": 1.2039, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.86497775356397e-05, |
|
"loss": 1.2013, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.856570181368148e-05, |
|
"loss": 1.2143, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.848162609172325e-05, |
|
"loss": 1.1883, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.8397550369765026e-05, |
|
"loss": 1.1958, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.83134746478068e-05, |
|
"loss": 1.2, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.822939892584858e-05, |
|
"loss": 1.2016, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.8145323203890356e-05, |
|
"loss": 1.1906, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.806124748193213e-05, |
|
"loss": 1.1845, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.7977171759973905e-05, |
|
"loss": 1.1977, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.789309603801568e-05, |
|
"loss": 1.179, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.780902031605746e-05, |
|
"loss": 1.1981, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.7724944594099235e-05, |
|
"loss": 1.1594, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.7640868872141e-05, |
|
"loss": 1.1823, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.7556793150182784e-05, |
|
"loss": 1.1694, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.747271742822456e-05, |
|
"loss": 1.1821, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.738864170626633e-05, |
|
"loss": 1.1805, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.730456598430811e-05, |
|
"loss": 1.1822, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.722049026234988e-05, |
|
"loss": 1.1828, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.713641454039166e-05, |
|
"loss": 1.1851, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.705233881843344e-05, |
|
"loss": 1.1643, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.696826309647521e-05, |
|
"loss": 1.1627, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.6884187374516986e-05, |
|
"loss": 1.168, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.680011165255876e-05, |
|
"loss": 1.1761, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.671603593060054e-05, |
|
"loss": 1.1732, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.6631960208642316e-05, |
|
"loss": 1.1829, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.6547884486684083e-05, |
|
"loss": 1.1495, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.6463808764725865e-05, |
|
"loss": 1.171, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.637973304276764e-05, |
|
"loss": 1.1545, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.629565732080942e-05, |
|
"loss": 1.1563, |
|
"step": 81500 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.621158159885119e-05, |
|
"loss": 1.1794, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.612750587689296e-05, |
|
"loss": 1.1549, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.6043430154934743e-05, |
|
"loss": 1.1521, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.595935443297652e-05, |
|
"loss": 1.141, |
|
"step": 83500 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.587527871101829e-05, |
|
"loss": 1.1472, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.579120298906007e-05, |
|
"loss": 1.1647, |
|
"step": 84500 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.570712726710184e-05, |
|
"loss": 1.1492, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.562305154514362e-05, |
|
"loss": 1.15, |
|
"step": 85500 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.55389758231854e-05, |
|
"loss": 1.1451, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.545490010122717e-05, |
|
"loss": 1.1576, |
|
"step": 86500 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.5370824379268945e-05, |
|
"loss": 1.1446, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.528674865731072e-05, |
|
"loss": 1.1436, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.52026729353525e-05, |
|
"loss": 1.1339, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.511859721339427e-05, |
|
"loss": 1.1506, |
|
"step": 88500 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.503452149143605e-05, |
|
"loss": 1.1423, |
|
"step": 89000 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.4950445769477824e-05, |
|
"loss": 1.1549, |
|
"step": 89500 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.48663700475196e-05, |
|
"loss": 1.1273, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.478229432556137e-05, |
|
"loss": 1.1419, |
|
"step": 90500 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.469821860360315e-05, |
|
"loss": 1.1326, |
|
"step": 91000 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.461414288164493e-05, |
|
"loss": 1.1269, |
|
"step": 91500 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.45300671596867e-05, |
|
"loss": 1.1425, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.444599143772848e-05, |
|
"loss": 1.1325, |
|
"step": 92500 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.436191571577025e-05, |
|
"loss": 1.1072, |
|
"step": 93000 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.4277839993812026e-05, |
|
"loss": 1.1251, |
|
"step": 93500 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.419376427185381e-05, |
|
"loss": 1.1313, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.410968854989558e-05, |
|
"loss": 1.1222, |
|
"step": 94500 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.4025612827937356e-05, |
|
"loss": 1.1474, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.394153710597913e-05, |
|
"loss": 1.1212, |
|
"step": 95500 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.3857461384020905e-05, |
|
"loss": 1.1181, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.3773385662062686e-05, |
|
"loss": 1.1044, |
|
"step": 96500 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.368930994010446e-05, |
|
"loss": 1.0983, |
|
"step": 97000 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.360523421814623e-05, |
|
"loss": 1.1204, |
|
"step": 97500 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.352115849618801e-05, |
|
"loss": 1.1173, |
|
"step": 98000 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.3437082774229784e-05, |
|
"loss": 1.1307, |
|
"step": 98500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.3353007052271565e-05, |
|
"loss": 1.1267, |
|
"step": 99000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.326893133031333e-05, |
|
"loss": 1.0383, |
|
"step": 99500 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.318485560835511e-05, |
|
"loss": 1.0176, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.310077988639689e-05, |
|
"loss": 1.0064, |
|
"step": 100500 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.301670416443866e-05, |
|
"loss": 1.0344, |
|
"step": 101000 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.293262844248044e-05, |
|
"loss": 1.0321, |
|
"step": 101500 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.284855272052221e-05, |
|
"loss": 1.0145, |
|
"step": 102000 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.2764476998563986e-05, |
|
"loss": 1.0237, |
|
"step": 102500 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.268040127660577e-05, |
|
"loss": 1.0119, |
|
"step": 103000 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.259632555464754e-05, |
|
"loss": 1.0162, |
|
"step": 103500 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.251224983268931e-05, |
|
"loss": 1.0114, |
|
"step": 104000 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.242817411073109e-05, |
|
"loss": 1.0048, |
|
"step": 104500 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.2344098388772865e-05, |
|
"loss": 1.031, |
|
"step": 105000 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.2260022666814646e-05, |
|
"loss": 1.0196, |
|
"step": 105500 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.2175946944856414e-05, |
|
"loss": 1.0178, |
|
"step": 106000 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.209187122289819e-05, |
|
"loss": 1.0326, |
|
"step": 106500 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.200779550093997e-05, |
|
"loss": 1.019, |
|
"step": 107000 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.1923719778981744e-05, |
|
"loss": 1.0128, |
|
"step": 107500 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.183964405702352e-05, |
|
"loss": 0.9983, |
|
"step": 108000 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.175556833506529e-05, |
|
"loss": 1.015, |
|
"step": 108500 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.167149261310707e-05, |
|
"loss": 1.0082, |
|
"step": 109000 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.158741689114885e-05, |
|
"loss": 1.0206, |
|
"step": 109500 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.150334116919062e-05, |
|
"loss": 1.0093, |
|
"step": 110000 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.14192654472324e-05, |
|
"loss": 1.0447, |
|
"step": 110500 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.133518972527417e-05, |
|
"loss": 1.0313, |
|
"step": 111000 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.1251114003315946e-05, |
|
"loss": 1.0167, |
|
"step": 111500 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.116703828135773e-05, |
|
"loss": 0.9993, |
|
"step": 112000 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.1082962559399494e-05, |
|
"loss": 1.0204, |
|
"step": 112500 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.0998886837441276e-05, |
|
"loss": 1.0144, |
|
"step": 113000 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.091481111548305e-05, |
|
"loss": 1.016, |
|
"step": 113500 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.0830735393524824e-05, |
|
"loss": 1.0075, |
|
"step": 114000 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.0746659671566606e-05, |
|
"loss": 1.0081, |
|
"step": 114500 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.066258394960837e-05, |
|
"loss": 1.0028, |
|
"step": 115000 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.0578508227650154e-05, |
|
"loss": 0.9926, |
|
"step": 115500 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.049443250569193e-05, |
|
"loss": 1.0033, |
|
"step": 116000 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.04103567837337e-05, |
|
"loss": 0.9876, |
|
"step": 116500 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.032628106177548e-05, |
|
"loss": 1.0082, |
|
"step": 117000 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.0242205339817252e-05, |
|
"loss": 1.01, |
|
"step": 117500 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.0158129617859033e-05, |
|
"loss": 0.9994, |
|
"step": 118000 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 3.0074053895900804e-05, |
|
"loss": 1.0031, |
|
"step": 118500 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 2.998997817394258e-05, |
|
"loss": 1.007, |
|
"step": 119000 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 2.9905902451984356e-05, |
|
"loss": 0.9771, |
|
"step": 119500 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 2.982182673002613e-05, |
|
"loss": 1.0073, |
|
"step": 120000 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.973775100806791e-05, |
|
"loss": 0.9912, |
|
"step": 120500 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.9653675286109683e-05, |
|
"loss": 0.9912, |
|
"step": 121000 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 2.9569599564151457e-05, |
|
"loss": 1.0106, |
|
"step": 121500 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 2.9485523842193235e-05, |
|
"loss": 1.0034, |
|
"step": 122000 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 2.940144812023501e-05, |
|
"loss": 1.0049, |
|
"step": 122500 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 2.9317372398276787e-05, |
|
"loss": 1.0011, |
|
"step": 123000 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 2.9233296676318562e-05, |
|
"loss": 1.0082, |
|
"step": 123500 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 2.9149220954360333e-05, |
|
"loss": 0.9963, |
|
"step": 124000 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 2.9065145232402114e-05, |
|
"loss": 1.0047, |
|
"step": 124500 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 2.898106951044389e-05, |
|
"loss": 0.9847, |
|
"step": 125000 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.8896993788485666e-05, |
|
"loss": 1.0177, |
|
"step": 125500 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.881291806652744e-05, |
|
"loss": 1.0099, |
|
"step": 126000 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 2.872884234456921e-05, |
|
"loss": 0.9906, |
|
"step": 126500 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 2.8644766622610993e-05, |
|
"loss": 0.9973, |
|
"step": 127000 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.8560690900652764e-05, |
|
"loss": 1.0014, |
|
"step": 127500 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.8476615178694545e-05, |
|
"loss": 1.0058, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 2.8392539456736316e-05, |
|
"loss": 0.9997, |
|
"step": 128500 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 2.830846373477809e-05, |
|
"loss": 0.9959, |
|
"step": 129000 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.8224388012819868e-05, |
|
"loss": 1.0129, |
|
"step": 129500 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.8140312290861643e-05, |
|
"loss": 1.0072, |
|
"step": 130000 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.805623656890342e-05, |
|
"loss": 0.9957, |
|
"step": 130500 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.7972160846945195e-05, |
|
"loss": 0.9912, |
|
"step": 131000 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.788808512498697e-05, |
|
"loss": 1.0023, |
|
"step": 131500 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.7804009403028747e-05, |
|
"loss": 0.9983, |
|
"step": 132000 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.771993368107052e-05, |
|
"loss": 0.9905, |
|
"step": 132500 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.7635857959112292e-05, |
|
"loss": 1.0008, |
|
"step": 133000 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.7551782237154074e-05, |
|
"loss": 0.9893, |
|
"step": 133500 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.7467706515195845e-05, |
|
"loss": 0.9737, |
|
"step": 134000 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.7383630793237626e-05, |
|
"loss": 1.0146, |
|
"step": 134500 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.7299555071279397e-05, |
|
"loss": 0.9829, |
|
"step": 135000 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.721547934932117e-05, |
|
"loss": 0.995, |
|
"step": 135500 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.713140362736295e-05, |
|
"loss": 1.0042, |
|
"step": 136000 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.7047327905404723e-05, |
|
"loss": 0.9867, |
|
"step": 136500 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.69632521834465e-05, |
|
"loss": 0.9773, |
|
"step": 137000 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.6879176461488276e-05, |
|
"loss": 0.9816, |
|
"step": 137500 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.679510073953005e-05, |
|
"loss": 0.9828, |
|
"step": 138000 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.6711025017571828e-05, |
|
"loss": 0.9802, |
|
"step": 138500 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.6626949295613602e-05, |
|
"loss": 0.9687, |
|
"step": 139000 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.654287357365538e-05, |
|
"loss": 0.9869, |
|
"step": 139500 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.6458797851697154e-05, |
|
"loss": 0.9651, |
|
"step": 140000 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.6374722129738925e-05, |
|
"loss": 0.976, |
|
"step": 140500 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.6290646407780707e-05, |
|
"loss": 0.9893, |
|
"step": 141000 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.6206570685822478e-05, |
|
"loss": 0.9758, |
|
"step": 141500 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.612249496386426e-05, |
|
"loss": 0.968, |
|
"step": 142000 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 2.603841924190603e-05, |
|
"loss": 0.9807, |
|
"step": 142500 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 2.5954343519947804e-05, |
|
"loss": 0.9831, |
|
"step": 143000 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.5870267797989582e-05, |
|
"loss": 0.9763, |
|
"step": 143500 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.5786192076031356e-05, |
|
"loss": 0.9809, |
|
"step": 144000 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.5702116354073134e-05, |
|
"loss": 0.9712, |
|
"step": 144500 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.561804063211491e-05, |
|
"loss": 0.958, |
|
"step": 145000 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.5533964910156683e-05, |
|
"loss": 0.9619, |
|
"step": 145500 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.544988918819846e-05, |
|
"loss": 0.9643, |
|
"step": 146000 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.5365813466240235e-05, |
|
"loss": 0.9764, |
|
"step": 146500 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.5281737744282013e-05, |
|
"loss": 0.9618, |
|
"step": 147000 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.5197662022323787e-05, |
|
"loss": 0.9908, |
|
"step": 147500 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.5113586300365562e-05, |
|
"loss": 0.9722, |
|
"step": 148000 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.502951057840734e-05, |
|
"loss": 0.9676, |
|
"step": 148500 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.4945434856449114e-05, |
|
"loss": 0.982, |
|
"step": 149000 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.486135913449089e-05, |
|
"loss": 0.9675, |
|
"step": 149500 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.4777283412532666e-05, |
|
"loss": 0.9739, |
|
"step": 150000 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.469320769057444e-05, |
|
"loss": 0.9613, |
|
"step": 150500 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.460913196861622e-05, |
|
"loss": 0.9694, |
|
"step": 151000 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.452505624665799e-05, |
|
"loss": 0.9985, |
|
"step": 151500 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.4440980524699767e-05, |
|
"loss": 0.9712, |
|
"step": 152000 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.4356904802741542e-05, |
|
"loss": 0.9718, |
|
"step": 152500 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.427282908078332e-05, |
|
"loss": 0.9707, |
|
"step": 153000 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.4188753358825094e-05, |
|
"loss": 0.9634, |
|
"step": 153500 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.410467763686687e-05, |
|
"loss": 0.9503, |
|
"step": 154000 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.4020601914908643e-05, |
|
"loss": 0.9535, |
|
"step": 154500 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.393652619295042e-05, |
|
"loss": 0.9638, |
|
"step": 155000 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.3852450470992195e-05, |
|
"loss": 0.9639, |
|
"step": 155500 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.376837474903397e-05, |
|
"loss": 0.9634, |
|
"step": 156000 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.3684299027075747e-05, |
|
"loss": 0.9756, |
|
"step": 156500 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.360022330511752e-05, |
|
"loss": 0.966, |
|
"step": 157000 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.35161475831593e-05, |
|
"loss": 0.9571, |
|
"step": 157500 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.3432071861201074e-05, |
|
"loss": 0.9665, |
|
"step": 158000 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 2.3347996139242848e-05, |
|
"loss": 0.9718, |
|
"step": 158500 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 2.3263920417284623e-05, |
|
"loss": 0.9705, |
|
"step": 159000 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 2.31798446953264e-05, |
|
"loss": 0.9488, |
|
"step": 159500 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 2.3095768973368175e-05, |
|
"loss": 0.9643, |
|
"step": 160000 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 2.3011693251409953e-05, |
|
"loss": 0.9567, |
|
"step": 160500 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 2.2927617529451727e-05, |
|
"loss": 0.9575, |
|
"step": 161000 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 2.28435418074935e-05, |
|
"loss": 0.9638, |
|
"step": 161500 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 2.275946608553528e-05, |
|
"loss": 0.9563, |
|
"step": 162000 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 2.2675390363577054e-05, |
|
"loss": 0.9423, |
|
"step": 162500 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 2.259131464161883e-05, |
|
"loss": 0.9718, |
|
"step": 163000 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 2.2507238919660602e-05, |
|
"loss": 0.9625, |
|
"step": 163500 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 2.242316319770238e-05, |
|
"loss": 0.964, |
|
"step": 164000 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.2339087475744155e-05, |
|
"loss": 0.9546, |
|
"step": 164500 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.2255011753785932e-05, |
|
"loss": 0.9433, |
|
"step": 165000 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 2.2170936031827707e-05, |
|
"loss": 0.9735, |
|
"step": 165500 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 2.208686030986948e-05, |
|
"loss": 0.9649, |
|
"step": 166000 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 2.2002784587911256e-05, |
|
"loss": 0.9506, |
|
"step": 166500 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 2.1918708865953033e-05, |
|
"loss": 0.971, |
|
"step": 167000 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 2.1834633143994808e-05, |
|
"loss": 0.964, |
|
"step": 167500 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 2.1750557422036586e-05, |
|
"loss": 0.969, |
|
"step": 168000 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 2.166648170007836e-05, |
|
"loss": 0.956, |
|
"step": 168500 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.1582405978120134e-05, |
|
"loss": 0.9528, |
|
"step": 169000 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.1498330256161912e-05, |
|
"loss": 0.9476, |
|
"step": 169500 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 2.1414254534203687e-05, |
|
"loss": 0.9306, |
|
"step": 170000 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 2.133017881224546e-05, |
|
"loss": 0.9446, |
|
"step": 170500 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.1246103090287235e-05, |
|
"loss": 0.9569, |
|
"step": 171000 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.1162027368329013e-05, |
|
"loss": 0.9526, |
|
"step": 171500 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 2.1077951646370788e-05, |
|
"loss": 0.949, |
|
"step": 172000 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 2.0993875924412565e-05, |
|
"loss": 0.9213, |
|
"step": 172500 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 2.090980020245434e-05, |
|
"loss": 0.9591, |
|
"step": 173000 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 2.0825724480496114e-05, |
|
"loss": 0.9498, |
|
"step": 173500 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 2.0741648758537892e-05, |
|
"loss": 0.9568, |
|
"step": 174000 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 2.0657573036579666e-05, |
|
"loss": 0.9511, |
|
"step": 174500 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 2.0573497314621444e-05, |
|
"loss": 0.9462, |
|
"step": 175000 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 2.0489421592663215e-05, |
|
"loss": 0.9555, |
|
"step": 175500 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 2.0405345870704993e-05, |
|
"loss": 0.9366, |
|
"step": 176000 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 2.0321270148746767e-05, |
|
"loss": 0.9353, |
|
"step": 176500 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 2.0237194426788545e-05, |
|
"loss": 0.9483, |
|
"step": 177000 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 2.015311870483032e-05, |
|
"loss": 0.9341, |
|
"step": 177500 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 2.0069042982872094e-05, |
|
"loss": 0.9426, |
|
"step": 178000 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 1.998496726091387e-05, |
|
"loss": 0.9652, |
|
"step": 178500 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.9900891538955646e-05, |
|
"loss": 0.9547, |
|
"step": 179000 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.981681581699742e-05, |
|
"loss": 0.9366, |
|
"step": 179500 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.97327400950392e-05, |
|
"loss": 0.9481, |
|
"step": 180000 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.9648664373080973e-05, |
|
"loss": 0.9352, |
|
"step": 180500 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.9564588651122747e-05, |
|
"loss": 0.9436, |
|
"step": 181000 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.9480512929164525e-05, |
|
"loss": 0.9401, |
|
"step": 181500 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.93964372072063e-05, |
|
"loss": 0.9378, |
|
"step": 182000 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.9312361485248077e-05, |
|
"loss": 0.9446, |
|
"step": 182500 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.9228285763289848e-05, |
|
"loss": 0.9623, |
|
"step": 183000 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.9144210041331626e-05, |
|
"loss": 0.9411, |
|
"step": 183500 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.90601343193734e-05, |
|
"loss": 0.9461, |
|
"step": 184000 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.8976058597415178e-05, |
|
"loss": 0.9508, |
|
"step": 184500 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.8891982875456953e-05, |
|
"loss": 0.9324, |
|
"step": 185000 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.8807907153498727e-05, |
|
"loss": 0.9295, |
|
"step": 185500 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.8723831431540505e-05, |
|
"loss": 0.9266, |
|
"step": 186000 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.863975570958228e-05, |
|
"loss": 0.9302, |
|
"step": 186500 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.8555679987624057e-05, |
|
"loss": 0.9407, |
|
"step": 187000 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.8471604265665828e-05, |
|
"loss": 0.9435, |
|
"step": 187500 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.8387528543707606e-05, |
|
"loss": 0.9212, |
|
"step": 188000 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.830345282174938e-05, |
|
"loss": 0.9373, |
|
"step": 188500 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.8219377099791158e-05, |
|
"loss": 0.9387, |
|
"step": 189000 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.8135301377832932e-05, |
|
"loss": 0.9207, |
|
"step": 189500 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.8051225655874707e-05, |
|
"loss": 0.933, |
|
"step": 190000 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.796714993391648e-05, |
|
"loss": 0.9255, |
|
"step": 190500 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.788307421195826e-05, |
|
"loss": 0.9277, |
|
"step": 191000 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.7798998490000033e-05, |
|
"loss": 0.9305, |
|
"step": 191500 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.771492276804181e-05, |
|
"loss": 0.9268, |
|
"step": 192000 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.7630847046083586e-05, |
|
"loss": 0.92, |
|
"step": 192500 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.754677132412536e-05, |
|
"loss": 0.9142, |
|
"step": 193000 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.7462695602167138e-05, |
|
"loss": 0.9363, |
|
"step": 193500 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.7378619880208912e-05, |
|
"loss": 0.9304, |
|
"step": 194000 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.729454415825069e-05, |
|
"loss": 0.9197, |
|
"step": 194500 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.721046843629246e-05, |
|
"loss": 0.9351, |
|
"step": 195000 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.712639271433424e-05, |
|
"loss": 0.9127, |
|
"step": 195500 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.7042316992376013e-05, |
|
"loss": 0.9333, |
|
"step": 196000 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.695824127041779e-05, |
|
"loss": 0.9244, |
|
"step": 196500 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.6874165548459565e-05, |
|
"loss": 0.9279, |
|
"step": 197000 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.679008982650134e-05, |
|
"loss": 0.9337, |
|
"step": 197500 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.6706014104543118e-05, |
|
"loss": 0.9175, |
|
"step": 198000 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.6621938382584892e-05, |
|
"loss": 0.8634, |
|
"step": 198500 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.653786266062667e-05, |
|
"loss": 0.8111, |
|
"step": 199000 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.645378693866844e-05, |
|
"loss": 0.8328, |
|
"step": 199500 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.636971121671022e-05, |
|
"loss": 0.8205, |
|
"step": 200000 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.6285635494751993e-05, |
|
"loss": 0.8165, |
|
"step": 200500 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.620155977279377e-05, |
|
"loss": 0.8228, |
|
"step": 201000 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.6117484050835545e-05, |
|
"loss": 0.8189, |
|
"step": 201500 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.603340832887732e-05, |
|
"loss": 0.8244, |
|
"step": 202000 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.5949332606919094e-05, |
|
"loss": 0.8259, |
|
"step": 202500 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.5865256884960872e-05, |
|
"loss": 0.8159, |
|
"step": 203000 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.5781181163002646e-05, |
|
"loss": 0.8117, |
|
"step": 203500 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.5697105441044424e-05, |
|
"loss": 0.8292, |
|
"step": 204000 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.56130297190862e-05, |
|
"loss": 0.8213, |
|
"step": 204500 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 1.5528953997127973e-05, |
|
"loss": 0.8251, |
|
"step": 205000 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 1.544487827516975e-05, |
|
"loss": 0.8265, |
|
"step": 205500 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.5360802553211525e-05, |
|
"loss": 0.8308, |
|
"step": 206000 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.5276726831253303e-05, |
|
"loss": 0.8286, |
|
"step": 206500 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 1.5192651109295076e-05, |
|
"loss": 0.8165, |
|
"step": 207000 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 1.5108575387336852e-05, |
|
"loss": 0.8444, |
|
"step": 207500 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.5024499665378628e-05, |
|
"loss": 0.8155, |
|
"step": 208000 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.4940423943420404e-05, |
|
"loss": 0.8039, |
|
"step": 208500 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 1.485634822146218e-05, |
|
"loss": 0.8101, |
|
"step": 209000 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 1.4772272499503953e-05, |
|
"loss": 0.8438, |
|
"step": 209500 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 1.4688196777545729e-05, |
|
"loss": 0.8105, |
|
"step": 210000 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 1.4604121055587505e-05, |
|
"loss": 0.812, |
|
"step": 210500 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 1.4520045333629281e-05, |
|
"loss": 0.8202, |
|
"step": 211000 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 1.4435969611671057e-05, |
|
"loss": 0.8117, |
|
"step": 211500 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 1.4351893889712832e-05, |
|
"loss": 0.8163, |
|
"step": 212000 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 1.4267818167754608e-05, |
|
"loss": 0.8194, |
|
"step": 212500 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 1.4183742445796384e-05, |
|
"loss": 0.8095, |
|
"step": 213000 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 1.409966672383816e-05, |
|
"loss": 0.832, |
|
"step": 213500 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 1.4015591001879933e-05, |
|
"loss": 0.8203, |
|
"step": 214000 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 1.3931515279921709e-05, |
|
"loss": 0.7988, |
|
"step": 214500 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 1.3847439557963485e-05, |
|
"loss": 0.8147, |
|
"step": 215000 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 1.376336383600526e-05, |
|
"loss": 0.8217, |
|
"step": 215500 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 1.3679288114047037e-05, |
|
"loss": 0.8108, |
|
"step": 216000 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 1.359521239208881e-05, |
|
"loss": 0.8084, |
|
"step": 216500 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 1.3511136670130586e-05, |
|
"loss": 0.8226, |
|
"step": 217000 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 1.3427060948172362e-05, |
|
"loss": 0.8125, |
|
"step": 217500 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 1.3342985226214138e-05, |
|
"loss": 0.8136, |
|
"step": 218000 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 1.3258909504255914e-05, |
|
"loss": 0.8401, |
|
"step": 218500 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 1.3174833782297688e-05, |
|
"loss": 0.8225, |
|
"step": 219000 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 1.3090758060339465e-05, |
|
"loss": 0.8261, |
|
"step": 219500 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.300668233838124e-05, |
|
"loss": 0.8142, |
|
"step": 220000 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.2922606616423017e-05, |
|
"loss": 0.8131, |
|
"step": 220500 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 1.2838530894464793e-05, |
|
"loss": 0.8105, |
|
"step": 221000 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 1.2754455172506566e-05, |
|
"loss": 0.8338, |
|
"step": 221500 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 1.2670379450548342e-05, |
|
"loss": 0.8085, |
|
"step": 222000 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 1.2586303728590118e-05, |
|
"loss": 0.8228, |
|
"step": 222500 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 1.2502228006631894e-05, |
|
"loss": 0.8052, |
|
"step": 223000 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 1.2418152284673668e-05, |
|
"loss": 0.7996, |
|
"step": 223500 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 1.2334076562715444e-05, |
|
"loss": 0.8099, |
|
"step": 224000 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 1.225000084075722e-05, |
|
"loss": 0.8091, |
|
"step": 224500 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 1.2165925118798997e-05, |
|
"loss": 0.8135, |
|
"step": 225000 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 1.2081849396840771e-05, |
|
"loss": 0.8194, |
|
"step": 225500 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 1.1997773674882547e-05, |
|
"loss": 0.8131, |
|
"step": 226000 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.1913697952924323e-05, |
|
"loss": 0.8227, |
|
"step": 226500 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.1829622230966098e-05, |
|
"loss": 0.8087, |
|
"step": 227000 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 1.1745546509007874e-05, |
|
"loss": 0.8066, |
|
"step": 227500 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 1.1661470787049648e-05, |
|
"loss": 0.824, |
|
"step": 228000 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 1.1577395065091424e-05, |
|
"loss": 0.814, |
|
"step": 228500 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 1.14933193431332e-05, |
|
"loss": 0.8112, |
|
"step": 229000 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.1409243621174975e-05, |
|
"loss": 0.8105, |
|
"step": 229500 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.132516789921675e-05, |
|
"loss": 0.8208, |
|
"step": 230000 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.1241092177258527e-05, |
|
"loss": 0.8106, |
|
"step": 230500 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.1157016455300303e-05, |
|
"loss": 0.8096, |
|
"step": 231000 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.1072940733342079e-05, |
|
"loss": 0.8113, |
|
"step": 231500 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.0988865011383853e-05, |
|
"loss": 0.8016, |
|
"step": 232000 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.090478928942563e-05, |
|
"loss": 0.8119, |
|
"step": 232500 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.0820713567467404e-05, |
|
"loss": 0.7987, |
|
"step": 233000 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 1.073663784550918e-05, |
|
"loss": 0.7981, |
|
"step": 233500 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 1.0652562123550954e-05, |
|
"loss": 0.8152, |
|
"step": 234000 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.056848640159273e-05, |
|
"loss": 0.8024, |
|
"step": 234500 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.0484410679634507e-05, |
|
"loss": 0.805, |
|
"step": 235000 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 1.0400334957676281e-05, |
|
"loss": 0.8004, |
|
"step": 235500 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 1.0316259235718057e-05, |
|
"loss": 0.8149, |
|
"step": 236000 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.0232183513759833e-05, |
|
"loss": 0.7909, |
|
"step": 236500 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.014810779180161e-05, |
|
"loss": 0.8186, |
|
"step": 237000 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 1.0064032069843386e-05, |
|
"loss": 0.8237, |
|
"step": 237500 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 9.97995634788516e-06, |
|
"loss": 0.803, |
|
"step": 238000 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 9.895880625926936e-06, |
|
"loss": 0.7964, |
|
"step": 238500 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 9.81180490396871e-06, |
|
"loss": 0.8161, |
|
"step": 239000 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 9.727729182010487e-06, |
|
"loss": 0.8002, |
|
"step": 239500 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 9.643653460052261e-06, |
|
"loss": 0.8098, |
|
"step": 240000 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 9.559577738094037e-06, |
|
"loss": 0.8066, |
|
"step": 240500 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 9.475502016135813e-06, |
|
"loss": 0.7983, |
|
"step": 241000 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 9.391426294177588e-06, |
|
"loss": 0.8024, |
|
"step": 241500 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 9.307350572219364e-06, |
|
"loss": 0.8141, |
|
"step": 242000 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 9.22327485026114e-06, |
|
"loss": 0.8175, |
|
"step": 242500 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 9.139199128302916e-06, |
|
"loss": 0.7908, |
|
"step": 243000 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 9.055123406344692e-06, |
|
"loss": 0.8036, |
|
"step": 243500 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 8.971047684386466e-06, |
|
"loss": 0.8256, |
|
"step": 244000 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 8.886971962428242e-06, |
|
"loss": 0.8206, |
|
"step": 244500 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 8.802896240470017e-06, |
|
"loss": 0.8007, |
|
"step": 245000 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 8.718820518511793e-06, |
|
"loss": 0.8025, |
|
"step": 245500 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 8.634744796553569e-06, |
|
"loss": 0.8014, |
|
"step": 246000 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 8.550669074595343e-06, |
|
"loss": 0.8016, |
|
"step": 246500 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 8.46659335263712e-06, |
|
"loss": 0.7916, |
|
"step": 247000 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 8.382517630678894e-06, |
|
"loss": 0.8014, |
|
"step": 247500 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 8.29844190872067e-06, |
|
"loss": 0.786, |
|
"step": 248000 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 8.214366186762446e-06, |
|
"loss": 0.8171, |
|
"step": 248500 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 8.130290464804222e-06, |
|
"loss": 0.7996, |
|
"step": 249000 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 8.046214742845998e-06, |
|
"loss": 0.7991, |
|
"step": 249500 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 7.962139020887773e-06, |
|
"loss": 0.7957, |
|
"step": 250000 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 7.878063298929549e-06, |
|
"loss": 0.791, |
|
"step": 250500 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 7.793987576971323e-06, |
|
"loss": 0.7988, |
|
"step": 251000 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 7.7099118550131e-06, |
|
"loss": 0.7948, |
|
"step": 251500 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 7.6258361330548755e-06, |
|
"loss": 0.8013, |
|
"step": 252000 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 7.54176041109665e-06, |
|
"loss": 0.7847, |
|
"step": 252500 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 7.457684689138427e-06, |
|
"loss": 0.8055, |
|
"step": 253000 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 7.373608967180201e-06, |
|
"loss": 0.7753, |
|
"step": 253500 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 7.289533245221977e-06, |
|
"loss": 0.7933, |
|
"step": 254000 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 7.205457523263752e-06, |
|
"loss": 0.8028, |
|
"step": 254500 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 7.121381801305528e-06, |
|
"loss": 0.7978, |
|
"step": 255000 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 7.037306079347304e-06, |
|
"loss": 0.7982, |
|
"step": 255500 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 6.953230357389079e-06, |
|
"loss": 0.7935, |
|
"step": 256000 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 6.869154635430855e-06, |
|
"loss": 0.8012, |
|
"step": 256500 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 6.78507891347263e-06, |
|
"loss": 0.803, |
|
"step": 257000 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 6.701003191514406e-06, |
|
"loss": 0.8042, |
|
"step": 257500 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 6.616927469556182e-06, |
|
"loss": 0.7998, |
|
"step": 258000 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 6.532851747597957e-06, |
|
"loss": 0.7931, |
|
"step": 258500 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 6.448776025639733e-06, |
|
"loss": 0.8042, |
|
"step": 259000 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 6.364700303681508e-06, |
|
"loss": 0.7897, |
|
"step": 259500 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 6.280624581723284e-06, |
|
"loss": 0.8136, |
|
"step": 260000 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 6.196548859765059e-06, |
|
"loss": 0.7773, |
|
"step": 260500 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 6.112473137806834e-06, |
|
"loss": 0.8014, |
|
"step": 261000 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 6.02839741584861e-06, |
|
"loss": 0.7907, |
|
"step": 261500 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 5.9443216938903864e-06, |
|
"loss": 0.7961, |
|
"step": 262000 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 5.860245971932162e-06, |
|
"loss": 0.7919, |
|
"step": 262500 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 5.776170249973937e-06, |
|
"loss": 0.782, |
|
"step": 263000 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 5.692094528015712e-06, |
|
"loss": 0.7765, |
|
"step": 263500 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 5.6080188060574874e-06, |
|
"loss": 0.7931, |
|
"step": 264000 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 5.5239430840992635e-06, |
|
"loss": 0.7963, |
|
"step": 264500 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 5.43986736214104e-06, |
|
"loss": 0.7836, |
|
"step": 265000 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 5.355791640182815e-06, |
|
"loss": 0.7988, |
|
"step": 265500 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 5.27171591822459e-06, |
|
"loss": 0.788, |
|
"step": 266000 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.187640196266365e-06, |
|
"loss": 0.7878, |
|
"step": 266500 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.103564474308141e-06, |
|
"loss": 0.7848, |
|
"step": 267000 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 5.019488752349917e-06, |
|
"loss": 0.7936, |
|
"step": 267500 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 4.935413030391693e-06, |
|
"loss": 0.7902, |
|
"step": 268000 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.851337308433468e-06, |
|
"loss": 0.7948, |
|
"step": 268500 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.767261586475243e-06, |
|
"loss": 0.7877, |
|
"step": 269000 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.683185864517019e-06, |
|
"loss": 0.7791, |
|
"step": 269500 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.599110142558794e-06, |
|
"loss": 0.7819, |
|
"step": 270000 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.51503442060057e-06, |
|
"loss": 0.7844, |
|
"step": 270500 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.430958698642346e-06, |
|
"loss": 0.7857, |
|
"step": 271000 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.346882976684121e-06, |
|
"loss": 0.7866, |
|
"step": 271500 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.2628072547258966e-06, |
|
"loss": 0.7896, |
|
"step": 272000 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.178731532767672e-06, |
|
"loss": 0.7797, |
|
"step": 272500 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.094655810809447e-06, |
|
"loss": 0.7926, |
|
"step": 273000 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 4.010580088851223e-06, |
|
"loss": 0.8092, |
|
"step": 273500 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.926504366892999e-06, |
|
"loss": 0.7916, |
|
"step": 274000 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.8424286449347745e-06, |
|
"loss": 0.7875, |
|
"step": 274500 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.7583529229765498e-06, |
|
"loss": 0.7937, |
|
"step": 275000 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.674277201018325e-06, |
|
"loss": 0.7848, |
|
"step": 275500 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.5902014790601007e-06, |
|
"loss": 0.7931, |
|
"step": 276000 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 3.506125757101877e-06, |
|
"loss": 0.7971, |
|
"step": 276500 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 3.422050035143652e-06, |
|
"loss": 0.78, |
|
"step": 277000 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 3.3379743131854277e-06, |
|
"loss": 0.7906, |
|
"step": 277500 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 3.253898591227203e-06, |
|
"loss": 0.7806, |
|
"step": 278000 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 3.1698228692689782e-06, |
|
"loss": 0.7885, |
|
"step": 278500 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 3.0857471473107543e-06, |
|
"loss": 0.7809, |
|
"step": 279000 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 3.0016714253525296e-06, |
|
"loss": 0.7809, |
|
"step": 279500 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 2.9175957033943053e-06, |
|
"loss": 0.775, |
|
"step": 280000 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 2.833519981436081e-06, |
|
"loss": 0.7967, |
|
"step": 280500 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 2.749444259477856e-06, |
|
"loss": 0.7885, |
|
"step": 281000 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 2.665368537519632e-06, |
|
"loss": 0.7757, |
|
"step": 281500 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 2.5812928155614076e-06, |
|
"loss": 0.7923, |
|
"step": 282000 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 2.497217093603183e-06, |
|
"loss": 0.7835, |
|
"step": 282500 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 2.4131413716449585e-06, |
|
"loss": 0.7766, |
|
"step": 283000 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 2.329065649686734e-06, |
|
"loss": 0.7609, |
|
"step": 283500 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 2.2449899277285094e-06, |
|
"loss": 0.7703, |
|
"step": 284000 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 2.160914205770285e-06, |
|
"loss": 0.7844, |
|
"step": 284500 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 2.0768384838120608e-06, |
|
"loss": 0.788, |
|
"step": 285000 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 1.992762761853836e-06, |
|
"loss": 0.7852, |
|
"step": 285500 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 1.9086870398956117e-06, |
|
"loss": 0.7854, |
|
"step": 286000 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 1.8246113179373872e-06, |
|
"loss": 0.7731, |
|
"step": 286500 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 1.7405355959791626e-06, |
|
"loss": 0.7993, |
|
"step": 287000 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 1.6564598740209383e-06, |
|
"loss": 0.7875, |
|
"step": 287500 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 1.5723841520627138e-06, |
|
"loss": 0.7614, |
|
"step": 288000 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 1.4883084301044894e-06, |
|
"loss": 0.7773, |
|
"step": 288500 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 1.404232708146265e-06, |
|
"loss": 0.7785, |
|
"step": 289000 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 1.3201569861880404e-06, |
|
"loss": 0.7909, |
|
"step": 289500 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 1.236081264229816e-06, |
|
"loss": 0.7869, |
|
"step": 290000 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 1.1520055422715917e-06, |
|
"loss": 0.7848, |
|
"step": 290500 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 1.067929820313367e-06, |
|
"loss": 0.783, |
|
"step": 291000 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 9.838540983551427e-07, |
|
"loss": 0.774, |
|
"step": 291500 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 8.997783763969182e-07, |
|
"loss": 0.77, |
|
"step": 292000 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 8.157026544386937e-07, |
|
"loss": 0.7865, |
|
"step": 292500 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 7.316269324804693e-07, |
|
"loss": 0.7824, |
|
"step": 293000 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 6.475512105222447e-07, |
|
"loss": 0.7706, |
|
"step": 293500 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 5.634754885640203e-07, |
|
"loss": 0.779, |
|
"step": 294000 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 4.793997666057959e-07, |
|
"loss": 0.7781, |
|
"step": 294500 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 3.9532404464757143e-07, |
|
"loss": 0.7684, |
|
"step": 295000 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 3.1124832268934695e-07, |
|
"loss": 0.7816, |
|
"step": 295500 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 2.271726007311225e-07, |
|
"loss": 0.7818, |
|
"step": 296000 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 1.4309687877289804e-07, |
|
"loss": 0.7767, |
|
"step": 296500 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 5.9021156814673575e-08, |
|
"loss": 0.7807, |
|
"step": 297000 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 297351, |
|
"total_flos": 3.1773810039894835e+17, |
|
"train_loss": 1.0556110556925662, |
|
"train_runtime": 79226.8485, |
|
"train_samples_per_second": 37.532, |
|
"train_steps_per_second": 3.753 |
|
} |
|
], |
|
"max_steps": 297351, |
|
"num_train_epochs": 3, |
|
"total_flos": 3.1773810039894835e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|