|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 40.0, |
|
"global_step": 31880, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 2.976e-05, |
|
"loss": 7.9128, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 2.9525812619502866e-05, |
|
"loss": 3.5609, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 2.9048757170172085e-05, |
|
"loss": 3.3615, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 2.857074569789675e-05, |
|
"loss": 2.8318, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 2.8093690248565966e-05, |
|
"loss": 1.7371, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 2.7615678776290633e-05, |
|
"loss": 1.3703, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"learning_rate": 2.71395793499044e-05, |
|
"loss": 1.1862, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 5.02, |
|
"learning_rate": 2.6661567877629064e-05, |
|
"loss": 1.0873, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 5.65, |
|
"learning_rate": 2.618451242829828e-05, |
|
"loss": 0.981, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 6.27, |
|
"learning_rate": 2.5707456978967497e-05, |
|
"loss": 0.9185, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 6.9, |
|
"learning_rate": 2.522944550669216e-05, |
|
"loss": 0.8622, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 7.53, |
|
"learning_rate": 2.4752390057361376e-05, |
|
"loss": 0.8181, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 8.16, |
|
"learning_rate": 2.4274378585086042e-05, |
|
"loss": 0.7863, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 8.78, |
|
"learning_rate": 2.3797323135755258e-05, |
|
"loss": 0.7409, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 9.41, |
|
"learning_rate": 2.3319311663479924e-05, |
|
"loss": 0.7223, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 10.04, |
|
"learning_rate": 2.284225621414914e-05, |
|
"loss": 0.7005, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 10.66, |
|
"learning_rate": 2.2365200764818355e-05, |
|
"loss": 0.6662, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 11.29, |
|
"learning_rate": 2.188718929254302e-05, |
|
"loss": 0.6458, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 11.92, |
|
"learning_rate": 2.1410133843212236e-05, |
|
"loss": 0.6246, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 12.55, |
|
"learning_rate": 2.093307839388145e-05, |
|
"loss": 0.5987, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 13.17, |
|
"learning_rate": 2.0455066921606118e-05, |
|
"loss": 0.5974, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 13.8, |
|
"learning_rate": 1.9978011472275333e-05, |
|
"loss": 0.5831, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 14.43, |
|
"learning_rate": 1.9500956022944552e-05, |
|
"loss": 0.5549, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 15.06, |
|
"learning_rate": 1.902294455066922e-05, |
|
"loss": 0.561, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 15.68, |
|
"learning_rate": 1.8545889101338434e-05, |
|
"loss": 0.5462, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 16.31, |
|
"learning_rate": 1.806883365200765e-05, |
|
"loss": 0.5491, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 16.94, |
|
"learning_rate": 1.7590822179732315e-05, |
|
"loss": 0.5594, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 17.57, |
|
"learning_rate": 1.7112810707456982e-05, |
|
"loss": 0.5565, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 18.19, |
|
"learning_rate": 1.6636711281070746e-05, |
|
"loss": 0.5745, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 18.82, |
|
"learning_rate": 1.6158699808795412e-05, |
|
"loss": 0.6096, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 19.45, |
|
"learning_rate": 1.568068833652008e-05, |
|
"loss": 0.67, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 20.08, |
|
"learning_rate": 1.5203632887189294e-05, |
|
"loss": 0.6566, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 20.7, |
|
"learning_rate": 1.4725621414913959e-05, |
|
"loss": 0.6627, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 21.33, |
|
"learning_rate": 1.4249521988527725e-05, |
|
"loss": 0.6647, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 21.96, |
|
"learning_rate": 1.377151051625239e-05, |
|
"loss": 0.9268, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 22.58, |
|
"learning_rate": 1.3293499043977056e-05, |
|
"loss": 0.9965, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 23.21, |
|
"learning_rate": 1.2816443594646271e-05, |
|
"loss": 0.9737, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 23.84, |
|
"learning_rate": 1.2339388145315486e-05, |
|
"loss": 1.4449, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 24.47, |
|
"learning_rate": 1.1861376673040153e-05, |
|
"loss": 2.0548, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 25.09, |
|
"learning_rate": 1.138432122370937e-05, |
|
"loss": 2.4726, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 25.72, |
|
"learning_rate": 1.0906309751434036e-05, |
|
"loss": 2.7061, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 26.35, |
|
"learning_rate": 1.0430210325047802e-05, |
|
"loss": 2.9211, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 26.98, |
|
"learning_rate": 9.952198852772467e-06, |
|
"loss": 2.9367, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 27.6, |
|
"learning_rate": 9.474187380497133e-06, |
|
"loss": 2.9015, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 28.23, |
|
"learning_rate": 8.997131931166348e-06, |
|
"loss": 2.9781, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 28.86, |
|
"learning_rate": 8.520076481835564e-06, |
|
"loss": 3.0789, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 29.49, |
|
"learning_rate": 8.042065009560229e-06, |
|
"loss": 3.0514, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 30.11, |
|
"learning_rate": 7.565009560229446e-06, |
|
"loss": 3.0248, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 30.74, |
|
"learning_rate": 7.087954110898662e-06, |
|
"loss": 2.9983, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 31.37, |
|
"learning_rate": 6.609942638623327e-06, |
|
"loss": 2.9716, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 31.99, |
|
"learning_rate": 6.1328871892925425e-06, |
|
"loss": 2.9549, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 32.62, |
|
"learning_rate": 5.6558317399617595e-06, |
|
"loss": 2.9551, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 33.25, |
|
"learning_rate": 5.177820267686425e-06, |
|
"loss": 2.9571, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 33.88, |
|
"learning_rate": 4.70076481835564e-06, |
|
"loss": 2.9589, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 34.5, |
|
"learning_rate": 4.2237093690248565e-06, |
|
"loss": 2.9573, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 35.13, |
|
"learning_rate": 3.7456978967495225e-06, |
|
"loss": 2.9556, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 35.76, |
|
"learning_rate": 3.268642447418738e-06, |
|
"loss": 2.9545, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 36.39, |
|
"learning_rate": 2.7906309751434034e-06, |
|
"loss": 2.9585, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 37.01, |
|
"learning_rate": 2.3135755258126195e-06, |
|
"loss": 2.9558, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 37.64, |
|
"learning_rate": 1.8355640535372848e-06, |
|
"loss": 2.9561, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 38.27, |
|
"learning_rate": 1.358508604206501e-06, |
|
"loss": 2.9597, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 38.9, |
|
"learning_rate": 8.81453154875717e-07, |
|
"loss": 2.957, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 39.52, |
|
"learning_rate": 4.0344168260038244e-07, |
|
"loss": 2.9569, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 40.0, |
|
"step": 31880, |
|
"total_flos": 2.3797340595067814e+19, |
|
"train_loss": 1.875457509039636, |
|
"train_runtime": 14726.9257, |
|
"train_samples_per_second": 34.625, |
|
"train_steps_per_second": 2.165 |
|
} |
|
], |
|
"max_steps": 31880, |
|
"num_train_epochs": 40, |
|
"total_flos": 2.3797340595067814e+19, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|