m2m100_418M_ibo_en_rel_news_ft / all_results.json
Davlan's picture
add MT model
71641cb
raw
history blame
654 Bytes
{
"epoch": 3.0,
"eval_bleu": 16.2144,
"eval_gen_len": 23.4513,
"eval_loss": 2.461557626724243,
"eval_runtime": 310.7865,
"eval_samples": 1500,
"eval_samples_per_second": 4.826,
"eval_steps_per_second": 0.483,
"predict_bleu": 20.9356,
"predict_gen_len": 22.9733,
"predict_loss": 2.167288303375244,
"predict_runtime": 317.1665,
"predict_samples": 1500,
"predict_samples_per_second": 4.729,
"predict_steps_per_second": 0.473,
"train_loss": 0.8962585285731725,
"train_runtime": 620.72,
"train_samples": 6998,
"train_samples_per_second": 33.822,
"train_steps_per_second": 3.383
}