File size: 360 Bytes
fa2cfba |
1 2 3 4 5 6 7 8 9 10 11 12 |
{
"epoch": 4.444444444444445,
"eval_loss": 0.996656596660614,
"eval_runtime": 10.5528,
"eval_samples_per_second": 9.476,
"eval_steps_per_second": 9.476,
"total_flos": 2.953325949045965e+16,
"train_loss": 0.8754597247838974,
"train_runtime": 1354.7094,
"train_samples_per_second": 2.953,
"train_steps_per_second": 0.369
} |