mt5-small-dequad-qg / eval /metric.short.sentence.sentence_answer.question.lmqg_qg_dequad.default.json
asahi417's picture
model update
c07208a
raw
history blame
536 Bytes
{"validation": {"Bleu_1": 0.10544146773688598, "Bleu_2": 0.044189756416117186, "Bleu_3": 0.0186157209181725, "Bleu_4": 8.449759678389553e-07, "METEOR": 0.10556723341356421, "ROUGE_L": 0.1027657793831715, "BERTScore": 0.791384597628501, "MoverScore": 0.5422777667353593}, "test": {"Bleu_1": 0.09817327766179028, "Bleu_2": 0.04156918501250146, "Bleu_3": 0.017462231512701203, "Bleu_4": 0.006385362709581332, "METEOR": 0.10766712948120857, "ROUGE_L": 0.09938958906802538, "BERTScore": 0.7852462579994436, "MoverScore": 0.5405059891065251}}