bart-large-squad-qg / eval /metric.short.sentence.sentence_answer.question.lmqg_qg_squad.default.json
asahi417's picture
model update
2160c60
raw
history blame contribute delete
525 Bytes
{"validation": {"Bleu_1": 0.5590138903744388, "Bleu_2": 0.4071383839063611, "Bleu_3": 0.3185915354196247, "Bleu_4": 0.2566871916856556, "METEOR": 0.26024728712792694, "ROUGE_L": 0.5366163446591908, "BERTScore": 0.9070440422795641, "MoverScore": 0.6429972604175417}, "test": {"Bleu_1": 0.5510090641148966, "Bleu_2": 0.39523322706388087, "Bleu_3": 0.30242585411389983, "Bleu_4": 0.23697325381922738, "METEOR": 0.2517811869031218, "ROUGE_L": 0.523501717049712, "BERTScore": 0.9048749061409995, "MoverScore": 0.6287940306458042}}