mt5-base-dequad-qg / eval /metric.first.sentence.paragraph_answer.question.lmqg_qg_dequad.default.json
asahi417's picture
model update
e6dc6f3
raw
history blame
532 Bytes
{"validation": {"Bleu_1": 0.12974191931846005, "Bleu_2": 0.05724293310417708, "Bleu_3": 0.02759076699158977, "Bleu_4": 0.012377278385198033, "METEOR": 0.15564940795444504, "ROUGE_L": 0.13002149496126614, "BERTScore": 0.8257608360032943, "MoverScore": 0.5714381008263734}, "test": {"Bleu_1": 0.108536892470405, "Bleu_2": 0.0460774951505116, "Bleu_3": 0.020565112444773048, "Bleu_4": 0.008725076003212685, "METEOR": 0.1365307725077819, "ROUGE_L": 0.11103265327794129, "BERTScore": 0.803881755792351, "MoverScore": 0.5572775810948901}}