t5-large-subjqa-vanilla-books-qg / eval /metric.first.sentence.sentence_answer.question.asahi417_qg_subjqa.books.json
asahi417's picture
model update
d957daa
raw
history blame
544 Bytes
{"validation": {"Bleu_1": 0.03665987780037, "Bleu_2": 0.006403637616287011, "Bleu_3": 3.660573001436189e-08, "Bleu_4": 8.888043422378835e-11, "METEOR": 0.028110432163448468, "ROUGE_L": 0.03069410979058089, "BERTScore": 0.7830190245921795, "MoverScore": 0.5128002236645189}, "test": {"Bleu_1": 0.036061820263287886, "Bleu_2": 0.009622104987923681, "Bleu_3": 3.996907155150396e-08, "Bleu_4": 8.276153056231133e-11, "METEOR": 0.02532440376679995, "ROUGE_L": 0.028664004061913845, "BERTScore": 0.7925587979910885, "MoverScore": 0.5160217417890958}}