t5-large-subjqa-vanilla-books-qg / eval /metric.middle.sentence.paragraph_answer.question.lmqg_qg_subjqa.books.json
asahi417's picture
update
a3feda7
raw
history blame
544 Bytes
{"validation": {"Bleu_1": 0.048479087452448444, "Bleu_2": 0.009814887508050046, "Bleu_3": 3.683802282886846e-08, "Bleu_4": 7.214780817092964e-11, "METEOR": 0.0510411936476269, "ROUGE_L": 0.07902812243722741, "BERTScore": 0.8276450404754052, "MoverScore": 0.5035195048524888}, "test": {"Bleu_1": 0.04120814797470353, "Bleu_2": 0.007783660337840986, "Bleu_3": 2.4956018892810725e-08, "Bleu_4": 4.518689535419543e-11, "METEOR": 0.0463163512336036, "ROUGE_L": 0.06362608202312194, "BERTScore": 0.8212118663712946, "MoverScore": 0.5029136160777198}}