mt5-small-dequad-qg / eval /metric.first.sentence.sentence_answer.question.lmqg_qg_dequad.default.json
asahi417's picture
model update
c07208a
raw
history blame
536 Bytes
{"validation": {"Bleu_1": 0.10524124290844193, "Bleu_2": 0.04418274657643979, "Bleu_3": 0.01860247112165767, "Bleu_4": 8.440750089752408e-07, "METEOR": 0.10545211162747903, "ROUGE_L": 0.10278890766462019, "BERTScore": 0.7912345443179315, "MoverScore": 0.5422661071390639}, "test": {"Bleu_1": 0.0981556736480099, "Bleu_2": 0.04166224522547785, "Bleu_3": 0.01747502641936229, "Bleu_4": 0.006384639775190415, "METEOR": 0.10759938879591739, "ROUGE_L": 0.09945400214840362, "BERTScore": 0.7852240769590457, "MoverScore": 0.5405228123904787}}