mt5-small-esquad-qag / eval /metric.first.answer.paragraph.questions_answers.lmqg_qag_esquad.default.json
asahi417's picture
model update
d347d6c
raw
history blame
1.16 kB
{"validation": {"Bleu_1": 0.2718803847471669, "Bleu_2": 0.15254535887603982, "Bleu_3": 0.0894467983934925, "Bleu_4": 0.060018622693704934, "METEOR": 0.20863168631638873, "ROUGE_L": 0.2493543685540034, "BERTScore": 0.7305193155297462, "MoverScore": 0.521490296598535, "QAAlignedF1Score (BERTScore)": 0.8023877345182617, "QAAlignedRecall (BERTScore)": 0.7787815647509312, "QAAlignedPrecision (BERTScore)": 0.8284482213316031, "QAAlignedF1Score (MoverScore)": 0.5564934635971232, "QAAlignedRecall (MoverScore)": 0.5375780961020423, "QAAlignedPrecision (MoverScore)": 0.5781583103105322}, "test": {"Bleu_1": 0.07432354130344639, "Bleu_2": 0.03507385086417368, "Bleu_3": 0.0204298541841383, "Bleu_4": 0.013646333757009334, "METEOR": 0.1689782326396343, "ROUGE_L": 0.12757471978428708, "BERTScore": 0.6408908916549078, "MoverScore": 0.5014847577134893, "QAAlignedF1Score (BERTScore)": 0.7811615456479336, "QAAlignedRecall (BERTScore)": 0.7826666697304786, "QAAlignedPrecision (BERTScore)": 0.7800460044662871, "QAAlignedF1Score (MoverScore)": 0.5391780380897195, "QAAlignedRecall (MoverScore)": 0.5393083014332328, "QAAlignedPrecision (MoverScore)": 0.5392940215876131}}