mt5-base-itquad-qg / eval_pipeline /metric.first.answer.paragraph.questions_answers.lmqg_qg_itquad.default.lmqg_mt5-base-itquad-ae.json
asahi417's picture
add model
7d0f435
raw
history blame
1.17 kB
{"test": {"QAAlignedF1Score (BERTScore)": 0.8167904791906043, "QAAlignedRecall (BERTScore)": 0.8216192701521727, "QAAlignedPrecision (BERTScore)": 0.8124736531553751, "QAAlignedF1Score (MoverScore)": 0.558255110674536, "QAAlignedRecall (MoverScore)": 0.5600604577516232, "QAAlignedPrecision (MoverScore)": 0.5568496662371624, "Bleu_1": 0.2357148779623688, "Bleu_2": 0.13036138557646462, "Bleu_3": 0.06301483269048781, "Bleu_4": 0.033292442071502566, "METEOR": 0.23375792177327157, "ROUGE_L": 0.2325288445744741, "BERTScore": 0.7724787136558555, "MoverScore": 0.5452210041957045}, "validation": {"QAAlignedF1Score (BERTScore)": 0.8085899545785263, "QAAlignedRecall (BERTScore)": 0.8324580582063605, "QAAlignedPrecision (BERTScore)": 0.7867042396548015, "QAAlignedF1Score (MoverScore)": 0.5518337697009046, "QAAlignedRecall (MoverScore)": 0.5683519897016391, "QAAlignedPrecision (MoverScore)": 0.5371723355679867, "Bleu_1": 0.1044525785868994, "Bleu_2": 0.04373823380331314, "Bleu_3": 0.018956200504695136, "Bleu_4": 0.009325528959266683, "METEOR": 0.19986641358489682, "ROUGE_L": 0.1477864152103143, "BERTScore": 0.6972227563286663, "MoverScore": 0.517332621538305}}