mt5-small-itquad-qg-ae / eval /metric.first.answer.paragraph_sentence.answer.lmqg_qg_itquad.default.json
asahi417's picture
add model
7e5d61d
raw
history blame contribute delete
679 Bytes
{"validation": {"Bleu_1": 0.39243158308234505, "Bleu_2": 0.3340814182775095, "Bleu_3": 0.29623430452827515, "Bleu_4": 0.2633059355533968, "METEOR": 0.44794614286543694, "ROUGE_L": 0.44138777620036307, "BERTScore": 0.9188229443366118, "MoverScore": 0.8425227110910056, "AnswerF1Score": 76.75979875027954, "AnswerExactMatch": 64.98882901826784}, "test": {"Bleu_1": 0.39332785795297454, "Bleu_2": 0.33635771092468514, "Bleu_3": 0.29592318553497876, "Bleu_4": 0.2601463452363598, "METEOR": 0.4267896663272497, "ROUGE_L": 0.45147997467067935, "BERTScore": 0.9023595586064642, "MoverScore": 0.8117498443929185, "AnswerF1Score": 72.09095481160314, "AnswerExactMatch": 57.8525430411355}}