asahi417 commited on
Commit
fa4debc
1 Parent(s): 7b92c1a
eval_pipeline/metric.first.answer.paragraph.questions_answers.lmqg_qg_koquad.default.lmqg_mt5-small-koquad-ae.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"test": {"QAAlignedF1Score (BERTScore)": 0.8051856640160873, "QAAlignedRecall (BERTScore)": 0.8380077880740827, "QAAlignedPrecision (BERTScore)": 0.7756050085026733, "QAAlignedF1Score (MoverScore)": 0.8295356763858925, "QAAlignedRecall (MoverScore)": 0.8702459907786589, "QAAlignedPrecision (MoverScore)": 0.7938575539431494, "Bleu_1": 0.08159742914726335, "Bleu_2": 0.043555415060016305, "Bleu_3": 0.020711845210355337, "Bleu_4": 0.010291521564549174, "METEOR": 0.23601603442915114, "ROUGE_L": 0.12997572881009126, "BERTScore": 0.6786500538377706, "MoverScore": 0.6245260884958}, "validation": {"QAAlignedF1Score (BERTScore)": 0.8270195012347064, "QAAlignedRecall (BERTScore)": 0.8338042555286257, "QAAlignedPrecision (BERTScore)": 0.8207682581541046, "QAAlignedF1Score (MoverScore)": 0.8711402794323722, "QAAlignedRecall (MoverScore)": 0.8737825552896148, "QAAlignedPrecision (MoverScore)": 0.8696819520075306, "Bleu_1": 0.2741035101356282, "Bleu_2": 0.17859250850701036, "Bleu_3": 0.10870040239577222, "Bleu_4": 0.06321766723069362, "METEOR": 0.28255983915021887, "ROUGE_L": 0.25379837939396416, "BERTScore": 0.7716758662213882, "MoverScore": 0.7009494635643794}}
eval_pipeline/samples.test.hyp.paragraph.questions_answers.lmqg_qg_koquad.default.lmqg_mt5-small-koquad-ae.txt ADDED
The diff for this file is too large to render. See raw diff
 
eval_pipeline/samples.validation.hyp.paragraph.questions_answers.lmqg_qg_koquad.default.lmqg_mt5-small-koquad-ae.txt ADDED
The diff for this file is too large to render. See raw diff
 
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:b784843bb2950f45f5435526cb5aeae26120c703a02fdfe0b07f7c81ecd2a66c
3
- size 1200724741
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:25f81bb2b8f84d5f4386be97b21d1accfa03ed4318524f5a2688f97c29af2572
3
+ size 1200723013