update model
Browse files- config.json +1 -1
- eval/metric.first.answer.json +1 -0
- eval/metric.first.sentence.json +1 -0
- eval/metric.last.sentence.json +1 -0
- eval/metric.long.sentence.json +1 -0
- eval/metric.middle.sentence.json +1 -0
- eval/metric.short.sentence.json +1 -0
- eval/samples.dev.hyp.txt +0 -0
- eval/samples.test.hyp.txt +0 -0
- eval_final/metric.first.sentence.json +1 -0
- eval_final/samples.dev.hyp.txt +0 -0
- eval_final/samples.test.hyp.txt +0 -0
- pytorch_model.bin +1 -1
- tokenizer_config.json +1 -1
config.json
CHANGED
@@ -1,5 +1,5 @@
|
|
1 |
{
|
2 |
-
"_name_or_path": "t5qg_output/search/
|
3 |
"activation_dropout": 0.1,
|
4 |
"activation_function": "gelu",
|
5 |
"add_bias_logits": false,
|
|
|
1 |
{
|
2 |
+
"_name_or_path": "t5qg_output/search/bart_large_squad_answer_level/model_ejulkw/epoch_2",
|
3 |
"activation_dropout": 0.1,
|
4 |
"activation_function": "gelu",
|
5 |
"add_bias_logits": false,
|
eval/metric.first.answer.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"dev": {"Bleu_1": 0.561620793576093, "Bleu_2": 0.4068755693074037, "Bleu_3": 0.3171985222424095, "Bleu_4": 0.25435444221839115}, "test": {"Bleu_1": 0.5513784396078307, "Bleu_2": 0.3943382019658421, "Bleu_3": 0.3028510062580668, "Bleu_4": 0.23880023973007972}}
|
eval/metric.first.sentence.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"dev": {"Bleu_1": 0.5961185956186713, "Bleu_2": 0.43973075171342463, "Bleu_3": 0.34653800312243627, "Bleu_4": 0.2802821799356514, "METEOR": 0.2834228242079163, "ROUGE_L": 0.5531196137031562}, "test": {"Bleu_1": 0.5949949073056991, "Bleu_2": 0.4347986400029582, "Bleu_3": 0.33711054849596694, "Bleu_4": 0.2670345121167377, "METEOR": 0.2771683644543029, "ROUGE_L": 0.5424152494855272}}
|
eval/metric.last.sentence.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"dev": {"Bleu_1": 0.5888331338037063, "Bleu_2": 0.4292470817671194, "Bleu_3": 0.3353448642287594, "Bleu_4": 0.2690537335548372, "METEOR": 0.2760311432975882, "ROUGE_L": 0.5421139568322882}, "test": {"Bleu_1": 0.5900075173001194, "Bleu_2": 0.4273027915082267, "Bleu_3": 0.3290745277026461, "Bleu_4": 0.25933002101722863, "METEOR": 0.2723402780812761, "ROUGE_L": 0.5346435374468232}}
|
eval/metric.long.sentence.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"dev": {"Bleu_1": 0.5784067592033726, "Bleu_2": 0.4213023837432283, "Bleu_3": 0.32859098586344965, "Bleu_4": 0.2634088031146139, "METEOR": 0.28031613555565826, "ROUGE_L": 0.5392837604986869}, "test": {"Bleu_1": 0.592047802258971, "Bleu_2": 0.4278680118343731, "Bleu_3": 0.32871710609741495, "Bleu_4": 0.25861018312369183, "METEOR": 0.2762543449109007, "ROUGE_L": 0.531091069968505}}
|
eval/metric.middle.sentence.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"dev": {"Bleu_1": 0.5849553908405754, "Bleu_2": 0.42778393521051156, "Bleu_3": 0.33519953331285546, "Bleu_4": 0.2697245611860127, "METEOR": 0.28096988712123067, "ROUGE_L": 0.5442280051653765}, "test": {"Bleu_1": 0.598019578596652, "Bleu_2": 0.43458889336541523, "Bleu_3": 0.3358776917353022, "Bleu_4": 0.26573690078761625, "METEOR": 0.2773980383500014, "ROUGE_L": 0.5375992332728966}}
|
eval/metric.short.sentence.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"dev": {"Bleu_1": 0.588492767934199, "Bleu_2": 0.4335680265031344, "Bleu_3": 0.3414749669613021, "Bleu_4": 0.2760231523884042, "METEOR": 0.27800204947689366, "ROUGE_L": 0.5535842510511678}, "test": {"Bleu_1": 0.5824657601920032, "Bleu_2": 0.4245661591251516, "Bleu_3": 0.3285198026185343, "Bleu_4": 0.2597657598851307, "METEOR": 0.2713215637948342, "ROUGE_L": 0.5418273056877164}}
|
eval/samples.dev.hyp.txt
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
eval/samples.test.hyp.txt
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
eval_final/metric.first.sentence.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"dev": {"Bleu_1": 0.5917073848212745, "Bleu_2": 0.4366246800285374, "Bleu_3": 0.3443732992155462, "Bleu_4": 0.27860373837160013, "METEOR": 0.2852321563686249, "ROUGE_L": 0.5509618525862262}, "test": {"Bleu_1": 0.5985019789210954, "Bleu_2": 0.4375613439278559, "Bleu_3": 0.33929563551647063, "Bleu_4": 0.26879496341517695, "METEOR": 0.2784937082876495, "ROUGE_L": 0.5417929535734445}}
|
eval_final/samples.dev.hyp.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|
eval_final/samples.test.hyp.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|
pytorch_model.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 1625561409
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e1209670269636901a39e3dca8cafc7ce459e1d21c3fbec8cc00d5bef398f266
|
3 |
size 1625561409
|
tokenizer_config.json
CHANGED
@@ -1 +1 @@
|
|
1 |
-
{"unk_token": "<unk>", "bos_token": "<s>", "eos_token": "</s>", "add_prefix_space": false, "errors": "replace", "sep_token": "</s>", "cls_token": "<s>", "pad_token": "<pad>", "mask_token": "<mask>", "model_max_length": 1024, "special_tokens_map_file": null, "name_or_path": "t5qg_output/search/
|
|
|
1 |
+
{"unk_token": "<unk>", "bos_token": "<s>", "eos_token": "</s>", "add_prefix_space": false, "errors": "replace", "sep_token": "</s>", "cls_token": "<s>", "pad_token": "<pad>", "mask_token": "<mask>", "model_max_length": 1024, "special_tokens_map_file": null, "name_or_path": "t5qg_output/search/bart_large_squad_answer_level/model_ejulkw/epoch_2", "tokenizer_class": "BartTokenizer"}
|