|
{ |
|
"results": { |
|
"polish_mc": { |
|
"acc,none": 0.718186555497655, |
|
"acc_stderr,none": 0.04357151021998618, |
|
"acc_norm,none": 0.7141219385096405, |
|
"acc_norm_stderr,none": 0.04372608290602946, |
|
"alias": "polish_mc" |
|
}, |
|
"belebele_pol_Latn": { |
|
"acc,none": 0.6855555555555556, |
|
"acc_stderr,none": 0.015485079682471438, |
|
"acc_norm,none": 0.6855555555555556, |
|
"acc_norm_stderr,none": 0.015485079682471438, |
|
"alias": " - belebele_pol_Latn" |
|
}, |
|
"polemo2_in_multiple_choice": { |
|
"acc,none": 0.7132963988919667, |
|
"acc_stderr,none": 0.01684162361588182, |
|
"acc_norm,none": 0.7174515235457064, |
|
"acc_norm_stderr,none": 0.016767763260316662, |
|
"alias": " - polemo2_in_multiple_choice" |
|
}, |
|
"polemo2_out_multiple_choice": { |
|
"acc,none": 0.6902834008097166, |
|
"acc_stderr,none": 0.02082439995830139, |
|
"acc_norm,none": 0.7145748987854251, |
|
"acc_norm_stderr,none": 0.020339791674885366, |
|
"alias": " - polemo2_out_multiple_choice" |
|
}, |
|
"polish_8tags_multiple_choice": { |
|
"acc,none": 0.72483989021043, |
|
"acc_stderr,none": 0.0067549692558673035, |
|
"acc_norm,none": 0.7124885635864593, |
|
"acc_norm_stderr,none": 0.006845829961519696, |
|
"alias": " - polish_8tags_multiple_choice" |
|
}, |
|
"polish_dyk_multiple_choice": { |
|
"acc,none": 0.8299319727891157, |
|
"acc_stderr,none": 0.011717528745193611, |
|
"acc_norm,none": 0.8299319727891157, |
|
"acc_norm_stderr,none": 0.011717528745193611, |
|
"alias": " - polish_dyk_multiple_choice" |
|
}, |
|
"polish_ppc_multiple_choice": { |
|
"acc,none": 0.624, |
|
"acc_stderr,none": 0.015325105508898132, |
|
"acc_norm,none": 0.624, |
|
"acc_norm_stderr,none": 0.015325105508898132, |
|
"alias": " - polish_ppc_multiple_choice" |
|
}, |
|
"polish_psc_multiple_choice": { |
|
"acc,none": 0.7152133580705009, |
|
"acc_stderr,none": 0.013752123513176048, |
|
"acc_norm,none": 0.7152133580705009, |
|
"acc_norm_stderr,none": 0.013752123513176048, |
|
"alias": " - polish_psc_multiple_choice" |
|
} |
|
}, |
|
"groups": { |
|
"polish_mc": { |
|
"acc,none": 0.718186555497655, |
|
"acc_stderr,none": 0.04357151021998618, |
|
"acc_norm,none": 0.7141219385096405, |
|
"acc_norm_stderr,none": 0.04372608290602946, |
|
"alias": "polish_mc" |
|
} |
|
}, |
|
"configs": { |
|
"belebele_pol_Latn": { |
|
"task": "belebele_pol_Latn", |
|
"group": "belebele", |
|
"dataset_path": "facebook/belebele", |
|
"test_split": "pol_Latn", |
|
"fewshot_split": "pol_Latn", |
|
"doc_to_text": "P: {{flores_passage}}\nQ: {{question.strip()}}\nA: {{mc_answer1}}\nB: {{mc_answer2}}\nC: {{mc_answer3}}\nD: {{mc_answer4}}\nAnswer:", |
|
"doc_to_target": "{{['1', '2', '3', '4'].index(correct_answer_num)}}", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C", |
|
"D" |
|
], |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"fewshot_config": { |
|
"sampler": "first_n" |
|
}, |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
}, |
|
{ |
|
"metric": "acc_norm", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{question}}", |
|
"metadata": { |
|
"version": 0.0 |
|
} |
|
}, |
|
"polemo2_in_multiple_choice": { |
|
"task": "polemo2_in_multiple_choice", |
|
"group": [ |
|
"polemo2_mc" |
|
], |
|
"dataset_path": "allegro/klej-polemo2-in", |
|
"training_split": "train", |
|
"validation_split": "validation", |
|
"test_split": "test", |
|
"doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii: Neutralny, Negatywny, Pozytywny, Niejednoznaczny.\nSentyment:", |
|
"doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}", |
|
"doc_to_choice": [ |
|
"Neutralny", |
|
"Negatywny", |
|
"Pozytywny", |
|
"Niejednoznaczny" |
|
], |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
}, |
|
{ |
|
"metric": "acc_norm", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence}}" |
|
}, |
|
"polemo2_out_multiple_choice": { |
|
"task": "polemo2_out_multiple_choice", |
|
"group": [ |
|
"polemo2_mc" |
|
], |
|
"dataset_path": "allegro/klej-polemo2-out", |
|
"training_split": "train", |
|
"validation_split": "validation", |
|
"test_split": "test", |
|
"doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii: Neutralny, Negatywny, Pozytywny, Niejednoznaczny.\nSentyment:", |
|
"doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}", |
|
"doc_to_choice": [ |
|
"Neutralny", |
|
"Negatywny", |
|
"Pozytywny", |
|
"Niejednoznaczny" |
|
], |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
}, |
|
{ |
|
"metric": "acc_norm", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence}}" |
|
}, |
|
"polish_8tags_multiple_choice": { |
|
"task": "polish_8tags_multiple_choice", |
|
"dataset_path": "djstrong/8tags", |
|
"training_split": "train", |
|
"test_split": "test", |
|
"fewshot_split": "train", |
|
"doc_to_text": "Tytuł: \"{{sentence}}\"\nDo podanego tytułu przyporządkuj jedną najlepiej pasującą kategorię z podanych: Film, Historia, Jedzenie, Medycyna, Motoryzacja, Praca, Sport, Technologie.\nKategoria:", |
|
"doc_to_target": "{{label|int}}", |
|
"doc_to_choice": [ |
|
"Film", |
|
"Historia", |
|
"Jedzenie", |
|
"Medycyna", |
|
"Motoryzacja", |
|
"Praca", |
|
"Sport", |
|
"Technologie" |
|
], |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
}, |
|
{ |
|
"metric": "acc_norm", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence}}" |
|
}, |
|
"polish_dyk_multiple_choice": { |
|
"task": "polish_dyk_multiple_choice", |
|
"dataset_path": "allegro/klej-dyk", |
|
"training_split": "train", |
|
"test_split": "test", |
|
"doc_to_text": "Pytanie: \"{{question}}\"\nSugerowana odpowiedź: \"{{answer}}\"\nPytanie: Czy sugerowana odpowiedź na zadane pytanie jest poprawna?\nOdpowiedz krótko \"Tak\" lub \"Nie\". Prawidłowa odpowiedź:", |
|
"doc_to_target": "{{target|int}}", |
|
"doc_to_choice": [ |
|
"Nie", |
|
"Tak" |
|
], |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
}, |
|
{ |
|
"metric": "acc_norm", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{question}} {{answer}}" |
|
}, |
|
"polish_ppc_multiple_choice": { |
|
"task": "polish_ppc_multiple_choice", |
|
"dataset_path": "djstrong/ppc", |
|
"training_split": "train", |
|
"validation_split": "validation", |
|
"test_split": "test", |
|
"doc_to_text": "Zdanie A: \"{{sentence_A}}\"\nZdanie B: \"{{sentence_B}}\"\nPytanie: jaka jest zależność między zdaniami A i B? Możliwe odpowiedzi:\nA - znaczą dokładnie to samo\nB - mają podobne znaczenie\nC - mają różne znaczenie\nPrawidłowa odpowiedź:", |
|
"doc_to_target": "{{label|int - 1}}", |
|
"doc_to_choice": [ |
|
"A", |
|
"B", |
|
"C" |
|
], |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
}, |
|
{ |
|
"metric": "acc_norm", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{sentence_A}} {{sentence_B}}" |
|
}, |
|
"polish_psc_multiple_choice": { |
|
"task": "polish_psc_multiple_choice", |
|
"dataset_path": "allegro/klej-psc", |
|
"training_split": "train", |
|
"test_split": "test", |
|
"doc_to_text": "Tekst: \"{{extract_text}}\"\nPodsumowanie: \"{{summary_text}}\"\nPytanie: Czy podsumowanie dla podanego tekstu jest poprawne?\nOdpowiedz krótko \"Tak\" lub \"Nie\". Prawidłowa odpowiedź:", |
|
"doc_to_target": "{{label|int}}", |
|
"doc_to_choice": [ |
|
"Nie", |
|
"Tak" |
|
], |
|
"description": "", |
|
"target_delimiter": " ", |
|
"fewshot_delimiter": "\n\n", |
|
"num_fewshot": 5, |
|
"metric_list": [ |
|
{ |
|
"metric": "acc", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
}, |
|
{ |
|
"metric": "acc_norm", |
|
"aggregation": "mean", |
|
"higher_is_better": true |
|
} |
|
], |
|
"output_type": "multiple_choice", |
|
"repeats": 1, |
|
"should_decontaminate": true, |
|
"doc_to_decontamination_query": "{{extract_text}} {{summary_text}}" |
|
} |
|
}, |
|
"versions": { |
|
"belebele_pol_Latn": 0.0, |
|
"polemo2_in_multiple_choice": "Yaml", |
|
"polemo2_out_multiple_choice": "Yaml", |
|
"polish_8tags_multiple_choice": "Yaml", |
|
"polish_dyk_multiple_choice": "Yaml", |
|
"polish_mc": "N/A", |
|
"polish_ppc_multiple_choice": "Yaml", |
|
"polish_psc_multiple_choice": "Yaml" |
|
}, |
|
"n-shot": { |
|
"belebele_pol_Latn": 5, |
|
"polemo2_in_multiple_choice": 5, |
|
"polemo2_out_multiple_choice": 5, |
|
"polish_8tags_multiple_choice": 5, |
|
"polish_dyk_multiple_choice": 5, |
|
"polish_mc": 5, |
|
"polish_ppc_multiple_choice": 5, |
|
"polish_psc_multiple_choice": 5 |
|
}, |
|
"config": { |
|
"model": "hf", |
|
"model_args": "pretrained=mistralai/Mistral-7B-v0.1", |
|
"batch_size": "1", |
|
"batch_sizes": [], |
|
"device": "cuda:0", |
|
"use_cache": null, |
|
"limit": null, |
|
"bootstrap_iters": 100000, |
|
"gen_kwargs": null |
|
}, |
|
"git_hash": null |
|
} |