{ "results": { "polish": { "acc,none": 0.5485088323770747, "acc_stderr,none": 0.006173394316567496, "acc_norm,none": 0.5613679808033012, "acc_norm_stderr,none": 0.008184643816451457, "f1,score-first": 0.5560581187634298, "f1_stderr,score-first": 0.10012844040270957, "accuracy,score-first": 0.5560581187634298, "accuracy_stderr,score-first": 0.10012844040270957, "alias": "polish" }, "belebele_pol_Latn": { "acc,none": 0.5077777777777778, "acc_stderr,none": 0.016673915944964764, "acc_norm,none": 0.5077777777777778, "acc_norm_stderr,none": 0.016673915944964764, "alias": " - belebele_pol_Latn" }, "polemo2_in": { "f1,score-first": 0.554016620498615, "f1_stderr,score-first": 0.018511989768822235, "accuracy,score-first": 0.554016620498615, "accuracy_stderr,score-first": 0.018511989768822235, "alias": " - polemo2_in" }, "polemo2_in_multiple_choice": { "acc,none": 0.27285318559556787, "acc_stderr,none": 0.016588531234686118, "acc_norm,none": 0.20221606648199447, "acc_norm_stderr,none": 0.01495832068619682, "alias": " - polemo2_in_multiple_choice" }, "polemo2_out": { "f1,score-first": 0.5566801619433198, "f1_stderr,score-first": 0.0223737095446236, "accuracy,score-first": 0.5566801619433198, "accuracy_stderr,score-first": 0.0223737095446236, "alias": " - polemo2_out" }, "polemo2_out_multiple_choice": { "acc,none": 0.3299595141700405, "acc_stderr,none": 0.02117665642255094, "acc_norm,none": 0.3340080971659919, "acc_norm_stderr,none": 0.021241711816127367, "alias": " - polemo2_out_multiple_choice" }, "polish_8tags_multiple_choice": { "acc,none": 0.6365507776761208, "acc_stderr,none": 0.007275251233570105, "acc_norm,none": 0.6779505946935042, "acc_norm_stderr,none": 0.0070675659629489225, "alias": " - polish_8tags_multiple_choice" }, "polish_8tags_regex": { "f1,score-first": 0.6564501372369624, "f1_stderr,score-first": 0.007182991593741683, "accuracy,score-first": 0.6564501372369624, "accuracy_stderr,score-first": 0.007182991593741683, "alias": " - polish_8tags_regex" }, "polish_belebele_regex": { "f1,score-first": 0.5433333333333333, "f1_stderr,score-first": 0.016613188209796736, "accuracy,score-first": 0.5433333333333333, "accuracy_stderr,score-first": 0.016613188209796736, "alias": " - polish_belebele_regex" }, "polish_dyk_multiple_choice": { "acc,none": 0.5481049562682215, "acc_stderr,none": 0.01552222935866704, "acc_norm,none": 0.5481049562682215, "acc_norm_stderr,none": 0.01552222935866704, "alias": " - polish_dyk_multiple_choice" }, "polish_dyk_regex": { "f1,score-first": 0.1749271137026239, "f1_stderr,score-first": 0.011848903583973948, "accuracy,score-first": 0.1749271137026239, "accuracy_stderr,score-first": 0.011848903583973948, "alias": " - polish_dyk_regex" }, "polish_ppc_multiple_choice": { "acc,none": 0.449, "acc_stderr,none": 0.015736792768752016, "acc_norm,none": 0.449, "acc_norm_stderr,none": 0.015736792768752016, "alias": " - polish_ppc_multiple_choice" }, "polish_ppc_regex": { "f1,score-first": 0.46, "f1_stderr,score-first": 0.01576859691439438, "accuracy,score-first": 0.46, "accuracy_stderr,score-first": 0.01576859691439438, "alias": " - polish_ppc_regex" }, "polish_psc_multiple_choice": { "acc,none": 0.725417439703154, "acc_stderr,none": 0.01359948992290204, "acc_norm,none": 0.725417439703154, "acc_norm_stderr,none": 0.01359948992290204, "alias": " - polish_psc_multiple_choice" }, "polish_psc_regex": { "f1,score-first": 0.5918367346938775, "f1_stderr,score-first": 0.014976490836384216, "accuracy,score-first": 0.5918367346938775, "accuracy_stderr,score-first": 0.014976490836384216, "alias": " - polish_psc_regex" } }, "groups": { "polish": { "acc,none": 0.5485088323770747, "acc_stderr,none": 0.006173394316567496, "acc_norm,none": 0.5613679808033012, "acc_norm_stderr,none": 0.008184643816451457, "f1,score-first": 0.5560581187634298, "f1_stderr,score-first": 0.10012844040270957, "accuracy,score-first": 0.5560581187634298, "accuracy_stderr,score-first": 0.10012844040270957, "alias": "polish" } }, "configs": { "belebele_pol_Latn": { "task": "belebele_pol_Latn", "group": "belebele", "dataset_path": "facebook/belebele", "test_split": "pol_Latn", "fewshot_split": "pol_Latn", "doc_to_text": "P: {{flores_passage}}\nQ: {{question.strip()}}\nA: {{mc_answer1}}\nB: {{mc_answer2}}\nC: {{mc_answer3}}\nD: {{mc_answer4}}\nAnswer:", "doc_to_target": "{{['1', '2', '3', '4'].index(correct_answer_num)}}", "doc_to_choice": [ "A", "B", "C", "D" ], "description": "", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true }, { "metric": "acc_norm", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": true, "doc_to_decontamination_query": "{{question}}", "metadata": { "version": 0.0 } }, "polemo2_in": { "task": "polemo2_in", "group": [ "polemo2" ], "dataset_path": "allegro/klej-polemo2-in", "training_split": "train", "validation_split": "validation", "test_split": "test", "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii. Możliwe odpowiedzi:\nA - Neutralny\nB - Negatywny\nC - Pozytywny\nD - Niejednoznaczny\nPrawidłowa odpowiedź:", "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}", "description": "", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "num_fewshot": 0, "metric_list": [ { "metric": "f1", "aggregation": "mean", "higher_is_better": true, "hf_evaluate": true, "average": "micro" }, { "metric": "accuracy", "aggregation": "mean", "higher_is_better": true } ], "output_type": "generate_until", "generation_kwargs": { "until": [ ".", "," ], "do_sample": false, "temperature": 0.0, "max_gen_toks": 50 }, "repeats": 1, "filter_list": [ { "name": "score-first", "filter": [ { "function": "regex", "regex_pattern": "(\\b[ABCD]\\b)" }, { "function": "take_first" }, { "function": "map", "mapping_dict": { "A": 0, "B": 1, "C": 2, "D": 3 }, "default_value": -1 }, { "function": "take_first" } ] } ], "should_decontaminate": true, "doc_to_decontamination_query": "{{sentence}}", "metadata": { "version": 1.0 } }, "polemo2_in_multiple_choice": { "task": "polemo2_in_multiple_choice", "group": [ "polemo2_mc" ], "dataset_path": "allegro/klej-polemo2-in", "training_split": "train", "validation_split": "validation", "test_split": "test", "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii: Neutralny, Negatywny, Pozytywny, Niejednoznaczny.\nSentyment:", "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}", "doc_to_choice": [ "Neutralny", "Negatywny", "Pozytywny", "Niejednoznaczny" ], "description": "", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true }, { "metric": "acc_norm", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": true, "doc_to_decontamination_query": "{{sentence}}" }, "polemo2_out": { "task": "polemo2_out", "group": [ "polemo2" ], "dataset_path": "allegro/klej-polemo2-out", "training_split": "train", "validation_split": "validation", "test_split": "test", "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii. Możliwe odpowiedzi:\nA - Neutralny\nB - Negatywny\nC - Pozytywny\nD - Niejednoznaczny\nPrawidłowa odpowiedź:", "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}", "description": "", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "num_fewshot": 0, "metric_list": [ { "metric": "f1", "aggregation": "mean", "higher_is_better": true, "hf_evaluate": true, "average": "micro" }, { "metric": "accuracy", "aggregation": "mean", "higher_is_better": true } ], "output_type": "generate_until", "generation_kwargs": { "until": [ ".", "," ], "do_sample": false, "temperature": 0.0, "max_gen_toks": 50 }, "repeats": 1, "filter_list": [ { "name": "score-first", "filter": [ { "function": "regex", "regex_pattern": "(\\b[ABCD]\\b)" }, { "function": "take_first" }, { "function": "map", "mapping_dict": { "A": 0, "B": 1, "C": 2, "D": 3 }, "default_value": -1 }, { "function": "take_first" } ] } ], "should_decontaminate": true, "doc_to_decontamination_query": "{{sentence}}", "metadata": { "version": 1.0 } }, "polemo2_out_multiple_choice": { "task": "polemo2_out_multiple_choice", "group": [ "polemo2_mc" ], "dataset_path": "allegro/klej-polemo2-out", "training_split": "train", "validation_split": "validation", "test_split": "test", "doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii: Neutralny, Negatywny, Pozytywny, Niejednoznaczny.\nSentyment:", "doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}", "doc_to_choice": [ "Neutralny", "Negatywny", "Pozytywny", "Niejednoznaczny" ], "description": "", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true }, { "metric": "acc_norm", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": true, "doc_to_decontamination_query": "{{sentence}}" }, "polish_8tags_multiple_choice": { "task": "polish_8tags_multiple_choice", "dataset_path": "djstrong/8tags", "training_split": "train", "test_split": "test", "fewshot_split": "train", "doc_to_text": "Tytuł: \"{{sentence}}\"\nDo podanego tytułu przyporządkuj jedną najlepiej pasującą kategorię z podanych: Film, Historia, Jedzenie, Medycyna, Motoryzacja, Praca, Sport, Technologie.\nKategoria:", "doc_to_target": "{{label|int}}", "doc_to_choice": [ "Film", "Historia", "Jedzenie", "Medycyna", "Motoryzacja", "Praca", "Sport", "Technologie" ], "description": "", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true }, { "metric": "acc_norm", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": true, "doc_to_decontamination_query": "{{sentence}}" }, "polish_8tags_regex": { "task": "polish_8tags_regex", "dataset_path": "djstrong/8tags", "training_split": "train", "validation_split": "validation", "test_split": "test", "doc_to_text": "Tytuł: \"{{sentence}}\"\nPytanie: jaka kategoria najlepiej pasuje do podanego tytułu?\nMożliwe odpowiedzi:\nA - film\nB - historia\nC - jedzenie\nD - medycyna\nE - motoryzacja\nF - praca\nG - sport\nH - technologie\nPrawidłowa odpowiedź:", "doc_to_target": "{{label|int}}", "description": "", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "num_fewshot": 0, "metric_list": [ { "metric": "f1", "aggregation": "mean", "higher_is_better": true, "hf_evaluate": true, "average": "micro" }, { "metric": "accuracy", "aggregation": "mean", "higher_is_better": true } ], "output_type": "generate_until", "generation_kwargs": { "until": [ ".", "," ], "do_sample": false, "temperature": 0.0, "max_gen_toks": 50 }, "repeats": 1, "filter_list": [ { "name": "score-first", "filter": [ { "function": "regex", "regex_pattern": "(\\b[ABCDEFGH]\\b)" }, { "function": "take_first" }, { "function": "map", "mapping_dict": { "A": 0, "B": 1, "C": 2, "D": 3, "E": 4, "F": 5, "G": 6, "H": 7 }, "default_value": -1 }, { "function": "take_first" } ] } ], "should_decontaminate": true, "doc_to_decontamination_query": "{{sentence}}" }, "polish_belebele_regex": { "task": "polish_belebele_regex", "dataset_path": "facebook/belebele", "test_split": "pol_Latn", "doc_to_text": "Fragment: \"{{flores_passage}}\"\nPytanie: \"{{question}}\"\nMożliwe odpowiedzi:\nA - {{mc_answer1}}\nB - {{mc_answer2}}\nC - {{mc_answer3}}\nD - {{mc_answer4}}\nPrawidłowa odpowiedź:", "doc_to_target": "{{correct_answer_num|int - 1}}", "description": "", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "num_fewshot": 0, "metric_list": [ { "metric": "f1", "aggregation": "mean", "higher_is_better": true, "hf_evaluate": true, "average": "micro" }, { "metric": "accuracy", "aggregation": "mean", "higher_is_better": true } ], "output_type": "generate_until", "generation_kwargs": { "until": [ ".", "," ], "do_sample": false, "temperature": 0.0, "max_gen_toks": 50 }, "repeats": 1, "filter_list": [ { "name": "score-first", "filter": [ { "function": "regex", "regex_pattern": "(\\b[ABCD]\\b)" }, { "function": "take_first" }, { "function": "map", "mapping_dict": { "A": 0, "B": 1, "C": 2, "D": 3 }, "default_value": -1 }, { "function": "take_first" } ] } ], "should_decontaminate": true, "doc_to_decontamination_query": "{{flores_passage}} {{question}} {{mc_answer1}} {{mc_answer2}} {{mc_answer3}} {{mc_answer4}}" }, "polish_dyk_multiple_choice": { "task": "polish_dyk_multiple_choice", "dataset_path": "allegro/klej-dyk", "training_split": "train", "test_split": "test", "doc_to_text": "Pytanie: \"{{question}}\"\nSugerowana odpowiedź: \"{{answer}}\"\nPytanie: Czy sugerowana odpowiedź na zadane pytanie jest poprawna?\nOdpowiedz krótko \"Tak\" lub \"Nie\". Prawidłowa odpowiedź:", "doc_to_target": "{{target|int}}", "doc_to_choice": [ "Nie", "Tak" ], "description": "", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true }, { "metric": "acc_norm", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": true, "doc_to_decontamination_query": "{{question}} {{answer}}" }, "polish_dyk_regex": { "task": "polish_dyk_regex", "dataset_path": "allegro/klej-dyk", "training_split": "train", "test_split": "test", "doc_to_text": "Pytanie: \"{{question}}\"\nSugerowana odpowiedź: \"{{answer}}\"\nCzy sugerowana odpowiedź na zadane pytanie jest poprawna? Możliwe opcje:\nA - brakuje sugerowanej odpowiedzi\nB - nie, sugerowana odpowiedź nie jest poprawna\nC - tak, sugerowana odpowiedź jest poprawna\nD - brakuje pytania\nPrawidłowa opcja:", "doc_to_target": "{{target|int+1}}", "description": "", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "num_fewshot": 0, "metric_list": [ { "metric": "f1", "aggregation": "mean", "higher_is_better": true, "hf_evaluate": true, "average": "micro" }, { "metric": "accuracy", "aggregation": "mean", "higher_is_better": true } ], "output_type": "generate_until", "generation_kwargs": { "until": [ ".", "," ], "do_sample": false, "temperature": 0.0, "max_gen_toks": 50 }, "repeats": 1, "filter_list": [ { "name": "score-first", "filter": [ { "function": "regex", "regex_pattern": "(\\b[ABCD]\\b)" }, { "function": "take_first" }, { "function": "map", "mapping_dict": { "A": 0, "B": 1, "C": 2, "D": 3 }, "default_value": -1 }, { "function": "take_first" } ] } ], "should_decontaminate": true, "doc_to_decontamination_query": "{{question}} {{answer}}" }, "polish_ppc_multiple_choice": { "task": "polish_ppc_multiple_choice", "dataset_path": "djstrong/ppc", "training_split": "train", "validation_split": "validation", "test_split": "test", "doc_to_text": "Zdanie A: \"{{sentence_A}}\"\nZdanie B: \"{{sentence_B}}\"\nPytanie: jaka jest zależność między zdaniami A i B? Możliwe odpowiedzi:\nA - znaczą dokładnie to samo\nB - mają podobne znaczenie\nC - mają różne znaczenie\nPrawidłowa odpowiedź:", "doc_to_target": "{{label|int - 1}}", "doc_to_choice": [ "A", "B", "C" ], "description": "", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true }, { "metric": "acc_norm", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": true, "doc_to_decontamination_query": "{{sentence_A}} {{sentence_B}}" }, "polish_ppc_regex": { "task": "polish_ppc_regex", "dataset_path": "djstrong/ppc", "training_split": "train", "validation_split": "validation", "test_split": "test", "doc_to_text": "Zdanie A: \"{{sentence_A}}\"\nZdanie B: \"{{sentence_B}}\"\nPytanie: jaka jest zależność między zdaniami A i B? Możliwe odpowiedzi:\nA - wszystkie odpowiedzi poprawne\nB - znaczą dokładnie to samo\nC - mają podobne znaczenie\nD - mają różne znaczenie\nPrawidłowa odpowiedź:", "doc_to_target": "{{label|int}}", "description": "", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "num_fewshot": 0, "metric_list": [ { "metric": "f1", "aggregation": "mean", "higher_is_better": true, "hf_evaluate": true, "average": "micro" }, { "metric": "accuracy", "aggregation": "mean", "higher_is_better": true } ], "output_type": "generate_until", "generation_kwargs": { "until": [ ".", "," ], "do_sample": false, "temperature": 0.0, "max_gen_toks": 50 }, "repeats": 1, "filter_list": [ { "name": "score-first", "filter": [ { "function": "regex", "regex_pattern": "(\\b[ABCD]\\b)" }, { "function": "take_first" }, { "function": "map", "mapping_dict": { "A": 0, "B": 1, "C": 2, "D": 3 }, "default_value": -1 }, { "function": "take_first" } ] } ], "should_decontaminate": true, "doc_to_decontamination_query": "{{sentence_A}} {{sentence_B}}" }, "polish_psc_multiple_choice": { "task": "polish_psc_multiple_choice", "dataset_path": "allegro/klej-psc", "training_split": "train", "test_split": "test", "doc_to_text": "Tekst: \"{{extract_text}}\"\nPodsumowanie: \"{{summary_text}}\"\nPytanie: Czy podsumowanie dla podanego tekstu jest poprawne?\nOdpowiedz krótko \"Tak\" lub \"Nie\". Prawidłowa odpowiedź:", "doc_to_target": "{{label|int}}", "doc_to_choice": [ "Nie", "Tak" ], "description": "", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true }, { "metric": "acc_norm", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": true, "doc_to_decontamination_query": "{{extract_text}} {{summary_text}}" }, "polish_psc_regex": { "task": "polish_psc_regex", "dataset_path": "allegro/klej-psc", "training_split": "train", "test_split": "test", "doc_to_text": "Fragment 1: \"{{extract_text}}\"\nFragment 2: \"{{summary_text}}\"\nPytanie: jaka jest zależność między fragmentami 1 i 2?\nMożliwe odpowiedzi:\nA - wszystkie odpowiedzi poprawne\nB - dotyczą tego samego artykułu\nC - dotyczą różnych artykułów\nD - brak poprawnej odpowiedzi\nPrawidłowa odpowiedź:", "doc_to_target": "{{label|int + 1}}", "description": "", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "num_fewshot": 0, "metric_list": [ { "metric": "f1", "aggregation": "mean", "higher_is_better": true, "hf_evaluate": true, "average": "micro" }, { "metric": "accuracy", "aggregation": "mean", "higher_is_better": true } ], "output_type": "generate_until", "generation_kwargs": { "until": [ ".", "," ], "do_sample": false, "temperature": 0.0, "max_gen_toks": 50 }, "repeats": 1, "filter_list": [ { "name": "score-first", "filter": [ { "function": "regex", "regex_pattern": "(\\b[ABCD]\\b)" }, { "function": "take_first" }, { "function": "map", "mapping_dict": { "A": 0, "B": 1, "C": 2, "D": 3 }, "default_value": -1 }, { "function": "take_first" } ] } ], "should_decontaminate": true, "doc_to_decontamination_query": "{{extract_text}} {{summary_text}}" } }, "versions": { "belebele_pol_Latn": 0.0, "polemo2_in": 1.0, "polemo2_in_multiple_choice": "Yaml", "polemo2_out": 1.0, "polemo2_out_multiple_choice": "Yaml", "polish": "N/A", "polish_8tags_multiple_choice": "Yaml", "polish_8tags_regex": "Yaml", "polish_belebele_regex": "Yaml", "polish_dyk_multiple_choice": "Yaml", "polish_dyk_regex": "Yaml", "polish_ppc_multiple_choice": "Yaml", "polish_ppc_regex": "Yaml", "polish_psc_multiple_choice": "Yaml", "polish_psc_regex": "Yaml" }, "n-shot": { "belebele_pol_Latn": 0, "polemo2_in": 0, "polemo2_in_multiple_choice": 0, "polemo2_out": 0, "polemo2_out_multiple_choice": 0, "polish": 0, "polish_8tags_multiple_choice": 0, "polish_8tags_regex": 0, "polish_belebele_regex": 0, "polish_dyk_multiple_choice": 0, "polish_dyk_regex": 0, "polish_ppc_multiple_choice": 0, "polish_ppc_regex": 0, "polish_psc_multiple_choice": 0, "polish_psc_regex": 0 }, "config": { "model": "hf", "model_args": "pretrained=Voicelab/trurl-2-13b-academic", "batch_size": "1", "batch_sizes": [], "device": "cuda:0", "use_cache": null, "limit": null, "bootstrap_iters": 100000, "gen_kwargs": null }, "git_hash": null }