djstrong's picture
asd
ecde7c5
{
"results": {
"polish_mc": {
"acc,none": 0.3392391870766024,
"acc_stderr,none": 0.08549718784572437,
"acc_norm,none": 0.2795205836373111,
"acc_norm_stderr,none": 0.09501543204876321,
"alias": "polish_mc"
},
"belebele_pol_Latn": {
"acc,none": 0.22555555555555556,
"acc_stderr,none": 0.013939334910458132,
"acc_norm,none": 0.22555555555555556,
"acc_norm_stderr,none": 0.013939334910458132,
"alias": " - belebele_pol_Latn"
},
"polemo2_in_multiple_choice": {
"acc,none": 0.24238227146814403,
"acc_stderr,none": 0.015959080213367862,
"acc_norm,none": 0.23130193905817176,
"acc_norm_stderr,none": 0.015703624326405728,
"alias": " - polemo2_in_multiple_choice"
},
"polemo2_out_multiple_choice": {
"acc,none": 0.10121457489878542,
"acc_stderr,none": 0.013583950135048612,
"acc_norm,none": 0.09109311740890688,
"acc_norm_stderr,none": 0.01295922466015463,
"alias": " - polemo2_out_multiple_choice"
},
"polish_8tags_multiple_choice": {
"acc,none": 0.3920402561756633,
"acc_stderr,none": 0.007384351149071361,
"acc_norm,none": 0.26395242451967066,
"acc_norm_stderr,none": 0.0066669219027414165,
"alias": " - polish_8tags_multiple_choice"
},
"polish_dyk_multiple_choice": {
"acc,none": 0.31875607385811466,
"acc_stderr,none": 0.014533961276467866,
"acc_norm,none": 0.31875607385811466,
"acc_norm_stderr,none": 0.014533961276467866,
"alias": " - polish_dyk_multiple_choice"
},
"polish_ppc_multiple_choice": {
"acc,none": 0.185,
"acc_stderr,none": 0.0122851913263867,
"acc_norm,none": 0.185,
"acc_norm_stderr,none": 0.0122851913263867,
"alias": " - polish_ppc_multiple_choice"
},
"polish_psc_multiple_choice": {
"acc,none": 0.5565862708719852,
"acc_stderr,none": 0.015137806899417445,
"acc_norm,none": 0.5565862708719852,
"acc_norm_stderr,none": 0.015137806899417445,
"alias": " - polish_psc_multiple_choice"
}
},
"groups": {
"polish_mc": {
"acc,none": 0.3392391870766024,
"acc_stderr,none": 0.08549718784572437,
"acc_norm,none": 0.2795205836373111,
"acc_norm_stderr,none": 0.09501543204876321,
"alias": "polish_mc"
}
},
"configs": {
"belebele_pol_Latn": {
"task": "belebele_pol_Latn",
"group": "belebele",
"dataset_path": "facebook/belebele",
"test_split": "pol_Latn",
"fewshot_split": "pol_Latn",
"doc_to_text": "P: {{flores_passage}}\nQ: {{question.strip()}}\nA: {{mc_answer1}}\nB: {{mc_answer2}}\nC: {{mc_answer3}}\nD: {{mc_answer4}}\nAnswer:",
"doc_to_target": "{{['1', '2', '3', '4'].index(correct_answer_num)}}",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
},
{
"metric": "acc_norm",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": true,
"doc_to_decontamination_query": "{{question}}",
"metadata": {
"version": 0.0
}
},
"polemo2_in_multiple_choice": {
"task": "polemo2_in_multiple_choice",
"group": [
"polemo2_mc"
],
"dataset_path": "allegro/klej-polemo2-in",
"training_split": "train",
"validation_split": "validation",
"test_split": "test",
"doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii: Neutralny, Negatywny, Pozytywny, Niejednoznaczny.\nSentyment:",
"doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
"doc_to_choice": [
"Neutralny",
"Negatywny",
"Pozytywny",
"Niejednoznaczny"
],
"description": "",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
},
{
"metric": "acc_norm",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": true,
"doc_to_decontamination_query": "{{sentence}}"
},
"polemo2_out_multiple_choice": {
"task": "polemo2_out_multiple_choice",
"group": [
"polemo2_mc"
],
"dataset_path": "allegro/klej-polemo2-out",
"training_split": "train",
"validation_split": "validation",
"test_split": "test",
"doc_to_text": "Opinia: \"{{sentence}}\"\nOkreśl sentyment podanej opinii: Neutralny, Negatywny, Pozytywny, Niejednoznaczny.\nSentyment:",
"doc_to_target": "{{['__label__meta_zero', '__label__meta_minus_m', '__label__meta_plus_m', '__label__meta_amb'].index(target)}}",
"doc_to_choice": [
"Neutralny",
"Negatywny",
"Pozytywny",
"Niejednoznaczny"
],
"description": "",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
},
{
"metric": "acc_norm",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": true,
"doc_to_decontamination_query": "{{sentence}}"
},
"polish_8tags_multiple_choice": {
"task": "polish_8tags_multiple_choice",
"dataset_path": "sdadas/8tags",
"training_split": "train",
"test_split": "test",
"fewshot_split": "train",
"doc_to_text": "Tytuł: \"{{sentence}}\"\nDo podanego tytułu przyporządkuj jedną najlepiej pasującą kategorię z podanych: Film, Historia, Jedzenie, Medycyna, Motoryzacja, Praca, Sport, Technologie.\nKategoria:",
"doc_to_target": "{{label|int}}",
"doc_to_choice": [
"Film",
"Historia",
"Jedzenie",
"Medycyna",
"Motoryzacja",
"Praca",
"Sport",
"Technologie"
],
"description": "",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
},
{
"metric": "acc_norm",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": true,
"doc_to_decontamination_query": "{{sentence}}"
},
"polish_dyk_multiple_choice": {
"task": "polish_dyk_multiple_choice",
"dataset_path": "allegro/klej-dyk",
"training_split": "train",
"test_split": "test",
"doc_to_text": "Pytanie: \"{{question}}\"\nSugerowana odpowiedź: \"{{answer}}\"\nPytanie: Czy sugerowana odpowiedź na zadane pytanie jest poprawna?\nOdpowiedz krótko \"Tak\" lub \"Nie\". Prawidłowa odpowiedź:",
"doc_to_target": "{{target|int}}",
"doc_to_choice": [
"Nie",
"Tak"
],
"description": "",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
},
{
"metric": "acc_norm",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": true,
"doc_to_decontamination_query": "{{question}} {{answer}}"
},
"polish_ppc_multiple_choice": {
"task": "polish_ppc_multiple_choice",
"dataset_path": "sdadas/ppc",
"training_split": "train",
"validation_split": "validation",
"test_split": "test",
"doc_to_text": "Zdanie A: \"{{sentence_A}}\"\nZdanie B: \"{{sentence_B}}\"\nPytanie: jaka jest zależność między zdaniami A i B? Możliwe odpowiedzi:\nA - znaczą dokładnie to samo\nB - mają podobne znaczenie\nC - mają różne znaczenie\nPrawidłowa odpowiedź:",
"doc_to_target": "{{label|int - 1}}",
"doc_to_choice": [
"A",
"B",
"C"
],
"description": "",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
},
{
"metric": "acc_norm",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": true,
"doc_to_decontamination_query": "{{sentence_A}} {{sentence_B}}"
},
"polish_psc_multiple_choice": {
"task": "polish_psc_multiple_choice",
"dataset_path": "allegro/klej-psc",
"training_split": "train",
"test_split": "test",
"doc_to_text": "Tekst: \"{{extract_text}}\"\nPodsumowanie: \"{{summary_text}}\"\nPytanie: Czy podsumowanie dla podanego tekstu jest poprawne?\nOdpowiedz krótko \"Tak\" lub \"Nie\". Prawidłowa odpowiedź:",
"doc_to_target": "{{label|int}}",
"doc_to_choice": [
"Nie",
"Tak"
],
"description": "",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
},
{
"metric": "acc_norm",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": true,
"doc_to_decontamination_query": "{{extract_text}} {{summary_text}}"
}
},
"versions": {
"belebele_pol_Latn": 0.0,
"polemo2_in_multiple_choice": "Yaml",
"polemo2_out_multiple_choice": "Yaml",
"polish_8tags_multiple_choice": "Yaml",
"polish_dyk_multiple_choice": "Yaml",
"polish_mc": "N/A",
"polish_ppc_multiple_choice": "Yaml",
"polish_psc_multiple_choice": "Yaml"
},
"n-shot": {
"belebele_pol_Latn": 0,
"polemo2_in_multiple_choice": 0,
"polemo2_out_multiple_choice": 0,
"polish_8tags_multiple_choice": 0,
"polish_dyk_multiple_choice": 0,
"polish_mc": 0,
"polish_ppc_multiple_choice": 0,
"polish_psc_multiple_choice": 0
},
"config": {
"model": "hf",
"model_args": "pretrained=RWKV/HF_v5-Eagle-7B,trust_remote_code=True",
"batch_size": "4",
"batch_sizes": [],
"device": "cuda:0",
"use_cache": null,
"limit": null,
"bootstrap_iters": 100000,
"gen_kwargs": null
},
"git_hash": null
}