zhuohan-7 commited on
Commit
4b7a759
1 Parent(s): 45b633e

Upload folder using huggingface_hub

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. results/cross_lingual/few_shot/cross_logiqa.csv +2 -0
  2. results/cross_lingual/few_shot/cross_mmlu.csv +2 -0
  3. results/cross_lingual/few_shot/cross_xquad.csv +1 -0
  4. results/cross_lingual/zero_shot/cross_logiqa.csv +1 -0
  5. results/cross_lingual/zero_shot/cross_mmlu.csv +1 -0
  6. results/cross_lingual/zero_shot/cross_xquad.csv +1 -0
  7. results/cultural_reasoning/few_shot/cn_eval.csv +2 -0
  8. results/cultural_reasoning/few_shot/ph_eval.csv +2 -0
  9. results/cultural_reasoning/few_shot/sg_eval.csv +1 -0
  10. results/cultural_reasoning/few_shot/us_eval.csv +2 -0
  11. results/cultural_reasoning/zero_shot/cn_eval.csv +1 -0
  12. results/cultural_reasoning/zero_shot/ph_eval.csv +1 -0
  13. results/cultural_reasoning/zero_shot/sg_eval.csv +1 -0
  14. results/cultural_reasoning/zero_shot/us_eval.csv +1 -0
  15. results/dialogue/few_shot/dream.csv +2 -0
  16. results/dialogue/zero_shot/dialogsum.csv +2 -0
  17. results/dialogue/zero_shot/dream.csv +2 -0
  18. results/dialogue/zero_shot/samsum.csv +2 -0
  19. results/emotion/few_shot/ind_emotion.csv +2 -0
  20. results/emotion/few_shot/sst2.csv +2 -0
  21. results/emotion/zero_shot/ind_emotion.csv +2 -0
  22. results/emotion/zero_shot/sst2.csv +2 -0
  23. results/flores_translation/few_shot/ind2eng.csv +2 -0
  24. results/flores_translation/few_shot/vie2eng.csv +2 -0
  25. results/flores_translation/few_shot/zho2eng.csv +2 -0
  26. results/flores_translation/few_shot/zsm2eng.csv +2 -0
  27. results/flores_translation/zero_shot/ind2eng.csv +2 -0
  28. results/flores_translation/zero_shot/vie2eng.csv +2 -0
  29. results/flores_translation/zero_shot/zho2eng.csv +2 -0
  30. results/flores_translation/zero_shot/zsm2eng.csv +2 -0
  31. results/fundamental_nlp_tasks/few_shot/c3.csv +2 -0
  32. results/fundamental_nlp_tasks/few_shot/cola.csv +2 -0
  33. results/fundamental_nlp_tasks/few_shot/mnli.csv +1 -0
  34. results/fundamental_nlp_tasks/few_shot/mrpc.csv +1 -0
  35. results/fundamental_nlp_tasks/few_shot/ocnli.csv +2 -0
  36. results/fundamental_nlp_tasks/few_shot/qnli.csv +1 -0
  37. results/fundamental_nlp_tasks/few_shot/qqp.csv +1 -0
  38. results/fundamental_nlp_tasks/few_shot/rte.csv +1 -0
  39. results/fundamental_nlp_tasks/few_shot/wnli.csv +1 -0
  40. results/fundamental_nlp_tasks/zero_shot/c3.csv +2 -0
  41. results/fundamental_nlp_tasks/zero_shot/cola.csv +2 -0
  42. results/fundamental_nlp_tasks/zero_shot/mnli.csv +2 -0
  43. results/fundamental_nlp_tasks/zero_shot/mrpc.csv +2 -0
  44. results/fundamental_nlp_tasks/zero_shot/ocnli.csv +2 -0
  45. results/fundamental_nlp_tasks/zero_shot/qnli.csv +2 -0
  46. results/fundamental_nlp_tasks/zero_shot/qqp.csv +2 -0
  47. results/fundamental_nlp_tasks/zero_shot/rte.csv +2 -0
  48. results/fundamental_nlp_tasks/zero_shot/wnli.csv +2 -0
  49. results/general_reasoning/few_shot/c_eval.csv +3 -0
  50. results/general_reasoning/few_shot/cmmlu.csv +2 -0
results/cross_lingual/few_shot/cross_logiqa.csv CHANGED
@@ -1,2 +1,4 @@
1
  Model,Accuracy,Cross-Lingual Consistency,AC3,English,Chinese,Spanish,Vietnamese,Indonesian,Malay,Filipino
 
2
  Meta-Llama-3-8B,0.44967532467532456,0.2623376623376623,0.33136129711503204,0.5227272727272727,0.4431818181818182,0.44886363636363635,0.44886363636363635,0.3693181818181818,0.4602272727272727,0.45454545454545453
 
 
1
  Model,Accuracy,Cross-Lingual Consistency,AC3,English,Chinese,Spanish,Vietnamese,Indonesian,Malay,Filipino
2
+ Meta-Llama-3-70B,0.6152597402597404,0.49480519480519464,0.5484971301967684,0.7272727272727273,0.6534090909090909,0.625,0.5681818181818182,0.6136363636363636,0.5795454545454546,0.5397727272727273
3
  Meta-Llama-3-8B,0.44967532467532456,0.2623376623376623,0.33136129711503204,0.5227272727272727,0.4431818181818182,0.44886363636363635,0.44886363636363635,0.3693181818181818,0.4602272727272727,0.45454545454545453
4
+ Meta-Llama-3.1-8B,0.46266233766233766,0.277435064935065,0.34686989908229837,0.5284090909090909,0.5,0.4375,0.4772727272727273,0.4318181818181818,0.4431818181818182,0.42045454545454547
results/cross_lingual/few_shot/cross_mmlu.csv CHANGED
@@ -1,2 +1,4 @@
1
  Model,Accuracy,Cross-Lingual Consistency,AC3,English,Chinese,Spanish,Vietnamese,Indonesian,Malay,Filipino
 
2
  Meta-Llama-3-8B,0.5295238095238096,0.31923809523809527,0.3983311959862401,0.6266666666666667,0.5466666666666666,0.56,0.4866666666666667,0.5266666666666666,0.5,0.46
 
 
1
  Model,Accuracy,Cross-Lingual Consistency,AC3,English,Chinese,Spanish,Vietnamese,Indonesian,Malay,Filipino
2
+ Meta-Llama-3-70B,0.7552380952380952,0.6674285714285715,0.708623453080271,0.8066666666666666,0.7266666666666667,0.7866666666666666,0.7533333333333333,0.7733333333333333,0.72,0.72
3
  Meta-Llama-3-8B,0.5295238095238096,0.31923809523809527,0.3983311959862401,0.6266666666666667,0.5466666666666666,0.56,0.4866666666666667,0.5266666666666666,0.5,0.46
4
+ Meta-Llama-3.1-8B,0.5342857142857141,0.2960000000000001,0.3809497590731823,0.6733333333333333,0.5533333333333333,0.5133333333333333,0.47333333333333333,0.5133333333333333,0.5,0.5133333333333333
results/cross_lingual/few_shot/cross_xquad.csv CHANGED
@@ -1,3 +1,4 @@
1
  Model,Accuracy,Cross-Lingual Consistency,AC3,English,Chinese,Spanish,Vietnamese,Indonesian,Malay,Filipino
2
  Meta-Llama-3-70B,0.9596638655462185,0.9359243697478992,0.9476454662047799,0.9697478991596639,0.9504201680672268,0.957983193277311,0.9605042016806723,,,
3
  Meta-Llama-3-8B,0.8928571428571429,0.8163865546218487,0.8529112234365448,0.926890756302521,0.8823529411764706,0.888235294117647,0.8739495798319328,,,
 
 
1
  Model,Accuracy,Cross-Lingual Consistency,AC3,English,Chinese,Spanish,Vietnamese,Indonesian,Malay,Filipino
2
  Meta-Llama-3-70B,0.9596638655462185,0.9359243697478992,0.9476454662047799,0.9697478991596639,0.9504201680672268,0.957983193277311,0.9605042016806723,,,
3
  Meta-Llama-3-8B,0.8928571428571429,0.8163865546218487,0.8529112234365448,0.926890756302521,0.8823529411764706,0.888235294117647,0.8739495798319328,,,
4
+ Meta-Llama-3.1-8B,0.9052521008403361,0.8355042016806722,0.8689808363106925,0.9352941176470588,0.8932773109243698,0.9,0.892436974789916,,,
results/cross_lingual/zero_shot/cross_logiqa.csv CHANGED
@@ -1,5 +1,6 @@
1
  Model,Accuracy,Cross-Lingual Consistency,AC3,English,Chinese,Spanish,Vietnamese,Indonesian,Malay,Filipino
2
  Qwen2-7B-Instruct,0.5673701298701299,0.477922077922078,0.5188189663543613,0.6590909090909091,0.6704545454545454,0.5340909090909091,0.5625,0.5340909090909091,0.5397727272727273,0.4715909090909091
 
3
  Qwen2-72B-Instruct,0.6753246753246753,0.6814935064935067,0.6783950674333673,0.75,0.8125,0.6647727272727273,0.6136363636363636,0.6420454545454546,0.6590909090909091,0.5852272727272727
4
  Meta-Llama-3-8B-Instruct,0.4115259740259741,0.34042207792207796,0.3726122484532397,0.48863636363636365,0.4659090909090909,0.42613636363636365,0.4034090909090909,0.4034090909090909,0.36363636363636365,0.32954545454545453
5
  Meta-Llama-3-70B-Instruct,0.6290584415584416,0.6181818181818182,0.6235727047409828,0.6988636363636364,0.6875,0.6420454545454546,0.6193181818181818,0.6022727272727273,0.6136363636363636,0.5397727272727273
 
1
  Model,Accuracy,Cross-Lingual Consistency,AC3,English,Chinese,Spanish,Vietnamese,Indonesian,Malay,Filipino
2
  Qwen2-7B-Instruct,0.5673701298701299,0.477922077922078,0.5188189663543613,0.6590909090909091,0.6704545454545454,0.5340909090909091,0.5625,0.5340909090909091,0.5397727272727273,0.4715909090909091
3
+ Meta-Llama-3.1-8B-Instruct,0.43993506493506496,0.33425324675324675,0.37988102268160845,0.5113636363636364,0.45454545454545453,0.4772727272727273,0.48295454545454547,0.3977272727272727,0.39204545454545453,0.36363636363636365
4
  Qwen2-72B-Instruct,0.6753246753246753,0.6814935064935067,0.6783950674333673,0.75,0.8125,0.6647727272727273,0.6136363636363636,0.6420454545454546,0.6590909090909091,0.5852272727272727
5
  Meta-Llama-3-8B-Instruct,0.4115259740259741,0.34042207792207796,0.3726122484532397,0.48863636363636365,0.4659090909090909,0.42613636363636365,0.4034090909090909,0.4034090909090909,0.36363636363636365,0.32954545454545453
6
  Meta-Llama-3-70B-Instruct,0.6290584415584416,0.6181818181818182,0.6235727047409828,0.6988636363636364,0.6875,0.6420454545454546,0.6193181818181818,0.6022727272727273,0.6136363636363636,0.5397727272727273
results/cross_lingual/zero_shot/cross_mmlu.csv CHANGED
@@ -1,5 +1,6 @@
1
  Model,Accuracy,Cross-Lingual Consistency,AC3,English,Chinese,Spanish,Vietnamese,Indonesian,Malay,Filipino
2
  Qwen2-7B-Instruct,0.6495238095238095,0.529714285714286,0.5835327779462245,0.74,0.6733333333333333,0.7,0.6,0.6533333333333333,0.6333333333333333,0.5466666666666666
 
3
  Qwen2-72B-Instruct,0.7714285714285715,0.7765714285714286,0.773991456997936,0.8,0.78,0.7866666666666666,0.7333333333333333,0.76,0.78,0.76
4
  Meta-Llama-3-8B-Instruct,0.5276190476190475,0.3792380952380953,0.4412894449458876,0.62,0.5066666666666667,0.5066666666666667,0.5466666666666666,0.49333333333333335,0.52,0.5
5
  Meta-Llama-3-70B-Instruct,0.7542857142857143,0.7228571428571428,0.7382370820168919,0.7933333333333333,0.74,0.7666666666666667,0.7466666666666667,0.7666666666666667,0.72,0.7466666666666667
 
1
  Model,Accuracy,Cross-Lingual Consistency,AC3,English,Chinese,Spanish,Vietnamese,Indonesian,Malay,Filipino
2
  Qwen2-7B-Instruct,0.6495238095238095,0.529714285714286,0.5835327779462245,0.74,0.6733333333333333,0.7,0.6,0.6533333333333333,0.6333333333333333,0.5466666666666666
3
+ Meta-Llama-3.1-8B-Instruct,0.5771428571428572,0.47047619047619055,0.5183792207297393,0.6933333333333334,0.5333333333333333,0.6266666666666667,0.54,0.54,0.54,0.5666666666666667
4
  Qwen2-72B-Instruct,0.7714285714285715,0.7765714285714286,0.773991456997936,0.8,0.78,0.7866666666666666,0.7333333333333333,0.76,0.78,0.76
5
  Meta-Llama-3-8B-Instruct,0.5276190476190475,0.3792380952380953,0.4412894449458876,0.62,0.5066666666666667,0.5066666666666667,0.5466666666666666,0.49333333333333335,0.52,0.5
6
  Meta-Llama-3-70B-Instruct,0.7542857142857143,0.7228571428571428,0.7382370820168919,0.7933333333333333,0.74,0.7666666666666667,0.7466666666666667,0.7666666666666667,0.72,0.7466666666666667
results/cross_lingual/zero_shot/cross_xquad.csv CHANGED
@@ -1,5 +1,6 @@
1
  Model,Accuracy,Cross-Lingual Consistency,AC3,English,Chinese,Spanish,Vietnamese,Indonesian,Malay,Filipino
2
  Qwen2-7B-Instruct,0.940546218487395,0.9016806722689076,0.9207034712119446,0.9521008403361344,0.9352941176470588,0.9445378151260504,0.9302521008403362,,,
 
3
  Qwen2-72B-Instruct,0.9611344537815126,0.9506302521008403,0.9558534951942531,0.9638655462184874,0.9554621848739496,0.9613445378151261,0.9638655462184874,,,
4
  Meta-Llama-3-8B-Instruct,0.8756302521008403,0.7699579831932772,0.8194012188828194,0.8815126050420168,0.8420168067226891,0.9092436974789916,0.8697478991596639,,,
5
  Meta-Llama-3-70B-Instruct,0.9586134453781513,0.9434873949579832,0.9509902767764395,0.9705882352941176,0.9394957983193277,0.9596638655462185,0.9647058823529412,,,
 
1
  Model,Accuracy,Cross-Lingual Consistency,AC3,English,Chinese,Spanish,Vietnamese,Indonesian,Malay,Filipino
2
  Qwen2-7B-Instruct,0.940546218487395,0.9016806722689076,0.9207034712119446,0.9521008403361344,0.9352941176470588,0.9445378151260504,0.9302521008403362,,,
3
+ Meta-Llama-3.1-8B-Instruct,0.9340336134453782,0.8831932773109243,0.9079022683718587,0.9369747899159664,0.9302521008403362,0.946218487394958,0.9226890756302522,,,
4
  Qwen2-72B-Instruct,0.9611344537815126,0.9506302521008403,0.9558534951942531,0.9638655462184874,0.9554621848739496,0.9613445378151261,0.9638655462184874,,,
5
  Meta-Llama-3-8B-Instruct,0.8756302521008403,0.7699579831932772,0.8194012188828194,0.8815126050420168,0.8420168067226891,0.9092436974789916,0.8697478991596639,,,
6
  Meta-Llama-3-70B-Instruct,0.9586134453781513,0.9434873949579832,0.9509902767764395,0.9705882352941176,0.9394957983193277,0.9596638655462185,0.9647058823529412,,,
results/cultural_reasoning/few_shot/cn_eval.csv CHANGED
@@ -1,2 +1,4 @@
1
  Model,Accuracy
 
2
  Meta-Llama-3-8B,0.41904761904761906
 
 
1
  Model,Accuracy
2
+ Meta-Llama-3-70B,0.6
3
  Meta-Llama-3-8B,0.41904761904761906
4
+ Meta-Llama-3.1-8B,0.4857142857142857
results/cultural_reasoning/few_shot/ph_eval.csv CHANGED
@@ -1,2 +1,4 @@
1
  Model,Accuracy
 
2
  Meta-Llama-3-8B,0.54
 
 
1
  Model,Accuracy
2
+ Meta-Llama-3-70B,0.68
3
  Meta-Llama-3-8B,0.54
4
+ Meta-Llama-3.1-8B,0.51
results/cultural_reasoning/few_shot/sg_eval.csv CHANGED
@@ -1,3 +1,4 @@
1
  Model,Accuracy
2
  Meta-Llama-3-70B,0.7572815533980582
3
  Meta-Llama-3-8B,0.6407766990291263
 
 
1
  Model,Accuracy
2
  Meta-Llama-3-70B,0.7572815533980582
3
  Meta-Llama-3-8B,0.6407766990291263
4
+ Meta-Llama-3.1-8B,0.6116504854368932
results/cultural_reasoning/few_shot/us_eval.csv CHANGED
@@ -1,2 +1,4 @@
1
  Model,Accuracy
 
2
  Meta-Llama-3-8B,0.6915887850467289
 
 
1
  Model,Accuracy
2
+ Meta-Llama-3-70B,0.8785046728971962
3
  Meta-Llama-3-8B,0.6915887850467289
4
+ Meta-Llama-3.1-8B,0.6728971962616822
results/cultural_reasoning/zero_shot/cn_eval.csv CHANGED
@@ -1,5 +1,6 @@
1
  Model,Accuracy
2
  Qwen2-7B-Instruct,0.8095238095238095
 
3
  Qwen2-72B-Instruct,0.8571428571428571
4
  Meta-Llama-3-8B-Instruct,0.37142857142857144
5
  Meta-Llama-3-70B-Instruct,0.5142857142857142
 
1
  Model,Accuracy
2
  Qwen2-7B-Instruct,0.8095238095238095
3
+ Meta-Llama-3.1-8B-Instruct,0.42857142857142855
4
  Qwen2-72B-Instruct,0.8571428571428571
5
  Meta-Llama-3-8B-Instruct,0.37142857142857144
6
  Meta-Llama-3-70B-Instruct,0.5142857142857142
results/cultural_reasoning/zero_shot/ph_eval.csv CHANGED
@@ -1,5 +1,6 @@
1
  Model,Accuracy
2
  Qwen2-7B-Instruct,0.51
 
3
  Qwen2-72B-Instruct,0.63
4
  Meta-Llama-3-8B-Instruct,0.54
5
  Meta-Llama-3-70B-Instruct,0.63
 
1
  Model,Accuracy
2
  Qwen2-7B-Instruct,0.51
3
+ Meta-Llama-3.1-8B-Instruct,0.56
4
  Qwen2-72B-Instruct,0.63
5
  Meta-Llama-3-8B-Instruct,0.54
6
  Meta-Llama-3-70B-Instruct,0.63
results/cultural_reasoning/zero_shot/sg_eval.csv CHANGED
@@ -1,5 +1,6 @@
1
  Model,Accuracy
2
  Qwen2-7B-Instruct,0.6699029126213593
 
3
  Qwen2-72B-Instruct,0.7378640776699029
4
  Meta-Llama-3-8B-Instruct,0.5922330097087378
5
  Meta-Llama-3-70B-Instruct,0.7184466019417476
 
1
  Model,Accuracy
2
  Qwen2-7B-Instruct,0.6699029126213593
3
+ Meta-Llama-3.1-8B-Instruct,0.6019417475728155
4
  Qwen2-72B-Instruct,0.7378640776699029
5
  Meta-Llama-3-8B-Instruct,0.5922330097087378
6
  Meta-Llama-3-70B-Instruct,0.7184466019417476
results/cultural_reasoning/zero_shot/us_eval.csv CHANGED
@@ -1,5 +1,6 @@
1
  Model,Accuracy
2
  Qwen2-7B-Instruct,0.719626168224299
 
3
  Qwen2-72B-Instruct,0.8504672897196262
4
  Meta-Llama-3-8B-Instruct,0.6448598130841121
5
  Meta-Llama-3-70B-Instruct,0.8691588785046729
 
1
  Model,Accuracy
2
  Qwen2-7B-Instruct,0.719626168224299
3
+ Meta-Llama-3.1-8B-Instruct,0.6448598130841121
4
  Qwen2-72B-Instruct,0.8504672897196262
5
  Meta-Llama-3-8B-Instruct,0.6448598130841121
6
  Meta-Llama-3-70B-Instruct,0.8691588785046729
results/dialogue/few_shot/dream.csv CHANGED
@@ -1 +1,3 @@
1
  Model,Accuracy
 
 
 
1
  Model,Accuracy
2
+ Meta-Llama-3-8B,0.8250857422831945
3
+ Meta-Llama-3.1-8B,0.8530132288094071
results/dialogue/zero_shot/dialogsum.csv CHANGED
@@ -1,4 +1,6 @@
1
  Model,Average,ROUGE-1,ROUGE-2,ROUGE-L
2
  Qwen2-7B-Instruct,0.20907406151501814,0.3054588156947843,0.09317750879187732,0.22858586005839285
 
 
3
  Meta-Llama-3-8B-Instruct,0.23748034560689027,0.33656243928704743,0.11826169056076426,0.2576169069728591
4
  Meta-Llama-3-70B-Instruct,0.2557065499979308,0.36058417323628,0.12758087337786866,0.2789546033796438
 
1
  Model,Average,ROUGE-1,ROUGE-2,ROUGE-L
2
  Qwen2-7B-Instruct,0.20907406151501814,0.3054588156947843,0.09317750879187732,0.22858586005839285
3
+ Meta-Llama-3.1-8B-Instruct,0.25775524210830225,0.361264483769506,0.1319601664036931,0.28004107615170776
4
+ Qwen2-72B-Instruct,0.21903635116217549,0.31670807543803475,0.10250931612356096,0.23789166192493072
5
  Meta-Llama-3-8B-Instruct,0.23748034560689027,0.33656243928704743,0.11826169056076426,0.2576169069728591
6
  Meta-Llama-3-70B-Instruct,0.2557065499979308,0.36058417323628,0.12758087337786866,0.2789546033796438
results/dialogue/zero_shot/dream.csv CHANGED
@@ -1,4 +1,6 @@
1
  Model,Accuracy
2
  Qwen2-7B-Instruct,0.9338559529642332
 
 
3
  Meta-Llama-3-8B-Instruct,0.5433610975012249
4
  Meta-Llama-3-70B-Instruct,0.9480646741793238
 
1
  Model,Accuracy
2
  Qwen2-7B-Instruct,0.9338559529642332
3
+ Meta-Llama-3.1-8B-Instruct,0.8858402743753062
4
+ Qwen2-72B-Instruct,0.9608035276825085
5
  Meta-Llama-3-8B-Instruct,0.5433610975012249
6
  Meta-Llama-3-70B-Instruct,0.9480646741793238
results/dialogue/zero_shot/samsum.csv CHANGED
@@ -1,4 +1,6 @@
1
  Model,Average,ROUGE-1,ROUGE-2,ROUGE-L
2
  Qwen2-7B-Instruct,0.2609036529701212,0.36802926348230236,0.1319027531874975,0.28277894224056366
 
 
3
  Meta-Llama-3-8B-Instruct,0.2850232460296334,0.3945214081577773,0.15619034353394273,0.3043579863971803
4
  Meta-Llama-3-70B-Instruct,0.2893525314227379,0.4030746211134018,0.15236139065578,0.3126215824990321
 
1
  Model,Average,ROUGE-1,ROUGE-2,ROUGE-L
2
  Qwen2-7B-Instruct,0.2609036529701212,0.36802926348230236,0.1319027531874975,0.28277894224056366
3
+ Meta-Llama-3.1-8B-Instruct,0.3002534894623792,0.41234119292969856,0.16596515741670248,0.3224541180407366
4
+ Qwen2-72B-Instruct,0.27953180135225114,0.3883786925058577,0.15246657328712612,0.2977501382637696
5
  Meta-Llama-3-8B-Instruct,0.2850232460296334,0.3945214081577773,0.15619034353394273,0.3043579863971803
6
  Meta-Llama-3-70B-Instruct,0.2893525314227379,0.4030746211134018,0.15236139065578,0.3126215824990321
results/emotion/few_shot/ind_emotion.csv CHANGED
@@ -1 +1,3 @@
1
  Model,Accuracy
 
 
 
1
  Model,Accuracy
2
+ Meta-Llama-3-8B,0.4636363636363636
3
+ Meta-Llama-3.1-8B,0.5136363636363637
results/emotion/few_shot/sst2.csv CHANGED
@@ -1 +1,3 @@
1
  Model,Accuracy
 
 
 
1
  Model,Accuracy
2
+ Meta-Llama-3-8B,0.6697247706422018
3
+ Meta-Llama-3.1-8B,0.8405963302752294
results/emotion/zero_shot/ind_emotion.csv CHANGED
@@ -1,4 +1,6 @@
1
  Model,Accuracy
2
  Qwen2-7B-Instruct,0.6386363636363637
 
 
3
  Meta-Llama-3-8B-Instruct,0.6522727272727272
4
  Meta-Llama-3-70B-Instruct,0.6909090909090909
 
1
  Model,Accuracy
2
  Qwen2-7B-Instruct,0.6386363636363637
3
+ Meta-Llama-3.1-8B-Instruct,0.6295454545454545
4
+ Qwen2-72B-Instruct,0.675
5
  Meta-Llama-3-8B-Instruct,0.6522727272727272
6
  Meta-Llama-3-70B-Instruct,0.6909090909090909
results/emotion/zero_shot/sst2.csv CHANGED
@@ -1,4 +1,6 @@
1
  Model,Accuracy
2
  Qwen2-7B-Instruct,0.9231651376146789
 
 
3
  Meta-Llama-3-8B-Instruct,0.8669724770642202
4
  Meta-Llama-3-70B-Instruct,0.9495412844036697
 
1
  Model,Accuracy
2
  Qwen2-7B-Instruct,0.9231651376146789
3
+ Meta-Llama-3.1-8B-Instruct,0.8784403669724771
4
+ Qwen2-72B-Instruct,0.9392201834862385
5
  Meta-Llama-3-8B-Instruct,0.8669724770642202
6
  Meta-Llama-3-70B-Instruct,0.9495412844036697
results/flores_translation/few_shot/ind2eng.csv CHANGED
@@ -1,2 +1,4 @@
1
  Model,BLEU
 
2
  Meta-Llama-3-8B,0.37760317005449096
 
 
1
  Model,BLEU
2
+ Meta-Llama-3-70B,0.4224655367668861
3
  Meta-Llama-3-8B,0.37760317005449096
4
+ Meta-Llama-3.1-8B,0.384092499597103
results/flores_translation/few_shot/vie2eng.csv CHANGED
@@ -1,2 +1,4 @@
1
  Model,BLEU
 
2
  Meta-Llama-3-8B,0.31157996445764863
 
 
1
  Model,BLEU
2
+ Meta-Llama-3-70B,0.3564689224836266
3
  Meta-Llama-3-8B,0.31157996445764863
4
+ Meta-Llama-3.1-8B,0.320367356810332
results/flores_translation/few_shot/zho2eng.csv CHANGED
@@ -1,2 +1,4 @@
1
  Model,BLEU
 
2
  Meta-Llama-3-8B,0.23710858530408072
 
 
1
  Model,BLEU
2
+ Meta-Llama-3-70B,0.27798501796196434
3
  Meta-Llama-3-8B,0.23710858530408072
4
+ Meta-Llama-3.1-8B,0.23777256698409086
results/flores_translation/few_shot/zsm2eng.csv CHANGED
@@ -1,2 +1,4 @@
1
  Model,BLEU
 
2
  Meta-Llama-3-8B,0.3908770132718593
 
 
1
  Model,BLEU
2
+ Meta-Llama-3-70B,0.44357168236218214
3
  Meta-Llama-3-8B,0.3908770132718593
4
+ Meta-Llama-3.1-8B,0.3893813156403672
results/flores_translation/zero_shot/ind2eng.csv CHANGED
@@ -1,4 +1,6 @@
1
  Model,BLEU
2
  Qwen2-7B-Instruct,0.2968667083646938
 
 
3
  Meta-Llama-3-8B-Instruct,0.33011728860318257
4
  Meta-Llama-3-70B-Instruct,0.3830092775167675
 
1
  Model,BLEU
2
  Qwen2-7B-Instruct,0.2968667083646938
3
+ Meta-Llama-3.1-8B-Instruct,0.3851478947359834
4
+ Qwen2-72B-Instruct,0.40378146176265345
5
  Meta-Llama-3-8B-Instruct,0.33011728860318257
6
  Meta-Llama-3-70B-Instruct,0.3830092775167675
results/flores_translation/zero_shot/vie2eng.csv CHANGED
@@ -1,4 +1,6 @@
1
  Model,BLEU
2
  Qwen2-7B-Instruct,0.23571859325121644
 
 
3
  Meta-Llama-3-8B-Instruct,0.2637063711923046
4
  Meta-Llama-3-70B-Instruct,0.3230140263371192
 
1
  Model,BLEU
2
  Qwen2-7B-Instruct,0.23571859325121644
3
+ Meta-Llama-3.1-8B-Instruct,0.3229889780558947
4
+ Qwen2-72B-Instruct,0.3326034551014482
5
  Meta-Llama-3-8B-Instruct,0.2637063711923046
6
  Meta-Llama-3-70B-Instruct,0.3230140263371192
results/flores_translation/zero_shot/zho2eng.csv CHANGED
@@ -1,4 +1,6 @@
1
  Model,BLEU
2
  Qwen2-7B-Instruct,0.21747115262398484
 
 
3
  Meta-Llama-3-8B-Instruct,0.19960072119079214
4
  Meta-Llama-3-70B-Instruct,0.24397819518058994
 
1
  Model,BLEU
2
  Qwen2-7B-Instruct,0.21747115262398484
3
+ Meta-Llama-3.1-8B-Instruct,0.24469097639356438
4
+ Qwen2-72B-Instruct,0.24317967002278634
5
  Meta-Llama-3-8B-Instruct,0.19960072119079214
6
  Meta-Llama-3-70B-Instruct,0.24397819518058994
results/flores_translation/zero_shot/zsm2eng.csv CHANGED
@@ -1,4 +1,6 @@
1
  Model,BLEU
2
  Qwen2-7B-Instruct,0.27198336767927184
 
 
3
  Meta-Llama-3-8B-Instruct,0.31536374302282033
4
  Meta-Llama-3-70B-Instruct,0.3957287030176054
 
1
  Model,BLEU
2
  Qwen2-7B-Instruct,0.27198336767927184
3
+ Meta-Llama-3.1-8B-Instruct,0.3833985449157327
4
+ Qwen2-72B-Instruct,0.40613262295280417
5
  Meta-Llama-3-8B-Instruct,0.31536374302282033
6
  Meta-Llama-3-70B-Instruct,0.3957287030176054
results/fundamental_nlp_tasks/few_shot/c3.csv CHANGED
@@ -1 +1,3 @@
1
  Model,Accuracy
 
 
 
1
  Model,Accuracy
2
+ Meta-Llama-3-8B,0.7703814510097232
3
+ Meta-Llama-3.1-8B,0.8208676140613314
results/fundamental_nlp_tasks/few_shot/cola.csv CHANGED
@@ -1 +1,3 @@
1
  Model,Accuracy
 
 
 
1
  Model,Accuracy
2
+ Meta-Llama-3-8B,0.6596356663470757
3
+ Meta-Llama-3.1-8B,0.6222435282837967
results/fundamental_nlp_tasks/few_shot/mnli.csv CHANGED
@@ -1 +1,2 @@
1
  Model,Accuracy
 
 
1
  Model,Accuracy
2
+ Meta-Llama-3-8B,0.46174988547869905
results/fundamental_nlp_tasks/few_shot/mrpc.csv CHANGED
@@ -1 +1,2 @@
1
  Model,Accuracy
 
 
1
  Model,Accuracy
2
+ Meta-Llama-3-8B,0.5906862745098039
results/fundamental_nlp_tasks/few_shot/ocnli.csv CHANGED
@@ -1 +1,3 @@
1
  Model,Accuracy
 
 
 
1
  Model,Accuracy
2
+ Meta-Llama-3-8B,0.3935593220338983
3
+ Meta-Llama-3.1-8B,0.411864406779661
results/fundamental_nlp_tasks/few_shot/qnli.csv CHANGED
@@ -1 +1,2 @@
1
  Model,Accuracy
 
 
1
  Model,Accuracy
2
+ Meta-Llama-3-8B,0.5059491122094087
results/fundamental_nlp_tasks/few_shot/qqp.csv CHANGED
@@ -1 +1,2 @@
1
  Model,Accuracy
 
 
1
  Model,Accuracy
2
+ Meta-Llama-3-8B,0.551
results/fundamental_nlp_tasks/few_shot/rte.csv CHANGED
@@ -1 +1,2 @@
1
  Model,Accuracy
 
 
1
  Model,Accuracy
2
+ Meta-Llama-3-8B,0.5487364620938628
results/fundamental_nlp_tasks/few_shot/wnli.csv CHANGED
@@ -1 +1,2 @@
1
  Model,Accuracy
 
 
1
  Model,Accuracy
2
+ Meta-Llama-3-8B,0.4647887323943662
results/fundamental_nlp_tasks/zero_shot/c3.csv CHANGED
@@ -1,4 +1,6 @@
1
  Model,Accuracy
2
  Qwen2-7B-Instruct,0.9233358264771877
 
 
3
  Meta-Llama-3-8B-Instruct,0.8515332834704562
4
  Meta-Llama-3-70B-Instruct,0.9521316379955124
 
1
  Model,Accuracy
2
  Qwen2-7B-Instruct,0.9233358264771877
3
+ Meta-Llama-3.1-8B-Instruct,0.7984293193717278
4
+ Qwen2-72B-Instruct,0.9599850411368736
5
  Meta-Llama-3-8B-Instruct,0.8515332834704562
6
  Meta-Llama-3-70B-Instruct,0.9521316379955124
results/fundamental_nlp_tasks/zero_shot/cola.csv CHANGED
@@ -1,4 +1,6 @@
1
  Model,Accuracy
2
  Qwen2-7B-Instruct,0.7861936720997124
 
 
3
  Meta-Llama-3-8B-Instruct,0.6481303930968361
4
  Meta-Llama-3-70B-Instruct,0.835091083413231
 
1
  Model,Accuracy
2
  Qwen2-7B-Instruct,0.7861936720997124
3
+ Meta-Llama-3.1-8B-Instruct,0.7046979865771812
4
+ Qwen2-72B-Instruct,0.8360498561840843
5
  Meta-Llama-3-8B-Instruct,0.6481303930968361
6
  Meta-Llama-3-70B-Instruct,0.835091083413231
results/fundamental_nlp_tasks/zero_shot/mnli.csv CHANGED
@@ -1,4 +1,6 @@
1
  Model,Accuracy
2
  Qwen2-7B-Instruct,0.7341578867002596
 
 
3
  Meta-Llama-3-8B-Instruct,0.5296991907161399
4
  Meta-Llama-3-70B-Instruct,0.6709421285692472
 
1
  Model,Accuracy
2
  Qwen2-7B-Instruct,0.7341578867002596
3
+ Meta-Llama-3.1-8B-Instruct,0.4603756298671553
4
+ Qwen2-72B-Instruct,0.7979335267470861
5
  Meta-Llama-3-8B-Instruct,0.5296991907161399
6
  Meta-Llama-3-70B-Instruct,0.6709421285692472
results/fundamental_nlp_tasks/zero_shot/mrpc.csv CHANGED
@@ -1,4 +1,6 @@
1
  Model,Accuracy
2
  Qwen2-7B-Instruct,0.7745098039215687
 
 
3
  Meta-Llama-3-8B-Instruct,0.6764705882352942
4
  Meta-Llama-3-70B-Instruct,0.7598039215686274
 
1
  Model,Accuracy
2
  Qwen2-7B-Instruct,0.7745098039215687
3
+ Meta-Llama-3.1-8B-Instruct,0.6740196078431373
4
+ Qwen2-72B-Instruct,0.7941176470588235
5
  Meta-Llama-3-8B-Instruct,0.6764705882352942
6
  Meta-Llama-3-70B-Instruct,0.7598039215686274
results/fundamental_nlp_tasks/zero_shot/ocnli.csv CHANGED
@@ -1,4 +1,6 @@
1
  Model,Accuracy
2
  Qwen2-7B-Instruct,0.6474576271186441
 
 
3
  Meta-Llama-3-8B-Instruct,0.4322033898305085
4
  Meta-Llama-3-70B-Instruct,0.5928813559322034
 
1
  Model,Accuracy
2
  Qwen2-7B-Instruct,0.6474576271186441
3
+ Meta-Llama-3.1-8B-Instruct,0.42135593220338985
4
+ Qwen2-72B-Instruct,0.7874576271186441
5
  Meta-Llama-3-8B-Instruct,0.4322033898305085
6
  Meta-Llama-3-70B-Instruct,0.5928813559322034
results/fundamental_nlp_tasks/zero_shot/qnli.csv CHANGED
@@ -1,4 +1,6 @@
1
  Model,Accuracy
2
  Qwen2-7B-Instruct,0.8169503935566539
 
 
3
  Meta-Llama-3-8B-Instruct,0.5689181768259198
4
  Meta-Llama-3-70B-Instruct,0.876807614863628
 
1
  Model,Accuracy
2
  Qwen2-7B-Instruct,0.8169503935566539
3
+ Meta-Llama-3.1-8B-Instruct,0.6027823540179389
4
+ Qwen2-72B-Instruct,0.8894380377082189
5
  Meta-Llama-3-8B-Instruct,0.5689181768259198
6
  Meta-Llama-3-70B-Instruct,0.876807614863628
results/fundamental_nlp_tasks/zero_shot/qqp.csv CHANGED
@@ -1,4 +1,6 @@
1
  Model,Accuracy
2
  Qwen2-7B-Instruct,0.7771209497897601
 
 
3
  Meta-Llama-3-8B-Instruct,0.5512490724709375
4
  Meta-Llama-3-70B-Instruct,0.7876082117239673
 
1
  Model,Accuracy
2
  Qwen2-7B-Instruct,0.7771209497897601
3
+ Meta-Llama-3.1-8B-Instruct,0.5058125154588177
4
+ Qwen2-72B-Instruct,0.7992332426416028
5
  Meta-Llama-3-8B-Instruct,0.5512490724709375
6
  Meta-Llama-3-70B-Instruct,0.7876082117239673
results/fundamental_nlp_tasks/zero_shot/rte.csv CHANGED
@@ -1,4 +1,6 @@
1
  Model,Accuracy
2
  Qwen2-7B-Instruct,0.8411552346570397
 
 
3
  Meta-Llama-3-8B-Instruct,0.6028880866425993
4
  Meta-Llama-3-70B-Instruct,0.8086642599277978
 
1
  Model,Accuracy
2
  Qwen2-7B-Instruct,0.8411552346570397
3
+ Meta-Llama-3.1-8B-Instruct,0.6895306859205776
4
+ Qwen2-72B-Instruct,0.8592057761732852
5
  Meta-Llama-3-8B-Instruct,0.6028880866425993
6
  Meta-Llama-3-70B-Instruct,0.8086642599277978
results/fundamental_nlp_tasks/zero_shot/wnli.csv CHANGED
@@ -1,4 +1,6 @@
1
  Model,Accuracy
2
  Qwen2-7B-Instruct,0.647887323943662
 
 
3
  Meta-Llama-3-8B-Instruct,0.4507042253521127
4
  Meta-Llama-3-70B-Instruct,0.7887323943661971
 
1
  Model,Accuracy
2
  Qwen2-7B-Instruct,0.647887323943662
3
+ Meta-Llama-3.1-8B-Instruct,0.4507042253521127
4
+ Qwen2-72B-Instruct,0.9014084507042254
5
  Meta-Llama-3-8B-Instruct,0.4507042253521127
6
  Meta-Llama-3-70B-Instruct,0.7887323943661971
results/general_reasoning/few_shot/c_eval.csv CHANGED
@@ -1 +1,4 @@
1
  Model,Accuracy
 
 
 
 
1
  Model,Accuracy
2
+ Meta-Llama-3-70B,0.6257783312577833
3
+ Meta-Llama-3-8B,0.43773349937733497
4
+ Meta-Llama-3.1-8B,0.44458281444582815
results/general_reasoning/few_shot/cmmlu.csv CHANGED
@@ -1 +1,3 @@
1
  Model,Accuracy
 
 
 
1
  Model,Accuracy
2
+ Meta-Llama-3-8B,0.4308409601105163
3
+ Meta-Llama-3.1-8B,0.4555344500086341