zhuohan-7 commited on
Commit
dc83d99
·
verified ·
1 Parent(s): 729da04

Upload folder using huggingface_hub

Browse files
results/cross_lingual/few_shot/cross_logiqa.csv CHANGED
@@ -1,5 +1 @@
1
  Model,Accuracy,Cross-Lingual Consistency,AC3,English,Chinese,Spanish,Vietnamese,Indonesian,Malay,Filipino
2
- Meta-Llama-3-70B,0.6152597402597404,0.49480519480519464,0.5484971301967684,0.7272727272727273,0.6534090909090909,0.625,0.5681818181818182,0.6136363636363636,0.5795454545454546,0.5397727272727273
3
- Meta-Llama-3-8B,0.44967532467532456,0.2623376623376623,0.33136129711503204,0.5227272727272727,0.4431818181818182,0.44886363636363635,0.44886363636363635,0.3693181818181818,0.4602272727272727,0.45454545454545453
4
- llama3-8b-cpt-sea-lionv2-base,0.43993506493506496,0.27012987012987016,0.3347288285088485,0.5170454545454546,0.4375,0.4431818181818182,0.4772727272727273,0.4090909090909091,0.4659090909090909,0.32954545454545453
5
- Meta-Llama-3.1-8B,0.46266233766233766,0.277435064935065,0.34686989908229837,0.5284090909090909,0.5,0.4375,0.4772727272727273,0.4318181818181818,0.4431818181818182,0.42045454545454547
 
1
  Model,Accuracy,Cross-Lingual Consistency,AC3,English,Chinese,Spanish,Vietnamese,Indonesian,Malay,Filipino
 
 
 
 
results/cross_lingual/few_shot/cross_mmlu.csv CHANGED
@@ -1,5 +1 @@
1
  Model,Accuracy,Cross-Lingual Consistency,AC3,English,Chinese,Spanish,Vietnamese,Indonesian,Malay,Filipino
2
- Meta-Llama-3-70B,0.7552380952380952,0.6674285714285715,0.708623453080271,0.8066666666666666,0.7266666666666667,0.7866666666666666,0.7533333333333333,0.7733333333333333,0.72,0.72
3
- Meta-Llama-3-8B,0.5295238095238096,0.31923809523809527,0.3983311959862401,0.6266666666666667,0.5466666666666666,0.56,0.4866666666666667,0.5266666666666666,0.5,0.46
4
- llama3-8b-cpt-sea-lionv2-base,0.5228571428571429,0.32704761904761903,0.402396106759339,0.6533333333333333,0.44,0.5066666666666667,0.47333333333333333,0.58,0.5466666666666666,0.46
5
- Meta-Llama-3.1-8B,0.5342857142857141,0.2960000000000001,0.3809497590731823,0.6733333333333333,0.5533333333333333,0.5133333333333333,0.47333333333333333,0.5133333333333333,0.5,0.5133333333333333
 
1
  Model,Accuracy,Cross-Lingual Consistency,AC3,English,Chinese,Spanish,Vietnamese,Indonesian,Malay,Filipino
 
 
 
 
results/cross_lingual/few_shot/cross_xquad.csv CHANGED
@@ -1,5 +1 @@
1
  Model,Accuracy,Cross-Lingual Consistency,AC3,English,Chinese,Spanish,Vietnamese,Indonesian,Malay,Filipino
2
- Meta-Llama-3-70B,0.9596638655462185,0.9359243697478992,0.9476454662047799,0.9697478991596639,0.9504201680672268,0.957983193277311,0.9605042016806723,,,
3
- Meta-Llama-3-8B,0.8928571428571429,0.8163865546218487,0.8529112234365448,0.926890756302521,0.8823529411764706,0.888235294117647,0.8739495798319328,,,
4
- llama3-8b-cpt-sea-lionv2-base,0.9029411764705881,0.842016806722689,0.8714154189951169,0.9218487394957983,0.8815126050420168,0.9058823529411765,0.9025210084033614,,,
5
- Meta-Llama-3.1-8B,0.9052521008403361,0.8355042016806722,0.8689808363106925,0.9352941176470588,0.8932773109243698,0.9,0.892436974789916,,,
 
1
  Model,Accuracy,Cross-Lingual Consistency,AC3,English,Chinese,Spanish,Vietnamese,Indonesian,Malay,Filipino
 
 
 
 
results/cross_lingual/zero_shot/cross_logiqa.csv CHANGED
@@ -1,10 +1,7 @@
1
  Model,Accuracy,Cross-Lingual Consistency,AC3,English,Chinese,Spanish,Vietnamese,Indonesian,Malay,Filipino
2
- Qwen2-7B-Instruct,0.5673701298701299,0.477922077922078,0.5188189663543613,0.6590909090909091,0.6704545454545454,0.5340909090909091,0.5625,0.5340909090909091,0.5397727272727273,0.4715909090909091
3
- Meta-Llama-3.1-8B-Instruct,0.43993506493506496,0.33425324675324675,0.37988102268160845,0.5113636363636364,0.45454545454545453,0.4772727272727273,0.48295454545454547,0.3977272727272727,0.39204545454545453,0.36363636363636365
4
- Qwen2-72B-Instruct,0.6753246753246753,0.6814935064935067,0.6783950674333673,0.75,0.8125,0.6647727272727273,0.6136363636363636,0.6420454545454546,0.6590909090909091,0.5852272727272727
5
- Meta-Llama-3-8B-Instruct,0.4115259740259741,0.34042207792207796,0.3726122484532397,0.48863636363636365,0.4659090909090909,0.42613636363636365,0.4034090909090909,0.4034090909090909,0.36363636363636365,0.32954545454545453
6
- SeaLLMs-v3-7B-Chat,0.5633116883116883,0.5176948051948052,0.5395407640365807,0.6079545454545454,0.7045454545454546,0.5681818181818182,0.5511363636363636,0.5340909090909091,0.5170454545454546,0.4602272727272727
7
- gemma-2-9b-it,0.6193181818181818,0.5688311688311687,0.5930020245684557,0.6818181818181818,0.6590909090909091,0.5625,0.6193181818181818,0.5909090909090909,0.6306818181818182,0.5909090909090909
8
- Meta-Llama-3-70B-Instruct,0.6290584415584416,0.6181818181818182,0.6235727047409828,0.6988636363636364,0.6875,0.6420454545454546,0.6193181818181818,0.6022727272727273,0.6136363636363636,0.5397727272727273
9
- gemma-2-2b-it,0.48214285714285715,0.44772727272727286,0.4642981843076105,0.5625,0.5113636363636364,0.48863636363636365,0.5,0.4431818181818182,0.4659090909090909,0.4034090909090909
10
- llama3-8b-cpt-sea-lionv2-instruct,0.43831168831168826,0.38831168831168833,0.41179951229957745,0.4943181818181818,0.48295454545454547,0.48295454545454547,0.4318181818181818,0.4147727272727273,0.38636363636363635,0.375
 
1
  Model,Accuracy,Cross-Lingual Consistency,AC3,English,Chinese,Spanish,Vietnamese,Indonesian,Malay,Filipino
2
+ Meta-Llama-3.1-8B-Instruct,0.4472402597402597,0.43717532467532455,0.44215052105151864,0.5227272727272727,0.4602272727272727,0.4715909090909091,0.4715909090909091,0.4147727272727273,0.3977272727272727,0.39204545454545453
3
+ Meta-Llama-3.1-70B-Instruct,0.6566558441558442,0.598051948051948,0.6259852839118454,0.7443181818181818,0.7215909090909091,0.6647727272727273,0.6534090909090909,0.6193181818181818,0.625,0.5681818181818182
4
+ gemma-2-9b-it,0.6185064935064934,0.5592532467532466,0.5873893507784849,0.6647727272727273,0.6761363636363636,0.5625,0.6193181818181818,0.5795454545454546,0.6420454545454546,0.5852272727272727
5
+ Meta-Llama-3-70B-Instruct,0.6306818181818182,0.6186688311688312,0.6246175698800746,0.7102272727272727,0.6875,0.6420454545454546,0.6193181818181818,0.6022727272727273,0.6136363636363636,0.5397727272727273
6
+ sg_llama3_70b_inst,0.6217532467532468,0.5629870129870129,0.590912649920049,0.7272727272727273,0.6590909090909091,0.6477272727272727,0.6079545454545454,0.6136363636363636,0.5795454545454546,0.5170454545454546
7
+ GPT4o_0513,0.7159090909090909,0.6941558441558444,0.7048646724637749,0.7613636363636364,0.7670454545454546,0.6988636363636364,0.6988636363636364,0.7045454545454546,0.6761363636363636,0.7045454545454546
 
 
 
results/cross_lingual/zero_shot/cross_mmlu.csv CHANGED
@@ -1,10 +1,7 @@
1
  Model,Accuracy,Cross-Lingual Consistency,AC3,English,Chinese,Spanish,Vietnamese,Indonesian,Malay,Filipino
2
- Qwen2-7B-Instruct,0.6495238095238095,0.529714285714286,0.5835327779462245,0.74,0.6733333333333333,0.7,0.6,0.6533333333333333,0.6333333333333333,0.5466666666666666
3
- Meta-Llama-3.1-8B-Instruct,0.5771428571428572,0.47047619047619055,0.5183792207297393,0.6933333333333334,0.5333333333333333,0.6266666666666667,0.54,0.54,0.54,0.5666666666666667
4
- Qwen2-72B-Instruct,0.7714285714285715,0.7765714285714286,0.773991456997936,0.8,0.78,0.7866666666666666,0.7333333333333333,0.76,0.78,0.76
5
- Meta-Llama-3-8B-Instruct,0.5276190476190475,0.3792380952380953,0.4412894449458876,0.62,0.5066666666666667,0.5066666666666667,0.5466666666666666,0.49333333333333335,0.52,0.5
6
- SeaLLMs-v3-7B-Chat,0.6580952380952381,0.6253333333333335,0.641296131344116,0.7466666666666667,0.6933333333333334,0.6933333333333334,0.6466666666666666,0.66,0.58,0.5866666666666667
7
- gemma-2-9b-it,0.7114285714285715,0.7201904761904762,0.7157827111185566,0.76,0.7333333333333333,0.7,0.66,0.7066666666666667,0.6933333333333334,0.7266666666666667
8
- Meta-Llama-3-70B-Instruct,0.7542857142857143,0.7228571428571428,0.7382370820168919,0.7933333333333333,0.74,0.7666666666666667,0.7466666666666667,0.7666666666666667,0.72,0.7466666666666667
9
- gemma-2-2b-it,0.5752380952380953,0.5333333333333332,0.5534936998355239,0.6866666666666666,0.5866666666666667,0.6066666666666667,0.5466666666666666,0.5466666666666666,0.5133333333333333,0.54
10
- llama3-8b-cpt-sea-lionv2-instruct,0.5466666666666667,0.4720000000000001,0.5065968585890122,0.66,0.49333333333333335,0.5466666666666666,0.5866666666666667,0.5666666666666667,0.5066666666666667,0.4666666666666667
 
1
  Model,Accuracy,Cross-Lingual Consistency,AC3,English,Chinese,Spanish,Vietnamese,Indonesian,Malay,Filipino
2
+ Meta-Llama-3.1-8B-Instruct,0.5619047619047618,0.5020952380952383,0.5303189947159841,0.66,0.5266666666666666,0.5733333333333334,0.5266666666666666,0.5533333333333333,0.5533333333333333,0.54
3
+ Meta-Llama-3.1-70B-Instruct,0.7638095238095238,0.7716190476190474,0.7676944251955988,0.8,0.74,0.7666666666666667,0.7666666666666667,0.76,0.7666666666666667,0.7466666666666667
4
+ gemma-2-9b-it,0.7161904761904762,0.7163809523809525,0.7162857015727578,0.7733333333333333,0.74,0.7066666666666667,0.64,0.7266666666666667,0.6933333333333334,0.7333333333333333
5
+ Meta-Llama-3-70B-Instruct,0.758095238095238,0.7316190476190477,0.7446218665971989,0.7933333333333333,0.7466666666666667,0.7733333333333333,0.7466666666666667,0.7733333333333333,0.7333333333333333,0.74
6
+ sg_llama3_70b_inst,0.7342857142857142,0.7079999999999999,0.7209033280007295,0.82,0.6866666666666666,0.7333333333333333,0.6933333333333334,0.78,0.7266666666666667,0.7
7
+ GPT4o_0513,0.8038095238095239,0.8506666666666668,0.8265745643832277,0.8266666666666667,0.7933333333333333,0.8,0.7666666666666667,0.7933333333333333,0.8266666666666667,0.82
 
 
 
results/cross_lingual/zero_shot/cross_xquad.csv CHANGED
@@ -1,10 +1,7 @@
1
  Model,Accuracy,Cross-Lingual Consistency,AC3,English,Chinese,Spanish,Vietnamese,Indonesian,Malay,Filipino
2
- Qwen2-7B-Instruct,0.940546218487395,0.9016806722689076,0.9207034712119446,0.9521008403361344,0.9352941176470588,0.9445378151260504,0.9302521008403362,,,
3
- Meta-Llama-3.1-8B-Instruct,0.9340336134453782,0.8831932773109243,0.9079022683718587,0.9369747899159664,0.9302521008403362,0.946218487394958,0.9226890756302522,,,
4
- Qwen2-72B-Instruct,0.9611344537815126,0.9506302521008403,0.9558534951942531,0.9638655462184874,0.9554621848739496,0.9613445378151261,0.9638655462184874,,,
5
- Meta-Llama-3-8B-Instruct,0.8756302521008403,0.7699579831932772,0.8194012188828194,0.8815126050420168,0.8420168067226891,0.9092436974789916,0.8697478991596639,,,
6
- SeaLLMs-v3-7B-Chat,0.9394957983193277,0.9172268907563025,0.9282278015934072,0.9512605042016806,0.938655462184874,0.938655462184874,0.9294117647058824,,,
7
- gemma-2-9b-it,0.9571428571428572,0.9352941176470588,0.9460923622945893,0.9663865546218487,0.9411764705882353,0.9613445378151261,0.9596638655462185,,,
8
- Meta-Llama-3-70B-Instruct,0.9586134453781513,0.9434873949579832,0.9509902767764395,0.9705882352941176,0.9394957983193277,0.9596638655462185,0.9647058823529412,,,
9
- gemma-2-2b-it,0.9149159663865546,0.8632352941176471,0.888324599638689,0.9302521008403362,0.9016806722689076,0.9184873949579831,0.9092436974789916,,,
10
- llama3-8b-cpt-sea-lionv2-instruct,0.8930672268907562,0.8262605042016806,0.8583659343003551,0.9142857142857143,0.8798319327731092,0.8890756302521008,0.8890756302521008,,,
 
1
  Model,Accuracy,Cross-Lingual Consistency,AC3,English,Chinese,Spanish,Vietnamese,Indonesian,Malay,Filipino
2
+ Meta-Llama-3.1-8B-Instruct,0.9287815126050419,0.8867647058823529,0.9072869161050563,0.9420168067226891,0.9193277310924369,0.9361344537815126,0.9176470588235294,,,
3
+ Meta-Llama-3.1-70B-Instruct,0.9615546218487395,0.9512605042016806,0.9563798632627071,0.9647058823529412,0.9512605042016806,0.9647058823529412,0.965546218487395,,,
4
+ gemma-2-9b-it,0.9567226890756303,0.9350840336134454,0.9457796088507574,0.9663865546218487,0.9411764705882353,0.9588235294117647,0.9605042016806723,,,
5
+ Meta-Llama-3-70B-Instruct,0.9592436974789916,0.9422268907563025,0.9506591499208973,0.9714285714285714,0.9403361344537815,0.9596638655462185,0.965546218487395,,,
6
+ sg_llama3_70b_inst,0.9552521008403361,0.9453781512605042,0.9502894779607259,0.9663865546218487,0.9436974789915966,0.957983193277311,0.9529411764705882,,,
7
+ GPT4o_0513,0.9605042016806723,0.951890756302521,0.9561780814209724,0.965546218487395,0.9537815126050421,0.9630252100840336,0.9596638655462185,,,
 
 
 
results/cultural_reasoning/few_shot/sg_eval.csv CHANGED
@@ -1,5 +1 @@
1
  Model,Accuracy
2
- Meta-Llama-3-70B,0.7572815533980582
3
- Meta-Llama-3-8B,0.6407766990291263
4
- llama3-8b-cpt-sea-lionv2-base,0.6310679611650486
5
- Meta-Llama-3.1-8B,0.6116504854368932
 
1
  Model,Accuracy
 
 
 
 
results/cultural_reasoning/few_shot/sg_eval_v1_cleaned.csv ADDED
@@ -0,0 +1 @@
 
 
1
+ Model,Accuracy
results/cultural_reasoning/few_shot/sg_eval_v2_mcq.csv ADDED
@@ -0,0 +1 @@
 
 
1
+ Model,Accuracy
results/cultural_reasoning/zero_shot/sg_eval.csv CHANGED
@@ -1,10 +1,7 @@
1
  Model,Accuracy
2
- Qwen2-7B-Instruct,0.6699029126213593
3
- Meta-Llama-3.1-8B-Instruct,0.6019417475728155
4
- Qwen2-72B-Instruct,0.7378640776699029
5
- Meta-Llama-3-8B-Instruct,0.5922330097087378
6
- SeaLLMs-v3-7B-Chat,0.6310679611650486
7
- gemma-2-9b-it,0.6893203883495146
8
- Meta-Llama-3-70B-Instruct,0.7184466019417476
9
- gemma-2-2b-it,0.5533980582524272
10
- llama3-8b-cpt-sea-lionv2-instruct,0.6019417475728155
 
1
  Model,Accuracy
2
+ Meta-Llama-3.1-8B-Instruct,0.5728155339805825
3
+ Meta-Llama-3.1-70B-Instruct,0.7184466019417476
4
+ gemma-2-9b-it,0.6699029126213593
5
+ Meta-Llama-3-70B-Instruct,0.7087378640776699
6
+ sg_llama3_70b_inst,0.6699029126213593
7
+ GPT4o_0513,0.8446601941747572
 
 
 
results/cultural_reasoning/zero_shot/sg_eval_v1_cleaned.csv ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ Model,Accuracy
2
+ Meta-Llama-3.1-8B-Instruct,0.5294117647058824
3
+ Meta-Llama-3.1-70B-Instruct,0.6617647058823529
4
+ gemma-2-9b-it,0.6029411764705882
5
+ Meta-Llama-3-70B-Instruct,0.6617647058823529
6
+ sg_llama3_70b_inst,0.6176470588235294
7
+ GPT4o_0513,0.8088235294117647
results/cultural_reasoning/zero_shot/sg_eval_v2_mcq.csv ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ Model,Accuracy
2
+ Meta-Llama-3-70B-Instruct,0.8381818181818181
3
+ sg_llama3_70b_inst,0.8436363636363636
results/dialogue/zero_shot/dialogsum.csv CHANGED
@@ -1,10 +1,5 @@
1
  Model,Average,ROUGE-1,ROUGE-2,ROUGE-L
2
- Qwen2-7B-Instruct,0.20907406151501814,0.3054588156947843,0.09317750879187732,0.22858586005839285
3
- Meta-Llama-3.1-8B-Instruct,0.25775524210830225,0.361264483769506,0.1319601664036931,0.28004107615170776
4
- Qwen2-72B-Instruct,0.21903635116217549,0.31670807543803475,0.10250931612356096,0.23789166192493072
5
- Meta-Llama-3-8B-Instruct,0.23748034560689027,0.33656243928704743,0.11826169056076426,0.2576169069728591
6
- SeaLLMs-v3-7B-Chat,0.24723061042117522,0.3515679169380843,0.12081049484108507,0.2693134194843562
7
- gemma-2-9b-it,0.2587338648607764,0.3658237880022337,0.12722373001686862,0.2831540765632268
8
  Meta-Llama-3-70B-Instruct,0.2557065499979308,0.36058417323628,0.12758087337786866,0.2789546033796438
9
- gemma-2-2b-it,0.26123184071161726,0.3683777522574926,0.12793735218483035,0.28738041769252887
10
- llama3-8b-cpt-sea-lionv2-instruct,0.2531827068435159,0.35516222681696785,0.12864609875605545,0.2757397949575244
 
1
  Model,Average,ROUGE-1,ROUGE-2,ROUGE-L
2
+ Meta-Llama-3.1-8B-Instruct,0.24990743661648132,0.3515557454075673,0.12563120411564133,0.2725353603262354
 
 
 
 
 
3
  Meta-Llama-3-70B-Instruct,0.2557065499979308,0.36058417323628,0.12758087337786866,0.2789546033796438
4
+ sg_llama3_70b_inst,0.26633840691332344,0.3692028513115729,0.1412505883866801,0.2885617810417173
5
+ GPT4o_0513,0.2375730297294346,0.3364674648846549,0.11718194476069822,0.25906967954295057
results/dialogue/zero_shot/samsum.csv CHANGED
@@ -1,10 +1,5 @@
1
  Model,Average,ROUGE-1,ROUGE-2,ROUGE-L
2
- Qwen2-7B-Instruct,0.2609036529701212,0.36802926348230236,0.1319027531874975,0.28277894224056366
3
- Meta-Llama-3.1-8B-Instruct,0.3002534894623792,0.41234119292969856,0.16596515741670248,0.3224541180407366
4
- Qwen2-72B-Instruct,0.27953180135225114,0.3883786925058577,0.15246657328712612,0.2977501382637696
5
- Meta-Llama-3-8B-Instruct,0.2850232460296334,0.3945214081577773,0.15619034353394273,0.3043579863971803
6
- SeaLLMs-v3-7B-Chat,0.2947730352305254,0.40661343212311085,0.16241730068430632,0.31528837288415906
7
- gemma-2-9b-it,0.30920311453647803,0.4269492679851157,0.16650133263007386,0.33415874299424464
8
  Meta-Llama-3-70B-Instruct,0.2893525314227379,0.4030746211134018,0.15236139065578,0.3126215824990321
9
- gemma-2-2b-it,0.3067902178200617,0.4277497131478937,0.1609158209467132,0.3317051193655783
10
- llama3-8b-cpt-sea-lionv2-instruct,0.29924948830821335,0.40828658585731714,0.16733998585334992,0.32212189321397305
 
1
  Model,Average,ROUGE-1,ROUGE-2,ROUGE-L
2
+ Meta-Llama-3.1-8B-Instruct,0.2891505262763006,0.4001228010515775,0.15677431231732958,0.31055446545999466
 
 
 
 
 
3
  Meta-Llama-3-70B-Instruct,0.2893525314227379,0.4030746211134018,0.15236139065578,0.3126215824990321
4
+ sg_llama3_70b_inst,0.3146051103643872,0.4271361513564755,0.18238925099430264,0.33428992874238356
5
+ GPT4o_0513,0.27736679291505306,0.386750207633093,0.14889081847621596,0.2964593526358502
results/flores_translation/few_shot/ind2eng.csv CHANGED
@@ -1,5 +1 @@
1
  Model,BLEU
2
- Meta-Llama-3-70B,0.4224655367668861
3
- Meta-Llama-3-8B,0.37760317005449096
4
- llama3-8b-cpt-sea-lionv2-base,0.37662180389435995
5
- Meta-Llama-3.1-8B,0.384092499597103
 
1
  Model,BLEU
 
 
 
 
results/flores_translation/few_shot/vie2eng.csv CHANGED
@@ -1,5 +1 @@
1
  Model,BLEU
2
- Meta-Llama-3-70B,0.3564689224836266
3
- Meta-Llama-3-8B,0.31157996445764863
4
- llama3-8b-cpt-sea-lionv2-base,0.30608365217733097
5
- Meta-Llama-3.1-8B,0.320367356810332
 
1
  Model,BLEU
 
 
 
 
results/flores_translation/few_shot/zho2eng.csv CHANGED
@@ -1,5 +1 @@
1
  Model,BLEU
2
- Meta-Llama-3-70B,0.27798501796196434
3
- Meta-Llama-3-8B,0.23710858530408072
4
- llama3-8b-cpt-sea-lionv2-base,0.22831898923969038
5
- Meta-Llama-3.1-8B,0.23777256698409086
 
1
  Model,BLEU
 
 
 
 
results/flores_translation/few_shot/zsm2eng.csv CHANGED
@@ -1,5 +1 @@
1
  Model,BLEU
2
- Meta-Llama-3-70B,0.44357168236218214
3
- Meta-Llama-3-8B,0.3908770132718593
4
- llama3-8b-cpt-sea-lionv2-base,0.37668373435658764
5
- Meta-Llama-3.1-8B,0.3893813156403672
 
1
  Model,BLEU
 
 
 
 
results/flores_translation/zero_shot/ind2eng.csv CHANGED
@@ -1,10 +1,7 @@
1
  Model,BLEU
2
- Qwen2-7B-Instruct,0.2968667083646938
3
- Meta-Llama-3.1-8B-Instruct,0.3851478947359834
4
- Qwen2-72B-Instruct,0.40378146176265345
5
- Meta-Llama-3-8B-Instruct,0.33011728860318257
6
- SeaLLMs-v3-7B-Chat,0.3642282499148727
7
- gemma-2-9b-it,0.4115273387213549
8
  Meta-Llama-3-70B-Instruct,0.3830092775167675
9
- gemma-2-2b-it,0.3496340692126605
10
- llama3-8b-cpt-sea-lionv2-instruct,0.39322992478935465
 
1
  Model,BLEU
2
+ Meta-Llama-3.1-8B-Instruct,0.3765752579792989
3
+ Meta-Llama-3.1-70B-Instruct,0.43366494500251235
4
+ gemma-2-9b-it,0.40786563079141763
 
 
 
5
  Meta-Llama-3-70B-Instruct,0.3830092775167675
6
+ sg_llama3_70b_inst,0.4086440304524362
7
+ GPT4o_0513,0.42589589086974855
results/flores_translation/zero_shot/vie2eng.csv CHANGED
@@ -1,10 +1,7 @@
1
  Model,BLEU
2
- Qwen2-7B-Instruct,0.23571859325121644
3
- Meta-Llama-3.1-8B-Instruct,0.3229889780558947
4
- Qwen2-72B-Instruct,0.3326034551014482
5
- Meta-Llama-3-8B-Instruct,0.2637063711923046
6
- SeaLLMs-v3-7B-Chat,0.3073965938987496
7
- gemma-2-9b-it,0.33638205957057027
8
  Meta-Llama-3-70B-Instruct,0.3230140263371192
9
- gemma-2-2b-it,0.2717960864611513
10
- llama3-8b-cpt-sea-lionv2-instruct,0.33210048239854756
 
1
  Model,BLEU
2
+ Meta-Llama-3.1-8B-Instruct,0.31019605539004524
3
+ Meta-Llama-3.1-70B-Instruct,0.37244508311079816
4
+ gemma-2-9b-it,0.3367700653885
 
 
 
5
  Meta-Llama-3-70B-Instruct,0.3230140263371192
6
+ sg_llama3_70b_inst,0.34258533717783785
7
+ GPT4o_0513,0.36219303373759176
results/flores_translation/zero_shot/zho2eng.csv CHANGED
@@ -1,10 +1,7 @@
1
  Model,BLEU
2
- Qwen2-7B-Instruct,0.21747115262398484
3
- Meta-Llama-3.1-8B-Instruct,0.24469097639356438
4
- Qwen2-72B-Instruct,0.24317967002278634
5
- Meta-Llama-3-8B-Instruct,0.19960072119079214
6
- SeaLLMs-v3-7B-Chat,0.25023469014968713
7
- gemma-2-9b-it,0.26747029920541504
8
  Meta-Llama-3-70B-Instruct,0.24397819518058994
9
- gemma-2-2b-it,0.21203164253450932
10
- llama3-8b-cpt-sea-lionv2-instruct,0.24572934810342245
 
1
  Model,BLEU
2
+ Meta-Llama-3.1-8B-Instruct,0.23889886925287113
3
+ Meta-Llama-3.1-70B-Instruct,0.2832594176173152
4
+ gemma-2-9b-it,0.267527968123433
 
 
 
5
  Meta-Llama-3-70B-Instruct,0.24397819518058994
6
+ sg_llama3_70b_inst,0.26000707510414633
7
+ GPT4o_0513,0.27722306559544163
results/flores_translation/zero_shot/zsm2eng.csv CHANGED
@@ -1,10 +1,7 @@
1
  Model,BLEU
2
- Qwen2-7B-Instruct,0.27198336767927184
3
- Meta-Llama-3.1-8B-Instruct,0.3833985449157327
4
- Qwen2-72B-Instruct,0.40613262295280417
5
- Meta-Llama-3-8B-Instruct,0.31536374302282033
6
- SeaLLMs-v3-7B-Chat,0.3535493169696862
7
- gemma-2-9b-it,0.4248122066845582
8
  Meta-Llama-3-70B-Instruct,0.3957287030176054
9
- gemma-2-2b-it,0.33384917509056944
10
- llama3-8b-cpt-sea-lionv2-instruct,0.391912232406389
 
1
  Model,BLEU
2
+ Meta-Llama-3.1-8B-Instruct,0.3700921225177551
3
+ Meta-Llama-3.1-70B-Instruct,0.4462132282683508
4
+ gemma-2-9b-it,0.4234100394581857
 
 
 
5
  Meta-Llama-3-70B-Instruct,0.3957287030176054
6
+ sg_llama3_70b_inst,0.4163761508073963
7
+ GPT4o_0513,0.451496635720668