kunato
commited on
Commit
•
4a49498
0
Parent(s):
init-result-13-aug-24
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +58 -0
- LLM/LLaMa3-8b-WangchanX-sft-Full/results.json +1 -0
- LLM/Meta-Llama-3.1-70B-Instruct/results.json +1 -0
- LLM/Meta-Llama-3.1-8B-Instruct/results.json +1 -0
- LLM/Qwen2-72B-Instruct/results.json +1 -0
- LLM/Qwen2-7B-Instruct/results.json +1 -0
- LLM/Sailor-7B-Chat/results.json +1 -0
- LLM/SeaLLMs-v3-7B-Chat/results.json +1 -0
- LLM/gemini-1.5-flash-001/results.json +1 -0
- LLM/gpt-4o-2024-05-13/results.json +1 -0
- LLM/gpt-4o-mini-2024-07-18/results.json +1 -0
- LLM/llama-3-typhoon-v1.5-8b-instruct/results.json +1 -0
- LLM/llama-3-typhoon-v1.5x-70b-instruct/results.json +1 -0
- LLM/llama-3-typhoon-v1.5x-8b-instruct/results.json +1 -0
- LLM/llama3-8b-cpt-sea-lionv2-instruct/results.json +1 -0
- LLM/openthaigpt-1.0.0-70b-chat/results.json +1 -0
- LLM/openthaigpt-1.0.0-7b-chat/results.json +1 -0
- MC/LLaMa3-8b-WangchanX-sft-Full/results.json +1 -0
- MC/Meta-Llama-3.1-70B-Instruct/results.json +1 -0
- MC/Meta-Llama-3.1-8B-Instruct/results.json +1 -0
- MC/Qwen2-72B-Instruct/results.json +1 -0
- MC/Qwen2-7B-Instruct/results.json +1 -0
- MC/Sailor-7B-Chat/results.json +1 -0
- MC/SeaLLMs-v3-7B-Chat/results.json +1 -0
- MC/gpt-4o-2024-05-13/results.json +1 -0
- MC/gpt-4o-mini-2024-07-18/results.json +1 -0
- MC/llama-3-typhoon-v1.5-8b-instruct/results.json +1 -0
- MC/llama-3-typhoon-v1.5x-70b-instruct/results.json +1 -0
- MC/llama-3-typhoon-v1.5x-8b-instruct/results.json +1 -0
- MC/llama3-8b-cpt-sea-lionv2-instruct/results.json +1 -0
- MC/openthaigpt-1.0.0-70b-chat/results.json +1 -0
- MC/openthaigpt-1.0.0-7b-chat/results.json +1 -0
- NLG/LLaMa3-8b-WangchanX-sft-Full/results.json +1 -0
- NLG/Meta-Llama-3.1-70B-Instruct/results.json +1 -0
- NLG/Meta-Llama-3.1-8B-Instruct/results.json +1 -0
- NLG/Qwen2-72B-Instruct/results.json +1 -0
- NLG/Qwen2-7B-Instruct/results.json +1 -0
- NLG/Sailor-7B-Chat/results.json +1 -0
- NLG/SeaLLMs-v3-7B-Chat/results.json +1 -0
- NLG/gpt-4o-2024-05-13/results.json +1 -0
- NLG/gpt-4o-mini-2024-07-18/results.json +1 -0
- NLG/llama-3-typhoon-v1.5-8b-instruct/results.json +1 -0
- NLG/llama-3-typhoon-v1.5x-70b-instruct/results.json +1 -0
- NLG/llama-3-typhoon-v1.5x-8b-instruct/results.json +1 -0
- NLG/llama3-8b-cpt-sea-lionv2-instruct/results.json +1 -0
- NLG/openthaigpt-1.0.0-70b-chat/results.json +1 -0
- NLG/openthaigpt-1.0.0-7b-chat/results.json +1 -0
- NLU/LLaMa3-8b-WangchanX-sft-Full/results.json +1 -0
- NLU/Meta-Llama-3.1-70B-Instruct/results.json +1 -0
- NLU/Meta-Llama-3.1-8B-Instruct/results.json +1 -0
.gitattributes
ADDED
@@ -0,0 +1,58 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
*.7z filter=lfs diff=lfs merge=lfs -text
|
2 |
+
*.arrow filter=lfs diff=lfs merge=lfs -text
|
3 |
+
*.bin filter=lfs diff=lfs merge=lfs -text
|
4 |
+
*.bz2 filter=lfs diff=lfs merge=lfs -text
|
5 |
+
*.ckpt filter=lfs diff=lfs merge=lfs -text
|
6 |
+
*.ftz filter=lfs diff=lfs merge=lfs -text
|
7 |
+
*.gz filter=lfs diff=lfs merge=lfs -text
|
8 |
+
*.h5 filter=lfs diff=lfs merge=lfs -text
|
9 |
+
*.joblib filter=lfs diff=lfs merge=lfs -text
|
10 |
+
*.lfs.* filter=lfs diff=lfs merge=lfs -text
|
11 |
+
*.lz4 filter=lfs diff=lfs merge=lfs -text
|
12 |
+
*.mlmodel filter=lfs diff=lfs merge=lfs -text
|
13 |
+
*.model filter=lfs diff=lfs merge=lfs -text
|
14 |
+
*.msgpack filter=lfs diff=lfs merge=lfs -text
|
15 |
+
*.npy filter=lfs diff=lfs merge=lfs -text
|
16 |
+
*.npz filter=lfs diff=lfs merge=lfs -text
|
17 |
+
*.onnx filter=lfs diff=lfs merge=lfs -text
|
18 |
+
*.ot filter=lfs diff=lfs merge=lfs -text
|
19 |
+
*.parquet filter=lfs diff=lfs merge=lfs -text
|
20 |
+
*.pb filter=lfs diff=lfs merge=lfs -text
|
21 |
+
*.pickle filter=lfs diff=lfs merge=lfs -text
|
22 |
+
*.pkl filter=lfs diff=lfs merge=lfs -text
|
23 |
+
*.pt filter=lfs diff=lfs merge=lfs -text
|
24 |
+
*.pth filter=lfs diff=lfs merge=lfs -text
|
25 |
+
*.rar filter=lfs diff=lfs merge=lfs -text
|
26 |
+
*.safetensors filter=lfs diff=lfs merge=lfs -text
|
27 |
+
saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
28 |
+
*.tar.* filter=lfs diff=lfs merge=lfs -text
|
29 |
+
*.tar filter=lfs diff=lfs merge=lfs -text
|
30 |
+
*.tflite filter=lfs diff=lfs merge=lfs -text
|
31 |
+
*.tgz filter=lfs diff=lfs merge=lfs -text
|
32 |
+
*.wasm filter=lfs diff=lfs merge=lfs -text
|
33 |
+
*.xz filter=lfs diff=lfs merge=lfs -text
|
34 |
+
*.zip filter=lfs diff=lfs merge=lfs -text
|
35 |
+
*.zst filter=lfs diff=lfs merge=lfs -text
|
36 |
+
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
37 |
+
# Audio files - uncompressed
|
38 |
+
*.pcm filter=lfs diff=lfs merge=lfs -text
|
39 |
+
*.sam filter=lfs diff=lfs merge=lfs -text
|
40 |
+
*.raw filter=lfs diff=lfs merge=lfs -text
|
41 |
+
# Audio files - compressed
|
42 |
+
*.aac filter=lfs diff=lfs merge=lfs -text
|
43 |
+
*.flac filter=lfs diff=lfs merge=lfs -text
|
44 |
+
*.mp3 filter=lfs diff=lfs merge=lfs -text
|
45 |
+
*.ogg filter=lfs diff=lfs merge=lfs -text
|
46 |
+
*.wav filter=lfs diff=lfs merge=lfs -text
|
47 |
+
# Image files - uncompressed
|
48 |
+
*.bmp filter=lfs diff=lfs merge=lfs -text
|
49 |
+
*.gif filter=lfs diff=lfs merge=lfs -text
|
50 |
+
*.png filter=lfs diff=lfs merge=lfs -text
|
51 |
+
*.tiff filter=lfs diff=lfs merge=lfs -text
|
52 |
+
# Image files - compressed
|
53 |
+
*.jpg filter=lfs diff=lfs merge=lfs -text
|
54 |
+
*.jpeg filter=lfs diff=lfs merge=lfs -text
|
55 |
+
*.webp filter=lfs diff=lfs merge=lfs -text
|
56 |
+
# Video files - compressed
|
57 |
+
*.mp4 filter=lfs diff=lfs merge=lfs -text
|
58 |
+
*.webm filter=lfs diff=lfs merge=lfs -text
|
LLM/LLaMa3-8b-WangchanX-sft-Full/results.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"config": {"model_name": "airesearch/LLaMa3-8b-WangchanX-sft-Full"}, "results": {"Math": {"avg_rating": 2.55}, "Reasoning": {"avg_rating": 2.85}, "Extraction": {"avg_rating": 3.15}, "Roleplay": {"avg_rating": 3.8}, "Writing": {"avg_rating": 2.8}, "Social Science": {"avg_rating": 5.1}, "STEM": {"avg_rating": 4.055555555555555}, "Coding": {"avg_rating": 3.6818181818181817}, "Knowledge III": {"avg_rating": 3.6}}}
|
LLM/Meta-Llama-3.1-70B-Instruct/results.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"config": {"model_name": "meta-llama/Meta-Llama-3.1-70B-Instruct"}, "results": {"Math": {"avg_rating": 6.35}, "Reasoning": {"avg_rating": 5.85}, "Extraction": {"avg_rating": 6.3}, "Roleplay": {"avg_rating": 6.6}, "Writing": {"avg_rating": 7.4}, "Social Science": {"avg_rating": 6.25}, "STEM": {"avg_rating": 6.5}, "Coding": {"avg_rating": 6.409090909090909}, "Knowledge III": {"avg_rating": 4.85}}}
|
LLM/Meta-Llama-3.1-8B-Instruct/results.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"config": {"model_name": "meta-llama/Meta-Llama-3.1-8B-Instruct"}, "results": {"Math": {"avg_rating": 4.25}, "Reasoning": {"avg_rating": 4.2}, "Extraction": {"avg_rating": 4.55}, "Roleplay": {"avg_rating": 6.3}, "Writing": {"avg_rating": 6.25}, "Social Science": {"avg_rating": 7.4}, "STEM": {"avg_rating": 4.305555555555555}, "Coding": {"avg_rating": 4.909090909090909}, "Knowledge III": {"avg_rating": 3.45}}}
|
LLM/Qwen2-72B-Instruct/results.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"config": {"model_name": "Qwen/Qwen2-72B-Instruct"}, "results": {"Math": {"avg_rating": 7.25}, "Reasoning": {"avg_rating": 6.35}, "Extraction": {"avg_rating": 6.2}, "Roleplay": {"avg_rating": 7.5}, "Writing": {"avg_rating": 7.2}, "Social Science": {"avg_rating": 8.6}, "STEM": {"avg_rating": 6.833333333333333}, "Coding": {"avg_rating": 7.0}, "Knowledge III": {"avg_rating": 4.75}}}
|
LLM/Qwen2-7B-Instruct/results.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"config": {"model_name": "Qwen/Qwen2-7B-Instruct"}, "results": {"Math": {"avg_rating": 4.4}, "Reasoning": {"avg_rating": 4.25}, "Extraction": {"avg_rating": 5.25}, "Roleplay": {"avg_rating": 6.85}, "Writing": {"avg_rating": 6.15}, "Social Science": {"avg_rating": 6.85}, "STEM": {"avg_rating": 4.5}, "Coding": {"avg_rating": 6.2727272727272725}, "Knowledge III": {"avg_rating": 3.1}}}
|
LLM/Sailor-7B-Chat/results.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"config": {"model_name": "sail/Sailor-7B-Chat"}, "results": {"Math": {"avg_rating": 2.4}, "Reasoning": {"avg_rating": 3.65}, "Extraction": {"avg_rating": 2.65}, "Roleplay": {"avg_rating": 5.55}, "Writing": {"avg_rating": 5.4}, "Social Science": {"avg_rating": 5.45}, "STEM": {"avg_rating": 4.555555555555555}, "Coding": {"avg_rating": 3.5454545454545454}, "Knowledge III": {"avg_rating": 3.05}}}
|
LLM/SeaLLMs-v3-7B-Chat/results.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"config": {"model_name": "SeaLLMs/SeaLLMs-v3-7B-Chat"}, "results": {"Math": {"avg_rating": 4.85}, "Reasoning": {"avg_rating": 2.85}, "Extraction": {"avg_rating": 4.15}, "Roleplay": {"avg_rating": 6.3}, "Writing": {"avg_rating": 6.0}, "Social Science": {"avg_rating": 5.7}, "STEM": {"avg_rating": 5.5}, "Coding": {"avg_rating": 7.045454545454546}, "Knowledge III": {"avg_rating": 3.75}}}
|
LLM/gemini-1.5-flash-001/results.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"config": {"model_name": "api/gemini-1.5-flash-001"}, "results": {"Math": {"avg_rating": 7.7}, "Reasoning": {"avg_rating": 5.85}, "Extraction": {"avg_rating": 6.1}, "Roleplay": {"avg_rating": 7.85}, "Writing": {"avg_rating": 8.0}, "Social Science": {"avg_rating": 7.85}, "STEM": {"avg_rating": 7.888888888888889}, "Coding": {"avg_rating": 7.636363636363637}, "Knowledge III": {"avg_rating": 5.05}}}
|
LLM/gpt-4o-2024-05-13/results.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"config": {"model_name": "api/gpt-4o-2024-05-13"}, "results": {"Math": {"avg_rating": 8.5}, "Reasoning": {"avg_rating": 9.0}, "Extraction": {"avg_rating": 7.6}, "Roleplay": {"avg_rating": 7.9}, "Writing": {"avg_rating": 8.15}, "Social Science": {"avg_rating": 8.65}, "STEM": {"avg_rating": 7.861111111111111}, "Coding": {"avg_rating": 8.863636363636363}, "Knowledge III": {"avg_rating": 7.8}}}
|
LLM/gpt-4o-mini-2024-07-18/results.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"config": {"model_name": "api/gpt-4o-mini-2024-07-18"}, "results": {"Math": {"avg_rating": 7.5}, "Reasoning": {"avg_rating": 6.7}, "Extraction": {"avg_rating": 7.15}, "Roleplay": {"avg_rating": 7.8}, "Writing": {"avg_rating": 8.35}, "Social Science": {"avg_rating": 8.75}, "STEM": {"avg_rating": 7.75}, "Coding": {"avg_rating": 7.318181818181818}, "Knowledge III": {"avg_rating": 6.45}}}
|
LLM/llama-3-typhoon-v1.5-8b-instruct/results.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"config": {"model_name": "scb10x/llama-3-typhoon-v1.5-8b-instruct"}, "results": {"Math": {"avg_rating": 3.05}, "Reasoning": {"avg_rating": 5.5}, "Extraction": {"avg_rating": 4.65}, "Roleplay": {"avg_rating": 6.1}, "Writing": {"avg_rating": 7.45}, "Social Science": {"avg_rating": 5.8}, "STEM": {"avg_rating": 4.833333333333333}, "Coding": {"avg_rating": 5.2272727272727275}, "Knowledge III": {"avg_rating": 3.8}}}
|
LLM/llama-3-typhoon-v1.5x-70b-instruct/results.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"config": {"model_name": "scb10x/llama-3-typhoon-v1.5x-70b-instruct"}, "results": {"Math": {"avg_rating": 5.75}, "Reasoning": {"avg_rating": 6.2}, "Extraction": {"avg_rating": 6.0}, "Roleplay": {"avg_rating": 7.7}, "Writing": {"avg_rating": 7.8}, "Social Science": {"avg_rating": 8.4}, "STEM": {"avg_rating": 6.972222222222222}, "Coding": {"avg_rating": 7.0}, "Knowledge III": {"avg_rating": 4.95}}}
|
LLM/llama-3-typhoon-v1.5x-8b-instruct/results.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"config": {"model_name": "scb10x/llama-3-typhoon-v1.5x-8b-instruct"}, "results": {"Math": {"avg_rating": 4.7}, "Reasoning": {"avg_rating": 4.55}, "Extraction": {"avg_rating": 4.65}, "Roleplay": {"avg_rating": 6.4}, "Writing": {"avg_rating": 6.65}, "Social Science": {"avg_rating": 7.5}, "STEM": {"avg_rating": 5.333333333333333}, "Coding": {"avg_rating": 5.090909090909091}, "Knowledge III": {"avg_rating": 3.95}}}
|
LLM/llama3-8b-cpt-sea-lionv2-instruct/results.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"config": {"model_name": "aisingapore/llama3-8b-cpt-sea-lionv2-instruct"}, "results": {"Math": {"avg_rating": 3.15}, "Reasoning": {"avg_rating": 4.3}, "Extraction": {"avg_rating": 3.7}, "Roleplay": {"avg_rating": 6.5}, "Writing": {"avg_rating": 6.8}, "Social Science": {"avg_rating": 6.05}, "STEM": {"avg_rating": 5.333333333333333}, "Coding": {"avg_rating": 5.2272727272727275}, "Knowledge III": {"avg_rating": 4.6}}}
|
LLM/openthaigpt-1.0.0-70b-chat/results.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"config": {"model_name": "openthaigpt/openthaigpt-1.0.0-70b-chat"}, "results": {"Math": {"avg_rating": 2.85}, "Reasoning": {"avg_rating": 4.4}, "Extraction": {"avg_rating": 3.45}, "Roleplay": {"avg_rating": 5.75}, "Writing": {"avg_rating": 4.9}, "Social Science": {"avg_rating": 5.55}, "STEM": {"avg_rating": 5.777777777777778}, "Coding": {"avg_rating": 3.5}, "Knowledge III": {"avg_rating": 4.1}}}
|
LLM/openthaigpt-1.0.0-7b-chat/results.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"config": {"model_name": "openthaigpt/openthaigpt-1.0.0-7b-chat"}, "results": {"Math": {"avg_rating": 1.6}, "Reasoning": {"avg_rating": 2.55}, "Extraction": {"avg_rating": 1.75}, "Roleplay": {"avg_rating": 4.1}, "Writing": {"avg_rating": 4.1}, "Social Science": {"avg_rating": 3.0}, "STEM": {"avg_rating": 2.861111111111111}, "Coding": {"avg_rating": 2.3636363636363638}, "Knowledge III": {"avg_rating": 2.15}}}
|
MC/LLaMa3-8b-WangchanX-sft-Full/results.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"config": {"model_name": "airesearch/LLaMa3-8b-WangchanX-sft-Full"}, "results": {"thaiexam_qa": {"accuracy": 0.3876106194690265}, "m3exam_tha_seacrowd_qa": {"accuracy": 0.4100553505535055}}}
|
MC/Meta-Llama-3.1-70B-Instruct/results.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"config": {"model_name": "meta-llama/Meta-Llama-3.1-70B-Instruct"}, "results": {"m3exam_tha_seacrowd_qa": {"accuracy": 0.6051660516605166}, "thaiexam_qa": {"accuracy": 0.5823008849557522}}}
|
MC/Meta-Llama-3.1-8B-Instruct/results.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"config": {"model_name": "meta-llama/Meta-Llama-3.1-8B-Instruct"}, "results": {"thaiexam_qa": {"accuracy": 0.4389380530973451}, "m3exam_tha_seacrowd_qa": {"accuracy": 0.4511070110701107}}}
|
MC/Qwen2-72B-Instruct/results.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"config": {"model_name": "Qwen/Qwen2-72B-Instruct"}, "results": {"m3exam_tha_seacrowd_qa": {"accuracy": 0.6305350553505535}, "thaiexam_qa": {"accuracy": 0.5823008849557522}}}
|
MC/Qwen2-7B-Instruct/results.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"config": {"model_name": "Qwen/Qwen2-7B-Instruct"}, "results": {"thaiexam_qa": {"accuracy": 0.4707964601769911}, "m3exam_tha_seacrowd_qa": {"accuracy": 0.4898523985239852}}}
|
MC/Sailor-7B-Chat/results.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"config": {"model_name": "sail/Sailor-7B-Chat"}, "results": {"m3exam_tha_seacrowd_qa": {"accuracy": 0.4363468634686346}, "thaiexam_qa": {"accuracy": 0.4053097345132743}}}
|
MC/SeaLLMs-v3-7B-Chat/results.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"config": {"model_name": "SeaLLMs/SeaLLMs-v3-7B-Chat"}, "results": {"m3exam_tha_seacrowd_qa": {"accuracy": 0.5142988929889298}, "thaiexam_qa": {"accuracy": 0.5132743362831859}}}
|
MC/gpt-4o-2024-05-13/results.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"config": {"model_name": "api/gpt-4o-2024-05-13"}, "results": {"m3exam_tha_seacrowd_qa": {"accuracy": 0.6863468634686347}, "thaiexam_qa": {"accuracy": 0.6389380530973451}}}
|
MC/gpt-4o-mini-2024-07-18/results.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"config": {"model_name": "api/gpt-4o-mini-2024-07-18"}, "results": {"thaiexam_qa": {"accuracy": 0.5451327433628319}, "m3exam_tha_seacrowd_qa": {"accuracy": 0.5784132841328413}}}
|
MC/llama-3-typhoon-v1.5-8b-instruct/results.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"config": {"model_name": "scb10x/llama-3-typhoon-v1.5-8b-instruct"}, "results": {"thaiexam_qa": {"accuracy": 0.4123893805309734}, "m3exam_tha_seacrowd_qa": {"accuracy": 0.4483394833948339}}}
|
MC/llama-3-typhoon-v1.5x-70b-instruct/results.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"config": {"model_name": "scb10x/llama-3-typhoon-v1.5x-70b-instruct"}, "results": {"m3exam_tha_seacrowd_qa": {"accuracy": 0.599169741697417}, "thaiexam_qa": {"accuracy": 0.5876106194690266}}}
|
MC/llama-3-typhoon-v1.5x-8b-instruct/results.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"config": {"model_name": "scb10x/llama-3-typhoon-v1.5x-8b-instruct"}, "results": {"m3exam_tha_seacrowd_qa": {"accuracy": 0.4381918819188192}, "thaiexam_qa": {"accuracy": 0.4194690265486726}}}
|
MC/llama3-8b-cpt-sea-lionv2-instruct/results.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"config": {"model_name": "aisingapore/llama3-8b-cpt-sea-lionv2-instruct"}, "results": {"thaiexam_qa": {"accuracy": 0.3752212389380531}, "m3exam_tha_seacrowd_qa": {"accuracy": 0.4261992619926199}}}
|
MC/openthaigpt-1.0.0-70b-chat/results.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"config": {"model_name": "openthaigpt/openthaigpt-1.0.0-70b-chat"}, "results": {"thaiexam_qa": {"accuracy": 0.5008849557522124}, "m3exam_tha_seacrowd_qa": {"accuracy": 0.5170664206642066}}}
|
MC/openthaigpt-1.0.0-7b-chat/results.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"config": {"model_name": "openthaigpt/openthaigpt-1.0.0-7b-chat"}, "results": {"m3exam_tha_seacrowd_qa": {"accuracy": 0.2822878228782288}, "thaiexam_qa": {"accuracy": 0.2530973451327433}}}
|
NLG/LLaMa3-8b-WangchanX-sft-Full/results.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"config": {"model_name": "airesearch/LLaMa3-8b-WangchanX-sft-Full"}, "results": {"xl_sum_tha_seacrowd_t2t": {"ROUGE1": 37.63697889890035, "ROUGE2": 16.413944846030954, "ROUGEL": 28.198600038522176}, "flores200_eng_Latn_tha_Thai_seacrowd_t2t": {"BLEU": 22.860828678699743, "SacreBLEU": 34.37529930982085, "chrF++": 50.24723442092947}, "flores200_tha_Thai_eng_Latn_seacrowd_t2t": {"BLEU": 48.49722160016288, "SacreBLEU": 31.91991593962373, "chrF++": 55.34127166301678}, "iapp_squad_seacrowd_qa": {"ROUGE1": 80.60864178690281, "ROUGE2": 66.87006649354268, "ROUGEL": 80.37538395013598}}}
|
NLG/Meta-Llama-3.1-70B-Instruct/results.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"config": {"model_name": "meta-llama/Meta-Llama-3.1-70B-Instruct"}, "results": {"xl_sum_tha_seacrowd_t2t": {"ROUGE1": 34.15279851230984, "ROUGE2": 13.527627142475689, "ROUGEL": 24.90354379864548}, "flores200_eng_Latn_tha_Thai_seacrowd_t2t": {"BLEU": 28.0767291735307, "SacreBLEU": 38.57267393751737, "chrF++": 53.957636026143504}, "flores200_tha_Thai_eng_Latn_seacrowd_t2t": {"BLEU": 52.75756495966303, "SacreBLEU": 36.91508162495054, "chrF++": 59.46016523399153}, "iapp_squad_seacrowd_qa": {"ROUGE1": 65.23784338666768, "ROUGE2": 52.796325373393, "ROUGEL": 64.62767407946384}}}
|
NLG/Meta-Llama-3.1-8B-Instruct/results.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"config": {"model_name": "meta-llama/Meta-Llama-3.1-8B-Instruct"}, "results": {"xl_sum_tha_seacrowd_t2t": {"ROUGE1": 31.280150935761508, "ROUGE2": 11.529087060870982, "ROUGEL": 21.6096544052766}, "flores200_eng_Latn_tha_Thai_seacrowd_t2t": {"BLEU": 21.53499185569734, "SacreBLEU": 30.453791413365572, "chrF++": 46.904976131032285}, "flores200_tha_Thai_eng_Latn_seacrowd_t2t": {"BLEU": 49.534981549467645, "SacreBLEU": 32.279638989338075, "chrF++": 56.42469467923213}, "iapp_squad_seacrowd_qa": {"ROUGE1": 68.99913671626041, "ROUGE2": 55.69258894655722, "ROUGEL": 68.56681583690373}}}
|
NLG/Qwen2-72B-Instruct/results.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"config": {"model_name": "Qwen/Qwen2-72B-Instruct"}, "results": {"xl_sum_tha_seacrowd_t2t": {"ROUGE1": 25.69630294614229, "ROUGE2": 8.260340869439057, "ROUGEL": 17.352486052399065}, "flores200_eng_Latn_tha_Thai_seacrowd_t2t": {"BLEU": 21.474236531426097, "SacreBLEU": 33.229982095060386, "chrF++": 51.774302337694}, "flores200_tha_Thai_eng_Latn_seacrowd_t2t": {"BLEU": 44.83361969366853, "SacreBLEU": 29.852007065399075, "chrF++": 58.962206355088185}, "iapp_squad_seacrowd_qa": {"ROUGE1": 35.71235816164353, "ROUGE2": 27.15104533416106, "ROUGEL": 34.979137037450954}}}
|
NLG/Qwen2-7B-Instruct/results.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"config": {"model_name": "Qwen/Qwen2-7B-Instruct"}, "results": {"xl_sum_tha_seacrowd_t2t": {"ROUGE1": 25.019716118676005, "ROUGE2": 8.819938476123221, "ROUGEL": 16.893508823907176}, "flores200_eng_Latn_tha_Thai_seacrowd_t2t": {"BLEU": 18.429465021232573, "SacreBLEU": 26.773578270493367, "chrF++": 44.35810167800766}, "flores200_tha_Thai_eng_Latn_seacrowd_t2t": {"BLEU": 39.49508634200125, "SacreBLEU": 23.797473235026303, "chrF++": 54.655790877849306}, "iapp_squad_seacrowd_qa": {"ROUGE1": 38.01319643414022, "ROUGE2": 29.173725113229874, "ROUGEL": 37.24609957955968}}}
|
NLG/Sailor-7B-Chat/results.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"config": {"model_name": "sail/Sailor-7B-Chat"}, "results": {"xl_sum_tha_seacrowd_t2t": {"ROUGE1": 8.082348750838788, "ROUGE2": 1.4226766606322387, "ROUGEL": 6.896338997417523}, "flores200_eng_Latn_tha_Thai_seacrowd_t2t": {"BLEU": 20.09681875599262, "SacreBLEU": 32.529923734701214, "chrF++": 48.34700186743904}, "flores200_tha_Thai_eng_Latn_seacrowd_t2t": {"BLEU": 45.867939537604286, "SacreBLEU": 29.86105422296352, "chrF++": 57.37614705818073}, "iapp_squad_seacrowd_qa": {"ROUGE1": 32.08290873923779, "ROUGE2": 24.236358989805883, "ROUGEL": 31.570119184710787}}}
|
NLG/SeaLLMs-v3-7B-Chat/results.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"config": {"model_name": "SeaLLMs/SeaLLMs-v3-7B-Chat"}, "results": {"xl_sum_tha_seacrowd_t2t": {"ROUGE1": 29.968426413809173, "ROUGE2": 10.227472359285144, "ROUGEL": 20.27396351668969}, "flores200_eng_Latn_tha_Thai_seacrowd_t2t": {"BLEU": 23.369448304817407, "SacreBLEU": 32.90676323492414, "chrF++": 49.70573051967939}, "flores200_tha_Thai_eng_Latn_seacrowd_t2t": {"BLEU": 44.66418625932317, "SacreBLEU": 28.795665596226115, "chrF++": 56.82789195857798}, "iapp_squad_seacrowd_qa": {"ROUGE1": 48.51542593783137, "ROUGE2": 39.36421866950317, "ROUGEL": 48.123415223878894}}}
|
NLG/gpt-4o-2024-05-13/results.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"config": {"model_name": "api/gpt-4o-2024-05-13"}, "results": {"xl_sum_tha_seacrowd_t2t": {"ROUGE1": 26.07540519508083, "ROUGE2": 9.493810579867878, "ROUGEL": 17.461039314397603}, "flores200_eng_Latn_tha_Thai_seacrowd_t2t": {"BLEU": 34.184695263232925, "SacreBLEU": 46.20707168241045, "chrF++": 59.87032517881556}, "flores200_tha_Thai_eng_Latn_seacrowd_t2t": {"BLEU": 52.95793003127012, "SacreBLEU": 37.57233656165935, "chrF++": 62.5830458169773}, "iapp_squad_seacrowd_qa": {"ROUGE1": 37.22409157547258, "ROUGE2": 28.84385857992345, "ROUGEL": 36.59907613378963}}}
|
NLG/gpt-4o-mini-2024-07-18/results.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"config": {"model_name": "api/gpt-4o-mini-2024-07-18"}, "results": {"xl_sum_tha_seacrowd_t2t": {"ROUGE1": 27.390711967187208, "ROUGE2": 9.445645106910009, "ROUGEL": 18.09278006481461}, "flores200_eng_Latn_tha_Thai_seacrowd_t2t": {"BLEU": 30.44345773531864, "SacreBLEU": 42.02386185746654, "chrF++": 56.91612295275716}, "flores200_tha_Thai_eng_Latn_seacrowd_t2t": {"BLEU": 50.45456750112044, "SacreBLEU": 35.04895415588623, "chrF++": 61.04080233081527}, "iapp_squad_seacrowd_qa": {"ROUGE1": 38.86119199633104, "ROUGE2": 30.634787850509333, "ROUGEL": 38.16643099311495}}}
|
NLG/llama-3-typhoon-v1.5-8b-instruct/results.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"config": {"model_name": "scb10x/llama-3-typhoon-v1.5-8b-instruct"}, "results": {"xl_sum_tha_seacrowd_t2t": {"ROUGE1": 29.70552893308527, "ROUGE2": 10.40130095291124, "ROUGEL": 19.73099418924683}, "flores200_eng_Latn_tha_Thai_seacrowd_t2t": {"BLEU": 20.42301736941763, "SacreBLEU": 30.308624386945784, "chrF++": 47.406681011509086}, "flores200_tha_Thai_eng_Latn_seacrowd_t2t": {"BLEU": 44.32516035484208, "SacreBLEU": 28.405122612243485, "chrF++": 56.698085101310255}, "iapp_squad_seacrowd_qa": {"ROUGE1": 56.6595624140093, "ROUGE2": 44.21773783337853, "ROUGEL": 55.9874416305376}}}
|
NLG/llama-3-typhoon-v1.5x-70b-instruct/results.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"config": {"model_name": "scb10x/llama-3-typhoon-v1.5x-70b-instruct"}, "results": {"xl_sum_tha_seacrowd_t2t": {"ROUGE1": 27.94828025074611, "ROUGE2": 10.495022935704329, "ROUGEL": 18.927149051397745}, "flores200_eng_Latn_tha_Thai_seacrowd_t2t": {"BLEU": 24.56695282857076, "SacreBLEU": 35.87021928950534, "chrF++": 52.07290778141319}, "flores200_tha_Thai_eng_Latn_seacrowd_t2t": {"BLEU": 47.93006808151984, "SacreBLEU": 31.98122508709968, "chrF++": 58.99769162518746}, "iapp_squad_seacrowd_qa": {"ROUGE1": 55.09413362900708, "ROUGE2": 43.48289395850847, "ROUGEL": 54.53605035767775}}}
|
NLG/llama-3-typhoon-v1.5x-8b-instruct/results.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"config": {"model_name": "scb10x/llama-3-typhoon-v1.5x-8b-instruct"}, "results": {"xl_sum_tha_seacrowd_t2t": {"ROUGE1": 24.647443491703218, "ROUGE2": 9.292572008638803, "ROUGEL": 16.65766474818548}, "flores200_eng_Latn_tha_Thai_seacrowd_t2t": {"BLEU": 21.75114260265249, "SacreBLEU": 31.94141229827124, "chrF++": 48.65763816553104}, "flores200_tha_Thai_eng_Latn_seacrowd_t2t": {"BLEU": 44.6756443533957, "SacreBLEU": 28.860428237470472, "chrF++": 57.08942685353786}, "iapp_squad_seacrowd_qa": {"ROUGE1": 55.17747628025658, "ROUGE2": 43.73106636235266, "ROUGEL": 54.77648311072716}}}
|
NLG/llama3-8b-cpt-sea-lionv2-instruct/results.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"config": {"model_name": "aisingapore/llama3-8b-cpt-sea-lionv2-instruct"}, "results": {"xl_sum_tha_seacrowd_t2t": {"ROUGE1": 21.57896237634282, "ROUGE2": 7.9567050604143, "ROUGEL": 14.66884880145703}, "flores200_eng_Latn_tha_Thai_seacrowd_t2t": {"BLEU": 27.246242185807706, "SacreBLEU": 37.798960163282814, "chrF++": 53.07546907542352}, "flores200_tha_Thai_eng_Latn_seacrowd_t2t": {"BLEU": 50.77091046956112, "SacreBLEU": 34.139957925808964, "chrF++": 57.95101492092646}, "iapp_squad_seacrowd_qa": {"ROUGE1": 54.04461363174925, "ROUGE2": 42.74444663261881, "ROUGEL": 53.61474107049092}}}
|
NLG/openthaigpt-1.0.0-70b-chat/results.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"config": {"model_name": "openthaigpt/openthaigpt-1.0.0-70b-chat"}, "results": {"xl_sum_tha_seacrowd_t2t": {"ROUGE1": 29.1684873017262, "ROUGE2": 9.692711025177072, "ROUGEL": 21.17699118102632}, "flores200_eng_Latn_tha_Thai_seacrowd_t2t": {"BLEU": 27.71880173693793, "SacreBLEU": 38.90498692430866, "chrF++": 53.54389545440027}, "flores200_tha_Thai_eng_Latn_seacrowd_t2t": {"BLEU": 7.2820993327079, "SacreBLEU": 3.58778640323282, "chrF++": 8.297499265948852}, "iapp_squad_seacrowd_qa": {"ROUGE1": 62.16963279925736, "ROUGE2": 46.67873410863833, "ROUGEL": 61.676715591891366}}}
|
NLG/openthaigpt-1.0.0-7b-chat/results.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"config": {"model_name": "openthaigpt/openthaigpt-1.0.0-7b-chat"}, "results": {"xl_sum_tha_seacrowd_t2t": {"ROUGE1": 24.57797140756105, "ROUGE2": 6.576683473636153, "ROUGEL": 16.68568250631098}, "flores200_eng_Latn_tha_Thai_seacrowd_t2t": {"BLEU": 18.43147114138253, "SacreBLEU": 29.731592373901456, "chrF++": 44.90234410450009}, "flores200_tha_Thai_eng_Latn_seacrowd_t2t": {"BLEU": 15.878363204971327, "SacreBLEU": 6.171787547718115, "chrF++": 24.25108108180568}, "iapp_squad_seacrowd_qa": {"ROUGE1": 50.55068681529102, "ROUGE2": 36.76246561062573, "ROUGEL": 50.01850761048598}}}
|
NLU/LLaMa3-8b-WangchanX-sft-Full/results.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"config": {"model_name": "airesearch/LLaMa3-8b-WangchanX-sft-Full"}, "results": {"belebele_tha_thai_seacrowd_qa": {"accuracy": 0.6211111111111111}, "wisesight_thai_sentiment_seacrowd_text": {"accuracy": 0.507675028079371}, "xnli.tha_seacrowd_pairs": {"accuracy": 0.3083832335329341}, "xcopa_tha_seacrowd_qa": {"accuracy": 0.73}}}
|
NLU/Meta-Llama-3.1-70B-Instruct/results.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"config": {"model_name": "meta-llama/Meta-Llama-3.1-70B-Instruct"}, "results": {"xcopa_tha_seacrowd_qa": {"accuracy": 0.926}, "belebele_tha_thai_seacrowd_qa": {"accuracy": 0.8755555555555555}, "wisesight_thai_sentiment_seacrowd_text": {"accuracy": 0.4953201048296518}, "xnli.tha_seacrowd_pairs": {"accuracy": 0.3347305389221557}}}
|
NLU/Meta-Llama-3.1-8B-Instruct/results.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"config": {"model_name": "meta-llama/Meta-Llama-3.1-8B-Instruct"}, "results": {"belebele_tha_thai_seacrowd_qa": {"accuracy": 0.7488888888888889}, "wisesight_thai_sentiment_seacrowd_text": {"accuracy": 0.4545114189442156}, "xcopa_tha_seacrowd_qa": {"accuracy": 0.734}, "xnli.tha_seacrowd_pairs": {"accuracy": 0.3349301397205589}}}
|