xuanricheng commited on
Commit
6b7a83b
·
verified ·
1 Parent(s): bd326e2

Add results for TIGER-Lab/MAmmoTH2-8x7B-Plus

Browse files
TIGER-Lab/MAmmoTH2-8x7B-Plus/results_2024-05-22T13-09-05.114856.json ADDED
@@ -0,0 +1,91 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "model_name": "TIGER-Lab/MAmmoTH2-8x7B-Plus",
4
+ "model_dtype": "float16",
5
+ "model_size": 0
6
+ },
7
+ "results": {
8
+ "harness-c_arc_challenge": {
9
+ "acc_norm": 57.42,
10
+ "acc_stderr": 0,
11
+ "c_arc_challenge_25shot_acc": 51.71,
12
+ "c_arc_challenge_25shot_acc_norm": 57.42
13
+ },
14
+ "harness-c_gsm8k": {
15
+ "acc": 56.56,
16
+ "acc_stderr": 0,
17
+ "c_gsm8k_5shot_acc": 56.56
18
+ },
19
+ "harness-c_hellaswag": {
20
+ "acc_norm": 61.38,
21
+ "acc_stderr": 0,
22
+ "c_hellaswag_10shot_acc": 45.77,
23
+ "c_hellaswag_10shot_acc_norm": 61.38
24
+ },
25
+ "harness-c-sem-v2": {
26
+ "acc": 77.93,
27
+ "acc_stderr": 0,
28
+ "c_sem_v2-LLSRC_5shot_acc": 80.58,
29
+ "c_sem_v2-SLPWC_5shot_acc": 71.86,
30
+ "c_sem_v2-SLRFC_5shot_acc": 78.42,
31
+ "c_sem_v2-SLSRC_5shot_acc": 80.86,
32
+ "c_sem_v2-LLSRC_5shot_acc_norm": 80.58,
33
+ "c_sem_v2-SLPWC_5shot_acc_norm": 71.86,
34
+ "c_sem_v2-SLRFC_5shot_acc_norm": 78.42,
35
+ "c_sem_v2-SLSRC_5shot_acc_norm": 80.86
36
+ },
37
+ "harness-c_truthfulqa_mc": {
38
+ "mc2": 58.49,
39
+ "acc_stderr": 0,
40
+ "c_truthfulqa_mc_0shot_mc1": 36.35,
41
+ "c_truthfulqa_mc_0shot_mc2": 58.49
42
+ },
43
+ "harness-c_winogrande": {
44
+ "acc": 62.19,
45
+ "acc_stderr": 0,
46
+ "c_winogrande_0shot_acc": 62.19
47
+ },
48
+ "harness-cmmlu": {
49
+ "acc_norm": 56.53,
50
+ "acc_stderr": 0,
51
+ "cmmlu_fullavg_5shot_acc": 56.53,
52
+ "cmmlu-virology_5shot_acc": 42.77,
53
+ "cmmlu-nutrition_5shot_acc": 63.4,
54
+ "cmmlu-sociology_5shot_acc": 75.12,
55
+ "cmmlu-philosophy_5shot_acc": 59.49,
56
+ "cmmlu-prehistory_5shot_acc": 55.86,
57
+ "cmmlu-miscellaneous_5shot_acc": 62.45,
58
+ "cmmlu-moral_disputes_5shot_acc": 59.54,
59
+ "cmmlu-moral_scenarios_5shot_acc": 31.62,
60
+ "cmmlu-world_religions_5shot_acc": 63.16,
61
+ "cmmlu-professional_law_5shot_acc": 42.31,
62
+ "cmmlu-public_relations_5shot_acc": 63.64,
63
+ "cmmlu-security_studies_5shot_acc": 67.76,
64
+ "cmmlu-us_foreign_policy_5shot_acc": 71.0,
65
+ "cmmlu-professional_medicine_5shot_acc": 47.79,
66
+ "cmmlu-professional_accounting_5shot_acc": 43.26,
67
+ "cmmlu-professional_psychology_5shot_acc": 55.39,
68
+ "cmmlu_fullavg_5shot_acc_norm": 56.53,
69
+ "cmmlu-virology_5shot_acc_norm": 42.77,
70
+ "cmmlu-nutrition_5shot_acc_norm": 63.4,
71
+ "cmmlu-sociology_5shot_acc_norm": 75.12,
72
+ "cmmlu-philosophy_5shot_acc_norm": 59.49,
73
+ "cmmlu-prehistory_5shot_acc_norm": 55.86,
74
+ "cmmlu-miscellaneous_5shot_acc_norm": 62.45,
75
+ "cmmlu-moral_disputes_5shot_acc_norm": 59.54,
76
+ "cmmlu-moral_scenarios_5shot_acc_norm": 31.62,
77
+ "cmmlu-world_religions_5shot_acc_norm": 63.16,
78
+ "cmmlu-professional_law_5shot_acc_norm": 42.31,
79
+ "cmmlu-public_relations_5shot_acc_norm": 63.64,
80
+ "cmmlu-security_studies_5shot_acc_norm": 67.76,
81
+ "cmmlu-us_foreign_policy_5shot_acc_norm": 71.0,
82
+ "cmmlu-professional_medicine_5shot_acc_norm": 47.79,
83
+ "cmmlu-professional_accounting_5shot_acc_norm": 43.26,
84
+ "cmmlu-professional_psychology_5shot_acc_norm": 55.39
85
+ }
86
+ },
87
+ "versions": {},
88
+ "config_tasks": {},
89
+ "summary_tasks": {},
90
+ "summary_general": {}
91
+ }