StarscreamDeceptions commited on
Commit
88763f7
·
verified ·
1 Parent(s): d42749d

Upload 4 files

Browse files
Llama-3.1-70B-Instruct/Llama-3.1-70B-Instruct/results_2024-11-11 15_46_20.425378.json ADDED
@@ -0,0 +1,89 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "mmlu": {
4
+ "acc": 0.854321
5
+ },
6
+ "mmmlu": {
7
+ "acc": 0.7166663275479683
8
+ },
9
+ "cmmlu": {
10
+ "acc": 0.91234
11
+ },
12
+ "mmmlu_ar": {
13
+ "acc": 0.7107961828799316
14
+ },
15
+ "mmmlu_bn": {
16
+ "acc": 0.6651474148981626
17
+ },
18
+ "mmmlu_de": {
19
+ "acc": 0.7700470018515881
20
+ },
21
+ "mmmlu_es": {
22
+ "acc": 0.792693348525851
23
+ },
24
+ "mmmlu_fr": {
25
+ "acc": 0.7791625124626121
26
+ },
27
+ "mmmlu_hi": {
28
+ "acc": 0.7266771115225751
29
+ },
30
+ "mmmlu_id": {
31
+ "acc": 0.756943455348241
32
+ },
33
+ "mmmlu_it": {
34
+ "acc": 0.7783079333428287
35
+ },
36
+ "mmmlu_ja": {
37
+ "acc": 0.7378578550064093
38
+ },
39
+ "mmmlu_ko": {
40
+ "acc": 0.7273892607890614
41
+ },
42
+ "mmmlu_pt": {
43
+ "acc": 0.7889189574134738
44
+ },
45
+ "mmmlu_sw": {
46
+ "acc": 0.6399373308645492
47
+ },
48
+ "mmmlu_yo": {
49
+ "acc": 0.4115510611024071
50
+ },
51
+ "mmmlu_zh": {
52
+ "acc": 0.7478991596638656
53
+ }
54
+ },
55
+ "versions": {
56
+ "mmlu": 0,
57
+ "mmmlu": 0,
58
+ "cmmlu": 0,
59
+ "mmmlu_ar": 0,
60
+ "mmmlu_bn": 0,
61
+ "mmmlu_de": 0,
62
+ "mmmlu_es": 0,
63
+ "mmmlu_fr": 0,
64
+ "mmmlu_hi": 0,
65
+ "mmmlu_id": 0,
66
+ "mmmlu_it": 0,
67
+ "mmmlu_ja": 0,
68
+ "mmmlu_ko": 0,
69
+ "mmmlu_pt": 0,
70
+ "mmmlu_sw": 0,
71
+ "mmmlu_yo": 0,
72
+ "mmmlu_zh": 0
73
+ },
74
+ "config": {
75
+ "model": "hf-causal-experimental",
76
+ "model_args": "instruction-tuned=meta-llama/Llama-3.1-70B-Instruct,revision=main,dtype=bfloat16",
77
+ "num_fewshot": 5,
78
+ "batch_size": 1,
79
+ "batch_sizes": [],
80
+ "device": "cpu",
81
+ "no_cache": true,
82
+ "limit": 20,
83
+ "bootstrap_iters": 100000,
84
+ "description_dict": null,
85
+ "model_dtype": "bfloat16",
86
+ "model_name": "meta-llama/Llama-3.1-70B-Instruct",
87
+ "model_sha": "main"
88
+ }
89
+ }
Llama-3.1-8B-Instruct/Llama-3.1-8B-Instruct/results_2024-11-11 15_46_20.425378.json ADDED
@@ -0,0 +1,89 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "mmlu": {
4
+ "acc": 0.854321
5
+ },
6
+ "mmmlu": {
7
+ "acc": 0.5000864752680734
8
+ },
9
+ "cmmlu": {
10
+ "acc": 0.91234
11
+ },
12
+ "mmmlu_ar": {
13
+ "acc": 0.4219484403931064
14
+ },
15
+ "mmmlu_bn": {
16
+ "acc": 0.387978920381712
17
+ },
18
+ "mmmlu_de": {
19
+ "acc": 0.5563310069790628
20
+ },
21
+ "mmmlu_es": {
22
+ "acc": 0.5913687508901866
23
+ },
24
+ "mmmlu_fr": {
25
+ "acc": 0.5894459478706737
26
+ },
27
+ "mmmlu_hi": {
28
+ "acc": 0.4584104828372027
29
+ },
30
+ "mmmlu_id": {
31
+ "acc": 0.5427289559891754
32
+ },
33
+ "mmmlu_it": {
34
+ "acc": 0.5626691354507904
35
+ },
36
+ "mmmlu_ja": {
37
+ "acc": 0.52072354365475
38
+ },
39
+ "mmmlu_ko": {
40
+ "acc": 0.5078336419313488
41
+ },
42
+ "mmmlu_pt": {
43
+ "acc": 0.5901580971371599
44
+ },
45
+ "mmmlu_sw": {
46
+ "acc": 0.40279162512462613
47
+ },
48
+ "mmmlu_yo": {
49
+ "acc": 0.31363053696054694
50
+ },
51
+ "mmmlu_zh": {
52
+ "acc": 0.5551915681526848
53
+ }
54
+ },
55
+ "versions": {
56
+ "mmlu": 0,
57
+ "mmmlu": 0,
58
+ "cmmlu": 0,
59
+ "mmmlu_ar": 0,
60
+ "mmmlu_bn": 0,
61
+ "mmmlu_de": 0,
62
+ "mmmlu_es": 0,
63
+ "mmmlu_fr": 0,
64
+ "mmmlu_hi": 0,
65
+ "mmmlu_id": 0,
66
+ "mmmlu_it": 0,
67
+ "mmmlu_ja": 0,
68
+ "mmmlu_ko": 0,
69
+ "mmmlu_pt": 0,
70
+ "mmmlu_sw": 0,
71
+ "mmmlu_yo": 0,
72
+ "mmmlu_zh": 0
73
+ },
74
+ "config": {
75
+ "model": "meta-llama/Llama-3.1-8B-Instruct",
76
+ "model_args": "instruction-tuned=meta-llama/Llama-3.1-8B-Instruct,revision=main,dtype=bfloat16",
77
+ "num_fewshot": 5,
78
+ "batch_size": 1,
79
+ "batch_sizes": [],
80
+ "device": "cpu",
81
+ "no_cache": true,
82
+ "limit": 20,
83
+ "bootstrap_iters": 100000,
84
+ "description_dict": null,
85
+ "model_dtype": "bfloat16",
86
+ "model_name": "meta-llama/Llama-3.1-8B-Instruct",
87
+ "model_sha": "main"
88
+ }
89
+ }
Meta-Llama-3-70B-Instruct/Meta-Llama-3-70B-Instruct/results_2024-11-11 15_46_20.425378.json ADDED
@@ -0,0 +1,89 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "mmlu": {
4
+ "acc": 0.854321
5
+ },
6
+ "mmmlu": {
7
+ "acc": 0.6430148330518648
8
+ },
9
+ "cmmlu": {
10
+ "acc": 0.91234
11
+ },
12
+ "mmmlu_ar": {
13
+ "acc": 0.6063238854863979
14
+ },
15
+ "mmmlu_bn": {
16
+ "acc": 0.5377439111237715
17
+ },
18
+ "mmmlu_de": {
19
+ "acc": 0.7142144993590657
20
+ },
21
+ "mmmlu_es": {
22
+ "acc": 0.7429853297251103
23
+ },
24
+ "mmmlu_fr": {
25
+ "acc": 0.7317333713146276
26
+ },
27
+ "mmmlu_hi": {
28
+ "acc": 0.6501922803019513
29
+ },
30
+ "mmmlu_id": {
31
+ "acc": 0.7058823529411765
32
+ },
33
+ "mmmlu_it": {
34
+ "acc": 0.7333000997008973
35
+ },
36
+ "mmmlu_ja": {
37
+ "acc": 0.6554621848739496
38
+ },
39
+ "mmmlu_ko": {
40
+ "acc": 0.6451360205098988
41
+ },
42
+ "mmmlu_pt": {
43
+ "acc": 0.7374305654465176
44
+ },
45
+ "mmmlu_sw": {
46
+ "acc": 0.5106110240706452
47
+ },
48
+ "mmmlu_yo": {
49
+ "acc": 0.3362056687081612
50
+ },
51
+ "mmmlu_zh": {
52
+ "acc": 0.6949864691639368
53
+ }
54
+ },
55
+ "versions": {
56
+ "mmlu": 0,
57
+ "mmmlu": 0,
58
+ "cmmlu": 0,
59
+ "mmmlu_ar": 0,
60
+ "mmmlu_bn": 0,
61
+ "mmmlu_de": 0,
62
+ "mmmlu_es": 0,
63
+ "mmmlu_fr": 0,
64
+ "mmmlu_hi": 0,
65
+ "mmmlu_id": 0,
66
+ "mmmlu_it": 0,
67
+ "mmmlu_ja": 0,
68
+ "mmmlu_ko": 0,
69
+ "mmmlu_pt": 0,
70
+ "mmmlu_sw": 0,
71
+ "mmmlu_yo": 0,
72
+ "mmmlu_zh": 0
73
+ },
74
+ "config": {
75
+ "model": "hf-causal-experimental",
76
+ "model_args": "instruction-tuned=meta-llama/Meta-Llama-3-70B-Instruct,revision=main,dtype=bfloat16",
77
+ "num_fewshot": 5,
78
+ "batch_size": 1,
79
+ "batch_sizes": [],
80
+ "device": "cpu",
81
+ "no_cache": true,
82
+ "limit": 20,
83
+ "bootstrap_iters": 100000,
84
+ "description_dict": null,
85
+ "model_dtype": "bfloat16",
86
+ "model_name": "meta-llama/Meta-Llama-3-70B-Instruct",
87
+ "model_sha": "main"
88
+ }
89
+ }
Meta-Llama-3-8B-Instruct/Meta-Llama-3-8B-Instruct/results_2024-11-11 15_46_20.425378.json ADDED
@@ -0,0 +1,89 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "mmlu": {
4
+ "acc": 0.854321
5
+ },
6
+ "mmmlu": {
7
+ "acc": 0.4657456202820111
8
+ },
9
+ "cmmlu": {
10
+ "acc": 0.91234
11
+ },
12
+ "mmmlu_ar": {
13
+ "acc": 0.40542657741062527
14
+ },
15
+ "mmmlu_bn": {
16
+ "acc": 0.36433556473436834
17
+ },
18
+ "mmmlu_de": {
19
+ "acc": 0.5351801737644211
20
+ },
21
+ "mmmlu_es": {
22
+ "acc": 0.5580401652186299
23
+ },
24
+ "mmmlu_fr": {
25
+ "acc": 0.5578977353653326
26
+ },
27
+ "mmmlu_hi": {
28
+ "acc": 0.4143284432417035
29
+ },
30
+ "mmmlu_id": {
31
+ "acc": 0.5100413046574562
32
+ },
33
+ "mmmlu_it": {
34
+ "acc": 0.5332573707449081
35
+ },
36
+ "mmmlu_ja": {
37
+ "acc": 0.42308787921948443
38
+ },
39
+ "mmmlu_ko": {
40
+ "acc": 0.465389545648768
41
+ },
42
+ "mmmlu_pt": {
43
+ "acc": 0.5546218487394958
44
+ },
45
+ "mmmlu_sw": {
46
+ "acc": 0.37501780373166216
47
+ },
48
+ "mmmlu_yo": {
49
+ "acc": 0.3096425010682239
50
+ },
51
+ "mmmlu_zh": {
52
+ "acc": 0.5141717704030765
53
+ }
54
+ },
55
+ "versions": {
56
+ "mmlu": 0,
57
+ "mmmlu": 0,
58
+ "cmmlu": 0,
59
+ "mmmlu_ar": 0,
60
+ "mmmlu_bn": 0,
61
+ "mmmlu_de": 0,
62
+ "mmmlu_es": 0,
63
+ "mmmlu_fr": 0,
64
+ "mmmlu_hi": 0,
65
+ "mmmlu_id": 0,
66
+ "mmmlu_it": 0,
67
+ "mmmlu_ja": 0,
68
+ "mmmlu_ko": 0,
69
+ "mmmlu_pt": 0,
70
+ "mmmlu_sw": 0,
71
+ "mmmlu_yo": 0,
72
+ "mmmlu_zh": 0
73
+ },
74
+ "config": {
75
+ "model": "hf-causal-experimental",
76
+ "model_args": "instruction-tuned=meta-llama/Meta-Llama-3-8B-Instruct,revision=main,dtype=bfloat16",
77
+ "num_fewshot": 5,
78
+ "batch_size": 1,
79
+ "batch_sizes": [],
80
+ "device": "cpu",
81
+ "no_cache": true,
82
+ "limit": 20,
83
+ "bootstrap_iters": 100000,
84
+ "description_dict": null,
85
+ "model_dtype": "bfloat16",
86
+ "model_name": "meta-llama/Meta-Llama-3-8B-Instruct",
87
+ "model_sha": "main"
88
+ }
89
+ }