delete home
Browse files
home/orbina/results_v0.2/asafaya/kanarya-2b/results_2024-07-14T09-37-22.461431.json
DELETED
@@ -1,26 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"config": {
|
3 |
-
"model_dtype": "torch.float16",
|
4 |
-
"model_name": "asafaya/kanarya-2b"
|
5 |
-
},
|
6 |
-
"results": {
|
7 |
-
"MMLU": {
|
8 |
-
"metric_name": 0.24136656067440657
|
9 |
-
},
|
10 |
-
"Truthful_qa": {
|
11 |
-
"metric_name": 0.43056237173575296
|
12 |
-
},
|
13 |
-
"ARC": {
|
14 |
-
"metric_name": 0.2935153583617747
|
15 |
-
},
|
16 |
-
"HellaSwag": {
|
17 |
-
"metric_name": 0.42610364683301344
|
18 |
-
},
|
19 |
-
"GSM8K": {
|
20 |
-
"metric_name": 0.015945330296127564
|
21 |
-
},
|
22 |
-
"Winogrande": {
|
23 |
-
"metric_name": 0.5
|
24 |
-
}
|
25 |
-
}
|
26 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
home/orbina/results_v0.2/asafaya/kanarya-2b/results_2024-07-14T10-13-27.720845.json
DELETED
@@ -1,26 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"config": {
|
3 |
-
"model_dtype": "torch.float16",
|
4 |
-
"model_name": "asafaya/kanarya-2b"
|
5 |
-
},
|
6 |
-
"results": {
|
7 |
-
"MMLU": {
|
8 |
-
"metric_name": 0.24136656067440657
|
9 |
-
},
|
10 |
-
"Truthful_qa": {
|
11 |
-
"metric_name": 0.43056237173575296
|
12 |
-
},
|
13 |
-
"ARC": {
|
14 |
-
"metric_name": 0.2935153583617747
|
15 |
-
},
|
16 |
-
"HellaSwag": {
|
17 |
-
"metric_name": 0.42610364683301344
|
18 |
-
},
|
19 |
-
"GSM8K": {
|
20 |
-
"metric_name": 0.015945330296127564
|
21 |
-
},
|
22 |
-
"Winogrande": {
|
23 |
-
"metric_name": 0.5
|
24 |
-
}
|
25 |
-
}
|
26 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
home/orbina/results_v0.2/nvidia/Llama3-ChatQA-1.5-8B/results_2024-07-13T15-27-57.339619.json
DELETED
@@ -1,26 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"config": {
|
3 |
-
"model_dtype": "torch.float16",
|
4 |
-
"model_name": "nvidia/Llama3-ChatQA-1.5-8B"
|
5 |
-
},
|
6 |
-
"results": {
|
7 |
-
"MMLU": {
|
8 |
-
"metric_name": 0.4737854026473416
|
9 |
-
},
|
10 |
-
"Truthful_qa": {
|
11 |
-
"metric_name": 0.5004162765382687
|
12 |
-
},
|
13 |
-
"ARC": {
|
14 |
-
"metric_name": 0.42406143344709896
|
15 |
-
},
|
16 |
-
"HellaSwag": {
|
17 |
-
"metric_name": 0.48266907530766623
|
18 |
-
},
|
19 |
-
"GSM8K": {
|
20 |
-
"metric_name": 0.02885345482156416
|
21 |
-
},
|
22 |
-
"Winogrande": {
|
23 |
-
"metric_name": 0.5434439178515008
|
24 |
-
}
|
25 |
-
}
|
26 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|