path
stringlengths
19
43
brainstorm
float64
0
0.55
open_qa
float64
0.03
0.95
closed_qa
float64
0
0.55
extract
float64
0
0.45
generation
float64
0.01
0.52
rewrite
float64
0
0.51
summarize
float64
0
0.45
classify
float64
0.01
0.55
reasoning_over_numerical_data
float64
0.01
0.5
multi-document_synthesis
float64
0
0.59
fact_checking_or_attributed_qa
float64
0.02
0.6
average
float64
0.01
0.5
brainstorm_rank
int64
1
36
open_qa_rank
int64
1
37
closed_qa_rank
int64
1
34
extract_rank
int64
1
36
generation_rank
int64
1
36
rewrite_rank
int64
1
35
summarize_rank
int64
1
34
classify_rank
int64
1
34
reasoning_over_numerical_data_rank
int64
1
36
multi-document_synthesis_rank
int64
1
37
fact_checking_or_attributed_qa_rank
int64
1
36
average_rank
int64
1
37
brainstorm_confi
stringlengths
11
11
open_qa_confi
stringlengths
11
11
closed_qa_confi
stringlengths
11
11
extract_confi
stringlengths
11
11
generation_confi
stringlengths
11
11
rewrite_confi
stringlengths
11
11
summarize_confi
stringlengths
11
11
classify_confi
stringlengths
11
11
reasoning_over_numerical_data_confi
stringlengths
11
11
multi-document_synthesis_confi
stringlengths
11
11
fact_checking_or_attributed_qa_confi
stringlengths
11
11
average_confi
stringlengths
13
13
meta-llama/Llama-2-13b-chat-hf
0.168
0.525
0.218
0.198
0.159
0.142
0.198
0.274
0.115
0.091
0.42
0.1936
19
21
14
14
19
19
15
20
21
20
8
20
+3.0 / -2.8
+9.8 / -9.8
+5.2 / -5.0
+4.7 / -4.5
+2.9 / -2.9
+2.7 / -2.5
+5.4 / -5.2
+5.7 / -5.5
+2.6 / -2.5
+2.6 / -2.6
+4.5 / -4.5
+1.16 / -1.16
meta-llama/Llama-2-70b-chat-hf
0.223
0.613
0.3
0.26
0.192
0.171
0.218
0.343
0.191
0.132
0.485
0.2448
16
21
2
7
16
16
7
9
16
17
8
17
+3.3 / -3.2
+9.3 / -9.8
+5.2 / -5.4
+5.0 / -5.0
+3.2 / -3.1
+2.9 / -2.8
+5.7 / -5.4
+5.7 / -6.0
+3.3 / -3.1
+3.3 / -3.1
+4.3 / -4.5
+1.22 / -1.24
meta-llama/Llama-2-7b-chat-hf
0.164
0.417
0.213
0.129
0.133
0.129
0.097
0.192
0.073
0.075
0.335
0.1563
19
35
14
14
19
19
21
20
21
20
18
25
+3.0 / -2.8
+9.8 / -9.8
+5.2 / -4.7
+4.2 / -4.0
+2.8 / -2.7
+2.5 / -2.5
+4.0 / -3.7
+5.2 / -5.0
+2.1 / -2.1
+2.4 / -2.4
+4.5 / -4.3
+1.08 / -1.06
meta-llama/Llama-3.1-70B-Instruct
0.486
0.843
0.552
0.45
0.455
0.446
0.433
0.545
0.503
0.498
0.571
0.498
1
1
1
1
1
1
1
1
1
7
1
1
+4.0 / -3.8
+6.4 / -6.9
+5.9 / -6.2
+5.7 / -5.5
+3.9 / -3.9
+3.7 / -3.7
+6.4 / -6.4
+6.0 / -6.0
+3.7 / -3.8
+4.5 / -4.6
+4.3 / -4.8
+1.47 / -1.42
meta-llama/Llama-3.1-8B-Instruct
0.467
0.794
0.406
0.324
0.363
0.367
0.329
0.43
0.299
0.358
0.393
0.385
1
5
2
7
7
7
1
1
11
12
18
8
+3.9 / -3.8
+7.4 / -7.8
+6.4 / -5.9
+5.4 / -5.4
+3.8 / -3.8
+3.5 / -3.4
+5.9 / -5.9
+6.2 / -6.2
+3.6 / -3.5
+4.4 / -4.4
+4.4 / -4.4
+1.44 / -1.43
allenai/Llama-3.1-Tulu-3-70B-DPO
0.513
0.569
0.359
0.391
0.461
0.437
0.277
0.415
0.432
0.515
0.254
0.4277
1
21
2
1
1
1
7
9
1
1
25
3
+3.9 / -3.9
+9.8 / -9.8
+6.2 / -5.9
+5.9 / -5.9
+4.0 / -4.0
+3.7 / -3.7
+5.9 / -5.9
+6.2 / -6.2
+3.8 / -3.9
+4.7 / -4.6
+3.8 / -3.9
+1.49 / -1.44
allenai/Llama-3.1-Tulu-3-70B-SFT
0.028
0.951
0.376
0.282
0.13
0.117
0.04
0.366
0.353
0.105
0.604
0.2358
25
1
2
7
19
19
21
9
5
20
1
17
+1.3 / -1.3
+3.4 / -4.4
+6.2 / -6.2
+5.4 / -5.2
+2.7 / -2.7
+2.4 / -2.4
+2.7 / -2.2
+6.7 / -6.2
+3.8 / -3.9
+3.0 / -2.8
+4.3 / -4.5
+1.28 / -1.21
allenai/Llama-3.1-Tulu-3-70B
0.506
0.583
0.351
0.381
0.444
0.441
0.287
0.428
0.421
0.502
0.26
0.4233
1
21
2
1
1
1
7
1
5
1
25
3
+3.9 / -4.0
+9.3 / -9.3
+6.2 / -5.9
+5.7 / -5.7
+4.0 / -4.1
+3.6 / -3.6
+6.2 / -5.9
+6.5 / -6.2
+3.8 / -3.8
+4.6 / -4.5
+4.1 / -3.9
+1.44 / -1.42
allenai/Llama-3.1-Tulu-3-8B-DPO
0.409
0.637
0.26
0.196
0.366
0.337
0.228
0.331
0.279
0.45
0.199
0.3323
10
21
14
14
7
7
7
9
11
7
36
9
+4.1 / -4.0
+8.8 / -9.8
+5.4 / -5.4
+5.2 / -5.0
+3.8 / -3.9
+3.6 / -3.5
+5.9 / -5.4
+6.5 / -6.2
+3.6 / -3.5
+4.6 / -4.4
+3.7 / -3.7
+1.37 / -1.39
allenai/Llama-3.1-Tulu-3-8B-SFT
0.015
0.877
0.319
0.176
0.068
0.074
0.007
0.313
0.187
0.04
0.539
0.1697
29
1
2
14
25
26
28
9
16
26
1
25
+1.0 / -0.8
+6.4 / -6.9
+6.2 / -5.7
+4.7 / -4.5
+2.2 / -2.0
+2.0 / -1.9
+1.2 / -0.7
+6.2 / -6.5
+3.2 / -3.1
+1.8 / -1.8
+4.5 / -4.5
+1.10 / -1.12
allenai/Llama-3.1-Tulu-3-8B
0.416
0.588
0.235
0.183
0.356
0.34
0.21
0.323
0.28
0.416
0.219
0.3269
10
21
14
14
7
7
7
9
11
7
25
9
+4.0 / -3.9
+9.8 / -9.8
+5.4 / -5.2
+5.0 / -4.7
+3.8 / -3.7
+3.6 / -3.4
+5.7 / -5.4
+6.2 / -6.0
+3.7 / -3.6
+4.4 / -4.5
+3.9 / -3.9
+1.42 / -1.37
mistralai/Mistral-7B-Instruct-v0.3
0.241
0.608
0.213
0.141
0.191
0.185
0.168
0.271
0.173
0.255
0.465
0.2427
16
21
14
14
16
16
15
20
16
14
8
17
+3.4 / -3.3
+9.8 / -9.8
+5.7 / -5.2
+4.5 / -4.2
+3.2 / -3.2
+2.9 / -2.8
+5.2 / -5.0
+6.0 / -5.7
+3.0 / -3.0
+4.0 / -4.0
+4.3 / -4.3
+1.27 / -1.24
mistralai/Mistral-Large-Instruct-2407
0.545
0.588
0.351
0.396
0.514
0.505
0.453
0.445
0.485
0.591
0.26
0.4762
1
21
2
1
1
1
1
1
1
1
25
1
+4.0 / -4.1
+8.8 / -8.8
+5.7 / -5.7
+5.4 / -5.7
+3.9 / -3.9
+3.6 / -3.7
+6.7 / -6.7
+6.0 / -6.2
+3.7 / -3.7
+4.5 / -4.5
+4.1 / -3.9
+1.42 / -1.41
mistralai/Mistral-Small-Instruct-2409
0.47
0.676
0.322
0.364
0.424
0.449
0.401
0.393
0.397
0.547
0.258
0.4221
1
5
2
1
7
1
1
9
5
1
25
3
+3.8 / -4.0
+8.8 / -8.8
+5.9 / -5.7
+5.9 / -5.7
+4.0 / -3.9
+3.6 / -3.7
+6.7 / -6.4
+6.0 / -5.7
+3.9 / -3.7
+4.6 / -4.6
+4.1 / -3.9
+1.46 / -1.44
allenai/OLMo-2-1124-13B-Instruct
0.377
0.598
0.309
0.203
0.348
0.36
0.322
0.44
0.259
0.446
0.284
0.3468
10
21
2
14
7
7
7
1
11
7
25
9
+3.9 / -3.8
+9.8 / -9.8
+5.9 / -5.7
+5.0 / -4.7
+3.8 / -3.8
+3.6 / -3.5
+6.2 / -6.4
+6.2 / -6.2
+3.6 / -3.5
+4.9 / -4.4
+4.3 / -4.1
+1.39 / -1.35
allenai/OLMo-2-1124-7B-Instruct
0.381
0.475
0.196
0.168
0.323
0.301
0.176
0.269
0.183
0.364
0.158
0.2772
10
21
14
14
13
12
15
20
16
12
36
15
+3.8 / -3.8
+9.8 / -9.3
+5.2 / -4.7
+5.0 / -4.7
+3.7 / -3.7
+3.4 / -3.3
+5.4 / -5.0
+5.7 / -5.7
+3.3 / -3.1
+4.4 / -4.4
+3.5 / -3.2
+1.30 / -1.32
allenai/OLMo-7B-0724-Instruct-hf
0.089
0.363
0.057
0.052
0.059
0.051
0.057
0.08
0.05
0.073
0.227
0.0883
23
35
34
26
25
29
21
34
27
20
25
32
+2.3 / -2.3
+9.8 / -9.8
+3.2 / -2.7
+3.2 / -2.7
+1.9 / -1.8
+1.6 / -1.6
+3.2 / -3.0
+3.5 / -3.2
+1.9 / -1.7
+2.4 / -2.2
+3.9 / -3.9
+0.83 / -0.81
allenai/OLMo-7B-SFT
0.012
0.804
0.134
0.069
0.067
0.044
0.015
0.224
0.047
0.024
0.5
0.1211
29
5
27
26
25
29
28
20
27
26
8
29
+0.8 / -0.8
+7.4 / -7.8
+4.5 / -4.0
+3.2 / -2.7
+2.0 / -2.0
+1.6 / -1.5
+1.5 / -1.2
+5.5 / -5.7
+1.9 / -1.7
+1.5 / -1.3
+4.5 / -4.5
+0.99 / -0.99
microsoft/Phi-3-medium-4k-instruct
0.277
0.814
0.349
0.243
0.263
0.281
0.24
0.453
0.372
0.172
0.602
0.3328
16
5
2
7
13
12
7
1
5
17
1
9
+3.7 / -3.5
+6.9 / -7.8
+5.9 / -5.7
+5.0 / -5.0
+3.5 / -3.4
+3.4 / -3.3
+5.9 / -5.7
+6.5 / -6.5
+3.8 / -3.6
+3.5 / -3.3
+4.5 / -4.5
+1.39 / -1.34
Qwen/Qwen1.5-110B-Chat
0.477
0.804
0.314
0.304
0.439
0.39
0.371
0.495
0.388
0.517
0.398
0.4297
1
5
2
7
1
7
1
1
5
1
18
3
+4.1 / -3.9
+7.8 / -7.8
+5.7 / -5.9
+5.9 / -5.7
+4.1 / -4.0
+3.6 / -3.6
+6.9 / -6.7
+6.5 / -6.5
+3.8 / -3.8
+4.7 / -4.6
+4.5 / -4.5
+1.42 / -1.42
Qwen/Qwen2-72B-Instruct
0.424
0.716
0.248
0.324
0.317
0.291
0.153
0.4
0.384
0.264
0.472
0.35
10
5
14
7
13
12
15
9
5
14
8
9
+3.8 / -3.8
+8.8 / -8.8
+5.5 / -5.2
+5.4 / -5.4
+3.6 / -3.7
+3.4 / -3.4
+5.0 / -4.7
+6.2 / -6.5
+3.8 / -3.6
+4.2 / -4.0
+4.5 / -4.5
+1.39 / -1.37
Qwen/Qwen2.5-72B-Instruct
0.514
0.716
0.302
0.408
0.477
0.46
0.364
0.435
0.456
0.575
0.234
0.4476
1
5
2
1
1
1
1
1
1
1
25
3
+3.8 / -4.0
+8.3 / -8.8
+5.7 / -5.7
+5.9 / -5.9
+4.1 / -3.9
+3.8 / -3.6
+6.2 / -6.7
+6.0 / -6.2
+3.9 / -3.8
+4.7 / -4.5
+3.9 / -3.9
+1.47 / -1.46
WizardLMTeam/WizardLM-13B-V1.2
0.169
0.632
0.228
0.171
0.158
0.147
0.077
0.336
0.097
0.113
0.439
0.1956
19
21
14
14
19
19
21
9
21
20
8
20
+3.0 / -2.8
+9.3 / -9.8
+5.4 / -5.0
+5.0 / -4.5
+3.0 / -2.8
+2.6 / -2.6
+3.5 / -3.2
+6.2 / -6.2
+2.4 / -2.3
+3.1 / -2.9
+4.5 / -4.5
+1.17 / -1.17
01-ai/Yi-1.5-34B-Chat
0.497
0.716
0.252
0.27
0.362
0.301
0.215
0.358
0.318
0.411
0.288
0.3542
1
5
14
7
7
12
7
9
11
7
25
9
+3.9 / -3.9
+7.8 / -8.3
+5.4 / -5.2
+5.4 / -5.2
+4.0 / -3.8
+3.3 / -3.4
+5.7 / -5.2
+6.2 / -6.0
+3.6 / -3.6
+4.6 / -4.4
+4.1 / -4.1
+1.40 / -1.37
databricks/dolly-v2-12b
0.003
0.755
0.124
0.074
0.028
0.02
0
0.067
0.033
0.009
0.396
0.0864
29
5
27
26
32
35
34
34
27
31
18
32
+0.5 / -0.3
+7.8 / -8.8
+4.7 / -4.5
+3.5 / -3.2
+1.3 / -1.3
+1.1 / -1.0
+0.0 / -0.0
+3.5 / -3.2
+1.6 / -1.4
+0.9 / -0.7
+4.5 / -4.3
+0.86 / -0.82
databricks/dolly-v2-7b
0.002
0.706
0.094
0.05
0.02
0.013
0
0.062
0.036
0.002
0.42
0.0822
36
5
27
26
36
35
34
34
27
37
8
32
+0.3 / -0.2
+8.8 / -9.8
+4.0 / -3.7
+3.0 / -2.5
+1.2 / -1.0
+0.9 / -0.7
+0.0 / -0.0
+3.5 / -3.0
+1.7 / -1.5
+0.4 / -0.2
+4.5 / -4.5
+0.81 / -0.81
nomic-ai/gpt4all-13b-snoozy
0.007
0.902
0.037
0.062
0.064
0.057
0.01
0.117
0.051
0.013
0.359
0.0985
29
1
34
26
25
29
28
30
27
31
18
32
+0.7 / -0.5
+5.4 / -5.9
+2.5 / -2.2
+3.2 / -3.0
+2.0 / -1.9
+1.9 / -1.7
+1.5 / -1.0
+4.5 / -4.2
+2.0 / -1.8
+1.1 / -0.9
+4.3 / -4.3
+0.92 / -0.88
TheBloke/koala-13B-HF
0.01
0.701
0.158
0.084
0.043
0.05
0.01
0.164
0.052
0.022
0.398
0.1044
29
5
27
26
32
29
28
30
27
31
18
29
+0.8 / -0.7
+8.3 / -9.3
+5.0 / -4.7
+3.5 / -3.0
+1.7 / -1.5
+1.7 / -1.6
+1.5 / -1.0
+5.2 / -4.7
+1.9 / -1.9
+1.3 / -1.3
+4.3 / -4.1
+0.92 / -0.89
TheBloke/koala-7B-HF
0.004
0.657
0.082
0.079
0.033
0.034
0
0.097
0.031
0.013
0.387
0.0861
29
5
27
26
32
29
34
30
36
31
18
32
+0.6 / -0.4
+8.8 / -9.8
+3.5 / -3.2
+3.5 / -3.2
+1.5 / -1.3
+1.4 / -1.3
+0.0 / -0.0
+4.2 / -3.7
+1.5 / -1.4
+1.1 / -0.9
+4.3 / -4.3
+0.83 / -0.82
mosaicml/mpt-7b-chat
0.005
0.73
0.121
0.037
0.05
0.039
0.015
0.211
0.041
0.013
0.474
0.1078
29
5
27
36
32
29
28
20
27
31
8
29
+0.7 / -0.5
+8.3 / -8.8
+4.2 / -4.0
+2.5 / -2.0
+1.8 / -1.7
+1.4 / -1.4
+1.7 / -1.2
+5.7 / -5.5
+1.8 / -1.6
+1.1 / -0.9
+4.8 / -4.5
+0.92 / -0.89
OpenAssistant/oasst-sft-1-pythia-12b
0.002
0.623
0.02
0.015
0.017
0.006
0
0.032
0.024
0.009
0.232
0.0507
36
21
34
36
36
35
34
34
36
31
25
37
+0.3 / -0.2
+9.3 / -9.3
+2.0 / -1.5
+1.7 / -1.2
+1.2 / -1.0
+0.6 / -0.4
+0.0 / -0.0
+2.5 / -2.2
+1.3 / -1.1
+0.9 / -0.7
+3.9 / -3.7
+0.67 / -0.62
allenai/tulu-2-dpo-13b
0.093
0.686
0.178
0.136
0.133
0.156
0.092
0.249
0.109
0.102
0.502
0.184
23
5
14
14
19
19
21
20
21
20
8
20
+2.3 / -2.3
+8.8 / -8.8
+5.0 / -5.0
+4.5 / -4.2
+2.8 / -2.7
+2.7 / -2.6
+4.0 / -3.7
+5.7 / -5.5
+2.5 / -2.4
+2.9 / -2.6
+4.5 / -4.5
+1.10 / -1.14
allenai/tulu-2-dpo-70b
0.175
0.608
0.265
0.21
0.229
0.215
0.119
0.308
0.232
0.181
0.556
0.2583
19
21
14
14
16
16
15
9
16
17
1
15
+2.9 / -2.8
+9.8 / -9.8
+5.7 / -5.4
+5.2 / -5.0
+3.2 / -3.3
+3.1 / -3.1
+4.7 / -4.2
+6.0 / -5.7
+3.5 / -3.3
+3.5 / -3.3
+4.8 / -4.8
+1.29 / -1.29
allenai/tulu-2-dpo-7b
0.049
0.559
0.146
0.099
0.1
0.099
0.069
0.184
0.054
0.06
0.465
0.138
25
21
27
26
25
26
21
20
27
26
8
27
+1.8 / -1.6
+9.8 / -9.8
+4.7 / -4.2
+4.0 / -3.5
+2.5 / -2.3
+2.2 / -2.1
+3.5 / -3.2
+5.2 / -5.0
+1.9 / -1.8
+2.2 / -2.0
+4.5 / -4.5
+1.00 / -1.00
allenai/tulu-v2.5-ppo-13b-uf-mean-70b-uf-rm
0.377
0.216
0.064
0.109
0.164
0.155
0.161
0.124
0.117
0.219
0.214
0.1898
10
37
34
26
19
19
15
30
21
14
25
20
+3.9 / -3.8
+7.8 / -7.8
+3.5 / -3.0
+4.2 / -3.7
+3.0 / -2.8
+2.8 / -2.6
+5.2 / -4.7
+4.5 / -4.5
+2.6 / -2.6
+3.8 / -3.8
+3.7 / -3.9
+1.19 / -1.15
lmsys/vicuna-13b-v1.5
0.041
0.814
0.265
0.139
0.102
0.119
0.079
0.256
0.106
0.055
0.558
0.1739
25
5
14
14
25
19
21
20
21
26
1
20
+1.7 / -1.6
+6.9 / -7.8
+5.4 / -5.4
+4.5 / -4.0
+2.5 / -2.3
+2.4 / -2.4
+4.0 / -3.2
+6.0 / -5.7
+2.6 / -2.5
+2.2 / -2.0
+4.5 / -4.8
+1.12 / -1.07
lmsys/vicuna-7b-v1.5
0.029
0.755
0.203
0.106
0.082
0.07
0.027
0.241
0.067
0.042
0.528
0.1428
25
5
14
26
25
26
28
20
27
26
1
27
+1.4 / -1.3
+7.8 / -8.8
+5.4 / -5.0
+4.0 / -3.5
+2.2 / -2.2
+2.0 / -1.9
+2.2 / -2.0
+5.7 / -5.7
+2.1 / -2.1
+2.0 / -1.8
+4.5 / -4.3
+1.07 / -1.02
meta-llama/Llama-2-13b-chat-hf
0.21
0.593
0.176
0.134
0.167
0.136
0.188
0.323
0.122
0.121
0.409
0.2013
13
19
14
12
13
13
5
5
13
15
9
13
+3.3 / -3.3
+9.3 / -9.3
+4.7 / -4.5
+4.2 / -4.0
+3.0 / -3.0
+2.6 / -2.5
+5.4 / -5.0
+6.0 / -5.7
+2.6 / -2.4
+3.1 / -2.9
+4.5 / -4.3
+1.22 / -1.20
meta-llama/Llama-2-70b-chat-hf
0.218
0.662
0.312
0.173
0.197
0.183
0.153
0.343
0.172
0.166
0.488
0.2431
13
1
1
5
9
9
10
5
10
12
1
10
+3.2 / -3.3
+8.3 / -9.3
+5.7 / -5.7
+5.0 / -4.5
+3.3 / -3.1
+2.9 / -2.9
+5.2 / -4.7
+6.0 / -6.0
+3.1 / -3.0
+3.5 / -3.3
+4.7 / -4.7
+1.28 / -1.26
meta-llama/Llama-2-7b-chat-hf
0.171
0.475
0.2
0.119
0.152
0.131
0.121
0.187
0.088
0.106
0.355
0.169
13
19
6
12
13
13
10
13
13
15
9
15
+3.0 / -2.8
+9.3 / -9.3
+4.7 / -5.0
+4.2 / -4.0
+2.9 / -2.7
+2.6 / -2.5
+4.5 / -4.2
+5.0 / -4.7
+2.3 / -2.2
+2.9 / -2.9
+4.5 / -4.3
+1.13 / -1.05
meta-llama/Llama-3.1-70B-Instruct
0.437
0.77
0.408
0.369
0.431
0.424
0.443
0.532
0.458
0.481
0.498
0.456
5
1
1
1
3
3
1
1
1
4
1
1
+3.9 / -3.9
+7.8 / -8.3
+5.9 / -5.9
+5.7 / -5.7
+3.8 / -3.9
+3.6 / -3.4
+6.9 / -6.4
+5.7 / -6.0
+3.8 / -3.6
+4.6 / -4.6
+4.5 / -4.6
+1.48 / -1.41
meta-llama/Llama-3.1-8B-Instruct
0.402
0.652
0.304
0.255
0.324
0.301
0.3
0.378
0.235
0.411
0.316
0.3334
5
1
1
5
7
5
5
5
8
4
15
8
+3.9 / -3.9
+9.3 / -9.3
+5.9 / -5.7
+5.4 / -5.2
+3.9 / -3.7
+3.4 / -3.3
+6.4 / -5.9
+6.5 / -6.0
+3.4 / -3.3
+4.6 / -4.6
+4.3 / -4.1
+1.40 / -1.40
mistralai/Mistral-7B-Instruct-v0.3
0.298
0.618
0.196
0.134
0.194
0.167
0.168
0.226
0.18
0.285
0.403
0.2425
9
1
6
12
9
9
10
13
10
9
9
10
+3.8 / -3.5
+8.8 / -9.8
+5.2 / -5.0
+4.7 / -4.2
+3.2 / -3.1
+2.8 / -2.9
+5.4 / -5.0
+5.7 / -5.5
+3.3 / -3.0
+4.2 / -4.0
+4.5 / -4.3
+1.27 / -1.28
mistralai/Mistral-Large-Instruct-2407
0.548
0.618
0.287
0.347
0.522
0.499
0.411
0.458
0.461
0.592
0.24
0.4656
1
1
6
1
1
1
1
1
1
1
15
1
+3.8 / -4.0
+9.8 / -9.8
+5.9 / -5.7
+5.9 / -5.9
+3.9 / -4.0
+3.6 / -3.7
+6.4 / -6.7
+6.0 / -6.0
+3.7 / -3.7
+4.4 / -4.6
+3.9 / -3.7
+1.46 / -1.46
mistralai/Mistral-Small-Instruct-2409
0.492
0.603
0.302
0.235
0.435
0.408
0.366
0.371
0.407
0.564
0.225
0.4079
1
1
1
5
3
3
1
5
1
1
22
4
+4.0 / -3.9
+9.3 / -9.8
+5.7 / -5.4
+5.4 / -5.2
+4.0 / -3.8
+3.6 / -3.6
+6.7 / -6.4
+6.2 / -6.2
+3.8 / -3.7
+4.4 / -4.5
+3.9 / -3.7
+1.43 / -1.43
allenai/OLMo-7B-SFT
0.065
0.176
0.04
0.037
0.04
0.033
0.05
0.08
0.05
0.073
0.173
0.0669
20
26
21
21
21
21
19
21
19
15
22
22
+2.0 / -1.8
+7.8 / -6.9
+2.7 / -2.2
+2.5 / -2.2
+1.7 / -1.5
+1.4 / -1.3
+3.0 / -3.0
+3.7 / -3.2
+1.9 / -1.8
+2.4 / -2.2
+3.5 / -3.5
+0.76 / -0.74
allenai/OLMo-7B-SFT
0.012
0.721
0.139
0.045
0.048
0.024
0.012
0.177
0.038
0.029
0.439
0.1029
22
1
14
21
21
21
22
13
19
21
1
21
+0.8 / -0.8
+8.3 / -8.8
+4.7 / -4.5
+2.7 / -2.2
+1.8 / -1.7
+1.1 / -1.1
+1.7 / -1.2
+5.5 / -5.2
+1.7 / -1.5
+1.5 / -1.5
+4.5 / -4.3
+0.94 / -0.88
microsoft/Phi-3-medium-4k-instruct
0.32
0.75
0.275
0.163
0.25
0.208
0.114
0.386
0.34
0.212
0.489
0.2951
9
1
6
5
9
9
10
5
6
9
1
9
+3.7 / -3.7
+8.3 / -8.8
+5.9 / -5.4
+4.7 / -4.5
+3.5 / -3.3
+3.0 / -2.9
+4.5 / -4.2
+6.5 / -6.2
+3.8 / -3.6
+3.8 / -3.5
+4.5 / -4.8
+1.34 / -1.32
Qwen/Qwen1.5-110B-Chat
0.452
0.775
0.28
0.255
0.367
0.321
0.3
0.413
0.375
0.49
0.346
0.3827
5
1
6
5
3
5
5
1
6
4
9
4
+3.9 / -3.9
+7.8 / -7.8
+6.2 / -5.7
+5.7 / -5.4
+3.8 / -3.8
+3.5 / -3.4
+6.2 / -6.2
+6.5 / -6.5
+3.8 / -3.8
+4.6 / -4.6
+4.3 / -4.3
+1.43 / -1.43
Qwen/Qwen2-72B-Instruct
0.454
0.74
0.257
0.275
0.34
0.326
0.22
0.393
0.398
0.342
0.387
0.3659
5
1
6
1
7
5
5
5
1
7
9
6
+4.1 / -4.0
+7.8 / -8.3
+5.7 / -5.9
+5.9 / -5.7
+3.8 / -3.7
+3.4 / -3.4
+5.7 / -5.4
+6.2 / -6.5
+3.9 / -3.6
+4.4 / -4.2
+4.5 / -4.3
+1.43 / -1.41
Qwen/Qwen2.5-72B-Instruct
0.523
0.642
0.292
0.334
0.47
0.453
0.364
0.42
0.454
0.574
0.225
0.4391
1
1
1
1
1
1
1
1
1
1
22
1
+3.8 / -3.9
+9.3 / -9.8
+5.7 / -5.9
+6.2 / -6.2
+4.0 / -4.0
+3.6 / -3.6
+6.4 / -6.4
+6.2 / -6.2
+3.7 / -3.8
+4.6 / -4.4
+3.9 / -3.7
+1.43 / -1.51
WizardLMTeam/WizardLM-13B-V1.2
0.189
0.5
0.119
0.084
0.165
0.137
0.072
0.216
0.078
0.104
0.307
0.1618
13
19
14
12
13
13
19
13
13
15
15
15
+3.3 / -3.1
+9.8 / -9.8
+4.5 / -4.0
+3.7 / -3.2
+3.2 / -3.0
+2.5 / -2.4
+3.7 / -3.2
+5.5 / -5.2
+2.1 / -2.1
+2.9 / -2.9
+4.1 / -4.1
+1.08 / -1.06
01-ai/Yi-1.5-34B-Chat
0.509
0.593
0.243
0.193
0.361
0.304
0.208
0.291
0.291
0.386
0.268
0.3377
1
19
6
5
3
5
5
5
8
7
15
6
+4.0 / -4.0
+8.8 / -9.8
+5.4 / -5.4
+5.2 / -5.0
+3.8 / -3.8
+3.4 / -3.3
+5.7 / -5.4
+6.0 / -5.7
+3.6 / -3.5
+4.6 / -4.3
+4.1 / -4.1
+1.42 / -1.36
databricks/dolly-v2-7b
0
0.029
0
0
0.007
0.001
0
0.005
0.007
0.009
0.017
0.0059
28
29
28
28
27
27
25
29
26
23
28
28
+0.0 / -0.0
+3.9 / -2.9
+0.0 / -0.0
+0.0 / -0.0
+0.7 / -0.5
+0.3 / -0.1
+0.0 / -0.0
+1.0 / -0.5
+0.7 / -0.6
+0.9 / -0.7
+1.3 / -1.1
+0.23 / -0.23
databricks/dolly-v2-12b
0
0.059
0
0
0.008
0.001
0
0.015
0.02
0.009
0.022
0.0093
28
26
28
28
27
27
25
25
26
23
28
28
+0.0 / -0.0
+4.9 / -3.9
+0.0 / -0.0
+0.0 / -0.0
+0.8 / -0.7
+0.3 / -0.1
+0.0 / -0.0
+2.0 / -1.5
+1.2 / -1.0
+0.9 / -0.7
+1.3 / -1.3
+0.31 / -0.27
nomic-ai/gpt4all-13b-snoozy
0.008
0.77
0.015
0.022
0.043
0.029
0.002
0.045
0.043
0.015
0.21
0.0641
22
1
26
21
21
21
22
25
19
23
22
25
+0.8 / -0.6
+7.8 / -8.3
+1.7 / -1.2
+2.0 / -1.7
+1.7 / -1.5
+1.3 / -1.1
+0.5 / -0.2
+3.0 / -2.5
+1.7 / -1.7
+1.1 / -1.1
+3.7 / -3.7
+0.74 / -0.72
TheBloke/koala-13B-HF
0.009
0.49
0.064
0.027
0.045
0.031
0.012
0.1
0.032
0.024
0.312
0.0746
22
19
21
21
21
21
22
21
19
23
15
22
+0.8 / -0.7
+9.8 / -9.8
+3.2 / -3.0
+2.2 / -1.7
+1.7 / -1.5
+1.3 / -1.1
+1.5 / -1.0
+4.0 / -3.7
+1.5 / -1.3
+1.5 / -1.3
+4.5 / -4.3
+0.82 / -0.75
TheBloke/koala-7B-HF
0.007
0.598
0.015
0.022
0.027
0.021
0
0.04
0.028
0.018
0.268
0.0607
22
1
26
21
21
21
25
25
26
23
15
25
+0.7 / -0.5
+9.8 / -9.8
+1.7 / -1.2
+2.0 / -1.7
+1.3 / -1.2
+1.1 / -1.0
+0.0 / -0.0
+2.7 / -2.5
+1.5 / -1.3
+1.3 / -1.1
+4.1 / -3.9
+0.70 / -0.70
mosaicml/mpt-7b-chat
0.007
0.696
0.062
0.02
0.04
0.034
0
0.124
0.037
0.013
0.32
0.0795
22
1
21
21
21
21
25
21
19
23
15
22
+0.7 / -0.5
+8.8 / -9.8
+3.2 / -3.0
+2.0 / -1.5
+1.7 / -1.5
+1.4 / -1.3
+0.0 / -0.0
+5.0 / -4.2
+1.7 / -1.5
+1.1 / -0.9
+4.3 / -4.3
+0.82 / -0.76
OpenAssistant/oasst-sft-1-pythia-12b
0.003
0.618
0.022
0.015
0.015
0.009
0
0.042
0.028
0.009
0.206
0.0493
22
1
21
21
27
27
25
25
26
23
22
27
+0.5 / -0.3
+9.8 / -9.8
+2.2 / -1.7
+2.0 / -1.5
+1.0 / -0.8
+0.7 / -0.6
+0.0 / -0.0
+3.0 / -2.5
+1.5 / -1.3
+0.9 / -0.7
+3.7 / -3.5
+0.67 / -0.65
allenai/OLMo-7B-SFT
0.148
0.529
0.161
0.104
0.148
0.146
0.084
0.206
0.099
0.135
0.437
0.179
17
19
14
12
13
13
10
13
13
12
1
13
+2.8 / -2.8
+9.8 / -8.8
+4.7 / -4.7
+4.0 / -3.7
+3.0 / -2.8
+2.6 / -2.5
+4.0 / -3.5
+5.5 / -5.0
+2.4 / -2.4
+3.1 / -3.1
+4.5 / -4.3
+1.12 / -1.14
allenai/tulu-2-dpo-70b
0.255
0.608
0.205
0.183
0.236
0.224
0.141
0.299
0.189
0.202
0.481
0.2572
9
1
6
5
9
9
10
5
10
12
1
10
+3.4 / -3.5
+8.8 / -9.8
+5.2 / -5.0
+5.2 / -4.7
+3.4 / -3.3
+3.1 / -3.1
+5.0 / -4.5
+6.2 / -6.0
+3.2 / -3.1
+3.8 / -3.8
+4.3 / -4.5
+1.28 / -1.27
allenai/tulu-2-dpo-13b
0.104
0.51
0.099
0.064
0.105
0.094
0.082
0.154
0.063
0.076
0.372
0.1325
17
19
14
12
19
19
10
13
19
15
9
19
+2.5 / -2.3
+9.8 / -9.8
+4.0 / -3.5
+3.2 / -3.0
+2.5 / -2.3
+2.3 / -2.1
+4.0 / -3.5
+5.2 / -4.7
+2.1 / -1.9
+2.5 / -2.3
+4.3 / -4.1
+1.02 / -1.01
allenai/tulu-v2.5-ppo-13b-uf-mean-70b-uf-rm
0.301
0.137
0.062
0.089
0.141
0.105
0.116
0.082
0.093
0.263
0.169
0.1571
9
26
21
12
13
13
10
21
13
9
22
15
+3.7 / -3.7
+6.9 / -6.9
+3.0 / -3.0
+4.0 / -3.5
+2.7 / -2.8
+2.3 / -2.1
+4.2 / -4.2
+3.7 / -3.2
+2.5 / -2.3
+4.2 / -4.0
+3.5 / -3.2
+1.12 / -1.07
lmsys/vicuna-7b-v1.5
0.101
0.77
0.171
0.094
0.117
0.11
0.094
0.236
0.085
0.077
0.511
0.1697
17
1
14
12
13
13
10
13
13
15
1
15
+2.5 / -2.4
+7.8 / -8.3
+4.7 / -4.7
+4.0 / -3.5
+2.7 / -2.5
+2.4 / -2.3
+4.2 / -3.7
+5.7 / -5.7
+2.3 / -2.1
+2.4 / -2.4
+4.8 / -4.5
+1.13 / -1.12
lmsys/vicuna-7b-v1.5
0.055
0.696
0.131
0.077
0.09
0.081
0.042
0.172
0.058
0.064
0.468
0.1352
20
1
14
12
19
19
19
13
19
21
1
19
+1.8 / -1.8
+8.8 / -8.8
+4.5 / -4.2
+3.7 / -3.2
+2.3 / -2.2
+2.1 / -1.9
+2.7 / -2.5
+5.5 / -5.0
+2.1 / -1.9
+2.2 / -2.2
+4.5 / -4.5
+1.05 / -1.01

No dataset card yet

Downloads last month
23

Space using allenai/href_results 1