model
stringlengths
4
89
revision
stringclasses
1 value
model_sha
stringlengths
0
40
results
dict
commit
stringlengths
40
40
date
unknown
score
float64
21.8
83
RWKV/rwkv-4-169m-pile
main
46bdc280eb97b6141d5d51a935e0c4870ecaefcc
{ "arc:challenge": 23.6, "hellaswag": 31.7, "hendrycksTest": 23.2, "truthfulqa:mc": 41.9 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
30.1
RWKV/rwkv-4-430m-pile
main
a4f6ec80438d4262d1bbc8f385feb2ef1a4a9d6b
{ "arc:challenge": 26.7, "hellaswag": 40, "hendrycksTest": 24.9, "truthfulqa:mc": 39.6 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
32.8
RWKV/rwkv-4-7b-pile
main
922e22a761427e50d7be457b31a76b1126021b8b
{ "arc:challenge": 39.7, "hellaswag": 66.3, "hendrycksTest": 25, "truthfulqa:mc": 33.7 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
41.2
RWKV/rwkv-raven-14b
main
359c0649b4f1d10a26ebea32908035bc00d152ee
{ "arc:challenge": 44.6, "hellaswag": 71.3, "hendrycksTest": 25.9, "truthfulqa:mc": 41.9 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
45.9
RWKV/rwkv-raven-1b5
main
571a3bd891ce33f2ee3fc6de09218178edb0dae2
{ "arc:challenge": 31.8, "hellaswag": 52.6, "hendrycksTest": 26, "truthfulqa:mc": 37.1 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
36.9
jaspercatapang/Echidna-30B
main
20b13b6676d54b555ae2b9b2b4b6fc8a0c7c2e89
{ "arc:challenge": 28.5, "hellaswag": 25.5, "hendrycksTest": 24.9, "truthfulqa:mc": 48.1 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
31.8
CoolWP/llama-2-13b-guanaco-fp16
main
a60e8e39e4fbe271655e1c78eb1ceb2081518231
{ "arc:challenge": 59.6, "hellaswag": 82.4, "hendrycksTest": 55.5, "truthfulqa:mc": 43.4 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
60.2
TigerResearch/tigerbot-7b-sft
main
98b847905d63f74624e834db1ff95ee2814cbbd3
{ "arc:challenge": 41.6, "hellaswag": 60.6, "hendrycksTest": 29.9, "truthfulqa:mc": 58.2 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
47.6
TigerResearch/tigerbot-7b-base
main
300831494aa1eb16e59799310a09531f60dcc904
{ "arc:challenge": 47.7, "hellaswag": 72.1, "hendrycksTest": 45.1, "truthfulqa:mc": 42.3 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
51.8
MayaPH/FinOPT-Washington
main
cdd8a6cde7902de39757cf31d73af1f51df0d8e8
{ "arc:challenge": 25.2, "hellaswag": 26.2, "hendrycksTest": 24.8, "truthfulqa:mc": 45.8 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
30.5
MayaPH/GodziLLa-30B
main
aa9912a2ac60abeac28b4566731cd903dcc582ac
{ "arc:challenge": 61.5, "hellaswag": 82.1, "hendrycksTest": 54.2, "truthfulqa:mc": 55.9 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
63.4
MayaPH/GodziLLa-30B-instruct
main
642bf3683801e20e4b7cf28d94374d5e6054c007
{ "arc:challenge": 29, "hellaswag": 26.5, "hendrycksTest": 24.9, "truthfulqa:mc": 48.8 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
32.3
MayaPH/GodziLLa-30B-plus
main
a66b1860d11ebf8aed07237cf636fdd2b3a07f06
{ "arc:challenge": 28.9, "hellaswag": 26.4, "hendrycksTest": 24.6, "truthfulqa:mc": 48.9 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
32.2
MayaPH/opt-flan-iml-6.7b
main
cbe8d60db6f3c52e653ca73e23a1c34c08127d02
{ "arc:challenge": 30.1, "hellaswag": 58.8, "hendrycksTest": 25.1, "truthfulqa:mc": 36.7 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
37.7
MayaPH/GodziLLa2-70B
main
7b78087db07eec97f7b461d10758ece76d685543
{ "arc:challenge": 71.4, "hellaswag": 87.5, "hendrycksTest": 69.9, "truthfulqa:mc": 61.5 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
72.6
abhishek/llama2guanacotest
main
679d17809939a0bf9b79bbb027898cbea64045b2
{ "arc:challenge": 51.6, "hellaswag": 77.6, "hendrycksTest": 48.5, "truthfulqa:mc": 43.9 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
55.4
abhishek/autotrain-llama-alpaca-peft-52508123785
main
{ "arc:challenge": 52.2, "hellaswag": 76.9, "hendrycksTest": 37.6, "truthfulqa:mc": 32.9 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
49.9
guardrail/llama-2-7b-guanaco-instruct-sharded
main
fc7a3abbc3b9a9b3e163ef3c4844307ac270fca7
{ "arc:challenge": 53.8, "hellaswag": 78.7, "hendrycksTest": 46.7, "truthfulqa:mc": 43.9 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
55.8
CobraMamba/mamba-gpt-3b-v2
main
935f4d90bd0fc7117113d3c7b6b6af9dba93183d
{ "arc:challenge": 42.2, "hellaswag": 71.5, "hendrycksTest": 27.1, "truthfulqa:mc": 36.7 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
44.4
CobraMamba/mamba-gpt-7b
main
cb0b04b1bff7921614efbd87d5b87bac04c58d13
{ "arc:challenge": 51.2, "hellaswag": 75.4, "hendrycksTest": 47.5, "truthfulqa:mc": 42.1 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
54
CobraMamba/mamba-gpt-7b-v1
main
e64d658b397748e409d9633fd24fc5a6df429600
{ "arc:challenge": 61.3, "hellaswag": 84.1, "hendrycksTest": 63.5, "truthfulqa:mc": 46.3 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
63.8
CobraMamba/mamba-gpt-3b-v3
main
d860a90ef6b30c695b985dd2ff382d4bbb80e857
{ "arc:challenge": 41.7, "hellaswag": 71.1, "hendrycksTest": 27.3, "truthfulqa:mc": 37.9 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
44.5
CobraMamba/mamba-gpt-7b-v2
main
6439444e2c0b61253d3e61ae04fe0436717acc2f
{ "arc:challenge": 61.9, "hellaswag": 83.8, "hendrycksTest": 61.7, "truthfulqa:mc": 46.6 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
63.5
blueapple8259/TinyStories-Alpaca
main
18e0bde7e72e477757832f0624a0410efc066216
{ "arc:challenge": 24, "hellaswag": 24.9, "hendrycksTest": 23.4, "truthfulqa:mc": 46.7 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
29.8
DanielSc4/RedPajama-INCITE-Chat-3B-v1-FT-LoRA-8bit-test1
main
f477d24b00e05fe4c5f8d5f933080994cfd90e4e
{ "arc:challenge": 38.7, "hellaswag": 63.5, "hendrycksTest": 25.2, "truthfulqa:mc": 36.1 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
40.9
DanielSc4/RedPajama-INCITE-Chat-3B-v1-RL-LoRA-8bit-test1
main
a2ee88a9fa1c9ad41e0a8c15217a4b1230ec33c8
{ "arc:challenge": 41.3, "hellaswag": 66.8, "hendrycksTest": 26.1, "truthfulqa:mc": 35 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
42.3
THUDM/chatglm2-6b
main
162b620e3078b03eefff94eb5f762d4093425fb5
{ "arc:challenge": 38.8, "hellaswag": 59, "hendrycksTest": 46.7, "truthfulqa:mc": 48.1 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
48.2
Salesforce/codegen-6B-multi
main
2d58b1e73791e8f0be7ea59c2720dccb6f4d0f06
{ "arc:challenge": 27.2, "hellaswag": 41.1, "hendrycksTest": 25.7, "truthfulqa:mc": 45.7 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
34.9
mrm8488/llama-2-coder-7b
main
f21c0d5e3f9f8c5addf093358e6885afa9602296
{ "arc:challenge": 54, "hellaswag": 78.4, "hendrycksTest": 46.3, "truthfulqa:mc": 38.5 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
54.3
mrm8488/mistral-7b-ft-h4-no_robots_instructions
main
785446da9a53ceae48795069bf7ccaf46a91a5ba
{ "arc:challenge": 60.9, "hellaswag": 83.2, "hendrycksTest": 63.7, "truthfulqa:mc": 43.6 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
62.8
Riiid/sheep-duck-llama-2
main
e196dd0fe1d604c4975d972b177b09e4f1572cd5
{ "arc:challenge": 72.4, "hellaswag": 87.8, "hendrycksTest": 70.8, "truthfulqa:mc": 63.8 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
73.7
Riiid/sheep-duck-llama-2-70b-v1.1
main
a8fc5c02c995733af6339ec882bef4ed93db1e8f
{ "arc:challenge": 73, "hellaswag": 87.8, "hendrycksTest": 70.8, "truthfulqa:mc": 64.6 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
74.1
PSanni/Deer-3b
main
53ea8f8862fc1820f0cd31f62953b7290fd79867
{ "arc:challenge": 38.5, "hellaswag": 57.4, "hendrycksTest": 25.6, "truthfulqa:mc": 40 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
40.4
w601sxs/b1ade-1b
main
b4b0fd71589e6590089e1ec14a840ecab10894ae
{ "arc:challenge": 28.6, "hellaswag": 46.1, "hendrycksTest": 25.1, "truthfulqa:mc": 41.3 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
35.3
bofenghuang/vigogne-13b-chat
main
27002e974774c3599e6a4d731dd44e68b9e41f92
{ "arc:challenge": 58.6, "hellaswag": 80.9, "hendrycksTest": 47.8, "truthfulqa:mc": 48.7 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
59
bofenghuang/vigogne-33b-instruct
main
9c2b558b888e0ef8b4a72e0771db72a06a5c8474
{ "arc:challenge": 63.1, "hellaswag": 85, "hendrycksTest": 58.3, "truthfulqa:mc": 52.1 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
64.6
bofenghuang/vigostral-7b-chat
main
969fbfc7a91f53c8562a2c48a3c24dd3745d5a97
{ "arc:challenge": 62.6, "hellaswag": 84.3, "hendrycksTest": 63.5, "truthfulqa:mc": 49.2 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
64.9
u-chom/preded-title-amazongoogle-abtbuy
main
ab36321d76775d6e276d157e27de23854d21be3a
{ "arc:challenge": 50.9, "hellaswag": 78.1, "hendrycksTest": 38, "truthfulqa:mc": 41.6 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
52.2
yeen214/test_llama2_7b
main
69a4886f51ed752216cdd7f41a584d14240126f9
{ "arc:challenge": 53.1, "hellaswag": 78.6, "hendrycksTest": 46.9, "truthfulqa:mc": 38.8 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
54.4
yeen214/llama2_7b_merge_orcafamily
main
fb65f697de632f2f3fef57fc3cd12fb5e4913a89
{ "arc:challenge": 56.9, "hellaswag": 81.2, "hendrycksTest": 51.5, "truthfulqa:mc": 49.7 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
59.8
DevaMalla/llama_7b_qlora_cds
main
b6b5c65c5c1cce34d24c8f790bb0cc011e0f0808
{ "arc:challenge": 52.5, "hellaswag": 77.8, "hendrycksTest": 32.4, "truthfulqa:mc": 46.1 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
52.2
DevaMalla/llama_7b_qlora_pds-eval
main
d20419e1d9e9a6a59ced3edf5169e8e7b3e8394c
{ "arc:challenge": 53.9, "hellaswag": 78.1, "hendrycksTest": 33, "truthfulqa:mc": 45.6 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
52.6
DevaMalla/llama-base-7b
main
e01d89d8e444f7d751ea58feaf22ff8c9af69d2a
{ "arc:challenge": 50.9, "hellaswag": 77.8, "hendrycksTest": 35.7, "truthfulqa:mc": 34.3 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
49.7
NekoPunchBBB/Llama-2-13b-hf_Open-Platypus-QLoRA-multigpu
main
f65029ea8f030731ace568e40bab33a7097a13de
{ "arc:challenge": 57.5, "hellaswag": 82.5, "hendrycksTest": 54.8, "truthfulqa:mc": 43.8 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
59.6
NekoPunchBBB/Llama-2-13b-hf_Open-Platypus
main
c318a24121bd69509f395e17a9636093213ece21
{ "arc:challenge": 58.9, "hellaswag": 82.1, "hendrycksTest": 55, "truthfulqa:mc": 42.8 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
59.7
NekoPunchBBB/Llama-2-13b-hf_Open-Platypus-8bit-att
main
83a8e51d0a72dcfbe5de13dc7ee10dc20e91602e
{ "arc:challenge": 57.5, "hellaswag": 82.1, "hendrycksTest": 54.6, "truthfulqa:mc": 42.2 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
59.1
NewstaR/Koss-7B-chat
main
b1ab836d9ebf7029fafa07949b51d3838501d537
{ "arc:challenge": 53.7, "hellaswag": 78.8, "hendrycksTest": 46.7, "truthfulqa:mc": 44 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
55.8
NewstaR/Starlight-13B
main
cb9fced568b1abd881133c642c427aaa488f00cc
{ "arc:challenge": 59.3, "hellaswag": 82.2, "hendrycksTest": 55.7, "truthfulqa:mc": 37.4 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
58.6
NewstaR/Morningstar-13b-hf
main
2605b5b3b0ecba906ac26d39aab40f33c2ec81c9
{ "arc:challenge": 59, "hellaswag": 81.9, "hendrycksTest": 54.6, "truthfulqa:mc": 44.1 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
59.9
NewstaR/Starlight-7B
main
1f7436c458ebc3d8d31b91091c1a7a48e942cd3b
{ "arc:challenge": 53.1, "hellaswag": 78.6, "hendrycksTest": 46.8, "truthfulqa:mc": 38.8 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
54.3
Sao10K/Stheno-1.8-L2-13B
main
fe054ab749a69375285df40913a88bd40f1e2bf6
{ "arc:challenge": 63.5, "hellaswag": 84.1, "hendrycksTest": 58.6, "truthfulqa:mc": 52.9 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
64.8
Sao10K/Mythical-Destroyer-V2-L2-13B
main
cbc8b2e4a3beafc311b9e61f8fa9f7526a77c360
{ "arc:challenge": 59.3, "hellaswag": 82.7, "hendrycksTest": 57.4, "truthfulqa:mc": 57.1 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
64.1
Sao10K/Stheno-Inverted-1.2-L2-13B
main
8d2e9087093eef1c9173e167beb40b9d034a4655
{ "arc:challenge": 59.4, "hellaswag": 83, "hendrycksTest": 55.8, "truthfulqa:mc": 51.2 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
62.4
Sao10K/Zephyrus-L1-33B
main
679aae34440d576456b283070371b2a15dbb948b
{ "arc:challenge": 64.5, "hellaswag": 84.1, "hendrycksTest": 57.4, "truthfulqa:mc": 53.9 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
65
Sao10K/SthenoWriter-L2-13B
main
a6d9e26ab765eb170cc0aa428ee5e25b08524657
{ "arc:challenge": 62.3, "hellaswag": 83.3, "hendrycksTest": 56.1, "truthfulqa:mc": 44.7 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
61.6
Sao10K/Chat-Stheno-L2-13B
main
20419fdd5b4bdcbbf075223c33b396958c48a6cf
{ "arc:challenge": 58.4, "hellaswag": 81, "hendrycksTest": 54.8, "truthfulqa:mc": 43.3 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
59.4
Sao10K/Euryale-1.3-L2-70B
main
6e3ce78eb5346bf3a5ee88cd60c25dc0d73de639
{ "arc:challenge": 70.8, "hellaswag": 87.9, "hendrycksTest": 70.4, "truthfulqa:mc": 59.9 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
72.2
Sao10K/Mythical-Destroyer-L2-13B
main
7c87376b201b1c30c4e12c0b7bc2f28f017ce7bc
{ "arc:challenge": 58.7, "hellaswag": 82, "hendrycksTest": 57.7, "truthfulqa:mc": 56.3 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
63.7
Sao10K/BrainDerp2
main
948ee7af94a8b092807df4becfc0a8c1cd042878
{ "arc:challenge": 60.9, "hellaswag": 81.9, "hendrycksTest": 58.9, "truthfulqa:mc": 57.2 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
64.7
Sao10K/Euryale-L2-70B
main
6589310a57ce5d9d6877f353f3d00cda8fa9101c
{ "arc:challenge": 68.9, "hellaswag": 87.1, "hendrycksTest": 68.8, "truthfulqa:mc": 54.5 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
69.8
Sao10K/Stheno-Mix-L2-20B
main
6f9dcdaae6ef9071effe63d2107abe8b9712345b
{ "arc:challenge": 57.8, "hellaswag": 79.6, "hendrycksTest": 52.5, "truthfulqa:mc": 51.8 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
60.4
nomic-ai/gpt4all-j
main
c7244e40ac6f3a52caecc96cd45481caae57ae8c
{ "arc:challenge": 42, "hellaswag": 64.1, "hendrycksTest": 28.2, "truthfulqa:mc": 42.8 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
44.3
tianyil1/denas-llama2
main
b8aebc9157c0e427536aeac9132021fd66615702
{ "arc:challenge": 53.9, "hellaswag": 77.8, "hendrycksTest": 45.5, "truthfulqa:mc": 45.2 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
55.6
llm-agents/tora-70b-v1.0
main
e95fd7daf017e7c414ec07ebef4ddf013c16f9a4
{ "arc:challenge": 67.7, "hellaswag": 85.8, "hendrycksTest": 69.2, "truthfulqa:mc": 51.8 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
68.6
rishiraj/bloom-560m-guanaco
main
17b886fe53bdb4cea75a7f40da1e8e987124edef
{ "arc:challenge": 27.9, "hellaswag": 26.1, "hendrycksTest": 24.5, "truthfulqa:mc": 49.4 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
32
ByteWave/Yi-8B-Llama
main
4f3f4d73ff3962487d1c51702b02d795bf1f33a4
{ "arc:challenge": 25.7, "hellaswag": 26.8, "hendrycksTest": 24.1, "truthfulqa:mc": 47.8 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
31.1
codellama/CodeLlama-34b-hf
main
c778b02fdecd4663d2b0a42bfb340fd29969533b
{ "arc:challenge": 37.5, "hellaswag": 31.8, "hendrycksTest": 37.2, "truthfulqa:mc": 38.9 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
36.4
codellama/CodeLlama-7b-Instruct-hf
main
7affc442e639b8aa1c4b3e98a10a2f45a21b8b4f
{ "arc:challenge": 36.5, "hellaswag": 55.4, "hendrycksTest": 34.5, "truthfulqa:mc": 41.2 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
41.9
habanoz/TinyLlama-1.1B-intermediate-step-715k-1.5T-lr-5-2.2epochs-oasst1-top1-instruct-V1
main
74cd9eba94e77832b3081689fc5c99c37c063790
{ "arc:challenge": 31.5, "hellaswag": 54.4, "hendrycksTest": 25.5, "truthfulqa:mc": 42.3 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
38.4
habanoz/TinyLlama-1.1B-intermediate-step-715k-1.5T-lr-5-3epochs-oasst1-top1-instruct-V1
main
b1ec2a1e08eb790b9a32a43053316650921af943
{ "arc:challenge": 31.4, "hellaswag": 54.2, "hendrycksTest": 25.4, "truthfulqa:mc": 42.5 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
38.4
habanoz/TinyLlama-1.1B-intermediate-step-715k-1.5T-lr-5-4epochs-oasst1-top1-instruct-V1
main
7cd6d5ad10180127771e4326772eae3d40fa8445
{ "arc:challenge": 31.1, "hellaswag": 54.3, "hendrycksTest": 25.4, "truthfulqa:mc": 41.7 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
38.1
habanoz/tinyllama-oasst1-top1-instruct-full-lr1-5-v0.1
main
e55b262cbd0ee52f7a4cbda136dbf1a027987c47
{ "arc:challenge": 32.8, "hellaswag": 58.2, "hendrycksTest": 26, "truthfulqa:mc": 38.3 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
38.8
Deci/DeciCoder-1b
main
af2ef45ef8cbe82eb7eb4074f260412bc14c7b11
{ "arc:challenge": 21.2, "hellaswag": 31.1, "hendrycksTest": 24.3, "truthfulqa:mc": 47.1 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
30.9
Andron00e/YetAnother_Open-Llama-3B-LoRA
main
52c5cb0178831908ed0571f1750fcb0f0fb125f9
{ "arc:challenge": 25.9, "hellaswag": 25.8, "hendrycksTest": 24.7, "truthfulqa:mc": null }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
null
Vmware/open-llama-7b-v2-open-instruct
main
b8fbe09571a71603ab517fe897a1281005060b62
{ "arc:challenge": 39.8, "hellaswag": 70.3, "hendrycksTest": 35.2, "truthfulqa:mc": 39.5 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
46.2
gpt2-medium
main
f65d4965d1221eff2bcf34f53a2ba12120e18f24
{ "arc:challenge": 27, "hellaswag": 40.2, "hendrycksTest": 26.6, "truthfulqa:mc": 40.8 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
33.7
TinyLlama/TinyLlama-1.1B-intermediate-step-955k-token-2T
main
f62ecb34ea0d4acea9d896040a4616a9538e2f36
{ "arc:challenge": 30.3, "hellaswag": 54.8, "hendrycksTest": 26.5, "truthfulqa:mc": 36.1 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
36.9
TinyLlama/TinyLlama-1.1B-Chat-v0.6
main
bf9ae1c8bf026667e6f810768de259bb4a7f4777
{ "arc:challenge": 31.7, "hellaswag": 55.8, "hendrycksTest": 26, "truthfulqa:mc": 34.7 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
37
ai-forever/rugpt3large_based_on_gpt2
main
8201db0de8deb68f25e7309db04d163b71970494
{ "arc:challenge": 22.6, "hellaswag": 32.8, "hendrycksTest": 24.9, "truthfulqa:mc": 43.4 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
30.9
beomi/KoAlpaca-KoRWKV-6B
main
427ee72c4350f26de1b287a0c07b842e7d168dbc
{ "arc:challenge": 23.5, "hellaswag": 31.6, "hendrycksTest": 24.9, "truthfulqa:mc": 39.8 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
30
beomi/KoRWKV-6B
main
541600070459baf0f1be9560181d5ceb77794085
{ "arc:challenge": 22.1, "hellaswag": 32.2, "hendrycksTest": 24.7, "truthfulqa:mc": 39 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
29.5
gpt2
main
11c5a3d5811f50298f278a704980280950aedb10
{ "arc:challenge": 22.1, "hellaswag": null, "hendrycksTest": null, "truthfulqa:mc": null }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
22.1
pszemraj/pythia-31m-simplewiki-2048
main
95d47818055661250b55144c7d9beaf05dc126d8
{ "arc:challenge": 22.2, "hellaswag": 25.6, "hendrycksTest": 23.1, "truthfulqa:mc": 49.4 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
30.1
pszemraj/pythia-31m-goodwiki-deduped-2048-scratch
main
01a3cd918dd7c233bc0c3c0c948a9a462a5359d1
{ "arc:challenge": 23.1, "hellaswag": 25.7, "hendrycksTest": 23.1, "truthfulqa:mc": 51.3 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
30.8
bavest/fin-llama-33b-merged
main
17114520801da7b9599fe7a9fdf238915713a59b
{ "arc:challenge": 65, "hellaswag": 86.2, "hendrycksTest": 58.7, "truthfulqa:mc": 49.7 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
64.9
totally-not-an-llm/EverythingLM-13b-V2-16k
main
943f932ae1ae462389e6d2db5273158530749fff
{ "arc:challenge": 58.7, "hellaswag": 80.9, "hendrycksTest": 49.7, "truthfulqa:mc": 47.4 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
59.2
totally-not-an-llm/EverythingLM-13b-V3-peft
main
7a2eed5038addcf4fa3b8dd358b45eb96134e749
{ "arc:challenge": 58.4, "hellaswag": 81, "hendrycksTest": 54.7, "truthfulqa:mc": 53 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
61.8
totally-not-an-llm/EverythingLM-13b-16k
main
8456a856a8b115b05e76a7d0d945853b10ac71e2
{ "arc:challenge": 56.6, "hellaswag": 80.6, "hendrycksTest": 50.2, "truthfulqa:mc": 47.5 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
58.7
totally-not-an-llm/PuddleJumper-13b
main
f3a8a475ff0c6ae37ac8ae0690980be11cac731a
{ "arc:challenge": 58.7, "hellaswag": 81.2, "hendrycksTest": 58.3, "truthfulqa:mc": 56.4 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
63.6
totally-not-an-llm/EverythingLM-13b-V3-16k
main
1de9244bfadb947f80872727f76790cbc76e7142
{ "arc:challenge": 58.2, "hellaswag": 80.1, "hendrycksTest": 50.5, "truthfulqa:mc": 45.2 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
58.5
dvruette/gpt-neox-20b-full-precision
main
20b347273d90da7c2c9eb4c32d4173dba862a0d2
{ "arc:challenge": 48.8, "hellaswag": 74.4, "hendrycksTest": 26.2, "truthfulqa:mc": 36.9 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
46.6
dvruette/oasst-pythia-12b-6000-steps
main
e2ccc0ef8d1cc5ffc8b0e2e885f03ef50597ea8a
{ "arc:challenge": 45.4, "hellaswag": 69.7, "hendrycksTest": 26, "truthfulqa:mc": 39.8 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
45.2
dvruette/oasst-pythia-6.9b-4000-steps
main
0e201b6f344ac6382dda40d389e1c9144a87d027
{ "arc:challenge": 41.6, "hellaswag": 64.2, "hendrycksTest": 26.3, "truthfulqa:mc": 40.4 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
43.1
dvruette/oasst-llama-13b-1000-steps
main
d2cd599cc40db3370009f45d6caa7e486cb6d31f
{ "arc:challenge": 58.1, "hellaswag": 81.5, "hendrycksTest": 48.6, "truthfulqa:mc": 36 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
56
dvruette/oasst-llama-13b-2-epochs
main
0e3796192f7edf43968541b9454ea35da4a2b1c5
{ "arc:challenge": 57.9, "hellaswag": 82.4, "hendrycksTest": 48.6, "truthfulqa:mc": 47.3 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
59
dvruette/oasst-gpt-neox-20b-3000-steps
main
f0462a8b7908f61202d86e6a9a2996d8339363b5
{ "arc:challenge": 46.4, "hellaswag": 72.1, "hendrycksTest": 26.2, "truthfulqa:mc": 35.5 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
45
dvruette/llama-13b-pretrained
main
c28cc0cf5a1a1bf4de96b23d06b02129dca85eb9
{ "arc:challenge": 56.3, "hellaswag": 79.3, "hendrycksTest": 47, "truthfulqa:mc": 48.4 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
57.8
dvruette/oasst-pythia-12b-reference
main
c5a9b7fad884e6c45ce5d2ca551aa1c03db6865f
{ "arc:challenge": 43, "hellaswag": 67.9, "hendrycksTest": 28.3, "truthfulqa:mc": 36.6 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
44
h2oai/h2ogpt-gm-oasst1-en-2048-open-llama-7b-preview-300bt-v2
main
fdc6ff469295d0aaabec8948525b70d6688728ac
{ "arc:challenge": 36.4, "hellaswag": 61.4, "hendrycksTest": 25, "truthfulqa:mc": 37.6 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
40.1
h2oai/h2ogpt-gm-oasst1-multilang-1024-20b
main
b3a6bf4250a037c09e451344e2a4e987011b79de
{ "arc:challenge": 47.4, "hellaswag": 72.6, "hendrycksTest": 26.4, "truthfulqa:mc": 34.4 }
9ba100d35ce48d3d4c132947464c93c861932caa
"2023-11-23T17:28:23"
45.2