model
stringlengths
4
89
revision
stringclasses
1 value
model_sha
stringlengths
0
40
results
dict
commit
stringlengths
40
40
date
timestamp[ns]
score
float64
21.8
83
TheBloke/Kimiko-v2-13B-fp16
main
0fed305667508e50330e71a2d43e9cee5ea73783
{ "arc:challenge": 61, "hellaswag": 83.3, "hendrycksTest": 55.2, "truthfulqa:mc": 40.7 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
60
TheBloke/wizard-vicuna-13B-GPTQ
main
936a51c0219744d7a9598d0c65a7d18e01660601
{ "arc:challenge": 28.7, "hellaswag": 25.9, "hendrycksTest": 25.8, "truthfulqa:mc": 48.5 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
32.2
TheBloke/CodeLlama-34B-Python-fp16
main
875f9d97fb6c9619d8867887dd1d80918ff0f593
{ "arc:challenge": 38.1, "hellaswag": 34.8, "hendrycksTest": 32.9, "truthfulqa:mc": 43.6 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
37.4
TheBloke/manticore-13b-chat-pyg-GPTQ
main
923f27245d13058c9c1b3ab0eab6c6c93ffc162e
{ "arc:challenge": 57.8, "hellaswag": 81.1, "hendrycksTest": 47.6, "truthfulqa:mc": 47.8 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
58.6
TheBloke/Airoboros-L2-70B-2.1-GPTQ
main
23ed580cb77ebaee49ea11eb4538fd3ab3795b76
{ "arc:challenge": 70.4, "hellaswag": 86.5, "hendrycksTest": 68.9, "truthfulqa:mc": 55.5 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
70.3
TheBloke/Airoboros-L2-13B-2.1-GPTQ
main
d90d96e40b9359cb5c35e6b6c8f0eb24896e827b
{ "arc:challenge": 59, "hellaswag": 81.7, "hendrycksTest": 53.2, "truthfulqa:mc": 44.7 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
59.6
TheBloke/gpt4-x-vicuna-13B-HF
main
a247577c882940e0c6b040fe8239d760c0d10d40
{ "arc:challenge": 53.4, "hellaswag": 80.1, "hendrycksTest": 51.2, "truthfulqa:mc": 53.6 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
59.6
TheBloke/Wizard-Vicuna-30B-Superhot-8K-fp16
main
062fe5409861d7386279fb534b435be39c88ceaf
{ "arc:challenge": 26.2, "hellaswag": 33, "hendrycksTest": 23.5, "truthfulqa:mc": 47.5 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
32.6
TheBloke/Wizard-Vicuna-7B-Uncensored-HF
main
b802f1b4401d0b2242137160c20cc11b9ffd3a4c
{ "arc:challenge": 53.4, "hellaswag": 78.8, "hendrycksTest": 37.1, "truthfulqa:mc": 43.5 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
53.2
TheBloke/Wizard-Vicuna-13B-Uncensored-HF
main
fff9ac7f0e2e7b340f2301f5f089d989fc03be67
{ "arc:challenge": 59, "hellaswag": 81.9, "hendrycksTest": 47.9, "truthfulqa:mc": 51.7 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
60.1
TheBloke/Wizard-Vicuna-30B-Uncensored-GPTQ
main
56a82ece7a9309189561a590e8f4d2fe0d4be92b
{ "arc:challenge": 61.1, "hellaswag": 82.4, "hendrycksTest": 56.5, "truthfulqa:mc": 49.9 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
62.5
TheBloke/orca_mini_v3_7B-GPTQ
main
06ddd48cd904907e3c73d2dfe47d28626053598b
{ "arc:challenge": 30.1, "hellaswag": 26, "hendrycksTest": 24.3, "truthfulqa:mc": 48.4 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
32.2
TheBloke/OpenOrca-Platypus2-13B-GPTQ
main
0fa9a56066656fbc94e3ec088bc900fd1d4d38e8
{ "arc:challenge": 62.5, "hellaswag": 82.7, "hendrycksTest": 58.6, "truthfulqa:mc": 51.9 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
63.9
TheBloke/guanaco-33B-GPTQ
main
8e42e031bfc8be3bbf31dc546d7c51fb991ff6e0
{ "arc:challenge": 28.2, "hellaswag": 26.3, "hendrycksTest": 24.9, "truthfulqa:mc": 49 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
32.1
TheBloke/robin-33B-v2-GPTQ
main
4c2588d65302e9ca634548ed81e8650fb2975686
{ "arc:challenge": 27.7, "hellaswag": 26.3, "hendrycksTest": 23.5, "truthfulqa:mc": 49.5 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
31.8
TheBloke/WizardLM-33B-V1.0-Uncensored-GPTQ
main
1c65902c620fcdf6b9c8e36ce17f21360e186a1e
{ "arc:challenge": 27.4, "hellaswag": 26, "hendrycksTest": 25.8, "truthfulqa:mc": 48.9 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
32
TheBloke/wizard-vicuna-13B-HF
main
12dc8aacb474522ae2a83c18cb0fdf0907987f8f
{ "arc:challenge": 54.7, "hellaswag": 79.2, "hendrycksTest": 48.9, "truthfulqa:mc": 49.6 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
58.1
TheBloke/vicuna-13B-1.1-HF
main
8c71dbe9221e83d2ec72e4dc08beccfc78b563c0
{ "arc:challenge": 52.7, "hellaswag": 80.1, "hendrycksTest": 51.9, "truthfulqa:mc": 52.1 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
59.2
TheBloke/WizardLM-13B-V1-1-SuperHOT-8K-GPTQ
main
085eb5cd394f30d72bf5efcf83a580e87264b3e8
{ "arc:challenge": 57, "hellaswag": 80.3, "hendrycksTest": 47.1, "truthfulqa:mc": 53.5 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
59.5
TheBloke/Chinese-Alpaca-33B-SuperHOT-8K-fp16
main
a55ce761bace8be6d17c357c57ef927751afd40c
{ "arc:challenge": 26.8, "hellaswag": 29.6, "hendrycksTest": 24.1, "truthfulqa:mc": 47.7 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
32
TheBloke/gpt4-alpaca-lora-30b-HF
main
3c8007467a081dc72ae09b9d358416b056b38920
{ "arc:challenge": 64.8, "hellaswag": 85.7, "hendrycksTest": 58.5, "truthfulqa:mc": 52.2 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
65.3
TheBloke/tulu-30B-fp16
main
37c3655676c37662f60c68dacfce3f0e861be846
{ "arc:challenge": 60, "hellaswag": 83.4, "hendrycksTest": 56.1, "truthfulqa:mc": 45.1 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
61.2
TheBloke/airoboros-33B-gpt4-1-4-SuperHOT-8K-fp16
main
53fdac1cdb8a37647e5dbe4199bc3fb70e617fce
{ "arc:challenge": 26, "hellaswag": 30.7, "hendrycksTest": 23.6, "truthfulqa:mc": 47.9 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
32.1
TheBloke/EverythingLM-13B-16K-GPTQ
main
f14d3df05577f3e1ac35e2c4ec32ce0d39b97508
{ "arc:challenge": 29.3, "hellaswag": 26.2, "hendrycksTest": 25.4, "truthfulqa:mc": 48.6 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
32.4
TheBloke/Llama-2-7B-GPTQ
main
ecd7ab9f6adc36ecbe0d751eeea0d90ae1863c3b
{ "arc:challenge": 52, "hellaswag": 77.6, "hendrycksTest": 44, "truthfulqa:mc": 39.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
53.2
TheBloke/Kimiko-13B-fp16
main
27868769e2d6b1af46337f0997c71b0577952a3d
{ "arc:challenge": 59.2, "hellaswag": 82.4, "hendrycksTest": 55.8, "truthfulqa:mc": 39.6 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
59.3
TheBloke/CodeLlama-34B-Instruct-fp16
main
a4d0ce949de4d5b5f74691641efb5b70736a32a8
{ "arc:challenge": 40.8, "hellaswag": 35.7, "hendrycksTest": 39.7, "truthfulqa:mc": 44.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
40.1
TheBloke/WizardLM-70B-V1.0-GPTQ
main
c234d7c9c0fd26efb55757fdbfb604d549539fe0
{ "arc:challenge": 63.8, "hellaswag": 83.8, "hendrycksTest": 63.7, "truthfulqa:mc": 54.5 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
66.4
TheBloke/CodeLlama-13B-Python-fp16
main
442282f4207442b828953a72c51a919c332cba5c
{ "arc:challenge": 33.2, "hellaswag": 44.5, "hendrycksTest": 25.9, "truthfulqa:mc": 44 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
36.9
TheBloke/Manticore-13B-Chat-Pyg-Guanaco-SuperHOT-8K-GPTQ
main
bd3c66e626c81de4977f197e1534bd3dfa2f569d
{ "arc:challenge": 52.8, "hellaswag": 79.6, "hendrycksTest": 39.8, "truthfulqa:mc": 52.5 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
56.2
TheBloke/medalpaca-13B-GPTQ-4bit
main
12190f743a19e91dfe1f5c77abc0c1bf486073dd
{ "arc:challenge": 29.4, "hellaswag": 26.3, "hendrycksTest": 25.4, "truthfulqa:mc": 49.5 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
32.6
TheBloke/OpenOrcaxOpenChat-Preview2-13B-GPTQ
main
ec9eb4f471b5bb6a7e5e505369628586c0c72252
{ "arc:challenge": 61.3, "hellaswag": 82.1, "hendrycksTest": 57.8, "truthfulqa:mc": 50.2 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
62.8
TheBloke/llama-2-70b-Guanaco-QLoRA-fp16
main
54b0e39d5e9aee7b323f50b0a26db15295c3d5c9
{ "arc:challenge": 68.3, "hellaswag": 88.3, "hendrycksTest": 70.2, "truthfulqa:mc": 55.7 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
70.6
TheBloke/tulu-13B-fp16
main
532aeb363b0ceee155b3cf9479ef635b797cee7c
{ "arc:challenge": 53.9, "hellaswag": 80.7, "hendrycksTest": 53.2, "truthfulqa:mc": 43.8 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
57.9
TheBloke/Lemur-70B-Chat-v1-GPTQ
main
12499165df1785f50df3e95940406032776401ea
{ "arc:challenge": 65.3, "hellaswag": 84.4, "hendrycksTest": 64.7, "truthfulqa:mc": 57.1 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
67.9
TheBloke/wizardLM-7B-HF
main
a8e22531a48cece989e670f539eb18ebd2dbd0cf
{ "arc:challenge": 50.3, "hellaswag": 75.3, "hendrycksTest": 38.1, "truthfulqa:mc": 45.6 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
52.3
TheBloke/Llama-2-13B-GPTQ
main
b7db471d1789802a3a8e3b93cdd66a9f046f17c3
{ "arc:challenge": 57.3, "hellaswag": 81.6, "hendrycksTest": 54.8, "truthfulqa:mc": 36.6 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
57.6
TheBloke/dromedary-65b-lora-HF
main
3fa4546259d6bbd6b5d637484c325ab19181a73c
{ "arc:challenge": 61.6, "hellaswag": 82.5, "hendrycksTest": 63.1, "truthfulqa:mc": 38.8 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
61.5
TheBloke/landmark-attention-llama7b-fp16
main
bf8bdcb0c30cceb0ceda33cf5fde683807e39a58
{ "arc:challenge": 47.4, "hellaswag": 65.8, "hendrycksTest": 31.6, "truthfulqa:mc": 42.6 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
46.8
TheBloke/stable-vicuna-13B-HF
main
2b099b2be0dafb2606ae9808c0f6183fe4bff7bc
{ "arc:challenge": 53.3, "hellaswag": 78.5, "hendrycksTest": 50.3, "truthfulqa:mc": 48.4 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
57.6
TheBloke/WizardLM-13B-V1.1-GPTQ
main
9df807ac64034bc6e7387326689d6e39656ce5e0
{ "arc:challenge": 58.5, "hellaswag": 80.7, "hendrycksTest": 49.6, "truthfulqa:mc": 54.4 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
60.8
TheBloke/robin-13B-v2-fp16
main
f4dd8fc4440ed84fcf3ff1122f2b7f6024cca29d
{ "arc:challenge": 56.5, "hellaswag": 80.4, "hendrycksTest": 48.8, "truthfulqa:mc": 50.6 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
59.1
TheBloke/VicUnlocked-alpaca-65B-QLoRA-fp16
main
6cdacfda96970aa144e316b108ab9bc17c99a573
{ "arc:challenge": 65.6, "hellaswag": 85.2, "hendrycksTest": 63.1, "truthfulqa:mc": 52.5 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
66.6
TheBloke/koala-7B-HF
main
d102fe3b68f1a5a50d547e4fd1c8b33b783c993b
{ "arc:challenge": 47.1, "hellaswag": 73.6, "hendrycksTest": 25.5, "truthfulqa:mc": 46 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
48
TheBloke/Nous-Hermes-13B-SuperHOT-8K-fp16
main
b407c1ece029ad5693d38e6e0931e9482962ed15
{ "arc:challenge": 55.3, "hellaswag": 81.9, "hendrycksTest": 48.2, "truthfulqa:mc": 51.2 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
59.1
TheBloke/tulu-7B-fp16
main
8a026683f79119643f4007da4e9155c7849792cc
{ "arc:challenge": 50.2, "hellaswag": 77, "hendrycksTest": 47.6, "truthfulqa:mc": 41.6 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
54.1
TheBloke/Vicuna-33B-1-3-SuperHOT-8K-fp16
main
0b6484697d5cca5baa534b882dcad8101add8cda
{ "arc:challenge": 25.4, "hellaswag": 34.6, "hendrycksTest": 23.6, "truthfulqa:mc": 46.9 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
32.6
TheBloke/wizardLM-13B-1.0-fp16
main
b79733805e98e668ff9a459975c259881b1b8014
{ "arc:challenge": 57.3, "hellaswag": 80.9, "hendrycksTest": 52.9, "truthfulqa:mc": 50.5 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
60.4
TheBloke/Genz-70b-GPTQ
main
7d38987a43d2445b193db99a029a264b39dc6c8e
{ "arc:challenge": 71.1, "hellaswag": 87.6, "hendrycksTest": 70.3, "truthfulqa:mc": 62.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
72.8
TheBloke/Project-Baize-v2-7B-GPTQ
main
5dc039834e1ea42ac334458b2e3090fe3705cc59
{ "arc:challenge": 46, "hellaswag": 73.4, "hendrycksTest": 35.5, "truthfulqa:mc": 39.9 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
48.7
TheBloke/airoboros-13B-HF
main
9219b61a0e8bc880e4cd0f8bebc48a97ee0950c7
{ "arc:challenge": 58.3, "hellaswag": 81, "hendrycksTest": 50, "truthfulqa:mc": 51.6 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
60.2
TheBloke/Platypus2-70B-Instruct-GPTQ
main
4a44568aadd8a4babfa5549cf33e6e84cbae7ab8
{ "arc:challenge": 71.2, "hellaswag": 87.6, "hendrycksTest": 69.9, "truthfulqa:mc": 62.5 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
72.8
TheBloke/guanaco-7B-HF
main
293c24105fa15afa127a2ec3905fdc2a0a3a6dac
{ "arc:challenge": 53, "hellaswag": 80.1, "hendrycksTest": 35.3, "truthfulqa:mc": 39.2 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
51.9
TheBloke/koala-13B-HF
main
b20f96a0171ce4c0fa27d6048215ebe710521587
{ "arc:challenge": 53, "hellaswag": 77.6, "hendrycksTest": 45.3, "truthfulqa:mc": 50.2 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
56.5
chaoyi-wu/MedLLaMA_13B
main
893557ef32f98cd01deb1c5d063be6d640ffa657
{ "arc:challenge": 54.3, "hellaswag": 78.5, "hendrycksTest": 46.4, "truthfulqa:mc": 40.5 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
54.9
Voicelab/trurl-2-13b
main
c8b2bbc7a570a9ea67928674695a4e7dff017d66
{ "arc:challenge": 60.1, "hellaswag": 80.2, "hendrycksTest": 78.6, "truthfulqa:mc": 45.9 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
66.2
Voicelab/trurl-2-7b
main
e26ca5f157c60fc527170cc04db7fc0ea04ad26f
{ "arc:challenge": 53.4, "hellaswag": 75.3, "hendrycksTest": 50, "truthfulqa:mc": 45.4 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
56
THUDM/chatglm2-6b
main
162b620e3078b03eefff94eb5f762d4093425fb5
{ "arc:challenge": 38.8, "hellaswag": 59, "hendrycksTest": 46.7, "truthfulqa:mc": 48.1 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
48.2
Brillibits/Instruct_Llama70B_Dolly15k
main
45444ac60488594e0700e6c7313ff444b4468240
{ "arc:challenge": 68.3, "hellaswag": 87.2, "hendrycksTest": 69.5, "truthfulqa:mc": 46.5 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
67.9
joehuangx/spatial-vicuna-7b-v1.5-LoRA
main
dc71924cfb214b91461d35178e6ea6fef7946f13
{ "arc:challenge": 50.8, "hellaswag": 74.6, "hendrycksTest": 48.1, "truthfulqa:mc": 49.4 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
55.7
openthaigpt/openthaigpt-1.0.0-alpha-7b-chat-ckpt-hf
main
cdffb3488c5cb1a9aa5039a6b3bc72af24827db0
{ "arc:challenge": 50.9, "hellaswag": 74.9, "hendrycksTest": 40, "truthfulqa:mc": 47.2 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
53.2
breadlicker45/dough-instruct-base-001
main
3e1b0bf0a887feeb342982eee4f6d8041772a7dd
{ "arc:challenge": 23.9, "hellaswag": 24.8, "hendrycksTest": 23.1, "truthfulqa:mc": 53.4 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
31.3
Tincando/fiction_story_generator
main
377b080cf96e10d50289aa3e1fd79c330265f45a
{ "arc:challenge": 23.3, "hellaswag": 28.7, "hendrycksTest": 26.7, "truthfulqa:mc": 43.8 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
30.6
GOAT-AI/GOAT-7B-Community
main
a7073a0f5142ce04aaa1603b0812b358f62a8de8
{ "arc:challenge": 48.8, "hellaswag": 74.6, "hendrycksTest": 49.6, "truthfulqa:mc": 42.5 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
53.9
l3utterfly/open-llama-3b-v2-layla
main
465669ddafad25393ac3cfe94d3726cced112b30
{ "arc:challenge": 38.2, "hellaswag": 66.4, "hendrycksTest": 28.6, "truthfulqa:mc": 44.4 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
44.4
SaylorTwift/gpt2_test
main
ef61310a16ffda93bf8f6132e02658482ffc2bcc
{ "arc:challenge": 21.8, "hellaswag": 31.6, "hendrycksTest": 25.9, "truthfulqa:mc": 40.7 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
30
Lazycuber/L2-7b-Guanaco-Uncensored
main
9d49378c69c00113cf7f6e66d1ddb9d9b003dddc
{ "arc:challenge": 50.6, "hellaswag": 77, "hendrycksTest": 48.9, "truthfulqa:mc": 43.4 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
55
Lazycuber/pyg-instruct-wizardlm
main
f00ef7a7b0cc6f02af2a11ac764270dfd61b9e2f
{ "arc:challenge": 41, "hellaswag": 66.7, "hendrycksTest": 26.3, "truthfulqa:mc": 31.9 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
41.5
Lazycuber/Janemalion-6B
main
e72ae3ec110121115b1ae6c2e5fb3995997a2d96
{ "arc:challenge": 42.4, "hellaswag": 68.4, "hendrycksTest": 28.3, "truthfulqa:mc": 34.6 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
43.4
MrNJK/gpt2-xl-sft
main
53250831436460254b7ee9afc4014d4d3156b372
{ "arc:challenge": 30, "hellaswag": 49.2, "hendrycksTest": 25.6, "truthfulqa:mc": 38.8 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
35.9
malhajar/Platypus2-70B-instruct-4bit-gptq
main
2aa2f5646e496b3cd9b510681ba2c5081bde821f
{ "arc:challenge": 29, "hellaswag": 26, "hendrycksTest": 23.5, "truthfulqa:mc": 49.6 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
32
pillowtalks-ai/delta13b
main
83fa0860990df1db35550f973ba4306449e35412
{ "arc:challenge": 52.7, "hellaswag": 80.1, "hendrycksTest": 51.9, "truthfulqa:mc": 52.1 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
59.2
ethzanalytics/pythia-31m
main
8a3c2f1555de8a3c53d67d73b5d0d53a66a6c6c2
{ "arc:challenge": 20, "hellaswag": 26.3, "hendrycksTest": 24.3, "truthfulqa:mc": 50.1 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
30.2
Rachneet/gpt2-xl-alpaca
main
a1a19acc0ef161bfa35f460c15ed3015595714d8
{ "arc:challenge": 26.8, "hellaswag": 43.8, "hendrycksTest": 26.3, "truthfulqa:mc": 39.4 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
34.1
Panchovix/airoboros-33b-gpt4-1.2-SuperHOT-8k
main
47c14f699cbbc9bd24458edd86eb70d87552b623
{ "arc:challenge": 24.7, "hellaswag": 31.2, "hendrycksTest": 23.1, "truthfulqa:mc": 47.4 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
31.6
Panchovix/WizardLM-33B-V1.0-Uncensored-SuperHOT-8k
main
b6d0002b10d43ab48aa14e365d9e7b40655ec160
{ "arc:challenge": 25.4, "hellaswag": 32, "hendrycksTest": 23.4, "truthfulqa:mc": 47 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
32
HyperbeeAI/Tulpar-7b-v1
main
719d8e1eb4a820f01e0a92ef6220d041964bb472
{ "arc:challenge": 57, "hellaswag": 79.7, "hendrycksTest": 51.3, "truthfulqa:mc": 51.8 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
60
quantumaikr/QuantumLM
main
9058130b416355b37f5f78777748aa56d98a4da0
{ "arc:challenge": 55.8, "hellaswag": 79.7, "hendrycksTest": 54.2, "truthfulqa:mc": 46.7 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
59.1
quantumaikr/KoreanLM-hf
main
a7261e7ae6ee76c78e1ba1ac8c59bcc3e0868bf9
{ "arc:challenge": 51.5, "hellaswag": 76.8, "hendrycksTest": 40.6, "truthfulqa:mc": 44.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
53.3
quantumaikr/quantumairk-llama-2-70B-instruct
main
94ff2fcafd507b08e953f70806ec671ec3d17b15
{ "arc:challenge": 70.3, "hellaswag": 87.1, "hendrycksTest": 70.5, "truthfulqa:mc": 54.4 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
70.6
quantumaikr/llama-2-7b-hf-guanaco-1k
main
bdb57c5c992872ced47f48cb2177a5fa159f926a
{ "arc:challenge": 51.6, "hellaswag": 76.7, "hendrycksTest": 47.4, "truthfulqa:mc": 44.8 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
55.1
quantumaikr/QuantumLM-70B-hf
main
e13dd23ae5e611e959b6c8d5bc47bf4fd37cd9d7
{ "arc:challenge": 59.5, "hellaswag": 83, "hendrycksTest": 62.3, "truthfulqa:mc": 53.4 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
64.6
quantumaikr/llama-2-70b-fb16-guanaco-1k
main
c317af1b593a4f91b0e79c7142ca75f1e8d65278
{ "arc:challenge": 70.5, "hellaswag": 87.3, "hendrycksTest": 70.2, "truthfulqa:mc": 57.6 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
71.4
quantumaikr/llama-2-70b-fb16-korean
main
fd57855006c15c4121feccab1cbeee8107de5b5a
{ "arc:challenge": 67.2, "hellaswag": 86.8, "hendrycksTest": 69.3, "truthfulqa:mc": 56.5 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
70
quantumaikr/open_llama_7b_hf
main
41441cea58f963cfc4827da12ae5759e943151cb
{ "arc:challenge": 26.5, "hellaswag": 26.9, "hendrycksTest": 26.5, "truthfulqa:mc": 49.5 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
32.4
quantumaikr/llama-2-70B-chat
main
d242fdbf800e388e6ee456578064cab5e057f987
{ "arc:challenge": 67.6, "hellaswag": 86.9, "hendrycksTest": 69.2, "truthfulqa:mc": 57.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
70.2
quantumaikr/llama-2-70b-fb16-orca-chat-10k
main
697aaeb8eb9905c9b25bebb736d1905444c774a6
{ "arc:challenge": 68.1, "hellaswag": 87.1, "hendrycksTest": 69.2, "truthfulqa:mc": 61.6 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
71.5
quantumaikr/QuantumLM-llama2-70B-Korean-LoRA
main
ea21456e999f6ce35da1cd88b8f62bb5770b985a
{ "arc:challenge": 70.6, "hellaswag": 86.4, "hendrycksTest": 69.4, "truthfulqa:mc": 56.1 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
70.6
quantumaikr/QuantumLM-7B
main
f44998432fb90d88094ddf42e57ec458877a197f
{ "arc:challenge": 50.3, "hellaswag": 76.1, "hendrycksTest": 45.3, "truthfulqa:mc": 46.3 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
54.5
huggingtweets/jerma985
main
816206ad02a397161be78dcb70eeda67e0c53132
{ "arc:challenge": 21.7, "hellaswag": 30.9, "hendrycksTest": 26.6, "truthfulqa:mc": 44 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
30.8
huggingtweets/gladosystem
main
02a1bbcee7b584ace743b2fe4885cc0eaf2179ac
{ "arc:challenge": 24.4, "hellaswag": 29.7, "hendrycksTest": 23.2, "truthfulqa:mc": 41.8 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
29.8
oh-yeontaek/llama-2-7B-LoRA-assemble
main
72e866a96a2e9afc6527c8d757c69088c3a069c8
{ "arc:challenge": 57.3, "hellaswag": 78.8, "hendrycksTest": 50.8, "truthfulqa:mc": 53.2 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
60
oh-yeontaek/llama-2-70B-LoRA-assemble-v2
main
7feeb5b665ab1ecdfd9cc4fe45fadb86b7b91b5b
{ "arc:challenge": 71.8, "hellaswag": 86.9, "hendrycksTest": 69.4, "truthfulqa:mc": 64.8 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
73.2
oh-yeontaek/llama-2-70B-LoRA-assemble
main
91caffe08852dcbbdedd64786bd3b4ac0dcb2e96
{ "arc:challenge": 71.8, "hellaswag": 86.8, "hendrycksTest": 69.4, "truthfulqa:mc": 64.8 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
73.2
oh-yeontaek/llama-2-13B-LoRA-assemble
main
85bb49d333dba4a08b051418663d16853ce30cee
{ "arc:challenge": 63.6, "hellaswag": 83.5, "hendrycksTest": 59.8, "truthfulqa:mc": 56 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
65.7
teknium/OpenHermes-7B
main
74edb1ad58d3d517ef46c4e2a31081084ecbc473
{ "arc:challenge": 56.1, "hellaswag": 78.3, "hendrycksTest": 48.6, "truthfulqa:mc": 45 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
57
teknium/OpenHermes-13B
main
f09d0fe655ad57cce9179b7b40ea6f81e07db18c
{ "arc:challenge": 60.2, "hellaswag": 82.2, "hendrycksTest": 56.2, "truthfulqa:mc": 46 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
61.2
psmathur/model_007
main
0f5d81b13718a866cb078bd8762ab80a41972663
{ "arc:challenge": 71.1, "hellaswag": 87.7, "hendrycksTest": 69, "truthfulqa:mc": 63.1 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
72.7
psmathur/orca_mini_7b
main
6ed0dca683685cb5b9e7df599f87d311f00ba6db
{ "arc:challenge": 43.9, "hellaswag": 65.2, "hendrycksTest": 30, "truthfulqa:mc": 42 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
45.3
psmathur/orca_mini_v2_7b
main
165850882991d7fa4eabab577a03ed84e0713bfa
{ "arc:challenge": 50.8, "hellaswag": 76, "hendrycksTest": 39.5, "truthfulqa:mc": 43.9 }
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
2023-09-20T10:22:33
52.6