model
stringlengths 4
89
| revision
stringclasses 1
value | model_sha
stringlengths 0
40
| results
dict | commit
stringlengths 40
40
| date
timestamp[ns] | score
float64 21.8
83
⌀ |
---|---|---|---|---|---|---|
psmathur/orca_mini_v3_13b | main | 99904e4119575f2c1606ca1e31d288f38a9f20b5 | {
"arc:challenge": 63.1,
"hellaswag": 82.4,
"hendrycksTest": 56.5,
"truthfulqa:mc": 51.8
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 63.4 |
psmathur/model_420 | main | 13c7b5f403c0f2af9bf7fce2d4a32deb9054c083 | {
"arc:challenge": 70.1,
"hellaswag": 87.7,
"hendrycksTest": 70.3,
"truthfulqa:mc": 54
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 70.5 |
psmathur/model_420_preview | main | 5095384f1b7bb6e23a987f95589e66e21ae854ef | {
"arc:challenge": 67.1,
"hellaswag": 87.3,
"hendrycksTest": 69.9,
"truthfulqa:mc": 44.6
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 67.2 |
psmathur/orca_mini_13b | main | ca900c8f3145de40cd188c559b2901a2e4711546 | {
"arc:challenge": 42.1,
"hellaswag": 63.4,
"hendrycksTest": 35.4,
"truthfulqa:mc": 43.1
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 46 |
psmathur/model_51 | main | 9542702011bf4d282f4b0f0bd79229f5822b6313 | {
"arc:challenge": 68.4,
"hellaswag": 86.7,
"hendrycksTest": 69.3,
"truthfulqa:mc": 57.2
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 70.4 |
psmathur/model_007_13b | main | 0436ba68d245c8a2c04a2cc9637630d2e163cbbe | {
"arc:challenge": 22.7,
"hellaswag": 25,
"hendrycksTest": 23.1,
"truthfulqa:mc": null
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | null |
psmathur/model_007_v2 | main | 3d95e0f3598f7a76ab97cb2cc0e4aae957d77479 | {
"arc:challenge": 71.4,
"hellaswag": 87.3,
"hendrycksTest": 68.6,
"truthfulqa:mc": 62.7
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 72.5 |
psmathur/orca_mini_3b | main | fd2754e80ce80757a3a68a840d7d287dd7def676 | {
"arc:challenge": 41.6,
"hellaswag": 61.5,
"hendrycksTest": 26.8,
"truthfulqa:mc": 42.4
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 43.1 |
psmathur/test_42_70b | main | ca3789cd6b683e97dcd6a5f0367f90a63d7a4e7b | {
"arc:challenge": 68.3,
"hellaswag": 87.7,
"hendrycksTest": 70,
"truthfulqa:mc": 48.8
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 68.7 |
psmathur/model_009 | main | 5020869e6394b1ac039bf80a0a1d2bed6be6707e | {
"arc:challenge": 71.6,
"hellaswag": 87.7,
"hendrycksTest": 69.4,
"truthfulqa:mc": 60.7
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 72.4 |
psmathur/orca_mini_v3_70b | main | c1d4f997f8ed685a6efc72229523b2e56fd0774b | {
"arc:challenge": 71.2,
"hellaswag": 87.9,
"hendrycksTest": 70.2,
"truthfulqa:mc": 61.3
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 72.6 |
psmathur/model_007_13b_v2 | main | 1c959d4b5d5b8683b051f07475bb5c1ab24c8bb0 | {
"arc:challenge": 61.9,
"hellaswag": 82.5,
"hendrycksTest": 57.3,
"truthfulqa:mc": 53.5
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 63.8 |
psmathur/model_101 | main | 884c53a64a3c5faf7b0706d36a587ca1532ed8f5 | {
"arc:challenge": 68.7,
"hellaswag": 86.4,
"hendrycksTest": 69.9,
"truthfulqa:mc": 58.9
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 71 |
psmathur/model_42_70b | main | ca3789cd6b683e97dcd6a5f0367f90a63d7a4e7b | {
"arc:challenge": 68.3,
"hellaswag": 87.7,
"hendrycksTest": 70,
"truthfulqa:mc": 48.8
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 68.7 |
psmathur/orca_mini_v2_13b | main | 1058709314f7ca090937d0a2b7b37b0b3a8f12a3 | {
"arc:challenge": 55.1,
"hellaswag": 79.7,
"hendrycksTest": 50.1,
"truthfulqa:mc": 52.6
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 59.4 |
psmathur/orca_mini_v3_7b | main | a1583d2f02041fb37df28eeae4da644d8dff33eb | {
"arc:challenge": 56.9,
"hellaswag": 79.6,
"hendrycksTest": 52.4,
"truthfulqa:mc": 50.5
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 59.8 |
nathan0/mpt_delta_tuned_model_v3 | main | 6adb4cb4ba37f4ce9e9c3051d343addf1098182c | {
"arc:challenge": 50.7,
"hellaswag": 76.4,
"hendrycksTest": 28.7,
"truthfulqa:mc": 35.5
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 47.8 |
nathan0/mpt_delta_tuned_model_v2 | main | 6adb4cb4ba37f4ce9e9c3051d343addf1098182c | {
"arc:challenge": 50.7,
"hellaswag": 76.4,
"hendrycksTest": 28.7,
"truthfulqa:mc": 35.5
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 47.8 |
frank098/Wizard-Vicuna-13B-juniper | main | 24f58beb9ed4cf635fc962853ed71d0f4b1909ba | {
"arc:challenge": 55.9,
"hellaswag": 79.7,
"hendrycksTest": 45,
"truthfulqa:mc": 54.7
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 58.8 |
frank098/WizardLM_13B_juniper | main | 2204970fc0d96b071e2b1b003fbc5c87cfc46840 | {
"arc:challenge": 55.4,
"hellaswag": 77.2,
"hendrycksTest": 45.5,
"truthfulqa:mc": 51.5
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 57.4 |
gpt2-large | main | 97935fc1a406f447320c3db70fe9e9875dca2595 | {
"arc:challenge": 25.9,
"hellaswag": 45.6,
"hendrycksTest": 26.1,
"truthfulqa:mc": 38.7
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 34.1 |
Harshvir/LaMini-Neo-1.3B-Mental-Health_lora | main | 9f1c45d5ce88a8eaf7ec03b760a4adfb5fda07eb | {
"arc:challenge": 25.8,
"hellaswag": 25.7,
"hendrycksTest": 27,
"truthfulqa:mc": 48.2
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 31.7 |
Harshvir/Llama-2-7B-physics | main | 5e66b59c145586266b2351a63f0cf1b4f62f5454 | {
"arc:challenge": 52.9,
"hellaswag": 77.7,
"hendrycksTest": 48.8,
"truthfulqa:mc": 48.9
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 57.1 |
AGI-inc/lora_moe_7b_baseline | main | ad8065c8357945e6c07569033f5eba82c67c72ed | {
"arc:challenge": 50.9,
"hellaswag": 77.8,
"hendrycksTest": 35.7,
"truthfulqa:mc": 34.3
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 49.7 |
AGI-inc/lora_moe_7b | main | 3a528bdd73a12adc73f841a6d46bd363fe690023 | {
"arc:challenge": 50.9,
"hellaswag": 77.8,
"hendrycksTest": 35.7,
"truthfulqa:mc": 34.3
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 49.7 |
BreadAi/gpt-Youtube | main | de88554a0212c16fdfeda030afb58f831ebcd895 | {
"arc:challenge": 23.3,
"hellaswag": 26.3,
"hendrycksTest": 23.5,
"truthfulqa:mc": 48.6
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 30.4 |
BreadAi/gpt-YA-1-1_70M | main | 218e8da522cf6fb5566314f37624f27412ae2259 | {
"arc:challenge": 22.5,
"hellaswag": 27.4,
"hendrycksTest": 25.4,
"truthfulqa:mc": 47.1
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 30.6 |
BreadAi/StoryPy | main | 5c32081bd3bc1404c2f5b8dbb6f888048bcb7cd7 | {
"arc:challenge": 22.4,
"hellaswag": 26.2,
"hendrycksTest": 24.4,
"truthfulqa:mc": 49.1
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 30.5 |
BreadAi/MuseCan | main | f441866d78feaead3dede6efd9e23990bb74c21e | {
"arc:challenge": 28.1,
"hellaswag": 25,
"hendrycksTest": 24.2,
"truthfulqa:mc": null
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | null |
BreadAi/PM_modelV2 | main | 4edde209eea33af491206f8651c0c47e70e08289 | {
"arc:challenge": 25.1,
"hellaswag": 26.4,
"hendrycksTest": 26.1,
"truthfulqa:mc": 51.4
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 32.2 |
BreadAi/MusePy-1-2 | main | 6c1725158a74a41a10f21696a48510d45b4b425b | {
"arc:challenge": 25.8,
"hellaswag": 25.9,
"hendrycksTest": 25.2,
"truthfulqa:mc": 49.3
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 31.6 |
ausboss/llama-30b-supercot | main | dc9d81f454d286ea040c5cd45b058aecaa51c13e | {
"arc:challenge": 64.8,
"hellaswag": 85.1,
"hendrycksTest": 56.6,
"truthfulqa:mc": 54
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 65.1 |
ausboss/llama-13b-supercot | main | f6953fa162b487a3d4c6bdc7b7951e09576c2ae5 | {
"arc:challenge": 56.1,
"hellaswag": 81.7,
"hendrycksTest": 45.4,
"truthfulqa:mc": 48.5
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 57.9 |
ausboss/llama7b-wizardlm-unfiltered | main | 2123beec77083c414b2ae51dd25b7a870b0b936c | {
"arc:challenge": 53,
"hellaswag": 77.9,
"hendrycksTest": 36.4,
"truthfulqa:mc": 37.8
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 51.3 |
Neko-Institute-of-Science/pygmalion-7b | main | 6473f9996d758fde48a181f37cc5de575aff1606 | {
"arc:challenge": 51.4,
"hellaswag": 77.8,
"hendrycksTest": 35.7,
"truthfulqa:mc": 34.5
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 49.8 |
FelixChao/llama2-13b-math1.2 | main | b05b4c22893e950e8e33acb67087a9acc8f0ab97 | {
"arc:challenge": 56.9,
"hellaswag": 80.7,
"hendrycksTest": 53.2,
"truthfulqa:mc": 48.2
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 59.8 |
FelixChao/vicuna-7B-chemical | main | fbf6476ebfa856ffe743e41f8d4413c15b2127c9 | {
"arc:challenge": 49.8,
"hellaswag": 74.4,
"hendrycksTest": 44.1,
"truthfulqa:mc": 51.7
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 55 |
FelixChao/vicuna-33b-coder | main | 67f6e669d7a15c1104a1478057f3752a503e83c0 | {
"arc:challenge": 60.7,
"hellaswag": 83.3,
"hendrycksTest": 56.9,
"truthfulqa:mc": 51.8
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 63.2 |
FelixChao/llama2-13b-math1.1 | main | 3c4d83d3525e54a493ff510443fdcca44bf63b59 | {
"arc:challenge": 57.3,
"hellaswag": 80.7,
"hendrycksTest": 53.6,
"truthfulqa:mc": 48.4
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 60 |
FelixChao/CodeLlama13B-Finetune-v1 | main | 40ff78ce37efcaf83718534c494829a573b9d719 | {
"arc:challenge": 45.8,
"hellaswag": 69.4,
"hendrycksTest": 45,
"truthfulqa:mc": 45
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 51.3 |
FelixChao/vicuna-7B-physics | main | 2147983e9493347c3424c07403f65e7a81c0b19f | {
"arc:challenge": 49.5,
"hellaswag": 75.9,
"hendrycksTest": 46.6,
"truthfulqa:mc": 49.3
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 55.3 |
harborwater/open-llama-3b-v2-wizard-evol-instuct-v2-196k | main | 4da0c661e6df1235c9997b996c8e395b87248406 | {
"arc:challenge": 41.2,
"hellaswag": 72.9,
"hendrycksTest": 25.4,
"truthfulqa:mc": 38.9
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 44.6 |
abhiramtirumala/DialoGPT-sarcastic-medium | main | 292596e120591887383011c4520bc5b57e7e8993 | {
"arc:challenge": 23.3,
"hellaswag": 25.9,
"hendrycksTest": 23.8,
"truthfulqa:mc": 46
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 29.8 |
mncai/SGPT-1.3B-insurance-epoch10 | main | df685c0bbf838f0627383c28f48e577ee901ba68 | {
"arc:challenge": 24.6,
"hellaswag": 24.2,
"hendrycksTest": 25.2,
"truthfulqa:mc": 45.2
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 29.8 |
huggingface/llama-13b | main | 4022c52fcc7473ce7364bb5ac166195903ea1efb | {
"arc:challenge": 56.2,
"hellaswag": 80.9,
"hendrycksTest": 47.7,
"truthfulqa:mc": 39.5
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 56.1 |
huggingface/llama-30b | main | 13c77caa472bfa79d4f3f0ec82cbdc9dd88e5d22 | {
"arc:challenge": 61.3,
"hellaswag": 84.7,
"hendrycksTest": 58.5,
"truthfulqa:mc": 42.3
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 61.7 |
huggingface/llama-7b | main | f356572651e58fb337d610470d4b36976e7fb802 | {
"arc:challenge": 51,
"hellaswag": 77.8,
"hendrycksTest": 35.7,
"truthfulqa:mc": 34.3
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 49.7 |
huggingface/llama-65b | main | 4ae2e56610e8b9b9a78472708390668e9096b4f9 | {
"arc:challenge": 63.5,
"hellaswag": 86.1,
"hendrycksTest": 63.9,
"truthfulqa:mc": 43.4
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 64.2 |
garage-bAInd/Platypus-30B | main | c5d21054f8dd71099696bd7790df07ac54990f29 | {
"arc:challenge": 64.6,
"hellaswag": 84.3,
"hendrycksTest": 64.2,
"truthfulqa:mc": 45.4
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 64.6 |
garage-bAInd/Platypus2-70B | main | 16b6583ad58313331f86be18e531ab03f1857695 | {
"arc:challenge": 70.6,
"hellaswag": 87.2,
"hendrycksTest": 70.1,
"truthfulqa:mc": 52.4
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 70.1 |
garage-bAInd/Platypus2-7B | main | f784afa7887b0738d92ea470797582756f02e630 | {
"arc:challenge": 55.2,
"hellaswag": 78.8,
"hendrycksTest": 49.8,
"truthfulqa:mc": 40.6
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 56.1 |
garage-bAInd/Camel-Platypus2-13B | main | 0480a52799cb8e8de73bb41994df8b6b793937c7 | {
"arc:challenge": 60.8,
"hellaswag": 83.6,
"hendrycksTest": 56.5,
"truthfulqa:mc": 49.6
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 62.6 |
garage-bAInd/Camel-Platypus2-70B | main | b9f8de09ab860ee8ba570db7227c5444020ea056 | {
"arc:challenge": 71.1,
"hellaswag": 87.6,
"hendrycksTest": 70,
"truthfulqa:mc": 58.1
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 71.7 |
garage-bAInd/Platypus2-70B-instruct | main | a66378c15f89756215ccc64572ba69b161173703 | {
"arc:challenge": 71.8,
"hellaswag": 87.9,
"hendrycksTest": 70.5,
"truthfulqa:mc": 62.3
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 73.1 |
pythainlp/wangchanglm-7.5B-sft-en-sharded | main | dd22eaea8be3fcb8c28f61b513a89d1adac00ffd | {
"arc:challenge": 34.5,
"hellaswag": 59.8,
"hendrycksTest": 26.4,
"truthfulqa:mc": 34.2
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 38.7 |
pythainlp/wangchanglm-7.5B-sft-enth | main | eeee33ea6778a5e66184eeb4bf4294d4316b1933 | {
"arc:challenge": 33.8,
"hellaswag": 59,
"hendrycksTest": 24.5,
"truthfulqa:mc": 34.9
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 38 |
shibing624/chinese-llama-plus-13b-hf | main | f17a52b8067d551a814069d2c710e1f5c487a3ce | {
"arc:challenge": 46.2,
"hellaswag": 71.9,
"hendrycksTest": 40.7,
"truthfulqa:mc": 39.9
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 49.7 |
shibing624/chinese-alpaca-plus-13b-hf | main | a118d2c35573b9a70c6f5b56fba4b657f74ce00c | {
"arc:challenge": 53.2,
"hellaswag": 73.5,
"hendrycksTest": 48.8,
"truthfulqa:mc": 45.3
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 55.2 |
shibing624/chinese-alpaca-plus-7b-hf | main | 0deb5a13732f1e3e3240ea83f403c57283fe2dc8 | {
"arc:challenge": 49.2,
"hellaswag": 70.5,
"hendrycksTest": 38.4,
"truthfulqa:mc": 39.7
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 49.4 |
Henk717/airochronos-33B | main | 06843c6693cc265dabb464c818a3d3713239721a | {
"arc:challenge": 64.4,
"hellaswag": 85.2,
"hendrycksTest": 59.8,
"truthfulqa:mc": 50.6
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 65 |
Henk717/chronoboros-33B | main | a4deca117c5fa48f2cdc49ed2e2596046201d688 | {
"arc:challenge": 63.9,
"hellaswag": 85,
"hendrycksTest": 59.4,
"truthfulqa:mc": 49.8
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 64.5 |
dfurman/llama-2-70b-dolphin-peft | main | a1190dee60b5854e80d340958dc3cc956bc56f68 | {
"arc:challenge": 69.6,
"hellaswag": 86.8,
"hendrycksTest": 69.2,
"truthfulqa:mc": 57.4
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 70.7 |
dfurman/llama-2-13b-guanaco-peft | main | 10b58a7c31d5513fa56a9b8b38739253bf5cc0b4 | {
"arc:challenge": 60,
"hellaswag": 82.4,
"hendrycksTest": 55.8,
"truthfulqa:mc": 42.6
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 60.2 |
dfurman/falcon-40b-openassistant-peft | main | 3d5084b6fbcb9f9f36493d9fd1e3795b0b9860f0 | {
"arc:challenge": 62.6,
"hellaswag": 85.6,
"hendrycksTest": 57.8,
"truthfulqa:mc": 51
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 64.2 |
TehVenom/Pygmalion-13b-Merged | main | f96308083033c84db47b6c093da3817c085c87c7 | {
"arc:challenge": 56.5,
"hellaswag": 80,
"hendrycksTest": 42.9,
"truthfulqa:mc": 35.9
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 53.8 |
TehVenom/Pygmalion_AlpacaLora-7b | main | 1f61442e1238062095b31b4909c5e9ab26105794 | {
"arc:challenge": 53.2,
"hellaswag": 76.9,
"hendrycksTest": 35.9,
"truthfulqa:mc": 39.4
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 51.4 |
TehVenom/DiffMerge_Pygmalion_Main-onto-V8P4 | main | f855780745aa34c3bdbe020e4c51253d538cb21e | {
"arc:challenge": 40.5,
"hellaswag": 67.5,
"hendrycksTest": 25.7,
"truthfulqa:mc": 32.5
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 41.6 |
TehVenom/Dolly_Shygmalion-6b-Dev_V8P2 | main | 6413b1d9e8b58df9d3aac91a862e8d505d8c6716 | {
"arc:challenge": 41.4,
"hellaswag": 67.7,
"hendrycksTest": 28.5,
"truthfulqa:mc": 36.9
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 43.6 |
TehVenom/PPO_Pygway-V8p4_Dev-6b | main | f30709dba36c665869f9ac8cd0cef5a8a2e7c8df | {
"arc:challenge": 40.4,
"hellaswag": 67.1,
"hendrycksTest": 29.3,
"truthfulqa:mc": 35.3
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 43 |
TehVenom/Pygmalion-Vicuna-1.1-7b | main | bdac596568769d1ba4af8df9a611eee9723adf29 | {
"arc:challenge": 52.8,
"hellaswag": 78.7,
"hendrycksTest": 43.6,
"truthfulqa:mc": 42.2
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 54.3 |
TehVenom/Moderator-Chan_GPT-JT-6b | main | f2b7cda25f6965c1551fa78e9e38676994bc6638 | {
"arc:challenge": 43.7,
"hellaswag": 70.8,
"hendrycksTest": 35.6,
"truthfulqa:mc": 36
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 46.5 |
TehVenom/oasst-sft-6-llama-33b-xor-MERGED-16bit | main | 62f92ddab8b37eaeda15cf5ecb5605141a0525eb | {
"arc:challenge": 61.5,
"hellaswag": 83.5,
"hendrycksTest": 57.4,
"truthfulqa:mc": 50.7
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 63.3 |
TehVenom/Dolly_Malion-6b | main | f239eb8d24fe26db3b0a9a69115dc305fc9351af | {
"arc:challenge": 42.8,
"hellaswag": 68.4,
"hendrycksTest": 27.1,
"truthfulqa:mc": 33
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 42.8 |
TehVenom/PPO_Shygmalion-V8p4_Dev-6b | main | fa3d503bca50c947e7a5bbde4bdd82f699f65c02 | {
"arc:challenge": 40.7,
"hellaswag": 67,
"hendrycksTest": 29.3,
"truthfulqa:mc": 35.6
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 43.2 |
TehVenom/Dolly_Shygmalion-6b | main | 108fabf8a916900525492c294c50998d7c09f10b | {
"arc:challenge": 41.9,
"hellaswag": 68.5,
"hendrycksTest": 27.6,
"truthfulqa:mc": 33.9
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 43 |
TehVenom/PPO_Shygmalion-6b | main | 573e4546fdccc5c8a52b9d7cb23a2e10f0f2ef51 | {
"arc:challenge": 40.3,
"hellaswag": 66.9,
"hendrycksTest": 27.5,
"truthfulqa:mc": 34.2
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 42.2 |
TehVenom/GPT-J-Pyg_PPO-6B | main | cde5bab3ae16e1704c5fec54a6a7ff1169c935e6 | {
"arc:challenge": 42.1,
"hellaswag": 67.5,
"hendrycksTest": 28.5,
"truthfulqa:mc": 32
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 42.5 |
TehVenom/ChanMalion | main | 2667b0e0b705ed23f81f3e2b69673d722e8f4964 | {
"arc:challenge": 41.9,
"hellaswag": 68.3,
"hendrycksTest": 27.3,
"truthfulqa:mc": 33.9
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 42.8 |
TehVenom/Metharme-13b-Merged | main | 90c02cc338afcdd890a948af06432674743363ad | {
"arc:challenge": 59.9,
"hellaswag": 81.1,
"hendrycksTest": 47.2,
"truthfulqa:mc": 51.2
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 59.8 |
TehVenom/GPT-J-Pyg_PPO-6B-Dev-V8p4 | main | 930dc82245c607ce43558a0e6c0225e77b341ea6 | {
"arc:challenge": 40.2,
"hellaswag": 66.4,
"hendrycksTest": 30.4,
"truthfulqa:mc": 34.8
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 43 |
TehVenom/DiffMerge-DollyGPT-Pygmalion | main | 6a00b371146d4bd2903890814485ee1b775162e7 | {
"arc:challenge": 23.6,
"hellaswag": 34.4,
"hendrycksTest": 24.4,
"truthfulqa:mc": 46.5
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 32.2 |
The-Face-Of-Goonery/Huginn-13b-FP16 | main | 69615d9a8e1547f2407afd3380868a99f780e008 | {
"arc:challenge": 60.6,
"hellaswag": 82.5,
"hendrycksTest": 53.7,
"truthfulqa:mc": 54.5
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 62.8 |
The-Face-Of-Goonery/Huginn-13b-v4.5 | main | f3be56d8bf71a8d3905974b1e5fcba7336b02159 | {
"arc:challenge": 60.7,
"hellaswag": 82.3,
"hendrycksTest": 52.3,
"truthfulqa:mc": 50.6
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 61.5 |
The-Face-Of-Goonery/Huginn-13b-V4 | main | 6186feee849e0c2b7e62d4cbdc4cdc48260ac684 | {
"arc:challenge": 60.7,
"hellaswag": 82.3,
"hendrycksTest": 52.3,
"truthfulqa:mc": 50.6
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 61.5 |
The-Face-Of-Goonery/huginnv1.2 | main | aed4ddc951c657993939fa5b87a4088550569a3b | {
"arc:challenge": 62.4,
"hellaswag": 84.3,
"hendrycksTest": 57,
"truthfulqa:mc": 47.8
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 62.9 |
The-Face-Of-Goonery/Huginn-22b-Prototype | main | 29222b05794abb862ad0aaaf3020696c9f599810 | {
"arc:challenge": 57.7,
"hellaswag": 80.7,
"hendrycksTest": 49.8,
"truthfulqa:mc": 52.1
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 60.1 |
The-Face-Of-Goonery/Huginn-v3-13b | main | 6c2faf828c5380d28c51fcb4d3d0f1a420fb9a9a | {
"arc:challenge": 60.7,
"hellaswag": 82.3,
"hendrycksTest": 52.3,
"truthfulqa:mc": 50.6
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 61.5 |
danielhanchen/open_llama_3b_600bt_preview | main | d8fddf7651dfcae5aefda59d9e868c9111d8bdb3 | {
"arc:challenge": 36.9,
"hellaswag": 60,
"hendrycksTest": 26,
"truthfulqa:mc": 32.8
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 38.9 |
oPenBuddy/openbuddy-llama2-34b-v11.1-bf16 | main | 21ac0d26c0097e5ac5b4a757493574b156da7731 | {
"arc:challenge": 50,
"hellaswag": 71.2,
"hendrycksTest": 55.7,
"truthfulqa:mc": 53
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 57.5 |
Deci/DeciCoder-1b | main | af2ef45ef8cbe82eb7eb4074f260412bc14c7b11 | {
"arc:challenge": 21.2,
"hellaswag": 31.1,
"hendrycksTest": 24.3,
"truthfulqa:mc": 47.1
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 30.9 |
microsoft/DialoGPT-medium | main | 9d5c5fadcc072b693fb5a5e29416bbf3f503c26c | {
"arc:challenge": 24.5,
"hellaswag": 26.2,
"hendrycksTest": 25.8,
"truthfulqa:mc": 47.1
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 30.9 |
microsoft/DialoGPT-small | main | 97d0fec744c2cb4d48f5db51d17e3258e185858e | {
"arc:challenge": 25.8,
"hellaswag": 25.8,
"hendrycksTest": 25.8,
"truthfulqa:mc": 47.5
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 31.2 |
microsoft/CodeGPT-small-py | main | e5f31df92bfb7b7a808ea8d1c7557488e1bdff7f | {
"arc:challenge": 22.7,
"hellaswag": 27.3,
"hendrycksTest": 25,
"truthfulqa:mc": 51.2
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 31.6 |
microsoft/DialoGPT-large | main | 04e3e47b52dadbcf7688aa61a7ed0438ecf9184c | {
"arc:challenge": 23.4,
"hellaswag": 25.8,
"hendrycksTest": 23.8,
"truthfulqa:mc": 50.3
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 30.8 |
microsoft/phi-1_5 | main | ea95720a352172db6fcbcd89032bfb1cb8481797 | {
"arc:challenge": 52.9,
"hellaswag": 63.8,
"hendrycksTest": 43.9,
"truthfulqa:mc": 40.9
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 50.4 |
HWERI/pythia-70m-deduped-cleansharegpt | main | 6ea42abd94cb0017918f6fe5e71d78bcb7c75548 | {
"arc:challenge": 25.7,
"hellaswag": 25.4,
"hendrycksTest": 23.1,
"truthfulqa:mc": 51.2
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 31.4 |
HWERI/Llama2-7b-sharegpt4 | main | 8ecaba5dd0e9929f5858cfe9f5f8cd8ba285c9e5 | {
"arc:challenge": 55.7,
"hellaswag": 80.9,
"hendrycksTest": 47.5,
"truthfulqa:mc": 48.3
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 58.1 |
togethercomputer/RedPajama-INCITE-Chat-7B-v0.1 | main | 47b94a739e2f3164b438501c8684acc5d5acc146 | {
"arc:challenge": 42.1,
"hellaswag": 70.8,
"hendrycksTest": 26.9,
"truthfulqa:mc": 36.1
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 44 |
togethercomputer/RedPajama-INCITE-7B-Chat | main | 47b94a739e2f3164b438501c8684acc5d5acc146 | {
"arc:challenge": 42.1,
"hellaswag": 70.8,
"hendrycksTest": 26.9,
"truthfulqa:mc": 36.1
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 44 |
togethercomputer/GPT-JT-6B-v0 | main | 41bd1937dbc51f9e589d310bddab5b4c1409e783 | {
"arc:challenge": 42.1,
"hellaswag": 68,
"hendrycksTest": 49.3,
"truthfulqa:mc": 38.9
} | 10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3 | 2023-09-20T10:22:33 | 49.6 |