model
stringlengths
4
89
revision
stringclasses
1 value
model_sha
stringlengths
0
40
results
dict
commit
stringlengths
40
40
date
timestamp[ns]
score
float64
21.8
83
CalderaAI/30B-Epsilon
main
6962638c2b0368ad496af6e20e46e3de97a7772b
{ "arc:challenge": 63.1, "hellaswag": 83.6, "hendrycksTest": 56.9, "truthfulqa:mc": 59 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
65.6
Lajonbot/vicuna-7b-v1.5-PL-lora_unload
main
92bf763ce7ae0bfe155bfd60190eed64582e5080
{ "arc:challenge": 53.5, "hellaswag": 76.7, "hendrycksTest": 49.7, "truthfulqa:mc": 49.7 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
57.4
Lajonbot/WizardLM-13B-V1.2-PL-lora_unload
main
5f14e6f5ea67fd2840791c46b3e00846cbdb32cf
{ "arc:challenge": 58.5, "hellaswag": 81.1, "hendrycksTest": 55.1, "truthfulqa:mc": 46.2 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
60.2
Lajonbot/vicuna-13b-v1.3-PL-lora_unload
main
5582369752583b02df3cba4bd2a733d12265cddb
{ "arc:challenge": 54.9, "hellaswag": 80.4, "hendrycksTest": 52.2, "truthfulqa:mc": 49.6 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
59.3
Xwin-LM/Xwin-LM-70B-V0.1
main
d6c803a180e3d46c371f8d3cb3848b861596ccbc
{ "arc:challenge": 70.2, "hellaswag": 87.3, "hendrycksTest": 69.8, "truthfulqa:mc": 59.9 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
71.8
jlevin/guanaco-unchained-llama-2-7b
main
43f3de8bcef63eec03a1b00079c08b5932c1a429
{ "arc:challenge": 49.7, "hellaswag": 74.3, "hendrycksTest": 45.2, "truthfulqa:mc": 43.1 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
53.1
Harshvir/LaMini-Neo-1.3B-Mental-Health_lora
main
9f1c45d5ce88a8eaf7ec03b760a4adfb5fda07eb
{ "arc:challenge": 25.8, "hellaswag": 25.7, "hendrycksTest": 27, "truthfulqa:mc": 48.2 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
31.7
Harshvir/Llama-2-7B-physics
main
5e66b59c145586266b2351a63f0cf1b4f62f5454
{ "arc:challenge": 52.9, "hellaswag": 77.7, "hendrycksTest": 48.8, "truthfulqa:mc": 48.9 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
57.1
PocketDoc/Dans-PersonalityEngine-30b
main
1990b46a2e2ac1f6282d961bce691ceceafed514
{ "arc:challenge": 63.5, "hellaswag": 84.4, "hendrycksTest": 59, "truthfulqa:mc": 47 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
63.5
PocketDoc/Dans-AdventurousWinds-7b
main
ddc7e4fcbbb5c666a3fe1bbe4a47b4477151b699
{ "arc:challenge": 61, "hellaswag": 83.5, "hendrycksTest": 63.7, "truthfulqa:mc": 42.7 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
62.7
PocketDoc/Dans-AdventurousWinds-Mk2-7b
main
cfcc969a7e97275b2298253f1eabf4575e5a3768
{ "arc:challenge": 58.2, "hellaswag": 83.5, "hendrycksTest": 61.8, "truthfulqa:mc": 43.6 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
61.8
PocketDoc/Dans-PileOfSets-Mk1-llama-13b-merged
main
a7e5484df8aceae7800ae9301a3954cf74b527e9
{ "arc:challenge": 58.8, "hellaswag": 81.8, "hendrycksTest": 48.1, "truthfulqa:mc": 41.2 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
57.5
PocketDoc/Dans-CreepingSenseOfDoom
main
efc7cbc5d0461c137e8ea0c83e54bc5357188783
{ "arc:challenge": 53.3, "hellaswag": 78.9, "hendrycksTest": 48.1, "truthfulqa:mc": 37.8 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
54.5
xiaol/RWKV-v4-raven-14B-one-state
main
1f41a1253b47c5fa4dc71ae118d32de9178d9def
{ "arc:challenge": 45.7, "hellaswag": 71.5, "hendrycksTest": 33.5, "truthfulqa:mc": 37.3 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
47
IDEA-CCNL/Ziya-LLaMA-13B-v1
main
fccf34387d2c9f2f95ff59ae380e6de3718e41ff
{ "arc:challenge": 27.7, "hellaswag": 26, "hendrycksTest": 27, "truthfulqa:mc": 48.6 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
32.3
IDEA-CCNL/Ziya-LLaMA-13B-Pretrain-v1
main
826e83e411df32f358893ab21f5eae680499ae9a
{ "arc:challenge": 28, "hellaswag": 26, "hendrycksTest": 27, "truthfulqa:mc": 48.6 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
32.4
chaoyi-wu/MedLLaMA_13B
main
893557ef32f98cd01deb1c5d063be6d640ffa657
{ "arc:challenge": 54.3, "hellaswag": 78.5, "hendrycksTest": 46.4, "truthfulqa:mc": 40.5 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
54.9
huggyllama/llama-65b
main
49707c5313d34d1c5a846e29cf2a2a650c22c8ee
{ "arc:challenge": 63.5, "hellaswag": 86.1, "hendrycksTest": 63.9, "truthfulqa:mc": 43.4 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
64.2
huggyllama/llama-30b
main
2b1edcdb3c7ced7bce6c1aa75c94545777c3118b
{ "arc:challenge": 61.4, "hellaswag": 84.7, "hendrycksTest": 58.4, "truthfulqa:mc": 42.3 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
61.7
kevinpro/Vicuna-13B-CoT
main
346e3c46959cf9f1e03feffa761afe020c0fb6a8
{ "arc:challenge": 52.7, "hellaswag": 80.1, "hendrycksTest": 51.9, "truthfulqa:mc": 52.1 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
59.2
yhyhy3/open_llama_7b_v2_med_instruct
main
cabb47abd422a2d67161e2d038265ee23be45fb8
{ "arc:challenge": 46.5, "hellaswag": 76.9, "hendrycksTest": 42.3, "truthfulqa:mc": 40.3 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
51.5
yhyhy3/med-orca-instruct-33b
main
1d636881854338e571825226c712180da06be72c
{ "arc:challenge": 28.8, "hellaswag": 25.6, "hendrycksTest": 26.5, "truthfulqa:mc": 49.3 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
32.6
AI-Sweden-Models/gpt-sw3-20b
main
36797b7835a9e656af456e0006465a3af48735fc
{ "arc:challenge": 41.8, "hellaswag": 68.8, "hendrycksTest": 28.5, "truthfulqa:mc": 37.1 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
44
AI-Sweden-Models/gpt-sw3-20b-instruct
main
006477ad4c4875611f20cd927f1fd76bbf5ba5ba
{ "arc:challenge": 43.2, "hellaswag": 71.1, "hendrycksTest": 31.3, "truthfulqa:mc": 41 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
46.6
AI-Sweden-Models/gpt-sw3-1.3b
main
b0d9545a27cfaf9a937adac72ed6953f2dc597de
{ "arc:challenge": 30.4, "hellaswag": 50.4, "hendrycksTest": 26.1, "truthfulqa:mc": 40 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
36.7
AI-Sweden-Models/gpt-sw3-1.3b-instruct
main
5f2f03167dedc59192ee02694e07424a890d9206
{ "arc:challenge": 31, "hellaswag": 51.4, "hendrycksTest": 26.2, "truthfulqa:mc": 40.3 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
37.2
AI-Sweden-Models/gpt-sw3-6.7b-v2
main
7a7f93d4318658b354c5411cde64e9f0121f6b1f
{ "arc:challenge": 39.4, "hellaswag": 66.4, "hendrycksTest": 30.1, "truthfulqa:mc": 35.6 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
42.9
AI-Sweden-Models/gpt-sw3-6.7b
main
7b20cb87e793e1b73b6a73da5261c6010f2b5410
{ "arc:challenge": 36.3, "hellaswag": 60.8, "hendrycksTest": 26, "truthfulqa:mc": 39 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
40.5
AI-Sweden-Models/gpt-sw3-6.7b-v2-instruct
main
81ca95a4e93746240994d1e6797ffa64dc796bd9
{ "arc:challenge": 40.8, "hellaswag": 67.8, "hendrycksTest": 31.6, "truthfulqa:mc": 40.3 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
45.1
SebastianSchramm/Cerebras-GPT-111M-instruction
main
09f1ec782ae2243fc605b24eb13ec8d5e4fd2734
{ "arc:challenge": 24.4, "hellaswag": 26.1, "hendrycksTest": 25.9, "truthfulqa:mc": 49.5 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
31.5
Dampish/StellarX-4B-V0
main
0a79832bd57a8cdadc61626fb77bdc26c85b9fa4
{ "arc:challenge": 36.9, "hellaswag": 61.9, "hendrycksTest": 26.9, "truthfulqa:mc": 34.3 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
40
OptimalScale/robin-7b-v2-delta
main
85eef39d89c100d860e53ff915ad3ab9668e1d1e
{ "arc:challenge": 49.1, "hellaswag": 74.4, "hendrycksTest": 39, "truthfulqa:mc": 42.3 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
51.2
OptimalScale/robin-13b-v2-delta
main
54c56605e22c731fc1d51273f7e18fc019b20436
{ "arc:challenge": 56.6, "hellaswag": 80.4, "hendrycksTest": 48.4, "truthfulqa:mc": 50.5 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
59
OptimalScale/robin-65b-v2-delta
main
cde761c8c5e956a4d981d396f993f46971ea2cd4
{ "arc:challenge": 60.8, "hellaswag": 81.6, "hendrycksTest": 60.8, "truthfulqa:mc": 44.7 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
62
vikp/phi2
main
9fd01ce09da870fc66af88616d43e53db642ef46
{ "arc:challenge": 22.9, "hellaswag": 30.7, "hendrycksTest": 27.5, "truthfulqa:mc": 46.1 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
31.8
aiplanet/panda-coder-13B
main
823a8320224cdac88e927aee00338ffa79395faa
{ "arc:challenge": 22.7, "hellaswag": 25, "hendrycksTest": 23.1, "truthfulqa:mc": null }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
null
aiplanet/effi-13b
main
1b4b4c72dd41ddc1a80f2db6c85170e50a91ed7a
{ "arc:challenge": 53.3, "hellaswag": 81.2, "hendrycksTest": 53.6, "truthfulqa:mc": 44.9 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
58.2
aiplanet/effi-7b
main
d58c62ee27cae60392bd0bd53e1fd05ea82e273b
{ "arc:challenge": 55.1, "hellaswag": 78.1, "hendrycksTest": 35.9, "truthfulqa:mc": 39.7 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
52.2
player1537/dolphinette
main
20529d47b0a82343014727edd1639a9a6a6b09e6
{ "arc:challenge": 24.9, "hellaswag": 37.3, "hendrycksTest": 25.4, "truthfulqa:mc": 42.1 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
32.4
posicube/Llama2-chat-AYB-13B
main
cc7ca1b8f906b9f62ace094540f4ff4124dd581a
{ "arc:challenge": 63.4, "hellaswag": 84.8, "hendrycksTest": 59.3, "truthfulqa:mc": 55.6 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
65.8
posicube/Llama2-chat-AYT-13B
main
dd12dced8076a959c03b8b5c4a4266f234d6639a
{ "arc:challenge": 63.3, "hellaswag": 83.5, "hendrycksTest": 59.7, "truthfulqa:mc": 55.8 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
65.6
posicube/Llama-chat-AY-13B
main
66037b5ee553f7b878d796d2b2d5ada5734cc164
{ "arc:challenge": 62.8, "hellaswag": 83.2, "hendrycksTest": 60, "truthfulqa:mc": 55.9 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
65.5
ariellee/SuperPlatty-30B
main
017e1c32bca060107337dbf26db2044a7caa56f2
{ "arc:challenge": 65.8, "hellaswag": 83.9, "hendrycksTest": 62.6, "truthfulqa:mc": 53.5 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
66.4
malhajar/Platypus2-70B-instruct-4bit-gptq
main
2aa2f5646e496b3cd9b510681ba2c5081bde821f
{ "arc:challenge": 29, "hellaswag": 26, "hendrycksTest": 23.5, "truthfulqa:mc": 49.6 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
32
KoboldAI/OPT-13B-Nerybus-Mix
main
c27a7e2360dd313406719980851e89abf46ebb13
{ "arc:challenge": 39.8, "hellaswag": 70.6, "hendrycksTest": 24.9, "truthfulqa:mc": 34 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
42.3
KoboldAI/OPT-6.7B-Erebus
main
9c4d1af96f93224e01d2f69c303fc6d6f686bdcc
{ "arc:challenge": 39.2, "hellaswag": 68.7, "hendrycksTest": 24.6, "truthfulqa:mc": 35.1 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
41.9
KoboldAI/GPT-J-6B-Shinen
main
afa5a11b24cb23eee708e17c83b920a788e9e07b
{ "arc:challenge": 39.8, "hellaswag": 67.1, "hendrycksTest": 27.7, "truthfulqa:mc": 36.9 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
42.9
KoboldAI/OPT-13B-Erebus
main
8a949353677d2b971910a6c4afcc70e95d838c2a
{ "arc:challenge": 40, "hellaswag": 70.1, "hendrycksTest": 25.3, "truthfulqa:mc": 34.9 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
42.6
KoboldAI/OPT-2.7B-Erebus
main
39ca914ceb82f7f14a38484023bc04f0cd5d0a8d
{ "arc:challenge": 34.4, "hellaswag": 60.9, "hendrycksTest": 26.7, "truthfulqa:mc": 37.8 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
40
KoboldAI/OPT-350M-Nerys-v2
main
59b1019c35ab17a7d77ea1ad32b45a8375ba6e89
{ "arc:challenge": 23.6, "hellaswag": 35.5, "hendrycksTest": 25.9, "truthfulqa:mc": 42.1 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
31.8
KoboldAI/fairseq-dense-1.3B
main
20bf1732212ea81adb45b782a25ce69e65a01ad2
{ "arc:challenge": 31.1, "hellaswag": 58.4, "hendrycksTest": 25, "truthfulqa:mc": 37.4 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
38
KoboldAI/OPT-13B-Nerys-v2
main
b0aa4f3630356f7801ca083c00b03d03da13b8bb
{ "arc:challenge": 39.7, "hellaswag": 70.5, "hendrycksTest": 25.4, "truthfulqa:mc": 33.5 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
42.3
KoboldAI/OPT-6.7B-Nerybus-Mix
main
9afe4dca5a9dbd71cb90d1050d142837f4c739f6
{ "arc:challenge": 39.2, "hellaswag": 68.6, "hendrycksTest": 24.5, "truthfulqa:mc": 34.8 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
41.8
KoboldAI/fairseq-dense-13B
main
785793f6b216afd9fc664fc63e8e6c776a016825
{ "arc:challenge": 40.4, "hellaswag": 75.5, "hendrycksTest": 27.1, "truthfulqa:mc": 32.8 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
44
KoboldAI/GPT-J-6B-Skein
main
acfe27303f74129930fef5e6fadbc5f58c6b8590
{ "arc:challenge": 42.6, "hellaswag": 68.7, "hendrycksTest": 24.9, "truthfulqa:mc": 38.7 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
43.7
KoboldAI/fairseq-dense-2.7B
main
4201f4b101bad2992efc8452009317a354ec52d2
{ "arc:challenge": 33.8, "hellaswag": 65.7, "hendrycksTest": 26.4, "truthfulqa:mc": 34.6 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
40.1
KoboldAI/PPO_Pygway-6b-Mix
main
b31d25819e00d5031ccdb22a9584f0850dcfe39c
{ "arc:challenge": 41.8, "hellaswag": 67.8, "hendrycksTest": 28.4, "truthfulqa:mc": 32.5 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
42.6
KoboldAI/LLaMA2-13B-Tiefighter
main
0d193a4562d6836724485cb7df6e58ca846bbfeb
{ "arc:challenge": 59.9, "hellaswag": 84, "hendrycksTest": 55, "truthfulqa:mc": 53 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
63
KoboldAI/OPT-2.7B-Nerybus-Mix
main
b4131723cfff1fa42f6cbab546c5b4bb0d19fd83
{ "arc:challenge": 33.7, "hellaswag": 61.2, "hendrycksTest": 26.6, "truthfulqa:mc": 37.6 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
39.8
Mohammed-Altaf/Medical-ChatBot
main
9e2d5d7a6189762164690a2fe714b00ce497b253
{ "arc:challenge": 30.5, "hellaswag": 38.5, "hendrycksTest": 25.9, "truthfulqa:mc": 41 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
34
porkorbeef/Llama-2-13b-sf
main
06253ee259e6b205c4734ab6ec3fa850737b2110
{ "arc:challenge": 29.5, "hellaswag": 26.5, "hendrycksTest": 26, "truthfulqa:mc": 49 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
32.8
porkorbeef/Llama-2-13b-12_153950
main
ee9b0cf26f521b5cb2322d743880e8b6bfadb0b7
{ "arc:challenge": 28.6, "hellaswag": 26.6, "hendrycksTest": 20.8, "truthfulqa:mc": 49 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
31.2
ehartford/Samantha-1.1-70b
main
a3819d186f5b4d52ced7ddeb7fa16bf66e8a2ea7
{ "arc:challenge": 68.8, "hellaswag": 87.5, "hendrycksTest": 68.6, "truthfulqa:mc": 64.8 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
72.4
ehartford/WizardLM-30B-Uncensored
main
761783745fcb97831ad8035d3cbd5de484aca3ce
{ "arc:challenge": 60.2, "hellaswag": 82.9, "hendrycksTest": 56.8, "truthfulqa:mc": 51.6 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
62.9
ehartford/CodeLlama-34b-Instruct-hf
main
50ac374da09ab585b9cf7625a2ea3554ef97f18a
{ "arc:challenge": 40.8, "hellaswag": 35.7, "hendrycksTest": 39.7, "truthfulqa:mc": 44.3 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
40.1
ehartford/dolphin-llama-13b
main
b6d16c3e1cffef5e914863f41fd96152dafddd6f
{ "arc:challenge": 55.5, "hellaswag": 77.1, "hendrycksTest": 52.2, "truthfulqa:mc": 52.2 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
59.2
ehartford/dolphin-2.0-mistral-7b
main
c673387016c622fd0a707426953c03957398bc37
{ "arc:challenge": 59.2, "hellaswag": 80.3, "hendrycksTest": 56.9, "truthfulqa:mc": 61.1 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
64.4
ehartford/samantha-mistral-7b
main
7f9e40543fdff8c3e58eca0390c8a631829c1206
{ "arc:challenge": 63.4, "hellaswag": 84.1, "hendrycksTest": 61.4, "truthfulqa:mc": 46.1 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
63.8
ehartford/dolphin-llama2-7b
main
85aa4f67191fd016ab7ea8c389fddb5d9e5a9a52
{ "arc:challenge": 46.6, "hellaswag": 67.5, "hendrycksTest": 48.4, "truthfulqa:mc": 49.7 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
53
ehartford/dolphin-2.1-mistral-7b
main
aa5bd48c8b3040d1155a8fd59328df160aa63680
{ "arc:challenge": 64, "hellaswag": 85, "hendrycksTest": 63.4, "truthfulqa:mc": 55.6 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
67
ehartford/WizardLM-7B-Uncensored
main
14c23f9fa775ab5ce49010418f00df06d92b0b13
{ "arc:challenge": 47.9, "hellaswag": 73.1, "hendrycksTest": 35.4, "truthfulqa:mc": 41.5 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
49.5
ehartford/Samantha-1.11-CodeLlama-34b
main
3fd110de9282e52f56f999bf1da1a76425f00e29
{ "arc:challenge": 56.6, "hellaswag": 75.5, "hendrycksTest": 53.5, "truthfulqa:mc": 50.5 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
59
ehartford/minotaur-llama2-13b-qlora
main
22c83f7d68e547fb0b59acfa01c60b108c59fe55
{ "arc:challenge": 60.1, "hellaswag": 82.4, "hendrycksTest": 55.9, "truthfulqa:mc": 45.6 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
61
ehartford/dolphin-2.2.1-mistral-7b
main
001b48e9aebffb395c698af47b6b48364cc3cbe8
{ "arc:challenge": 63.5, "hellaswag": 83.9, "hendrycksTest": 63.3, "truthfulqa:mc": 53.2 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
66
ehartford/Samantha-1.11-7b
main
730cbd8f3077f3d24001aab714def991f1e4e7e8
{ "arc:challenge": 55, "hellaswag": 79.1, "hendrycksTest": 40.5, "truthfulqa:mc": 50.4 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
56.2
ehartford/Wizard-Vicuna-13B-Uncensored
main
95bfd1640a54e76b3e857c2462fd3a77eca0b275
{ "arc:challenge": 59, "hellaswag": 81.9, "hendrycksTest": 47.9, "truthfulqa:mc": 51.7 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
60.1
ehartford/WizardLM-13B-Uncensored
main
9025c5f96fef9525da9238369ad082961b0e9494
{ "arc:challenge": 50.9, "hellaswag": 76.6, "hendrycksTest": 44, "truthfulqa:mc": 46.7 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
54.6
SciPhi/SciPhi-Self-RAG-Mistral-7B-32k
main
640192e2ba5898f87c407a9f771fc270f7628dee
{ "arc:challenge": 57.3, "hellaswag": 80.4, "hendrycksTest": 60.8, "truthfulqa:mc": 45.6 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
61
lmsys/vicuna-13b-v1.1
main
8c71dbe9221e83d2ec72e4dc08beccfc78b563c0
{ "arc:challenge": 52.7, "hellaswag": 80.1, "hendrycksTest": 51.9, "truthfulqa:mc": 52.1 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
59.2
lmsys/vicuna-7b-delta-v1.1
main
{ "arc:challenge": 53.7, "hellaswag": 77.5, "hendrycksTest": 45.6, "truthfulqa:mc": 48.9 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
56.4
lmsys/vicuna-13b-v1.5
main
3deb0106f72a3a433f0c6ea0cb978bdf14bcd3a6
{ "arc:challenge": 57.1, "hellaswag": 81.2, "hendrycksTest": 56.7, "truthfulqa:mc": 51.5 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
61.6
lmsys/vicuna-7b-v1.3
main
ac066c83424c4a7221aa10c0ebe074b24d3bcdb6
{ "arc:challenge": 50.4, "hellaswag": 76.9, "hendrycksTest": 48.1, "truthfulqa:mc": 47 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
55.6
lmsys/vicuna-13b-delta-v1.1
main
ffed4c7cf1b9814812078efbe29ec3f610ea39e7
{ "arc:challenge": 52.7, "hellaswag": 80.1, "hendrycksTest": 51.9, "truthfulqa:mc": 52.1 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
59.2
lmsys/vicuna-13b-v1.3
main
7900eeb715a49affee9e6390f824e62eea3f3fb1
{ "arc:challenge": 54.6, "hellaswag": 80.4, "hendrycksTest": 52.9, "truthfulqa:mc": 52.1 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
60
lmsys/vicuna-7b-v1.5-16k
main
9a93d7d11fac7f3f9074510b80092b53bc1a5bec
{ "arc:challenge": 54.2, "hellaswag": 77.3, "hendrycksTest": 49.3, "truthfulqa:mc": 50.3 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
57.8
lmsys/vicuna-7b-v1.5
main
de56c35b1763eaae20f4d60efd64af0a9091ebe5
{ "arc:challenge": 53.2, "hellaswag": 77.4, "hendrycksTest": 51, "truthfulqa:mc": 50.3 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
58
lmsys/longchat-7b-v1.5-32k
main
16deb633ef4d6a18d5750239edc5a85ffeaf3918
{ "arc:challenge": 51.7, "hellaswag": 75, "hendrycksTest": 43.2, "truthfulqa:mc": 44.4 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
53.6
Rardilit/Panther_v1
main
c47493294aa5154feb72bcba31d7e99cbe02d4fa
{ "arc:challenge": 22.7, "hellaswag": 25, "hendrycksTest": 23.1, "truthfulqa:mc": null }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
null
robowaifudev/megatron-gpt2-345m
main
b39f8d00fb9f33da4271be2035da848da896a23b
{ "arc:challenge": 24.2, "hellaswag": 39.2, "hendrycksTest": 24.3, "truthfulqa:mc": 41.5 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
32.3
KnutJaegersberg/webMistral-7B
main
0b221c617df3d2f883cfd925f646ebd93de23037
{ "arc:challenge": 59, "hellaswag": 80.9, "hendrycksTest": 59, "truthfulqa:mc": 39.7 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
59.6
KnutJaegersberg/gpt-2-xl-EvolInstruct
main
3e68735b9bfbca5c2e6a8e4367f003ab3d3c1512
{ "arc:challenge": 27.4, "hellaswag": 38.5, "hendrycksTest": 25.7, "truthfulqa:mc": 42.8 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
33.6
KnutJaegersberg/RWKV-pileplus-1B5-evol_instruct_v2
main
cb5582403f78f09973291980da56d7636516545a
{ "arc:challenge": 31.8, "hellaswag": 55.5, "hendrycksTest": 25.1, "truthfulqa:mc": 35.2 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
36.9
KnutJaegersberg/galactica-orca-wizardlm-1.3b
main
4c0294934ecafb9ee6ec120b17f7ef81c2e1240b
{ "arc:challenge": 30.9, "hellaswag": 36, "hendrycksTest": 25.9, "truthfulqa:mc": 41.3 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
33.5
KnutJaegersberg/RWKV-4-PilePlus-430M-20230520-6162-1018Gtokens-ctx4098
main
e31777c9d3b8c5c9f803b23f49550c009cbdcf6d
{ "arc:challenge": 26, "hellaswag": 40.4, "hendrycksTest": 24.4, "truthfulqa:mc": 37.6 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
32.1
KnutJaegersberg/black_goo_recipe_a
main
7067f68d4d9e7b10a1aa2c9fa97456bc04678867
{ "arc:challenge": 38.1, "hellaswag": 66.6, "hendrycksTest": 25.8, "truthfulqa:mc": 37.5 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
42
KnutJaegersberg/MistralInstructLongish
main
813c4707970cb5bf3e2a49f7f350af59e7032c24
{ "arc:challenge": 60.8, "hellaswag": 81.9, "hendrycksTest": 60.5, "truthfulqa:mc": 40.6 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
61
KnutJaegersberg/megatron-gpt2-345m-evol_instruct_v2
main
2866eeaaf62014a7a6e939d18b6e27f44df48428
{ "arc:challenge": 26.4, "hellaswag": 38.4, "hendrycksTest": 23.6, "truthfulqa:mc": 41.2 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
32.4
KnutJaegersberg/Galactica-6.7B-EssayWriter
main
ac74fdd938de1ffd34832d66a25db20b0230983e
{ "arc:challenge": 40.1, "hellaswag": 50.3, "hendrycksTest": 33.9, "truthfulqa:mc": 40.3 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
41.2
KnutJaegersberg/openllama_3b_EvolInstruct_lora_merged
main
c55e3e114951346f273c519d266170e4d52781e9
{ "arc:challenge": 40.3, "hellaswag": 71.6, "hendrycksTest": 27.1, "truthfulqa:mc": 34.8 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
43.4
HanningZhang/Robin-v2
main
{ "arc:challenge": 48.8, "hellaswag": 74.5, "hendrycksTest": 39.3, "truthfulqa:mc": 42.3 }
9ba100d35ce48d3d4c132947464c93c861932caa
2023-11-23T17:28:23
51.2