Spaces:
Running
Running
Update leaderboard_data.jsonl
Browse files- leaderboard_data.jsonl +16 -16
leaderboard_data.jsonl
CHANGED
@@ -1,16 +1,16 @@
|
|
1 |
-
{"Model": "PartAI/Dorna-Llama3-8B-Instruct", "Precision": "bfloat16", "#Params (B)": 8.03, "Part Multiple Choice": 33.88, "ARC Easy": 70.40, "ARC Challenge": 61.07, "MMLU Pro": 23.39, "
|
2 |
-
{"Model":"meta-llama/Llama-3.2-1B-Instruct", "Precision": "bfloat16", "#Params (B)": 1.24, "Part Multiple Choice": 28.28, "ARC Easy": 47.10, "ARC Challenge": 39.00, "MMLU Pro": 12.17, "
|
3 |
-
{"Model":"meta-llama/Llama-3.1-8B-Instruct", "Precision": "bfloat16", "#Params (B)": 8.03, "Part Multiple Choice": 36.68, "ARC Easy": 78.40, "ARC Challenge": 60.40, "MMLU Pro": 21.00, "
|
4 |
-
{"Model":"meta-llama/Meta-Llama-3-8B-Instruct", "Precision": "bfloat16", "#Params (B)": 8.03, "Part Multiple Choice": 34.99, "ARC Easy": 72.90, "ARC Challenge": 57.70, "MMLU Pro": 25.54, "
|
5 |
-
{"Model":"CohereForAI/aya-23-8B", "Precision": "float16", "#Params (B)": 8.03, "Part Multiple Choice": 32.82, "ARC Easy": 80.46, "ARC Challenge": 64.43, "MMLU Pro": 18.62, "
|
6 |
-
{"Model":"CohereForAI/aya-23-35B", "Precision": "float16", "#Params (B)": 35, "Part Multiple Choice": 36.79, "ARC Easy": 87.93, "ARC Challenge": 72.48, "MMLU Pro": 25.54, "
|
7 |
-
{"Model":"CohereForAI/aya-expanse-8b", "Precision": "float16", "#Params (B)": 8.03, "Part Multiple Choice": 34.91, "ARC Easy": 79.60, "ARC Challenge": 70.47, "MMLU Pro": 25.06, "
|
8 |
-
{"Model":"CohereForAI/aya-expanse-32b", "Precision": "float16", "#Params (B)": 32.3, "Part Multiple Choice": 43.36, "ARC Easy": 93.10, "ARC Challenge": 79.87, "MMLU Pro": 31.03, "
|
9 |
-
{"Model":"Qwen/Qwen2-7B-Instruct", "Precision": "bfloat16", "#Params (B)": 7.62, "Part Multiple Choice": 35.90, "ARC Easy": 77.30, "ARC Challenge": 68.46, "MMLU Pro": 23.87, "
|
10 |
-
{"Model":"Qwen/Qwen2.5-7B-Instruct", "Precision": "bfloat16", "#Params (B)": 7.62, "Part Multiple Choice": 36.72, "ARC Easy": 79.02, "ARC Challenge": 69.13, "MMLU Pro": 21.96, "
|
11 |
-
{"Model":"Qwen/Qwen2.5-32B-Instruct", "Precision": "bfloat16", "#Params (B)": 32.8, "Part Multiple Choice": 46.06, "ARC Easy": 90.80, "ARC Challenge": 85.91, "MMLU Pro": 38.19, "
|
12 |
-
{"Model":"google/gemma-2-2b-it", "Precision": "bfloat16", "#Params (B)": 2.61, "Part Multiple Choice": 31.12, "ARC Easy": 71.26, "ARC Challenge": 57.72, "MMLU Pro": 16.23, "
|
13 |
-
{"Model":"google/gemma-2-9b-it", "Precision": "bfloat16", "#Params (B)": 9.24, "Part Multiple Choice": 42.70, "ARC Easy": 93.10, "ARC Challenge": 84.56, "MMLU Pro": 31.74, "
|
14 |
-
{"Model":"google/gemma-2-27b-it", "Precision": "bfloat16", "#Params (B)": 27.2, "Part Multiple Choice": 46.03, "ARC Easy": 95.98, "ARC Challenge": 85.91, "MMLU Pro": 36.28, "
|
15 |
-
{"Model":"universitytehran/PersianMind-v1.0", "Precision": "bfloat16", "#Params (B)": 6.82, "Part Multiple Choice": 29.27, "ARC Easy": 58.91, "ARC Challenge": 48.32, "MMLU Pro": 15.51, "
|
16 |
-
{"Model":"MaralGPT/Maral-7B-alpha-1", "Precision": "bfloat16", "#Params (B)": 7.24, "Part Multiple Choice": 26.67, "ARC Easy": 44.54, "ARC Challenge": 30.87, "MMLU Pro": 15.99, "
|
|
|
1 |
+
{"Model": "PartAI/Dorna-Llama3-8B-Instruct", "Precision": "bfloat16", "#Params (B)": 8.03, "Part Multiple Choice": 33.88, "ARC Easy": 70.40, "ARC Challenge": 61.07, "MMLU Pro": 23.39, "Multiple Choice Persian": 52.86, "Hub License": "llama3", "Model sha": "main", "model_name_for_query": "PartAI/Dorna-Llama3-8B-Instruct"}
|
2 |
+
{"Model":"meta-llama/Llama-3.2-1B-Instruct", "Precision": "bfloat16", "#Params (B)": 1.24, "Part Multiple Choice": 28.28, "ARC Easy": 47.10, "ARC Challenge": 39.00, "MMLU Pro": 12.17, "Multiple Choice Persian": 36.88, "Hub License": "llama3.2", "Model sha": "main", "model_name_for_query": "meta-llama/Llama-3.2-1B-Instruct"}
|
3 |
+
{"Model":"meta-llama/Llama-3.1-8B-Instruct", "Precision": "bfloat16", "#Params (B)": 8.03, "Part Multiple Choice": 36.68, "ARC Easy": 78.40, "ARC Challenge": 60.40, "MMLU Pro": 21.00, "Multiple Choice Persian": 54.24, "Hub License": "llama3.1", "Model sha": "main", "model_name_for_query": "meta-llama/Llama-3.1-8B-Instruct"}
|
4 |
+
{"Model":"meta-llama/Meta-Llama-3-8B-Instruct", "Precision": "bfloat16", "#Params (B)": 8.03, "Part Multiple Choice": 34.99, "ARC Easy": 72.90, "ARC Challenge": 57.70, "MMLU Pro": 25.54, "Multiple Choice Persian": 53.85, "Hub License": "llama3", "Model sha": "main", "model_name_for_query": "meta-llama/Meta-Llama-3-8B-Instruct"}
|
5 |
+
{"Model":"CohereForAI/aya-23-8B", "Precision": "float16", "#Params (B)": 8.03, "Part Multiple Choice": 32.82, "ARC Easy": 80.46, "ARC Challenge": 64.43, "MMLU Pro": 18.62, "Multiple Choice Persian": 52.86, "Hub License": "cc-by-nc-4.0", "Model sha": "main", "model_name_for_query": "CohereForAI/aya-23-8B"}
|
6 |
+
{"Model":"CohereForAI/aya-23-35B", "Precision": "float16", "#Params (B)": 35, "Part Multiple Choice": 36.79, "ARC Easy": 87.93, "ARC Challenge": 72.48, "MMLU Pro": 25.54, "Multiple Choice Persian": 61.14, "Hub License": "cc-by-nc-4.0", "Model sha": "main", "model_name_for_query": "CohereForAI/aya-23-35B"}
|
7 |
+
{"Model":"CohereForAI/aya-expanse-8b", "Precision": "float16", "#Params (B)": 8.03, "Part Multiple Choice": 34.91, "ARC Easy": 79.60, "ARC Challenge": 70.47, "MMLU Pro": 25.06, "Multiple Choice Persian": 58.38, "Hub License": "cc-by-nc-4.0", "Model sha": "main", "model_name_for_query": "CohereForAI/aya-expanse-8b"}
|
8 |
+
{"Model":"CohereForAI/aya-expanse-32b", "Precision": "float16", "#Params (B)": 32.3, "Part Multiple Choice": 43.36, "ARC Easy": 93.10, "ARC Challenge": 79.87, "MMLU Pro": 31.03, "Multiple Choice Persian": 62.33, "Hub License": "cc-by-nc-4.0", "Model sha": "main", "model_name_for_query": "CohereForAI/aya-expanse-32b"}
|
9 |
+
{"Model":"Qwen/Qwen2-7B-Instruct", "Precision": "bfloat16", "#Params (B)": 7.62, "Part Multiple Choice": 35.90, "ARC Easy": 77.30, "ARC Challenge": 68.46, "MMLU Pro": 23.87, "Multiple Choice Persian": 51.68, "Hub License": "apache-2.0", "Model sha": "main", "model_name_for_query": "Qwen/Qwen2-7B-Instruct"}
|
10 |
+
{"Model":"Qwen/Qwen2.5-7B-Instruct", "Precision": "bfloat16", "#Params (B)": 7.62, "Part Multiple Choice": 36.72, "ARC Easy": 79.02, "ARC Challenge": 69.13, "MMLU Pro": 21.96, "Multiple Choice Persian": 52.66, "Hub License": "apache-2.0", "Model sha": "main", "model_name_for_query": "Qwen/Qwen2.5-7B-Instruct"}
|
11 |
+
{"Model":"Qwen/Qwen2.5-32B-Instruct", "Precision": "bfloat16", "#Params (B)": 32.8, "Part Multiple Choice": 46.06, "ARC Easy": 90.80, "ARC Challenge": 85.91, "MMLU Pro": 38.19, "Multiple Choice Persian": 61.34, "Hub License": "apache-2.0", "Model sha": "main", "model_name_for_query": "Qwen/Qwen2.5-32B-Instruct"}
|
12 |
+
{"Model":"google/gemma-2-2b-it", "Precision": "bfloat16", "#Params (B)": 2.61, "Part Multiple Choice": 31.12, "ARC Easy": 71.26, "ARC Challenge": 57.72, "MMLU Pro": 16.23, "Multiple Choice Persian": 49.90, "Hub License": "gemma", "Model sha": "main", "model_name_for_query": "google/gemma-2-2b-it"}
|
13 |
+
{"Model":"google/gemma-2-9b-it", "Precision": "bfloat16", "#Params (B)": 9.24, "Part Multiple Choice": 42.70, "ARC Easy": 93.10, "ARC Challenge": 84.56, "MMLU Pro": 31.74, "Multiple Choice Persian": 62.33, "Hub License": "gemma", "Model sha": "main", "model_name_for_query": "google/gemma-2-9b-it"}
|
14 |
+
{"Model":"google/gemma-2-27b-it", "Precision": "bfloat16", "#Params (B)": 27.2, "Part Multiple Choice": 46.03, "ARC Easy": 95.98, "ARC Challenge": 85.91, "MMLU Pro": 36.28, "Multiple Choice Persian": 63.12, "Hub License": "gemma", "Model sha": "main", "model_name_for_query": "google/gemma-2-27b-it"}
|
15 |
+
{"Model":"universitytehran/PersianMind-v1.0", "Precision": "bfloat16", "#Params (B)": 6.82, "Part Multiple Choice": 29.27, "ARC Easy": 58.91, "ARC Challenge": 48.32, "MMLU Pro": 15.51, "Multiple Choice Persian": 45.36, "Hub License": "cc-by-nc-sa-4.0", "Model sha": "main", "model_name_for_query": "universitytehran/PersianMind-v1.0"}
|
16 |
+
{"Model":"MaralGPT/Maral-7B-alpha-1", "Precision": "bfloat16", "#Params (B)": 7.24, "Part Multiple Choice": 26.67, "ARC Easy": 44.54, "ARC Challenge": 30.87, "MMLU Pro": 15.99, "Multiple Choice Persian": 36.09, "Hub License": "mit", "Model sha": "main", "model_name_for_query": "MaralGPT/Maral-7B-alpha-1"}
|