Spaces:
Running
Running
{"Model": "PartAI/Dorna-Llama3-8B-Instruct", "Precision": "bfloat16", "#Params (B)": 8.03, "Part Multiple Choice": 33.88, "ARC Easy": 70.40, "ARC Challenge": 61.07, "MMLU Pro": 23.39, "AUT Multiple Choice Persian": 52.86, "Hub License": "llama3", "Model sha": "main", "model_name_for_query": "PartAI/Dorna-Llama3-8B-Instruct"} | |
{"Model":"meta-llama/Llama-3.2-1B-Instruct", "Precision": "bfloat16", "#Params (B)": 1.24, "Part Multiple Choice": 28.28, "ARC Easy": 47.10, "ARC Challenge": 39.00, "MMLU Pro": 12.17, "AUT Multiple Choice Persian": 36.88, "Hub License": "llama3.2", "Model sha": "main", "model_name_for_query": "meta-llama/Llama-3.2-1B-Instruct"} | |
{"Model":"meta-llama/Llama-3.1-8B-Instruct", "Precision": "bfloat16", "#Params (B)": 8.03, "Part Multiple Choice": 36.68, "ARC Easy": 78.40, "ARC Challenge": 60.40, "MMLU Pro": 21.00, "AUT Multiple Choice Persian": 54.24, "Hub License": "llama3.1", "Model sha": "main", "model_name_for_query": "meta-llama/Llama-3.1-8B-Instruct"} | |
{"Model":"meta-llama/Meta-Llama-3-8B-Instruct", "Precision": "bfloat16", "#Params (B)": 8.03, "Part Multiple Choice": 34.99, "ARC Easy": 72.90, "ARC Challenge": 57.70, "MMLU Pro": 25.54, "AUT Multiple Choice Persian": 53.85, "Hub License": "llama3", "Model sha": "main", "model_name_for_query": "meta-llama/Meta-Llama-3-8B-Instruct"} | |
{"Model":"CohereForAI/aya-23-8B", "Precision": "float16", "#Params (B)": 8.03, "Part Multiple Choice": 32.82, "ARC Easy": 80.46, "ARC Challenge": 64.43, "MMLU Pro": 18.62, "AUT Multiple Choice Persian": 52.86, "Hub License": "cc-by-nc-4.0", "Model sha": "main", "model_name_for_query": "CohereForAI/aya-23-8B"} | |
{"Model":"CohereForAI/aya-23-35B", "Precision": "float16", "#Params (B)": 35, "Part Multiple Choice": 36.79, "ARC Easy": 87.93, "ARC Challenge": 72.48, "MMLU Pro": 25.54, "AUT Multiple Choice Persian": 61.14, "Hub License": "cc-by-nc-4.0", "Model sha": "main", "model_name_for_query": "CohereForAI/aya-23-35B"} | |
{"Model":"CohereForAI/aya-expanse-8b", "Precision": "float16", "#Params (B)": 8.03, "Part Multiple Choice": 34.91, "ARC Easy": 79.60, "ARC Challenge": 70.47, "MMLU Pro": 25.06, "AUT Multiple Choice Persian": 58.38, "Hub License": "cc-by-nc-4.0", "Model sha": "main", "model_name_for_query": "CohereForAI/aya-expanse-8b"} | |
{"Model":"CohereForAI/aya-expanse-32b", "Precision": "float16", "#Params (B)": 32.3, "Part Multiple Choice": 43.36, "ARC Easy": 93.10, "ARC Challenge": 79.87, "MMLU Pro": 31.03, "AUT Multiple Choice Persian": 62.33, "Hub License": "cc-by-nc-4.0", "Model sha": "main", "model_name_for_query": "CohereForAI/aya-expanse-32b"} | |
{"Model":"Qwen/Qwen2-7B-Instruct", "Precision": "bfloat16", "#Params (B)": 7.62, "Part Multiple Choice": 35.90, "ARC Easy": 77.30, "ARC Challenge": 68.46, "MMLU Pro": 23.87, "AUT Multiple Choice Persian": 51.68, "Hub License": "apache-2.0", "Model sha": "main", "model_name_for_query": "Qwen/Qwen2-7B-Instruct"} | |
{"Model":"Qwen/Qwen2.5-7B-Instruct", "Precision": "bfloat16", "#Params (B)": 7.62, "Part Multiple Choice": 36.72, "ARC Easy": 79.02, "ARC Challenge": 69.13, "MMLU Pro": 21.96, "AUT Multiple Choice Persian": 52.66, "Hub License": "apache-2.0", "Model sha": "main", "model_name_for_query": "Qwen/Qwen2.5-7B-Instruct"} | |
{"Model":"Qwen/Qwen2.5-32B-Instruct", "Precision": "bfloat16", "#Params (B)": 32.8, "Part Multiple Choice": 46.06, "ARC Easy": 90.80, "ARC Challenge": 85.91, "MMLU Pro": 38.19, "AUT Multiple Choice Persian": 61.34, "Hub License": "apache-2.0", "Model sha": "main", "model_name_for_query": "Qwen/Qwen2.5-32B-Instruct"} | |
{"Model":"google/gemma-2-2b-it", "Precision": "bfloat16", "#Params (B)": 2.61, "Part Multiple Choice": 31.12, "ARC Easy": 71.26, "ARC Challenge": 57.72, "MMLU Pro": 16.23, "AUT Multiple Choice Persian": 49.90, "Hub License": "gemma", "Model sha": "main", "model_name_for_query": "google/gemma-2-2b-it"} | |
{"Model":"google/gemma-2-9b-it", "Precision": "bfloat16", "#Params (B)": 9.24, "Part Multiple Choice": 42.70, "ARC Easy": 93.10, "ARC Challenge": 84.56, "MMLU Pro": 31.74, "AUT Multiple Choice Persian": 62.33, "Hub License": "gemma", "Model sha": "main", "model_name_for_query": "google/gemma-2-9b-it"} | |
{"Model":"google/gemma-2-27b-it", "Precision": "bfloat16", "#Params (B)": 27.2, "Part Multiple Choice": 46.03, "ARC Easy": 95.98, "ARC Challenge": 85.91, "MMLU Pro": 36.28, "AUT Multiple Choice Persian": 63.12, "Hub License": "gemma", "Model sha": "main", "model_name_for_query": "google/gemma-2-27b-it"} | |
{"Model":"universitytehran/PersianMind-v1.0", "Precision": "bfloat16", "#Params (B)": 6.82, "Part Multiple Choice": 29.27, "ARC Easy": 58.91, "ARC Challenge": 48.32, "MMLU Pro": 15.51, "AUT Multiple Choice Persian": 45.36, "Hub License": "cc-by-nc-sa-4.0", "Model sha": "main", "model_name_for_query": "universitytehran/PersianMind-v1.0"} | |
{"Model":"MaralGPT/Maral-7B-alpha-1", "Precision": "bfloat16", "#Params (B)": 7.24, "Part Multiple Choice": 26.67, "ARC Easy": 44.54, "ARC Challenge": 30.87, "MMLU Pro": 15.99, "AUT Multiple Choice Persian": 36.09, "Hub License": "mit", "Model sha": "main", "model_name_for_query": "MaralGPT/Maral-7B-alpha-1"} | |