Datasets:

License:
vlm_results / Mistral-Small-3.1-24B-Instruct-2503 /results_2025-01-25T10-42-53.190540.json
daiteng01's picture
Upload 52 files
7498b1c verified
{
"config_general": {
"model_name": "Mistral-Small-3.1-24B-Instruct-2503",
"model_dtype": "float16",
"model_size": 0
},
"results": {
"CMMMU": {
"艺术与设计": {
"num": 88,
"correct": 54,
"accuracy": 61.36
},
"overall": {
"num": 900,
"correct": 391,
"accuracy": 43.44
},
"商业": {
"num": 126,
"correct": 40,
"accuracy": 31.75
},
"科学": {
"num": 204,
"correct": 77,
"accuracy": 37.75
},
"健康与医学": {
"num": 153,
"correct": 71,
"accuracy": 46.41
},
"人文社会科学": {
"num": 85,
"correct": 44,
"accuracy": 51.76
},
"技术与工程": {
"num": 244,
"correct": 105,
"accuracy": 43.03
},
"accuracy": 43.44,
"acc_stderr": 0,
"acc": 43.44
},
"MMMU": {
"accuracy": 53.78,
"subject_score": {
"Accounting": 46.67,
"Agriculture": 56.67,
"Architecture": 40.0,
"Art": 73.33,
"Basic": 70.0,
"Biology": 46.67,
"Chemistry": 30.0,
"Clinical": 73.33,
"Computer": 43.33,
"Design": 90.0,
"Diagnostics": 43.33,
"Economics": 56.67,
"Electronics": 30.0,
"Energy": 40.0,
"Finance": 33.33,
"Geography": 63.33,
"History": 66.67,
"Literature": 86.67,
"Manage": 56.67,
"Marketing": 66.67,
"Materials": 33.33,
"Math": 33.33,
"Mechanical": 36.67,
"Music": 30.0,
"Pharmacy": 56.67,
"Physics": 40.0,
"Psychology": 60.0,
"Public": 73.33,
"Sociology": 63.33
},
"difficulty_score": {
"Medium": 50.71,
"Easy": 66.44,
"Hard": 40.33
},
"acc_stderr": 0,
"acc": 53.78
},
"MMMU_Pro_standard": {
"accuracy": 38.96,
"subject_score": {
"History": 46.43,
"Art": 60.38,
"Design": 58.33,
"Literature": 69.23,
"Agriculture": 26.67,
"Finance": 28.33,
"Sociology": 59.26,
"Accounting": 27.59,
"Energy_and_Power": 25.86,
"Pharmacy": 52.63,
"Architecture_and_Engineering": 30.0,
"Clinical_Medicine": 40.68,
"Public_Health": 39.66,
"Physics": 21.67,
"Art_Theory": 54.55,
"Electronics": 45.0,
"Psychology": 35.0,
"Biology": 40.68,
"Manage": 36.0,
"Economics": 49.15,
"Mechanical_Engineering": 33.9,
"Diagnostics_and_Laboratory_Medicine": 28.33,
"Basic_Medical_Science": 40.38,
"Computer_Science": 46.67,
"Math": 23.33,
"Music": 26.67,
"Materials": 25.0,
"Marketing": 32.2,
"Chemistry": 31.67,
"Geography": 44.23
},
"difficulty_score": {
"Medium": 36.33,
"Easy": 49.05,
"Hard": 30.92
},
"acc_stderr": 0,
"acc": 38.96
},
"MMMU_Pro_vision": {
"accuracy": 40.17,
"subject_score": {
"History": 50.0,
"Art": 49.06,
"Design": 51.67,
"Literature": 67.31,
"Agriculture": 35.0,
"Finance": 56.67,
"Sociology": 44.44,
"Accounting": 50.0,
"Energy_and_Power": 27.59,
"Pharmacy": 54.39,
"Architecture_and_Engineering": 25.0,
"Clinical_Medicine": 35.59,
"Public_Health": 56.9,
"Physics": 25.0,
"Art_Theory": 54.55,
"Electronics": 31.67,
"Psychology": 36.67,
"Biology": 35.59,
"Manage": 34.0,
"Economics": 54.24,
"Mechanical_Engineering": 30.51,
"Diagnostics_and_Laboratory_Medicine": 23.33,
"Basic_Medical_Science": 38.46,
"Computer_Science": 36.67,
"Math": 31.67,
"Music": 31.67,
"Materials": 25.0,
"Marketing": 47.46,
"Chemistry": 38.33,
"Geography": 32.69
},
"acc_stderr": 0,
"acc": 40.17
},
"MmvetV2": {
"accuracy": 63.4623,
"capability_scores": {
"ocr": 72.54807692307695,
"math": 80.0,
"spat": 61.37055837563451,
"rec": 58.93203883495154,
"know": 56.66666666666664,
"gen": 62.1818181818182,
"seq": 59.285714285714306
},
"capability_detail_scores": {
"ocr_math": 81.81818181818183,
"math_ocr_spat": 86.0,
"math_ocr_rec_spat": 40.0,
"rec_spat": 51.42857142857144,
"ocr_spat": 83.07692307692308,
"ocr_rec_spat": 37.5,
"know_ocr_spat": 100.0,
"ocr_rec": 62.5,
"know_rec_spat": 25.0,
"ocr": 85.3125,
"rec": 60.67796610169491,
"know_rec": 60.76923076923077,
"know_gen_rec": 56.09999999999997,
"know_gen_ocr_rec": 69.23076923076923,
"gen_ocr_rec_spat": 71.16279069767441,
"gen_ocr_spat": 75.0,
"ocr_spat_seq_math_gen": 100.0,
"ocr_spat_seq_rec_math": 50.0,
"gen_rec_spat": 58.63636363636364,
"math_gen_ocr_spat": 0.0,
"seq_rec_spat": 40.0,
"seq_ocr_rec_spat": 50.0,
"know_gen_rec_spat": 36.66666666666667,
"gen_rec": 66.17647058823529,
"know_ocr_rec_spat": 20.0,
"ocr_know_spat_rec_gen": 80.0,
"ocr_rec_math": 100.0,
"gen_ocr_rec": 92.0,
"seq_gen_ocr_rec": 62.85714285714287,
"gen_ocr": 61.53846153846154,
"seq_gen_rec": 67.14285714285714,
"seq_rec": 54.99999999999999,
"seq_gen_rec_spat": 61.24999999999999,
"know_seq_rec": 100.0,
"gen_seq_rec": 67.14285714285714,
"seq_know_gen_rec": 30.0,
"ocr_spat_seq_rec_gen": 33.33333333333333,
"ocr_know_seq_rec_gen": 90.0,
"know_rec_math": 100.0,
"seq_ocr_rec": 100.0
},
"acc_stderr": 0,
"acc": 63.4623
},
"MathVerse": {
"reject_info": {
"reject_rate": 0.3,
"reject_number": 12,
"total_question": 3940
},
"Vision Intensive": {
"accuracy": 27.01,
"correct": 212,
"total": 785
},
"Total": {
"accuracy": 28.51,
"correct": 1120,
"total": 3928
},
"Text Dominant": {
"accuracy": 34.95,
"correct": 274,
"total": 784
},
"Vision Dominant": {
"accuracy": 27.13,
"correct": 213,
"total": 785
},
"Vision Only": {
"accuracy": 24.62,
"correct": 194,
"total": 788
},
"Text Lite": {
"accuracy": 28.88,
"correct": 227,
"total": 786
},
"accuracy": 28.51,
"acc_stderr": 0,
"acc": 28.51
},
"Ocrlite": {
"final_score": [
1253,
1644
],
"accuracy": 76.217,
"Key Information Extraction-Bookshelf": [
32,
51,
0.627,
{
"Default": [
32,
51,
0.627
]
}
],
"Scene Text-centric VQA-diet_constraints": [
60,
90,
0.667,
{
"Default": [
60,
90,
0.667
]
}
],
"Doc-oriented VQA-Control": [
147,
189,
0.778,
{
"Default": [
147,
189,
0.778
]
}
],
"Doc-oriented VQA": [
178,
204,
0.873,
{
"Default": [
178,
204,
0.873
]
}
],
"Scene Text-centric VQA-Fake_logo": [
72,
119,
0.605,
{
"Default": [
72,
119,
0.605
]
}
],
"Handwritten Mathematical Expression Recognition": [
1,
100,
0.01,
{
"Default": [
1,
100,
0.01
]
}
],
"Key Information Extraction": [
187,
209,
0.895,
{
"Default": [
187,
209,
0.895
]
}
],
"Scene Text-centric VQA-Control": [
171,
200,
0.855,
{
"Default": [
171,
200,
0.855
]
}
],
"Scene Text-centric VQA": [
242,
282,
0.858,
{
"Default": [
242,
282,
0.858
]
}
],
"Artistic Text Recognition": [
36,
50,
0.72,
{
"Default": [
36,
50,
0.72
]
}
],
"Irregular Text Recognition": [
41,
50,
0.82,
{
"Default": [
41,
50,
0.82
]
}
],
"Non-Semantic Text Recognition": [
39,
50,
0.78,
{
"Default": [
39,
50,
0.78
]
}
],
"Regular Text Recognition": [
47,
50,
0.94,
{
"Default": [
47,
50,
0.94
]
}
],
"acc_stderr": 0,
"acc": 76.217
},
"OcrliteZh": {
"final_score": [
113,
234
],
"accuracy": 48.291,
"Docvqa": [
6,
10,
0.6,
{
"Default": [
6,
10,
0.6
]
}
],
"Chartqa-human": [
4,
10,
0.4,
{
"Default": [
4,
10,
0.4
]
}
],
"Chartqa-au": [
8,
10,
0.8,
{
"Default": [
8,
10,
0.8
]
}
],
"infographic": [
6,
10,
0.6,
{
"Default": [
6,
10,
0.6
]
}
],
"Key Information Extraction": [
30,
45,
0.667,
{
"Default": [
30,
45,
0.667
]
}
],
"Scene Text-centric VQA": [
23,
40,
0.575,
{
"Default": [
23,
40,
0.575
]
}
],
"Artistic Text Recognition": [
2,
11,
0.182,
{
"Default": [
2,
11,
0.182
]
}
],
"IrRegular Text Recognition": [
4,
11,
0.364,
{
"Default": [
4,
11,
0.364
]
}
],
"Non-semantic Text Recognition": [
2,
12,
0.167,
{
"Default": [
2,
12,
0.167
]
}
],
"Regular Text Recognition": [
6,
11,
0.545,
{
"Default": [
6,
11,
0.545
]
}
],
"Handwriting_CN": [
6,
20,
0.3,
{
"Default": [
6,
20,
0.3
]
}
],
"Chinese Unlimited": [
16,
44,
0.364,
{
"Default": [
16,
44,
0.364
]
}
],
"acc_stderr": 0,
"acc": 48.291
},
"CharXiv": {
"descriptive": {
"Overall Score": 81.97,
"By Question": {
"Q1": 83.61,
"Q2": 87.83,
"Q3": 78.54,
"Q4": 88.33,
"Q5": 90.79,
"Q6": 80.72,
"Q7": 84.19,
"Q8": 87.95,
"Q9": 84.58,
"Q10": 75.34,
"Q11": 57.71,
"Q12": 74.73,
"Q13": 70.32,
"Q14": 84.75,
"Q15": 90.73,
"Q16": 75.0,
"Q17": 63.84,
"Q18": 91.5,
"Q19": 93.85
},
"By Category": {
"Information Extraction": 84.88,
"Enumeration": 84.26,
"Pattern Recognition": 77.29,
"Counting": 78.12,
"Compositionality": 63.84
},
"By Subplot": {
"1 Subplot": 86.01,
"2-4 Subplots": 83.53,
"5+ Subplots": 72.88
},
"By Subject": {
"Computer Science": 81.55,
"Economics": 82.07,
"Electrical Engineering and Systems Science": 88.03,
"Mathematics": 82.22,
"Physics": 78.15,
"Quantitative Biology": 76.98,
"Quantitative Finance": 80.82,
"Statistics": 86.73
},
"By Year": {
"2020": 81.07,
"2021": 81.13,
"2022": 84.22,
"2023": 81.55
},
"N_valid": 4000,
"N_invalid": 0,
"Question Type": "Descriptive"
},
"reasoning": {
"Overall Score": 47.9,
"By Answer Type": {
"Text-in-Chart": 54.77,
"Text-in-General": 54.55,
"Number-in-Chart": 43.1,
"Number-in-General": 36.68
},
"By Source": {
"GPT-Sourced": 57.61,
"GPT-Inspired": 47.22,
"Completely Human": 45.17
},
"By Subject": {
"Computer Science": 48.41,
"Economics": 48.55,
"Electrical Engineering and Systems Science": 48.74,
"Mathematics": 52.59,
"Physics": 51.97,
"Quantitative Biology": 43.65,
"Quantitative Finance": 44.83,
"Statistics": 43.36
},
"By Year": {
"2020": 44.13,
"2021": 47.89,
"2022": 44.26,
"2023": 55.24
},
"By Subplot": {
"1 Subplot": 49.22,
"2-4 Subplots": 44.71,
"5+ Subplots": 50.85
},
"N_valid": 1000,
"N_invalid": 0,
"Question Type": "Reasoning"
},
"accuracy": 64.94,
"acc_stderr": 0,
"acc": 64.94
},
"MathVision": {
"accuracy": 33.75,
"acc_stderr": 0,
"acc": 33.75
},
"CII-Bench": {
"accuracy": 52.42,
"domain_score": {
"Life": 49.35,
"Art": 50.0,
"CTC": 55.56,
"Society": 56.76,
"Env.": 48.15,
"Politics": 54.17
},
"emotion_score": {
"Neutral": 55.64,
"Negative": 52.45,
"Positive": 48.72
},
"acc_stderr": 0,
"acc": 52.42
},
"Blink": {
"accuracy": 55.23,
"Art Style": 58.97,
"Counting": 70.83,
"Forensic Detection": 53.03,
"Functional Correspondence": 31.54,
"IQ Test": 24.67,
"Jigsaw": 63.33,
"Multi-view Reasoning": 56.39,
"Object Localization": 50.82,
"Relative Depth": 71.77,
"Relative Reflectance": 27.61,
"Semantic Correspondence": 44.6,
"Spatial Relation": 76.92,
"Visual Correspondence": 68.02,
"Visual Similarity": 74.81,
"acc_stderr": 0,
"acc": 55.23
}
}
}