|
{ |
|
"config_general": { |
|
"model_name": "Qwen2.5-VL-32B-Instruct", |
|
"model_dtype": "float16", |
|
"model_size": 0 |
|
}, |
|
"results": { |
|
"CMMMU": { |
|
"艺术与设计": { |
|
"num": 88, |
|
"correct": 65, |
|
"accuracy": 73.86 |
|
}, |
|
"overall": { |
|
"num": 900, |
|
"correct": 443, |
|
"accuracy": 49.22 |
|
}, |
|
"商业": { |
|
"num": 126, |
|
"correct": 39, |
|
"accuracy": 30.95 |
|
}, |
|
"科学": { |
|
"num": 204, |
|
"correct": 85, |
|
"accuracy": 41.67 |
|
}, |
|
"健康与医学": { |
|
"num": 153, |
|
"correct": 82, |
|
"accuracy": 53.59 |
|
}, |
|
"人文社会科学": { |
|
"num": 85, |
|
"correct": 55, |
|
"accuracy": 64.71 |
|
}, |
|
"技术与工程": { |
|
"num": 244, |
|
"correct": 117, |
|
"accuracy": 47.95 |
|
}, |
|
"accuracy": 49.22, |
|
"acc_stderr": 0, |
|
"acc": 49.22 |
|
}, |
|
"MMMU": { |
|
"accuracy": 61.0, |
|
"subject_score": { |
|
"Accounting": 53.33, |
|
"Agriculture": 56.67, |
|
"Architecture": 33.33, |
|
"Art": 75.0, |
|
"Basic": 66.67, |
|
"Biology": 63.33, |
|
"Chemistry": 33.33, |
|
"Clinical": 70.0, |
|
"Computer": 63.33, |
|
"Design": 83.33, |
|
"Diagnostics": 46.67, |
|
"Economics": 63.33, |
|
"Electronics": 40.0, |
|
"Energy": 50.0, |
|
"Finance": 36.67, |
|
"Geography": 66.67, |
|
"History": 80.0, |
|
"Literature": 83.33, |
|
"Manage": 53.33, |
|
"Marketing": 76.67, |
|
"Materials": 50.0, |
|
"Math": 50.0, |
|
"Mechanical": 43.33, |
|
"Music": 53.33, |
|
"Pharmacy": 73.33, |
|
"Physics": 73.33, |
|
"Psychology": 73.33, |
|
"Public": 70.0, |
|
"Sociology": 73.33 |
|
}, |
|
"difficulty_score": { |
|
"Hard": 44.2, |
|
"Medium": 60.14, |
|
"Easy": 72.54 |
|
}, |
|
"acc_stderr": 0, |
|
"acc": 61.0 |
|
}, |
|
"MMMU_Pro_standard": { |
|
"accuracy": 44.34, |
|
"subject_score": { |
|
"Literature": 69.23, |
|
"Finance": 36.67, |
|
"Design": 68.33, |
|
"History": 62.5, |
|
"Sociology": 50.0, |
|
"Art": 60.38, |
|
"Energy_and_Power": 20.69, |
|
"Agriculture": 23.33, |
|
"Accounting": 31.03, |
|
"Clinical_Medicine": 49.15, |
|
"Pharmacy": 45.61, |
|
"Architecture_and_Engineering": 40.0, |
|
"Public_Health": 43.1, |
|
"Physics": 35.0, |
|
"Electronics": 60.0, |
|
"Art_Theory": 67.27, |
|
"Psychology": 51.67, |
|
"Manage": 42.0, |
|
"Biology": 42.37, |
|
"Economics": 47.46, |
|
"Diagnostics_and_Laboratory_Medicine": 36.67, |
|
"Mechanical_Engineering": 44.07, |
|
"Basic_Medical_Science": 44.23, |
|
"Computer_Science": 48.33, |
|
"Math": 36.67, |
|
"Music": 33.33, |
|
"Materials": 26.67, |
|
"Marketing": 37.29, |
|
"Chemistry": 36.67, |
|
"Geography": 48.08 |
|
}, |
|
"difficulty_score": { |
|
"Easy": 54.92, |
|
"Hard": 35.66, |
|
"Medium": 41.7 |
|
}, |
|
"acc_stderr": 0, |
|
"acc": 44.34 |
|
}, |
|
"MMMU_Pro_vision": { |
|
"accuracy": 35.55, |
|
"subject_score": { |
|
"History": 48.21, |
|
"Sociology": 44.44, |
|
"Design": 48.33, |
|
"Accounting": 24.14, |
|
"Energy_and_Power": 13.79, |
|
"Literature": 67.31, |
|
"Agriculture": 20.0, |
|
"Art": 45.28, |
|
"Pharmacy": 50.88, |
|
"Finance": 33.33, |
|
"Clinical_Medicine": 40.68, |
|
"Architecture_and_Engineering": 23.33, |
|
"Physics": 38.33, |
|
"Art_Theory": 49.09, |
|
"Electronics": 40.0, |
|
"Psychology": 31.67, |
|
"Manage": 28.0, |
|
"Biology": 28.81, |
|
"Economics": 37.29, |
|
"Diagnostics_and_Laboratory_Medicine": 28.33, |
|
"Mechanical_Engineering": 22.03, |
|
"Basic_Medical_Science": 34.62, |
|
"Computer_Science": 46.67, |
|
"Math": 21.67, |
|
"Public_Health": 43.1, |
|
"Materials": 30.0, |
|
"Music": 20.0, |
|
"Geography": 36.54, |
|
"Marketing": 35.59, |
|
"Chemistry": 41.67 |
|
}, |
|
"acc_stderr": 0, |
|
"acc": 35.55 |
|
}, |
|
"MmvetV2": { |
|
"accuracy": 70.7157, |
|
"capability_scores": { |
|
"ocr": 81.00961538461544, |
|
"math": 84.11764705882352, |
|
"spat": 69.59390862944163, |
|
"rec": 66.48058252427191, |
|
"know": 64.35897435897436, |
|
"gen": 68.87272727272735, |
|
"seq": 65.35714285714286 |
|
}, |
|
"capability_detail_scores": { |
|
"ocr_math": 88.18181818181817, |
|
"ocr_math_spat": 89.33333333333333, |
|
"rec_ocr_math_spat": 50.0, |
|
"rec_spat": 63.21428571428571, |
|
"ocr_spat": 84.61538461538461, |
|
"rec_ocr_spat": 50.0, |
|
"know_ocr_spat": 100.0, |
|
"rec_ocr": 100.0, |
|
"know_rec_spat": 50.0, |
|
"ocr": 89.68749999999999, |
|
"rec": 69.15254237288137, |
|
"know_rec": 64.61538461538461, |
|
"know_rec_gen": 61.699999999999974, |
|
"know_rec_ocr_gen": 84.6153846153846, |
|
"rec_ocr_gen_spat": 81.62790697674417, |
|
"ocr_gen_spat": 95.0, |
|
"gen_spat_math_ocr_seq": 100.0, |
|
"rec_spat_math_ocr_seq": 100.0, |
|
"rec_gen_spat": 54.54545454545454, |
|
"ocr_math_gen_spat": 50.0, |
|
"rec_seq_spat": 65.71428571428571, |
|
"rec_seq_ocr_spat": 16.666666666666664, |
|
"know_rec_gen_spat": 40.0, |
|
"rec_gen": 70.29411764705883, |
|
"know_rec_ocr_spat": 47.5, |
|
"rec_gen_spat_know_ocr": 90.0, |
|
"rec_ocr_math": 100.0, |
|
"rec_ocr_gen": 96.0, |
|
"rec_seq_ocr_gen": 72.85714285714286, |
|
"ocr_gen": 80.76923076923077, |
|
"rec_seq_gen": 64.28571428571429, |
|
"rec_seq": 58.333333333333336, |
|
"rec_seq_gen_spat": 73.75, |
|
"know_rec_seq": 100.0, |
|
"know_rec_seq_gen": 70.0, |
|
"rec_gen_spat_ocr_seq": 20.0, |
|
"rec_gen_know_ocr_seq": 100.0, |
|
"know_rec_math": 50.0, |
|
"rec_seq_ocr": 100.0 |
|
}, |
|
"acc_stderr": 0, |
|
"acc": 70.7157 |
|
}, |
|
"MathVerse": { |
|
"Text Lite": { |
|
"accuracy": 54.95, |
|
"correct": 433, |
|
"total": 788 |
|
}, |
|
"Total": { |
|
"accuracy": 53.27, |
|
"correct": 2099, |
|
"total": 3940 |
|
}, |
|
"Text Dominant": { |
|
"accuracy": 62.44, |
|
"correct": 492, |
|
"total": 788 |
|
}, |
|
"Vision Intensive": { |
|
"accuracy": 49.87, |
|
"correct": 393, |
|
"total": 788 |
|
}, |
|
"Vision Dominant": { |
|
"accuracy": 50.13, |
|
"correct": 395, |
|
"total": 788 |
|
}, |
|
"Vision Only": { |
|
"accuracy": 48.98, |
|
"correct": 386, |
|
"total": 788 |
|
}, |
|
"accuracy": 53.27, |
|
"acc_stderr": 0, |
|
"acc": 53.27 |
|
}, |
|
"Ocrlite": { |
|
"final_score": [ |
|
1275, |
|
1644 |
|
], |
|
"accuracy": 77.555, |
|
"Key Information Extraction-Bookshelf": [ |
|
32, |
|
51, |
|
0.627, |
|
{ |
|
"Default": [ |
|
32, |
|
51, |
|
0.627 |
|
] |
|
} |
|
], |
|
"Scene Text-centric VQA-diet_constraints": [ |
|
75, |
|
90, |
|
0.833, |
|
{ |
|
"Default": [ |
|
75, |
|
90, |
|
0.833 |
|
] |
|
} |
|
], |
|
"Doc-oriented VQA-Control": [ |
|
152, |
|
189, |
|
0.804, |
|
{ |
|
"Default": [ |
|
152, |
|
189, |
|
0.804 |
|
] |
|
} |
|
], |
|
"Doc-oriented VQA": [ |
|
176, |
|
204, |
|
0.863, |
|
{ |
|
"Default": [ |
|
176, |
|
204, |
|
0.863 |
|
] |
|
} |
|
], |
|
"Scene Text-centric VQA-Fake_logo": [ |
|
61, |
|
119, |
|
0.513, |
|
{ |
|
"Default": [ |
|
61, |
|
119, |
|
0.513 |
|
] |
|
} |
|
], |
|
"Handwritten Mathematical Expression Recognition": [ |
|
1, |
|
100, |
|
0.01, |
|
{ |
|
"Default": [ |
|
1, |
|
100, |
|
0.01 |
|
] |
|
} |
|
], |
|
"Key Information Extraction": [ |
|
194, |
|
209, |
|
0.928, |
|
{ |
|
"Default": [ |
|
194, |
|
209, |
|
0.928 |
|
] |
|
} |
|
], |
|
"Scene Text-centric VQA-Control": [ |
|
170, |
|
200, |
|
0.85, |
|
{ |
|
"Default": [ |
|
170, |
|
200, |
|
0.85 |
|
] |
|
} |
|
], |
|
"Scene Text-centric VQA": [ |
|
249, |
|
282, |
|
0.883, |
|
{ |
|
"Default": [ |
|
249, |
|
282, |
|
0.883 |
|
] |
|
} |
|
], |
|
"Artistic Text Recognition": [ |
|
41, |
|
50, |
|
0.82, |
|
{ |
|
"Default": [ |
|
41, |
|
50, |
|
0.82 |
|
] |
|
} |
|
], |
|
"Irregular Text Recognition": [ |
|
42, |
|
50, |
|
0.84, |
|
{ |
|
"Default": [ |
|
42, |
|
50, |
|
0.84 |
|
] |
|
} |
|
], |
|
"Non-Semantic Text Recognition": [ |
|
34, |
|
50, |
|
0.68, |
|
{ |
|
"Default": [ |
|
34, |
|
50, |
|
0.68 |
|
] |
|
} |
|
], |
|
"Regular Text Recognition": [ |
|
48, |
|
50, |
|
0.96, |
|
{ |
|
"Default": [ |
|
48, |
|
50, |
|
0.96 |
|
] |
|
} |
|
], |
|
"acc_stderr": 0, |
|
"acc": 77.555 |
|
}, |
|
"OcrliteZh": { |
|
"final_score": [ |
|
168, |
|
234 |
|
], |
|
"accuracy": 71.795, |
|
"Docvqa": [ |
|
8, |
|
10, |
|
0.8, |
|
{ |
|
"Default": [ |
|
8, |
|
10, |
|
0.8 |
|
] |
|
} |
|
], |
|
"Chartqa-human": [ |
|
8, |
|
10, |
|
0.8, |
|
{ |
|
"Default": [ |
|
8, |
|
10, |
|
0.8 |
|
] |
|
} |
|
], |
|
"Chartqa-au": [ |
|
8, |
|
10, |
|
0.8, |
|
{ |
|
"Default": [ |
|
8, |
|
10, |
|
0.8 |
|
] |
|
} |
|
], |
|
"infographic": [ |
|
8, |
|
10, |
|
0.8, |
|
{ |
|
"Default": [ |
|
8, |
|
10, |
|
0.8 |
|
] |
|
} |
|
], |
|
"Key Information Extraction": [ |
|
40, |
|
45, |
|
0.889, |
|
{ |
|
"Default": [ |
|
40, |
|
45, |
|
0.889 |
|
] |
|
} |
|
], |
|
"Scene Text-centric VQA": [ |
|
28, |
|
40, |
|
0.7, |
|
{ |
|
"Default": [ |
|
28, |
|
40, |
|
0.7 |
|
] |
|
} |
|
], |
|
"Artistic Text Recognition": [ |
|
4, |
|
11, |
|
0.364, |
|
{ |
|
"Default": [ |
|
4, |
|
11, |
|
0.364 |
|
] |
|
} |
|
], |
|
"IrRegular Text Recognition": [ |
|
7, |
|
11, |
|
0.636, |
|
{ |
|
"Default": [ |
|
7, |
|
11, |
|
0.636 |
|
] |
|
} |
|
], |
|
"Non-semantic Text Recognition": [ |
|
11, |
|
12, |
|
0.917, |
|
{ |
|
"Default": [ |
|
11, |
|
12, |
|
0.917 |
|
] |
|
} |
|
], |
|
"Regular Text Recognition": [ |
|
10, |
|
11, |
|
0.909, |
|
{ |
|
"Default": [ |
|
10, |
|
11, |
|
0.909 |
|
] |
|
} |
|
], |
|
"Handwriting_CN": [ |
|
13, |
|
20, |
|
0.65, |
|
{ |
|
"Default": [ |
|
13, |
|
20, |
|
0.65 |
|
] |
|
} |
|
], |
|
"Chinese Unlimited": [ |
|
23, |
|
44, |
|
0.523, |
|
{ |
|
"Default": [ |
|
23, |
|
44, |
|
0.523 |
|
] |
|
} |
|
], |
|
"acc_stderr": 0, |
|
"acc": 71.795 |
|
}, |
|
"CharXiv": { |
|
"descriptive": { |
|
"Overall Score": 81.6, |
|
"By Question": { |
|
"Q1": 87.3, |
|
"Q2": 85.22, |
|
"Q3": 71.67, |
|
"Q4": 89.11, |
|
"Q5": 89.12, |
|
"Q6": 83.13, |
|
"Q7": 80.34, |
|
"Q8": 91.96, |
|
"Q9": 76.62, |
|
"Q10": 78.08, |
|
"Q11": 50.29, |
|
"Q12": 75.27, |
|
"Q13": 70.78, |
|
"Q14": 87.94, |
|
"Q15": 94.25, |
|
"Q16": 75.0, |
|
"Q17": 65.18, |
|
"Q18": 89.88, |
|
"Q19": 90.77 |
|
}, |
|
"By Category": { |
|
"Information Extraction": 83.81, |
|
"Enumeration": 85.39, |
|
"Pattern Recognition": 73.58, |
|
"Counting": 78.88, |
|
"Compositionality": 65.18 |
|
}, |
|
"By Subplot": { |
|
"1 Subplot": 86.01, |
|
"2-4 Subplots": 81.94, |
|
"5+ Subplots": 73.83 |
|
}, |
|
"By Subject": { |
|
"Computer Science": 82.14, |
|
"Economics": 83.7, |
|
"Electrical Engineering and Systems Science": 85.92, |
|
"Mathematics": 83.7, |
|
"Physics": 76.77, |
|
"Quantitative Biology": 76.19, |
|
"Quantitative Finance": 80.39, |
|
"Statistics": 84.07 |
|
}, |
|
"By Year": { |
|
"2020": 81.98, |
|
"2021": 79.6, |
|
"2022": 83.61, |
|
"2023": 81.35 |
|
}, |
|
"N_valid": 4000, |
|
"N_invalid": 5, |
|
"Question Type": "Descriptive" |
|
}, |
|
"reasoning": { |
|
"Overall Score": 42.7, |
|
"By Answer Type": { |
|
"Text-in-Chart": 45.45, |
|
"Text-in-General": 43.43, |
|
"Number-in-Chart": 44.4, |
|
"Number-in-General": 35.37 |
|
}, |
|
"By Source": { |
|
"GPT-Sourced": 50.0, |
|
"GPT-Inspired": 41.67, |
|
"Completely Human": 40.83 |
|
}, |
|
"By Subject": { |
|
"Computer Science": 42.06, |
|
"Economics": 43.48, |
|
"Electrical Engineering and Systems Science": 42.86, |
|
"Mathematics": 40.0, |
|
"Physics": 51.18, |
|
"Quantitative Biology": 42.06, |
|
"Quantitative Finance": 37.93, |
|
"Statistics": 41.59 |
|
}, |
|
"By Year": { |
|
"2020": 40.08, |
|
"2021": 41.38, |
|
"2022": 36.48, |
|
"2023": 52.82 |
|
}, |
|
"By Subplot": { |
|
"1 Subplot": 45.6, |
|
"2-4 Subplots": 42.33, |
|
"5+ Subplots": 38.56 |
|
}, |
|
"N_valid": 1000, |
|
"N_invalid": 1, |
|
"Question Type": "Reasoning" |
|
}, |
|
"accuracy": 62.15, |
|
"acc_stderr": 0, |
|
"acc": 62.15 |
|
}, |
|
"MathVision": { |
|
"accuracy": 31.18, |
|
"acc_stderr": 0, |
|
"acc": 31.18 |
|
}, |
|
"CII-Bench": { |
|
"accuracy": 63.01, |
|
"domain_score": { |
|
"Art": 62.5, |
|
"Life": 60.17, |
|
"Env.": 79.63, |
|
"Society": 65.95, |
|
"CTC": 58.52, |
|
"Politics": 58.33 |
|
}, |
|
"emotion_score": { |
|
"Negative": 64.91, |
|
"Neutral": 60.15, |
|
"Positive": 64.1 |
|
}, |
|
"acc_stderr": 0, |
|
"acc": 63.01 |
|
}, |
|
"Blink": { |
|
"accuracy": 58.97, |
|
"Art Style": 76.07, |
|
"Counting": 65.0, |
|
"Forensic Detection": 58.33, |
|
"Functional Correspondence": 29.23, |
|
"IQ Test": 30.67, |
|
"Jigsaw": 69.33, |
|
"Multi-view Reasoning": 48.87, |
|
"Object Localization": 56.56, |
|
"Relative Depth": 74.19, |
|
"Relative Reflectance": 28.36, |
|
"Semantic Correspondence": 51.8, |
|
"Spatial Relation": 86.01, |
|
"Visual Correspondence": 72.09, |
|
"Visual Similarity": 78.52, |
|
"acc_stderr": 0, |
|
"acc": 58.97 |
|
} |
|
} |
|
} |