|
{ |
|
"config_general": { |
|
"model_name": "Qwen-VL-Max", |
|
"model_dtype": "float16", |
|
"model_size": 0 |
|
}, |
|
"results": { |
|
"CMMMU": { |
|
"reject_info": { |
|
"reject_rate": 0.78, |
|
"reject_number": 7, |
|
"total_question": 900 |
|
}, |
|
"accuracy": 49.94, |
|
"acc_stderr": 0, |
|
"acc": 49.94 |
|
}, |
|
"MMMU": { |
|
"reject_info": { |
|
"reject_rate": 0.78, |
|
"reject_number": 7, |
|
"total_question": 900 |
|
}, |
|
"accuracy": 56.89, |
|
"acc_stderr": 0, |
|
"acc": 56.89 |
|
}, |
|
"MMMU_Pro_standard": { |
|
"reject_info": { |
|
"reject_rate": 0.29, |
|
"reject_number": 5, |
|
"total_question": 1730 |
|
}, |
|
"accuracy": 39.25, |
|
"acc_stderr": 0, |
|
"acc": 39.25 |
|
}, |
|
"MMMU_Pro_vision": { |
|
"accuracy": 41.16, |
|
"subject_score": { |
|
"History": 58.93, |
|
"Art": 50.94, |
|
"Design": 61.67, |
|
"Literature": 61.54, |
|
"Agriculture": 20.0, |
|
"Finance": 50.0, |
|
"Sociology": 46.3, |
|
"Accounting": 44.83, |
|
"Energy_and_Power": 18.97, |
|
"Pharmacy": 47.37, |
|
"Architecture_and_Engineering": 21.67, |
|
"Clinical_Medicine": 33.9, |
|
"Public_Health": 53.45, |
|
"Physics": 31.67, |
|
"Art_Theory": 63.64, |
|
"Electronics": 43.33, |
|
"Psychology": 46.67, |
|
"Biology": 44.07, |
|
"Manage": 26.0, |
|
"Economics": 52.54, |
|
"Mechanical_Engineering": 42.37, |
|
"Diagnostics_and_Laboratory_Medicine": 30.0, |
|
"Basic_Medical_Science": 48.08, |
|
"Computer_Science": 35.0, |
|
"Math": 36.67, |
|
"Music": 38.33, |
|
"Materials": 26.67, |
|
"Marketing": 42.37, |
|
"Chemistry": 33.33, |
|
"Geography": 28.85 |
|
}, |
|
"acc_stderr": 0, |
|
"acc": 41.16 |
|
}, |
|
"MmvetV2": { |
|
"reject_info": { |
|
"reject_rate": 0.97, |
|
"reject_number": 5, |
|
"total_question": 517 |
|
}, |
|
"accuracy": 63.7695, |
|
"capability_scores": { |
|
"ocr": 69.03846153846155, |
|
"math": 64.99999999999999, |
|
"spat": 58.25641025641023, |
|
"rec": 60.491400491400526, |
|
"know": 59.271523178807925, |
|
"gen": 63.12500000000001, |
|
"seq": 58.21428571428571 |
|
}, |
|
"capability_detail_scores": { |
|
"ocr_math": 80.0, |
|
"ocr_spat_math": 62.66666666666667, |
|
"ocr_rec_spat_math": 25.0, |
|
"rec_spat": 54.642857142857146, |
|
"ocr_spat": 74.23076923076923, |
|
"ocr_rec_spat": 30.83333333333333, |
|
"ocr_know_spat": 95.0, |
|
"ocr_rec": 70.0, |
|
"rec_know_spat": 43.75, |
|
"ocr": 84.6875, |
|
"rec": 70.5084745762712, |
|
"rec_know": 48.46153846153846, |
|
"rec_know_gen": 61.03092783505152, |
|
"ocr_rec_know_gen": 73.84615384615387, |
|
"ocr_rec_spat_gen": 69.30232558139537, |
|
"ocr_spat_gen": 80.0, |
|
"ocr_seq_spat_math_gen": 100.0, |
|
"ocr_seq_spat_rec_math": 0.0, |
|
"rec_spat_gen": 52.727272727272734, |
|
"ocr_spat_math_gen": 40.0, |
|
"rec_spat_seq": 48.57142857142857, |
|
"ocr_rec_spat_seq": 43.333333333333336, |
|
"rec_know_spat_gen": 46.66666666666667, |
|
"rec_gen": 63.82352941176471, |
|
"ocr_rec_spat_know": 12.5, |
|
"ocr_spat_know": 95.0, |
|
"ocr_spat_rec_know_gen": 60.0, |
|
"ocr_rec_math": 100.0, |
|
"ocr_rec_gen": 78.0, |
|
"ocr_rec_gen_seq": 65.71428571428572, |
|
"ocr_gen": 68.46153846153847, |
|
"rec_gen_seq": 56.42857142857143, |
|
"rec_seq": 75.0, |
|
"rec_spat_gen_seq": 56.25, |
|
"rec_know_seq": 0.0, |
|
"rec_know_gen_seq": 50.0, |
|
"ocr_seq_spat_rec_gen": 46.666666666666664, |
|
"ocr_seq_rec_know_gen": 100.0, |
|
"rec_know_math": 50.0, |
|
"ocr_rec_seq": 100.0, |
|
"rec_spat_know": 43.75 |
|
}, |
|
"acc_stderr": 0, |
|
"acc": 63.7695 |
|
}, |
|
"MathVerse": { |
|
"reject_info": { |
|
"reject_rate": 0.03, |
|
"reject_number": 1, |
|
"total_question": 3940 |
|
}, |
|
"Text Dominant": { |
|
"accuracy": 46.95, |
|
"correct": 370, |
|
"total": 788 |
|
}, |
|
"Total": { |
|
"accuracy": 40.14, |
|
"correct": 1581, |
|
"total": 3939 |
|
}, |
|
"Text Lite": { |
|
"accuracy": 39.97, |
|
"correct": 315, |
|
"total": 788 |
|
}, |
|
"Vision Intensive": { |
|
"accuracy": 39.47, |
|
"correct": 311, |
|
"total": 788 |
|
}, |
|
"Vision Dominant": { |
|
"accuracy": 36.55, |
|
"correct": 288, |
|
"total": 788 |
|
}, |
|
"Vision Only": { |
|
"accuracy": 37.74, |
|
"correct": 297, |
|
"total": 787 |
|
}, |
|
"accuracy": 40.14, |
|
"acc_stderr": 0, |
|
"acc": 40.14 |
|
}, |
|
"Ocrlite": { |
|
"reject_info": { |
|
"reject_rate": 0.18, |
|
"reject_number": 3, |
|
"total_question": 1645 |
|
}, |
|
"final_score": [ |
|
1266, |
|
1642 |
|
], |
|
"accuracy": 77.101, |
|
"Key Information Extraction-Bookshelf": [ |
|
33, |
|
52 |
|
], |
|
"Scene Text-centric VQA-diet_constraints": [ |
|
67, |
|
90 |
|
], |
|
"Doc-oriented VQA-Control": [ |
|
137, |
|
188 |
|
], |
|
"Doc-oriented VQA": [ |
|
153, |
|
204 |
|
], |
|
"Scene Text-centric VQA-Fake_logo": [ |
|
62, |
|
119 |
|
], |
|
"Handwritten Mathematical Expression Recognition": [ |
|
37, |
|
100 |
|
], |
|
"Key Information Extraction": [ |
|
196, |
|
209 |
|
], |
|
"Scene Text-centric VQA-Control": [ |
|
168, |
|
199 |
|
], |
|
"Scene Text-centric VQA": [ |
|
239, |
|
281 |
|
], |
|
"Artistic Text Recognition": [ |
|
44, |
|
50 |
|
], |
|
"Irregular Text Recognition": [ |
|
45, |
|
50 |
|
], |
|
"Non-Semantic Text Recognition": [ |
|
36, |
|
50 |
|
], |
|
"Regular Text Recognition": [ |
|
49, |
|
50 |
|
], |
|
"acc_stderr": 0, |
|
"acc": 77.101 |
|
}, |
|
"OcrliteZh": { |
|
"reject_info": { |
|
"reject_rate": 0.43, |
|
"reject_number": 1, |
|
"total_question": 234 |
|
}, |
|
"final_score": [ |
|
148, |
|
233 |
|
], |
|
"accuracy": 63.519, |
|
"Docvqa": [ |
|
7, |
|
10 |
|
], |
|
"Chartqa-human": [ |
|
5, |
|
10 |
|
], |
|
"Chartqa-au": [ |
|
6, |
|
10 |
|
], |
|
"infographic": [ |
|
5, |
|
9 |
|
], |
|
"Key Information Extraction": [ |
|
34, |
|
45 |
|
], |
|
"Scene Text-centric VQA": [ |
|
28, |
|
40 |
|
], |
|
"Artistic Text Recognition": [ |
|
6, |
|
11 |
|
], |
|
"IrRegular Text Recognition": [ |
|
6, |
|
11 |
|
], |
|
"Non-semantic Text Recognition": [ |
|
10, |
|
12 |
|
], |
|
"Regular Text Recognition": [ |
|
10, |
|
11 |
|
], |
|
"Handwriting_CN": [ |
|
13, |
|
20 |
|
], |
|
"Chinese Unlimited": [ |
|
18, |
|
44 |
|
], |
|
"acc_stderr": 0, |
|
"acc": 63.519 |
|
}, |
|
"CharXiv": { |
|
"reject_info": { |
|
"reject_rate": 0.04, |
|
"reject_number": 2, |
|
"total_question": 5000 |
|
}, |
|
"descriptive": { |
|
"Overall Score": 76.49, |
|
"By Question": { |
|
"Q1": 79.92, |
|
"Q2": 76.09, |
|
"Q3": 72.1, |
|
"Q4": 81.71, |
|
"Q5": 80.33, |
|
"Q6": 73.09, |
|
"Q7": 72.65, |
|
"Q8": 78.57, |
|
"Q9": 75.62, |
|
"Q10": 81.51, |
|
"Q11": 53.71, |
|
"Q12": 80.22, |
|
"Q13": 77.17, |
|
"Q14": 83.33, |
|
"Q15": 79.23, |
|
"Q16": 75.0, |
|
"Q17": 58.48, |
|
"Q18": 86.59, |
|
"Q19": 87.69 |
|
}, |
|
"By Category": { |
|
"Information Extraction": 76.63, |
|
"Enumeration": 79.1, |
|
"Pattern Recognition": 73.09, |
|
"Counting": 81.93, |
|
"Compositionality": 58.48 |
|
}, |
|
"By Subplot": { |
|
"1 Subplot": 82.71, |
|
"2-4 Subplots": 76.19, |
|
"5+ Subplots": 66.81 |
|
}, |
|
"By Subject": { |
|
"Computer Science": 77.18, |
|
"Economics": 78.26, |
|
"Electrical Engineering and Systems Science": 79.83, |
|
"Mathematics": 78.85, |
|
"Physics": 71.46, |
|
"Quantitative Biology": 72.42, |
|
"Quantitative Finance": 76.94, |
|
"Statistics": 76.99 |
|
}, |
|
"By Year": { |
|
"2020": 74.49, |
|
"2021": 75.77, |
|
"2022": 78.97, |
|
"2023": 76.81 |
|
}, |
|
"N_valid": 3999, |
|
"N_invalid": 0, |
|
"Question Type": "Descriptive" |
|
}, |
|
"reasoning": { |
|
"Overall Score": 38.64, |
|
"By Answer Type": { |
|
"Text-in-Chart": 43.28, |
|
"Text-in-General": 41.41, |
|
"Number-in-Chart": 36.64, |
|
"Number-in-General": 30.57 |
|
}, |
|
"By Source": { |
|
"GPT-Sourced": 49.18, |
|
"GPT-Inspired": 31.94, |
|
"Completely Human": 37.83 |
|
}, |
|
"By Subject": { |
|
"Computer Science": 39.68, |
|
"Economics": 42.03, |
|
"Electrical Engineering and Systems Science": 41.18, |
|
"Mathematics": 33.33, |
|
"Physics": 46.03, |
|
"Quantitative Biology": 35.71, |
|
"Quantitative Finance": 33.62, |
|
"Statistics": 37.17 |
|
}, |
|
"By Year": { |
|
"2020": 37.25, |
|
"2021": 45.0, |
|
"2022": 35.66, |
|
"2023": 36.29 |
|
}, |
|
"By Subplot": { |
|
"1 Subplot": 41.97, |
|
"2-4 Subplots": 38.62, |
|
"5+ Subplots": 33.19 |
|
}, |
|
"N_valid": 999, |
|
"N_invalid": 4, |
|
"Question Type": "Reasoning" |
|
}, |
|
"accuracy": 57.56, |
|
"acc_stderr": 0, |
|
"acc": 57.56 |
|
}, |
|
"MathVision": { |
|
"reject_info": { |
|
"reject_rate": 0.03, |
|
"reject_number": 1, |
|
"total_question": 3040 |
|
}, |
|
"accuracy": 26.39, |
|
"acc_stderr": 0, |
|
"acc": 26.39 |
|
}, |
|
"CII-Bench": { |
|
"accuracy": 58.76, |
|
"domain_score": { |
|
"Life": 56.39, |
|
"Art": 61.03, |
|
"CTC": 51.11, |
|
"Society": 61.41, |
|
"Env.": 70.37, |
|
"Politics": 65.22 |
|
}, |
|
"emotion_score": { |
|
"Neutral": 59.09, |
|
"Negative": 59.39, |
|
"Positive": 57.69 |
|
}, |
|
"acc_stderr": 0, |
|
"acc": 58.76 |
|
}, |
|
"Blink": { |
|
"reject_info": { |
|
"reject_rate": 0.11, |
|
"reject_number": 2, |
|
"total_question": 1901 |
|
}, |
|
"accuracy": 56.4, |
|
"Art Style": 68.1, |
|
"Counting": 63.33, |
|
"Forensic Detection": 59.85, |
|
"Functional Correspondence": 47.69, |
|
"IQ Test": 23.33, |
|
"Jigsaw": 52.35, |
|
"Multi-view Reasoning": 52.63, |
|
"Object Localization": 55.74, |
|
"Relative Depth": 65.32, |
|
"Relative Reflectance": 36.57, |
|
"Semantic Correspondence": 42.45, |
|
"Spatial Relation": 72.03, |
|
"Visual Correspondence": 72.09, |
|
"Visual Similarity": 80.0, |
|
"acc_stderr": 0, |
|
"acc": 56.4 |
|
} |
|
} |
|
} |