|
{ |
|
"config_general": { |
|
"model_name": "Phi-4-multimodal-instruct", |
|
"model_dtype": "float16", |
|
"model_size": 0 |
|
}, |
|
"results": { |
|
"CMMMU": { |
|
"艺术与设计": { |
|
"num": 88, |
|
"correct": 42, |
|
"accuracy": 47.73 |
|
}, |
|
"overall": { |
|
"num": 900, |
|
"correct": 308, |
|
"accuracy": 34.22 |
|
}, |
|
"商业": { |
|
"num": 126, |
|
"correct": 32, |
|
"accuracy": 25.4 |
|
}, |
|
"科学": { |
|
"num": 204, |
|
"correct": 63, |
|
"accuracy": 30.88 |
|
}, |
|
"健康与医学": { |
|
"num": 153, |
|
"correct": 58, |
|
"accuracy": 37.91 |
|
}, |
|
"人文社会科学": { |
|
"num": 85, |
|
"correct": 30, |
|
"accuracy": 35.29 |
|
}, |
|
"技术与工程": { |
|
"num": 244, |
|
"correct": 83, |
|
"accuracy": 34.02 |
|
}, |
|
"accuracy": 34.22, |
|
"acc_stderr": 0, |
|
"acc": 34.22 |
|
}, |
|
"MMMU": { |
|
"accuracy": 54.0, |
|
"subject_score": { |
|
"Accounting": 50.0, |
|
"Agriculture": 53.33, |
|
"Architecture": 40.0, |
|
"Art": 78.33, |
|
"Basic": 56.67, |
|
"Biology": 50.0, |
|
"Chemistry": 26.67, |
|
"Clinical": 70.0, |
|
"Computer": 50.0, |
|
"Design": 73.33, |
|
"Diagnostics": 40.0, |
|
"Economics": 56.67, |
|
"Electronics": 36.67, |
|
"Energy": 46.67, |
|
"Finance": 46.67, |
|
"Geography": 43.33, |
|
"History": 60.0, |
|
"Literature": 90.0, |
|
"Manage": 50.0, |
|
"Marketing": 56.67, |
|
"Materials": 33.33, |
|
"Math": 43.33, |
|
"Mechanical": 26.67, |
|
"Music": 33.33, |
|
"Pharmacy": 60.0, |
|
"Physics": 60.0, |
|
"Psychology": 66.67, |
|
"Public": 66.67, |
|
"Sociology": 76.67 |
|
}, |
|
"difficulty_score": { |
|
"Medium": 53.3, |
|
"Easy": 65.42, |
|
"Hard": 37.02 |
|
}, |
|
"acc_stderr": 0, |
|
"acc": 54.0 |
|
}, |
|
"MMMU_Pro_standard": { |
|
"accuracy": 39.36, |
|
"subject_score": { |
|
"History": 62.5, |
|
"Art": 52.83, |
|
"Design": 65.0, |
|
"Literature": 67.31, |
|
"Agriculture": 31.67, |
|
"Finance": 21.67, |
|
"Sociology": 53.7, |
|
"Accounting": 27.59, |
|
"Energy_and_Power": 27.59, |
|
"Pharmacy": 43.86, |
|
"Architecture_and_Engineering": 38.33, |
|
"Clinical_Medicine": 42.37, |
|
"Public_Health": 39.66, |
|
"Physics": 38.33, |
|
"Art_Theory": 70.91, |
|
"Electronics": 50.0, |
|
"Psychology": 35.0, |
|
"Biology": 37.29, |
|
"Manage": 32.0, |
|
"Economics": 42.37, |
|
"Mechanical_Engineering": 22.03, |
|
"Diagnostics_and_Laboratory_Medicine": 30.0, |
|
"Basic_Medical_Science": 40.38, |
|
"Computer_Science": 35.0, |
|
"Math": 23.33, |
|
"Music": 30.0, |
|
"Materials": 21.67, |
|
"Marketing": 40.68, |
|
"Chemistry": 33.33, |
|
"Geography": 32.69 |
|
}, |
|
"difficulty_score": { |
|
"Medium": 36.58, |
|
"Easy": 53.03, |
|
"Hard": 26.93 |
|
}, |
|
"acc_stderr": 0, |
|
"acc": 39.36 |
|
}, |
|
"MMMU_Pro_vision": { |
|
"accuracy": 26.59, |
|
"subject_score": { |
|
"History": 37.5, |
|
"Art": 39.62, |
|
"Design": 56.67, |
|
"Literature": 65.38, |
|
"Agriculture": 20.0, |
|
"Finance": 18.33, |
|
"Sociology": 29.63, |
|
"Accounting": 25.86, |
|
"Energy_and_Power": 20.69, |
|
"Pharmacy": 31.58, |
|
"Architecture_and_Engineering": 21.67, |
|
"Clinical_Medicine": 22.03, |
|
"Public_Health": 10.34, |
|
"Physics": 21.67, |
|
"Art_Theory": 32.73, |
|
"Electronics": 28.33, |
|
"Psychology": 20.0, |
|
"Biology": 23.73, |
|
"Manage": 26.0, |
|
"Economics": 16.95, |
|
"Mechanical_Engineering": 18.64, |
|
"Diagnostics_and_Laboratory_Medicine": 18.33, |
|
"Basic_Medical_Science": 32.69, |
|
"Computer_Science": 21.67, |
|
"Math": 18.33, |
|
"Music": 30.0, |
|
"Materials": 16.67, |
|
"Marketing": 28.81, |
|
"Chemistry": 26.67, |
|
"Geography": 25.0 |
|
}, |
|
"acc_stderr": 0, |
|
"acc": 26.59 |
|
}, |
|
"MmvetV2": { |
|
"accuracy": 36.25, |
|
"capability_scores": { |
|
"math": 28.235294117647058, |
|
"ocr": 32.592592592592595, |
|
"spat": 40.95238095238095, |
|
"rec": 25.454545454545453, |
|
"know": 66.66666666666666 |
|
}, |
|
"capability_detail_scores": { |
|
"math_ocr": 30.0, |
|
"spat_ocr_math": 30.0, |
|
"rec_spat_ocr_math": 0.0, |
|
"rec_spat": 60.0, |
|
"spat_ocr": 75.0, |
|
"rec_spat_ocr": 0.0, |
|
"spat_ocr_know": 100.0, |
|
"rec_ocr": 0.0, |
|
"rec_spat_know": 50.0 |
|
}, |
|
"acc_stderr": 0, |
|
"acc": 36.25 |
|
}, |
|
"MathVerse": { |
|
"Text Dominant": { |
|
"accuracy": 37.56, |
|
"correct": 296, |
|
"total": 788 |
|
}, |
|
"Total": { |
|
"accuracy": 30.63, |
|
"correct": 1207, |
|
"total": 3940 |
|
}, |
|
"Text Lite": { |
|
"accuracy": 33.5, |
|
"correct": 264, |
|
"total": 788 |
|
}, |
|
"Vision Intensive": { |
|
"accuracy": 31.35, |
|
"correct": 247, |
|
"total": 788 |
|
}, |
|
"Vision Dominant": { |
|
"accuracy": 30.84, |
|
"correct": 243, |
|
"total": 788 |
|
}, |
|
"Vision Only": { |
|
"accuracy": 19.92, |
|
"correct": 157, |
|
"total": 788 |
|
}, |
|
"accuracy": 30.63, |
|
"acc_stderr": 0, |
|
"acc": 30.63 |
|
}, |
|
"Ocrlite": { |
|
"final_score": [ |
|
680, |
|
1644 |
|
], |
|
"accuracy": 41.363, |
|
"Key Information Extraction-Bookshelf": [ |
|
6, |
|
51, |
|
0.118, |
|
{ |
|
"Default": [ |
|
6, |
|
51, |
|
0.118 |
|
] |
|
} |
|
], |
|
"Scene Text-centric VQA-diet_constraints": [ |
|
49, |
|
90, |
|
0.544, |
|
{ |
|
"Default": [ |
|
49, |
|
90, |
|
0.544 |
|
] |
|
} |
|
], |
|
"Doc-oriented VQA-Control": [ |
|
35, |
|
189, |
|
0.185, |
|
{ |
|
"Default": [ |
|
35, |
|
189, |
|
0.185 |
|
] |
|
} |
|
], |
|
"Doc-oriented VQA": [ |
|
78, |
|
204, |
|
0.382, |
|
{ |
|
"Default": [ |
|
78, |
|
204, |
|
0.382 |
|
] |
|
} |
|
], |
|
"Scene Text-centric VQA-Fake_logo": [ |
|
35, |
|
119, |
|
0.294, |
|
{ |
|
"Default": [ |
|
35, |
|
119, |
|
0.294 |
|
] |
|
} |
|
], |
|
"Handwritten Mathematical Expression Recognition": [ |
|
1, |
|
100, |
|
0.01, |
|
{ |
|
"Default": [ |
|
1, |
|
100, |
|
0.01 |
|
] |
|
} |
|
], |
|
"Key Information Extraction": [ |
|
131, |
|
209, |
|
0.627, |
|
{ |
|
"Default": [ |
|
131, |
|
209, |
|
0.627 |
|
] |
|
} |
|
], |
|
"Scene Text-centric VQA-Control": [ |
|
112, |
|
200, |
|
0.56, |
|
{ |
|
"Default": [ |
|
112, |
|
200, |
|
0.56 |
|
] |
|
} |
|
], |
|
"Scene Text-centric VQA": [ |
|
122, |
|
282, |
|
0.433, |
|
{ |
|
"Default": [ |
|
122, |
|
282, |
|
0.433 |
|
] |
|
} |
|
], |
|
"Artistic Text Recognition": [ |
|
31, |
|
50, |
|
0.62, |
|
{ |
|
"Default": [ |
|
31, |
|
50, |
|
0.62 |
|
] |
|
} |
|
], |
|
"Irregular Text Recognition": [ |
|
31, |
|
50, |
|
0.62, |
|
{ |
|
"Default": [ |
|
31, |
|
50, |
|
0.62 |
|
] |
|
} |
|
], |
|
"Non-Semantic Text Recognition": [ |
|
14, |
|
50, |
|
0.28, |
|
{ |
|
"Default": [ |
|
14, |
|
50, |
|
0.28 |
|
] |
|
} |
|
], |
|
"Regular Text Recognition": [ |
|
35, |
|
50, |
|
0.7, |
|
{ |
|
"Default": [ |
|
35, |
|
50, |
|
0.7 |
|
] |
|
} |
|
], |
|
"acc_stderr": 0, |
|
"acc": 41.363 |
|
}, |
|
"OcrliteZh": { |
|
"final_score": [ |
|
101, |
|
234 |
|
], |
|
"accuracy": 43.162, |
|
"Docvqa": [ |
|
3, |
|
10, |
|
0.3, |
|
{ |
|
"Default": [ |
|
3, |
|
10, |
|
0.3 |
|
] |
|
} |
|
], |
|
"Chartqa-human": [ |
|
4, |
|
10, |
|
0.4, |
|
{ |
|
"Default": [ |
|
4, |
|
10, |
|
0.4 |
|
] |
|
} |
|
], |
|
"Chartqa-au": [ |
|
5, |
|
10, |
|
0.5, |
|
{ |
|
"Default": [ |
|
5, |
|
10, |
|
0.5 |
|
] |
|
} |
|
], |
|
"infographic": [ |
|
5, |
|
10, |
|
0.5, |
|
{ |
|
"Default": [ |
|
5, |
|
10, |
|
0.5 |
|
] |
|
} |
|
], |
|
"Key Information Extraction": [ |
|
33, |
|
45, |
|
0.733, |
|
{ |
|
"Default": [ |
|
33, |
|
45, |
|
0.733 |
|
] |
|
} |
|
], |
|
"Scene Text-centric VQA": [ |
|
14, |
|
40, |
|
0.35, |
|
{ |
|
"Default": [ |
|
14, |
|
40, |
|
0.35 |
|
] |
|
} |
|
], |
|
"Artistic Text Recognition": [ |
|
1, |
|
11, |
|
0.091, |
|
{ |
|
"Default": [ |
|
1, |
|
11, |
|
0.091 |
|
] |
|
} |
|
], |
|
"IrRegular Text Recognition": [ |
|
2, |
|
11, |
|
0.182, |
|
{ |
|
"Default": [ |
|
2, |
|
11, |
|
0.182 |
|
] |
|
} |
|
], |
|
"Non-semantic Text Recognition": [ |
|
6, |
|
12, |
|
0.5, |
|
{ |
|
"Default": [ |
|
6, |
|
12, |
|
0.5 |
|
] |
|
} |
|
], |
|
"Regular Text Recognition": [ |
|
6, |
|
11, |
|
0.545, |
|
{ |
|
"Default": [ |
|
6, |
|
11, |
|
0.545 |
|
] |
|
} |
|
], |
|
"Handwriting_CN": [ |
|
7, |
|
20, |
|
0.35, |
|
{ |
|
"Default": [ |
|
7, |
|
20, |
|
0.35 |
|
] |
|
} |
|
], |
|
"Chinese Unlimited": [ |
|
15, |
|
44, |
|
0.341, |
|
{ |
|
"Default": [ |
|
15, |
|
44, |
|
0.341 |
|
] |
|
} |
|
], |
|
"acc_stderr": 0, |
|
"acc": 43.162 |
|
}, |
|
"CharXiv": { |
|
"descriptive": { |
|
"Overall Score": 56.25, |
|
"By Question": { |
|
"Q1": 51.64, |
|
"Q2": 73.48, |
|
"Q3": 62.23, |
|
"Q4": 77.82, |
|
"Q5": 79.08, |
|
"Q6": 64.26, |
|
"Q7": 61.54, |
|
"Q8": 58.04, |
|
"Q9": 53.73, |
|
"Q10": 56.85, |
|
"Q11": 38.86, |
|
"Q12": 55.49, |
|
"Q13": 51.6, |
|
"Q14": 48.23, |
|
"Q15": 36.74, |
|
"Q16": 66.67, |
|
"Q17": 3.12, |
|
"Q18": 70.45, |
|
"Q19": 89.23 |
|
}, |
|
"By Category": { |
|
"Information Extraction": 67.2, |
|
"Enumeration": 48.59, |
|
"Pattern Recognition": 58.08, |
|
"Counting": 61.58, |
|
"Compositionality": 3.12 |
|
}, |
|
"By Subplot": { |
|
"1 Subplot": 64.18, |
|
"2-4 Subplots": 54.03, |
|
"5+ Subplots": 46.82 |
|
}, |
|
"By Subject": { |
|
"Computer Science": 51.79, |
|
"Economics": 57.07, |
|
"Electrical Engineering and Systems Science": 65.13, |
|
"Mathematics": 58.33, |
|
"Physics": 49.41, |
|
"Quantitative Biology": 51.19, |
|
"Quantitative Finance": 59.91, |
|
"Statistics": 57.96 |
|
}, |
|
"By Year": { |
|
"2020": 55.97, |
|
"2021": 54.89, |
|
"2022": 55.02, |
|
"2023": 59.17 |
|
}, |
|
"N_valid": 4000, |
|
"N_invalid": 0, |
|
"Question Type": "Descriptive" |
|
}, |
|
"reasoning": { |
|
"Overall Score": 34.3, |
|
"By Answer Type": { |
|
"Text-in-Chart": 32.5, |
|
"Text-in-General": 47.47, |
|
"Number-in-Chart": 39.66, |
|
"Number-in-General": 26.64 |
|
}, |
|
"By Source": { |
|
"GPT-Sourced": 38.04, |
|
"GPT-Inspired": 28.7, |
|
"Completely Human": 35.17 |
|
}, |
|
"By Subject": { |
|
"Computer Science": 32.54, |
|
"Economics": 34.06, |
|
"Electrical Engineering and Systems Science": 31.93, |
|
"Mathematics": 39.26, |
|
"Physics": 39.37, |
|
"Quantitative Biology": 30.95, |
|
"Quantitative Finance": 28.45, |
|
"Statistics": 37.17 |
|
}, |
|
"By Year": { |
|
"2020": 30.36, |
|
"2021": 34.48, |
|
"2022": 38.11, |
|
"2023": 34.27 |
|
}, |
|
"By Subplot": { |
|
"1 Subplot": 38.34, |
|
"2-4 Subplots": 32.8, |
|
"5+ Subplots": 30.08 |
|
}, |
|
"N_valid": 1000, |
|
"N_invalid": 0, |
|
"Question Type": "Reasoning" |
|
}, |
|
"accuracy": 45.27, |
|
"acc_stderr": 0, |
|
"acc": 45.27 |
|
}, |
|
"MathVision": { |
|
"accuracy": 15.95, |
|
"acc_stderr": 0, |
|
"acc": 15.95 |
|
}, |
|
"CII-Bench": { |
|
"accuracy": 37.65, |
|
"domain_score": { |
|
"Life": 33.77, |
|
"Art": 41.91, |
|
"CTC": 31.85, |
|
"Society": 37.84, |
|
"Env.": 53.7, |
|
"Politics": 45.83 |
|
}, |
|
"emotion_score": { |
|
"Neutral": 38.35, |
|
"Negative": 40.0, |
|
"Positive": 34.19 |
|
}, |
|
"acc_stderr": 0, |
|
"acc": 37.65 |
|
}, |
|
"Blink": { |
|
"accuracy": 61.39, |
|
"Art Style": 87.18, |
|
"Counting": 59.17, |
|
"Forensic Detection": 90.15, |
|
"Functional Correspondence": 27.69, |
|
"IQ Test": 25.33, |
|
"Jigsaw": 67.33, |
|
"Multi-view Reasoning": 77.44, |
|
"Object Localization": 52.46, |
|
"Relative Depth": 70.97, |
|
"Relative Reflectance": 31.34, |
|
"Semantic Correspondence": 51.8, |
|
"Spatial Relation": 72.73, |
|
"Visual Correspondence": 67.44, |
|
"Visual Similarity": 82.22, |
|
"acc_stderr": 0, |
|
"acc": 61.39 |
|
} |
|
} |
|
} |