|
{ |
|
"config_general": { |
|
"model_name": "InternVL2_5-2B", |
|
"model_dtype": "float16", |
|
"model_size": 0 |
|
}, |
|
"results": { |
|
"CMMMU": { |
|
"艺术与设计": { |
|
"num": 88, |
|
"correct": 45, |
|
"accuracy": 51.14 |
|
}, |
|
"overall": { |
|
"num": 900, |
|
"correct": 317, |
|
"accuracy": 35.22 |
|
}, |
|
"商业": { |
|
"num": 126, |
|
"correct": 33, |
|
"accuracy": 26.19 |
|
}, |
|
"科学": { |
|
"num": 204, |
|
"correct": 59, |
|
"accuracy": 28.92 |
|
}, |
|
"健康与医学": { |
|
"num": 153, |
|
"correct": 62, |
|
"accuracy": 40.52 |
|
}, |
|
"人文社会科学": { |
|
"num": 85, |
|
"correct": 34, |
|
"accuracy": 40.0 |
|
}, |
|
"技术与工程": { |
|
"num": 244, |
|
"correct": 84, |
|
"accuracy": 34.43 |
|
}, |
|
"accuracy": 35.22, |
|
"acc_stderr": 0, |
|
"acc": 35.22 |
|
}, |
|
"MMMU": { |
|
"accuracy": 41.78, |
|
"subject_score": { |
|
"Accounting": 26.67, |
|
"Agriculture": 30.0, |
|
"Architecture": 26.67, |
|
"Art": 63.33, |
|
"Basic": 60.0, |
|
"Biology": 40.0, |
|
"Chemistry": 26.67, |
|
"Clinical": 43.33, |
|
"Computer": 46.67, |
|
"Design": 73.33, |
|
"Diagnostics": 36.67, |
|
"Economics": 30.0, |
|
"Electronics": 26.67, |
|
"Energy": 33.33, |
|
"Finance": 10.0, |
|
"Geography": 36.67, |
|
"History": 60.0, |
|
"Literature": 76.67, |
|
"Manage": 33.33, |
|
"Marketing": 43.33, |
|
"Materials": 40.0, |
|
"Math": 43.33, |
|
"Mechanical": 23.33, |
|
"Music": 33.33, |
|
"Pharmacy": 50.0, |
|
"Physics": 26.67, |
|
"Psychology": 46.67, |
|
"Public": 53.33, |
|
"Sociology": 50.0 |
|
}, |
|
"difficulty_score": { |
|
"Medium": 39.39, |
|
"Easy": 52.54, |
|
"Hard": 29.83 |
|
}, |
|
"acc_stderr": 0, |
|
"acc": 41.78 |
|
}, |
|
"MMMU_Pro_standard": { |
|
"accuracy": 25.55, |
|
"subject_score": { |
|
"History": 35.71, |
|
"Art": 47.17, |
|
"Design": 53.33, |
|
"Literature": 61.54, |
|
"Agriculture": 21.67, |
|
"Finance": 11.67, |
|
"Sociology": 35.19, |
|
"Accounting": 12.07, |
|
"Energy_and_Power": 10.34, |
|
"Pharmacy": 29.82, |
|
"Architecture_and_Engineering": 13.33, |
|
"Clinical_Medicine": 23.73, |
|
"Public_Health": 13.79, |
|
"Physics": 16.67, |
|
"Art_Theory": 52.73, |
|
"Electronics": 26.67, |
|
"Psychology": 16.67, |
|
"Biology": 30.51, |
|
"Manage": 26.0, |
|
"Economics": 15.25, |
|
"Mechanical_Engineering": 16.95, |
|
"Diagnostics_and_Laboratory_Medicine": 21.67, |
|
"Basic_Medical_Science": 34.62, |
|
"Computer_Science": 15.0, |
|
"Math": 23.33, |
|
"Music": 23.33, |
|
"Materials": 13.33, |
|
"Marketing": 22.03, |
|
"Chemistry": 25.0, |
|
"Geography": 28.85 |
|
}, |
|
"difficulty_score": { |
|
"Medium": 22.1, |
|
"Easy": 35.8, |
|
"Hard": 18.95 |
|
}, |
|
"acc_stderr": 0, |
|
"acc": 25.55 |
|
}, |
|
"MMMU_Pro_vision": { |
|
"accuracy": 15.43, |
|
"subject_score": { |
|
"History": 10.71, |
|
"Art": 11.32, |
|
"Design": 13.33, |
|
"Literature": 42.31, |
|
"Agriculture": 13.33, |
|
"Finance": 10.0, |
|
"Sociology": 16.67, |
|
"Accounting": 15.52, |
|
"Energy_and_Power": 17.24, |
|
"Pharmacy": 26.32, |
|
"Architecture_and_Engineering": 16.67, |
|
"Clinical_Medicine": 6.78, |
|
"Public_Health": 12.07, |
|
"Physics": 16.67, |
|
"Art_Theory": 12.73, |
|
"Electronics": 18.33, |
|
"Psychology": 16.67, |
|
"Biology": 11.86, |
|
"Manage": 28.0, |
|
"Economics": 10.17, |
|
"Mechanical_Engineering": 16.95, |
|
"Diagnostics_and_Laboratory_Medicine": 13.33, |
|
"Basic_Medical_Science": 21.15, |
|
"Computer_Science": 20.0, |
|
"Math": 11.67, |
|
"Music": 8.33, |
|
"Materials": 8.33, |
|
"Marketing": 13.56, |
|
"Chemistry": 15.0, |
|
"Geography": 13.46 |
|
}, |
|
"acc_stderr": 0, |
|
"acc": 15.43 |
|
}, |
|
"MmvetV2": { |
|
"accuracy": 50.0967, |
|
"capability_scores": { |
|
"math": 32.94117647058824, |
|
"ocr": 57.59615384615381, |
|
"spat": 45.38071065989844, |
|
"rec": 47.3300970873787, |
|
"know": 46.602564102564095, |
|
"gen": 47.85454545454542, |
|
"seq": 43.21428571428573 |
|
}, |
|
"capability_detail_scores": { |
|
"math_ocr": 27.27272727272727, |
|
"spat_math_ocr": 36.666666666666664, |
|
"rec_math_spat_ocr": 45.0, |
|
"rec_spat": 35.714285714285715, |
|
"spat_ocr": 63.46153846153846, |
|
"rec_spat_ocr": 41.66666666666667, |
|
"know_spat_ocr": 87.5, |
|
"rec_ocr": 75.0, |
|
"rec_spat_know": 28.999999999999996, |
|
"ocr": 84.375, |
|
"rec": 60.338983050847474, |
|
"rec_know": 34.61538461538461, |
|
"rec_gen_know": 46.899999999999984, |
|
"know_rec_gen_ocr": 63.84615384615384, |
|
"rec_gen_spat_ocr": 49.53488372093025, |
|
"spat_gen_ocr": 60.0, |
|
"seq_ocr_gen_spat_math": 50.0, |
|
"seq_ocr_rec_spat_math": 0.0, |
|
"rec_gen_spat": 35.45454545454545, |
|
"gen_spat_math_ocr": 30.0, |
|
"rec_seq_spat": 21.428571428571427, |
|
"rec_seq_spat_ocr": 86.66666666666667, |
|
"rec_gen_spat_know": 26.666666666666668, |
|
"rec_gen": 52.64705882352941, |
|
"know_rec_ocr_spat": 45.0, |
|
"rec_gen_ocr_know": 63.84615384615384, |
|
"know_ocr_rec_gen_spat": 75.0, |
|
"rec_math_ocr": 100.0, |
|
"rec_gen_ocr": 42.00000000000001, |
|
"rec_seq_gen_ocr": 57.14285714285715, |
|
"gen_ocr": 50.0, |
|
"rec_seq_gen": 29.999999999999993, |
|
"rec_seq": 35.0, |
|
"rec_seq_gen_spat": 50.0, |
|
"rec_seq_know": 0.0, |
|
"rec_seq_gen_know": 50.0, |
|
"seq_ocr_rec_gen_spat": 60.0, |
|
"seq_know_ocr_rec_gen": 75.0, |
|
"rec_math_know": 0.0, |
|
"rec_seq_ocr": 100.0, |
|
"know_rec_spat_ocr": 45.0, |
|
"know_rec_spat": 28.999999999999996 |
|
}, |
|
"acc_stderr": 0, |
|
"acc": 50.0967 |
|
}, |
|
"MathVerse": { |
|
"Text Dominant": { |
|
"accuracy": 25.76, |
|
"correct": 203, |
|
"total": 788 |
|
}, |
|
"Total": { |
|
"accuracy": 20.46, |
|
"correct": 806, |
|
"total": 3940 |
|
}, |
|
"Text Lite": { |
|
"accuracy": 21.7, |
|
"correct": 171, |
|
"total": 788 |
|
}, |
|
"Vision Intensive": { |
|
"accuracy": 18.53, |
|
"correct": 146, |
|
"total": 788 |
|
}, |
|
"Vision Dominant": { |
|
"accuracy": 18.15, |
|
"correct": 143, |
|
"total": 788 |
|
}, |
|
"Vision Only": { |
|
"accuracy": 18.15, |
|
"correct": 143, |
|
"total": 788 |
|
}, |
|
"accuracy": 20.46, |
|
"acc_stderr": 0, |
|
"acc": 20.46 |
|
}, |
|
"Ocrlite": { |
|
"final_score": [ |
|
997, |
|
1644 |
|
], |
|
"accuracy": 60.645, |
|
"Key Information Extraction-Bookshelf": [ |
|
12, |
|
51, |
|
0.235, |
|
{ |
|
"Default": [ |
|
12, |
|
51, |
|
0.235 |
|
] |
|
} |
|
], |
|
"Scene Text-centric VQA-diet_constraints": [ |
|
36, |
|
90, |
|
0.4, |
|
{ |
|
"Default": [ |
|
36, |
|
90, |
|
0.4 |
|
] |
|
} |
|
], |
|
"Doc-oriented VQA-Control": [ |
|
101, |
|
189, |
|
0.534, |
|
{ |
|
"Default": [ |
|
101, |
|
189, |
|
0.534 |
|
] |
|
} |
|
], |
|
"Doc-oriented VQA": [ |
|
115, |
|
204, |
|
0.564, |
|
{ |
|
"Default": [ |
|
115, |
|
204, |
|
0.564 |
|
] |
|
} |
|
], |
|
"Scene Text-centric VQA-Fake_logo": [ |
|
46, |
|
119, |
|
0.387, |
|
{ |
|
"Default": [ |
|
46, |
|
119, |
|
0.387 |
|
] |
|
} |
|
], |
|
"Handwritten Mathematical Expression Recognition": [ |
|
3, |
|
100, |
|
0.03, |
|
{ |
|
"Default": [ |
|
3, |
|
100, |
|
0.03 |
|
] |
|
} |
|
], |
|
"Key Information Extraction": [ |
|
167, |
|
209, |
|
0.799, |
|
{ |
|
"Default": [ |
|
167, |
|
209, |
|
0.799 |
|
] |
|
} |
|
], |
|
"Scene Text-centric VQA-Control": [ |
|
163, |
|
200, |
|
0.815, |
|
{ |
|
"Default": [ |
|
163, |
|
200, |
|
0.815 |
|
] |
|
} |
|
], |
|
"Scene Text-centric VQA": [ |
|
192, |
|
282, |
|
0.681, |
|
{ |
|
"Default": [ |
|
192, |
|
282, |
|
0.681 |
|
] |
|
} |
|
], |
|
"Artistic Text Recognition": [ |
|
34, |
|
50, |
|
0.68, |
|
{ |
|
"Default": [ |
|
34, |
|
50, |
|
0.68 |
|
] |
|
} |
|
], |
|
"Irregular Text Recognition": [ |
|
39, |
|
50, |
|
0.78, |
|
{ |
|
"Default": [ |
|
39, |
|
50, |
|
0.78 |
|
] |
|
} |
|
], |
|
"Non-Semantic Text Recognition": [ |
|
39, |
|
50, |
|
0.78, |
|
{ |
|
"Default": [ |
|
39, |
|
50, |
|
0.78 |
|
] |
|
} |
|
], |
|
"Regular Text Recognition": [ |
|
50, |
|
50, |
|
1.0, |
|
{ |
|
"Default": [ |
|
50, |
|
50, |
|
1.0 |
|
] |
|
} |
|
], |
|
"acc_stderr": 0, |
|
"acc": 60.645 |
|
}, |
|
"OcrliteZh": { |
|
"final_score": [ |
|
108, |
|
234 |
|
], |
|
"accuracy": 46.154, |
|
"Docvqa": [ |
|
3, |
|
10, |
|
0.3, |
|
{ |
|
"Default": [ |
|
3, |
|
10, |
|
0.3 |
|
] |
|
} |
|
], |
|
"Chartqa-human": [ |
|
5, |
|
10, |
|
0.5, |
|
{ |
|
"Default": [ |
|
5, |
|
10, |
|
0.5 |
|
] |
|
} |
|
], |
|
"Chartqa-au": [ |
|
5, |
|
10, |
|
0.5, |
|
{ |
|
"Default": [ |
|
5, |
|
10, |
|
0.5 |
|
] |
|
} |
|
], |
|
"infographic": [ |
|
3, |
|
10, |
|
0.3, |
|
{ |
|
"Default": [ |
|
3, |
|
10, |
|
0.3 |
|
] |
|
} |
|
], |
|
"Key Information Extraction": [ |
|
30, |
|
45, |
|
0.667, |
|
{ |
|
"Default": [ |
|
30, |
|
45, |
|
0.667 |
|
] |
|
} |
|
], |
|
"Scene Text-centric VQA": [ |
|
17, |
|
40, |
|
0.425, |
|
{ |
|
"Default": [ |
|
17, |
|
40, |
|
0.425 |
|
] |
|
} |
|
], |
|
"Artistic Text Recognition": [ |
|
2, |
|
11, |
|
0.182, |
|
{ |
|
"Default": [ |
|
2, |
|
11, |
|
0.182 |
|
] |
|
} |
|
], |
|
"IrRegular Text Recognition": [ |
|
4, |
|
11, |
|
0.364, |
|
{ |
|
"Default": [ |
|
4, |
|
11, |
|
0.364 |
|
] |
|
} |
|
], |
|
"Non-semantic Text Recognition": [ |
|
10, |
|
12, |
|
0.833, |
|
{ |
|
"Default": [ |
|
10, |
|
12, |
|
0.833 |
|
] |
|
} |
|
], |
|
"Regular Text Recognition": [ |
|
10, |
|
11, |
|
0.909, |
|
{ |
|
"Default": [ |
|
10, |
|
11, |
|
0.909 |
|
] |
|
} |
|
], |
|
"Handwriting_CN": [ |
|
12, |
|
20, |
|
0.6, |
|
{ |
|
"Default": [ |
|
12, |
|
20, |
|
0.6 |
|
] |
|
} |
|
], |
|
"Chinese Unlimited": [ |
|
7, |
|
44, |
|
0.159, |
|
{ |
|
"Default": [ |
|
7, |
|
44, |
|
0.159 |
|
] |
|
} |
|
], |
|
"acc_stderr": 0, |
|
"acc": 46.154 |
|
}, |
|
"CharXiv": { |
|
"descriptive": { |
|
"Overall Score": 50.73, |
|
"By Question": { |
|
"Q1": 45.9, |
|
"Q2": 57.83, |
|
"Q3": 42.92, |
|
"Q4": 57.98, |
|
"Q5": 54.81, |
|
"Q6": 38.55, |
|
"Q7": 51.71, |
|
"Q8": 39.29, |
|
"Q9": 27.86, |
|
"Q10": 41.1, |
|
"Q11": 57.71, |
|
"Q12": 44.51, |
|
"Q13": 48.4, |
|
"Q14": 79.79, |
|
"Q15": 85.62, |
|
"Q16": 44.44, |
|
"Q17": 9.38, |
|
"Q18": 48.99, |
|
"Q19": 67.69 |
|
}, |
|
"By Category": { |
|
"Information Extraction": 49.94, |
|
"Enumeration": 59.97, |
|
"Pattern Recognition": 51.97, |
|
"Counting": 47.07, |
|
"Compositionality": 9.38 |
|
}, |
|
"By Subplot": { |
|
"1 Subplot": 60.95, |
|
"2-4 Subplots": 47.55, |
|
"5+ Subplots": 39.09 |
|
}, |
|
"By Subject": { |
|
"Computer Science": 51.59, |
|
"Economics": 50.54, |
|
"Electrical Engineering and Systems Science": 54.41, |
|
"Mathematics": 52.59, |
|
"Physics": 45.08, |
|
"Quantitative Biology": 47.82, |
|
"Quantitative Finance": 51.94, |
|
"Statistics": 52.21 |
|
}, |
|
"By Year": { |
|
"2020": 48.68, |
|
"2021": 50.48, |
|
"2022": 50.92, |
|
"2023": 52.82 |
|
}, |
|
"N_valid": 4000, |
|
"N_invalid": 0, |
|
"Question Type": "Descriptive" |
|
}, |
|
"reasoning": { |
|
"Overall Score": 21.4, |
|
"By Answer Type": { |
|
"Text-in-Chart": 27.5, |
|
"Text-in-General": 25.25, |
|
"Number-in-Chart": 21.12, |
|
"Number-in-General": 8.3 |
|
}, |
|
"By Source": { |
|
"GPT-Sourced": 20.65, |
|
"GPT-Inspired": 23.61, |
|
"Completely Human": 20.83 |
|
}, |
|
"By Subject": { |
|
"Computer Science": 17.46, |
|
"Economics": 24.64, |
|
"Electrical Engineering and Systems Science": 22.69, |
|
"Mathematics": 20.0, |
|
"Physics": 16.54, |
|
"Quantitative Biology": 19.84, |
|
"Quantitative Finance": 25.0, |
|
"Statistics": 25.66 |
|
}, |
|
"By Year": { |
|
"2020": 23.08, |
|
"2021": 21.46, |
|
"2022": 19.26, |
|
"2023": 21.77 |
|
}, |
|
"By Subplot": { |
|
"1 Subplot": 19.69, |
|
"2-4 Subplots": 24.6, |
|
"5+ Subplots": 19.07 |
|
}, |
|
"N_valid": 1000, |
|
"N_invalid": 0, |
|
"Question Type": "Reasoning" |
|
}, |
|
"accuracy": 36.06, |
|
"acc_stderr": 0, |
|
"acc": 36.06 |
|
}, |
|
"MathVision": { |
|
"accuracy": 12.93, |
|
"acc_stderr": 0, |
|
"acc": 12.93 |
|
}, |
|
"CII-Bench": { |
|
"accuracy": 41.18, |
|
"domain_score": { |
|
"Life": 31.6, |
|
"Art": 46.32, |
|
"CTC": 37.78, |
|
"Society": 44.32, |
|
"Env.": 59.26, |
|
"Politics": 58.33 |
|
}, |
|
"emotion_score": { |
|
"Neutral": 46.62, |
|
"Negative": 40.0, |
|
"Positive": 36.32 |
|
}, |
|
"acc_stderr": 0, |
|
"acc": 41.18 |
|
}, |
|
"Blink": { |
|
"accuracy": 43.35, |
|
"Art Style": 58.12, |
|
"Counting": 55.83, |
|
"Forensic Detection": 10.61, |
|
"Functional Correspondence": 18.46, |
|
"IQ Test": 31.33, |
|
"Jigsaw": 47.33, |
|
"Multi-view Reasoning": 51.13, |
|
"Object Localization": 50.82, |
|
"Relative Depth": 62.1, |
|
"Relative Reflectance": 38.06, |
|
"Semantic Correspondence": 25.9, |
|
"Spatial Relation": 83.22, |
|
"Visual Correspondence": 29.65, |
|
"Visual Similarity": 51.11, |
|
"acc_stderr": 0, |
|
"acc": 43.35 |
|
} |
|
} |
|
} |