|
{ |
|
"config_general": { |
|
"model_name": "LLaVA-OneVision-0.5B", |
|
"model_dtype": "float16", |
|
"model_size": 0 |
|
}, |
|
"results": { |
|
"CMMMU": { |
|
"艺术与设计": { |
|
"num": 88, |
|
"correct": 36, |
|
"accuracy": 40.91 |
|
}, |
|
"overall": { |
|
"num": 900, |
|
"correct": 243, |
|
"accuracy": 27.0 |
|
}, |
|
"商业": { |
|
"num": 126, |
|
"correct": 16, |
|
"accuracy": 12.7 |
|
}, |
|
"科学": { |
|
"num": 204, |
|
"correct": 34, |
|
"accuracy": 16.67 |
|
}, |
|
"健康与医学": { |
|
"num": 153, |
|
"correct": 55, |
|
"accuracy": 35.95 |
|
}, |
|
"人文社会科学": { |
|
"num": 85, |
|
"correct": 32, |
|
"accuracy": 37.65 |
|
}, |
|
"技术与工程": { |
|
"num": 244, |
|
"correct": 70, |
|
"accuracy": 28.69 |
|
}, |
|
"accuracy": 27.0, |
|
"acc_stderr": 0, |
|
"acc": 27.0 |
|
}, |
|
"MMMU": { |
|
"accuracy": 33.11, |
|
"subject_score": { |
|
"Accounting": 40.0, |
|
"Agriculture": 26.67, |
|
"Architecture": 20.0, |
|
"Art": 45.0, |
|
"Basic": 36.67, |
|
"Biology": 26.67, |
|
"Chemistry": 23.33, |
|
"Clinical": 23.33, |
|
"Computer": 26.67, |
|
"Design": 53.33, |
|
"Diagnostics": 23.33, |
|
"Economics": 33.33, |
|
"Electronics": 23.33, |
|
"Energy": 30.0, |
|
"Finance": 20.0, |
|
"Geography": 23.33, |
|
"History": 40.0, |
|
"Literature": 66.67, |
|
"Manage": 26.67, |
|
"Marketing": 40.0, |
|
"Materials": 43.33, |
|
"Math": 33.33, |
|
"Mechanical": 40.0, |
|
"Music": 26.67, |
|
"Pharmacy": 30.0, |
|
"Physics": 26.67, |
|
"Psychology": 20.0, |
|
"Public": 36.67, |
|
"Sociology": 43.33 |
|
}, |
|
"difficulty_score": { |
|
"Medium": 35.38, |
|
"Easy": 35.59, |
|
"Hard": 23.76 |
|
}, |
|
"acc_stderr": 0, |
|
"acc": 33.11 |
|
}, |
|
"MMMU_Pro_standard": { |
|
"accuracy": 17.28, |
|
"subject_score": { |
|
"Literature": 48.08, |
|
"Agriculture": 10.0, |
|
"History": 19.64, |
|
"Sociology": 18.52, |
|
"Design": 20.0, |
|
"Finance": 6.67, |
|
"Art": 20.75, |
|
"Public_Health": 17.24, |
|
"Accounting": 15.52, |
|
"Energy_and_Power": 18.97, |
|
"Clinical_Medicine": 10.17, |
|
"Architecture_and_Engineering": 15.0, |
|
"Pharmacy": 17.54, |
|
"Physics": 11.67, |
|
"Electronics": 13.33, |
|
"Psychology": 15.0, |
|
"Art_Theory": 16.36, |
|
"Economics": 6.78, |
|
"Manage": 22.0, |
|
"Diagnostics_and_Laboratory_Medicine": 11.67, |
|
"Mechanical_Engineering": 20.34, |
|
"Basic_Medical_Science": 19.23, |
|
"Computer_Science": 26.67, |
|
"Math": 10.0, |
|
"Biology": 18.64, |
|
"Marketing": 16.95, |
|
"Music": 20.0, |
|
"Materials": 8.33, |
|
"Chemistry": 20.0, |
|
"Geography": 30.77 |
|
}, |
|
"difficulty_score": { |
|
"Easy": 18.75, |
|
"Medium": 18.48, |
|
"Hard": 12.97 |
|
}, |
|
"acc_stderr": 0, |
|
"acc": 17.28 |
|
}, |
|
"MMMU_Pro_vision": { |
|
"accuracy": 12.08, |
|
"subject_score": { |
|
"Art": 13.21, |
|
"History": 5.36, |
|
"Agriculture": 8.33, |
|
"Finance": 3.33, |
|
"Literature": 11.54, |
|
"Sociology": 12.96, |
|
"Design": 5.0, |
|
"Public_Health": 6.9, |
|
"Clinical_Medicine": 8.47, |
|
"Accounting": 6.9, |
|
"Architecture_and_Engineering": 10.0, |
|
"Pharmacy": 29.82, |
|
"Energy_and_Power": 6.9, |
|
"Psychology": 10.0, |
|
"Physics": 5.0, |
|
"Electronics": 13.33, |
|
"Art_Theory": 9.09, |
|
"Manage": 20.0, |
|
"Mechanical_Engineering": 16.95, |
|
"Economics": 3.39, |
|
"Biology": 11.86, |
|
"Diagnostics_and_Laboratory_Medicine": 11.67, |
|
"Basic_Medical_Science": 15.38, |
|
"Computer_Science": 16.67, |
|
"Math": 13.33, |
|
"Music": 31.67, |
|
"Materials": 13.33, |
|
"Marketing": 8.47, |
|
"Chemistry": 23.33, |
|
"Geography": 11.54 |
|
}, |
|
"acc_stderr": 0, |
|
"acc": 12.08 |
|
}, |
|
"MmvetV2": { |
|
"reject_info": { |
|
"reject_rate": 0.19, |
|
"reject_number": 1, |
|
"total_question": 517 |
|
}, |
|
"accuracy": 36.7636, |
|
"capability_scores": { |
|
"math": 20.0, |
|
"ocr": 34.66346153846154, |
|
"spat": 31.573604060913706, |
|
"rec": 37.42092457420924, |
|
"know": 34.743589743589745, |
|
"gen": 35.69343065693429, |
|
"seq": 31.636363636363633 |
|
}, |
|
"capability_detail_scores": { |
|
"math_ocr": 13.636363636363635, |
|
"math_ocr_spat": 16.0, |
|
"math_ocr_spat_rec": 20.0, |
|
"spat_rec": 46.78571428571429, |
|
"ocr_spat": 26.923076923076923, |
|
"ocr_spat_rec": 38.33333333333333, |
|
"ocr_know_spat": 50.0, |
|
"ocr_rec": 37.5, |
|
"know_spat_rec": 20.0, |
|
"ocr": 51.87499999999999, |
|
"rec": 50.000000000000014, |
|
"know_rec": 25.384615384615383, |
|
"know_rec_gen": 36.0, |
|
"ocr_know_rec_gen": 44.61538461538462, |
|
"ocr_gen_spat_rec": 28.604651162790702, |
|
"ocr_gen_spat": 45.0, |
|
"ocr_math_spat_gen_seq": 0.0, |
|
"ocr_math_spat_seq_rec": 0.0, |
|
"gen_spat_rec": 31.818181818181817, |
|
"math_ocr_gen_spat": 50.0, |
|
"seq_spat_rec": 18.571428571428573, |
|
"ocr_seq_spat_rec": 30.0, |
|
"know_spat_rec_gen": 16.666666666666664, |
|
"gen_rec": 42.352941176470594, |
|
"ocr_know_spat_rec": 25.0, |
|
"ocr_know_spat_gen_rec": 64.99999999999999, |
|
"gen_rec_know": 36.0, |
|
"math_ocr_rec": 100.0, |
|
"ocr_gen_rec": 34.0, |
|
"ocr_gen_seq_rec": 40.0, |
|
"ocr_gen": 38.46153846153847, |
|
"gen_seq_rec": 25.38461538461539, |
|
"seq_rec": 30.0, |
|
"gen_seq_rec_spat": 47.5, |
|
"know_seq_rec": 0.0, |
|
"know_seq_rec_gen": 30.0, |
|
"gen_seq_spat_rec": 47.5, |
|
"ocr_spat_gen_seq_rec": 40.0, |
|
"ocr_know_gen_seq_rec": 35.0, |
|
"math_know_rec": 50.0, |
|
"ocr_seq_rec": 100.0 |
|
}, |
|
"acc_stderr": 0, |
|
"acc": 36.7636 |
|
}, |
|
"MathVerse": { |
|
"Text Lite": { |
|
"accuracy": 18.15, |
|
"correct": 143, |
|
"total": 788 |
|
}, |
|
"Total": { |
|
"accuracy": 19.11, |
|
"correct": 753, |
|
"total": 3940 |
|
}, |
|
"Vision Intensive": { |
|
"accuracy": 19.04, |
|
"correct": 150, |
|
"total": 788 |
|
}, |
|
"Text Dominant": { |
|
"accuracy": 21.95, |
|
"correct": 173, |
|
"total": 788 |
|
}, |
|
"Vision Dominant": { |
|
"accuracy": 19.16, |
|
"correct": 151, |
|
"total": 788 |
|
}, |
|
"Vision Only": { |
|
"accuracy": 17.26, |
|
"correct": 136, |
|
"total": 788 |
|
}, |
|
"accuracy": 19.11, |
|
"acc_stderr": 0, |
|
"acc": 19.11 |
|
}, |
|
"Ocrlite": { |
|
"final_score": [ |
|
819, |
|
1645 |
|
], |
|
"accuracy": 49.787, |
|
"Key Information Extraction-Bookshelf": [ |
|
2, |
|
52 |
|
], |
|
"Scene Text-centric VQA-diet_constraints": [ |
|
20, |
|
90 |
|
], |
|
"Doc-oriented VQA-Control": [ |
|
64, |
|
189 |
|
], |
|
"Doc-oriented VQA": [ |
|
81, |
|
204 |
|
], |
|
"Scene Text-centric VQA-Fake_logo": [ |
|
34, |
|
119 |
|
], |
|
"Handwritten Mathematical Expression Recognition": [ |
|
27, |
|
100 |
|
], |
|
"Key Information Extraction": [ |
|
134, |
|
209 |
|
], |
|
"Scene Text-centric VQA-Control": [ |
|
151, |
|
200 |
|
], |
|
"Scene Text-centric VQA": [ |
|
171, |
|
282 |
|
], |
|
"Artistic Text Recognition": [ |
|
34, |
|
50 |
|
], |
|
"Irregular Text Recognition": [ |
|
30, |
|
50 |
|
], |
|
"Non-Semantic Text Recognition": [ |
|
27, |
|
50 |
|
], |
|
"Regular Text Recognition": [ |
|
44, |
|
50 |
|
], |
|
"acc_stderr": 0, |
|
"acc": 49.787 |
|
}, |
|
"OcrliteZh": { |
|
"final_score": [ |
|
37, |
|
234 |
|
], |
|
"accuracy": 15.812, |
|
"Docvqa": [ |
|
2, |
|
10 |
|
], |
|
"Chartqa-human": [ |
|
2, |
|
10 |
|
], |
|
"Chartqa-au": [ |
|
1, |
|
10 |
|
], |
|
"infographic": [ |
|
1, |
|
10 |
|
], |
|
"Key Information Extraction": [ |
|
14, |
|
45 |
|
], |
|
"Scene Text-centric VQA": [ |
|
9, |
|
40 |
|
], |
|
"Artistic Text Recognition": [ |
|
0, |
|
11 |
|
], |
|
"IrRegular Text Recognition": [ |
|
0, |
|
11 |
|
], |
|
"Non-semantic Text Recognition": [ |
|
0, |
|
12 |
|
], |
|
"Regular Text Recognition": [ |
|
0, |
|
11 |
|
], |
|
"Handwriting_CN": [ |
|
0, |
|
20 |
|
], |
|
"Chinese Unlimited": [ |
|
8, |
|
44 |
|
], |
|
"acc_stderr": 0, |
|
"acc": 15.812 |
|
}, |
|
"CharXiv": { |
|
"descriptive": { |
|
"Overall Score": 30.27, |
|
"By Question": { |
|
"Q1": 25.41, |
|
"Q2": 55.65, |
|
"Q3": 28.76, |
|
"Q4": 53.7, |
|
"Q5": 48.12, |
|
"Q6": 26.91, |
|
"Q7": 31.2, |
|
"Q8": 13.84, |
|
"Q9": 15.92, |
|
"Q10": 30.82, |
|
"Q11": 19.43, |
|
"Q12": 26.92, |
|
"Q13": 30.59, |
|
"Q14": 72.7, |
|
"Q15": 12.78, |
|
"Q16": 33.33, |
|
"Q17": 5.36, |
|
"Q18": 3.24, |
|
"Q19": 40.0 |
|
}, |
|
"By Category": { |
|
"Information Extraction": 38.55, |
|
"Enumeration": 30.27, |
|
"Pattern Recognition": 11.79, |
|
"Counting": 30.53, |
|
"Compositionality": 5.36 |
|
}, |
|
"By Subplot": { |
|
"1 Subplot": 32.71, |
|
"2-4 Subplots": 31.35, |
|
"5+ Subplots": 24.58 |
|
}, |
|
"By Subject": { |
|
"Computer Science": 29.56, |
|
"Economics": 31.16, |
|
"Electrical Engineering and Systems Science": 30.04, |
|
"Mathematics": 32.04, |
|
"Physics": 27.56, |
|
"Quantitative Biology": 29.37, |
|
"Quantitative Finance": 28.66, |
|
"Statistics": 33.85 |
|
}, |
|
"By Year": { |
|
"2020": 29.45, |
|
"2021": 28.64, |
|
"2022": 33.5, |
|
"2023": 29.64 |
|
}, |
|
"N_valid": 4000, |
|
"N_invalid": 104, |
|
"Question Type": "Descriptive" |
|
}, |
|
"reasoning": { |
|
"Overall Score": 15.4, |
|
"By Answer Type": { |
|
"Text-in-Chart": 15.45, |
|
"Text-in-General": 22.22, |
|
"Number-in-Chart": 17.67, |
|
"Number-in-General": 10.04 |
|
}, |
|
"By Source": { |
|
"GPT-Sourced": 23.37, |
|
"GPT-Inspired": 15.28, |
|
"Completely Human": 13.0 |
|
}, |
|
"By Subject": { |
|
"Computer Science": 17.46, |
|
"Economics": 18.84, |
|
"Electrical Engineering and Systems Science": 15.97, |
|
"Mathematics": 16.3, |
|
"Physics": 17.32, |
|
"Quantitative Biology": 12.7, |
|
"Quantitative Finance": 12.93, |
|
"Statistics": 10.62 |
|
}, |
|
"By Year": { |
|
"2020": 17.81, |
|
"2021": 18.77, |
|
"2022": 11.48, |
|
"2023": 13.31 |
|
}, |
|
"By Subplot": { |
|
"1 Subplot": 17.62, |
|
"2-4 Subplots": 13.76, |
|
"5+ Subplots": 14.41 |
|
}, |
|
"N_valid": 1000, |
|
"N_invalid": 0, |
|
"Question Type": "Reasoning" |
|
}, |
|
"accuracy": 22.84, |
|
"acc_stderr": 0, |
|
"acc": 22.84 |
|
}, |
|
"MathVision": { |
|
"accuracy": 13.09, |
|
"acc_stderr": 0, |
|
"acc": 13.09 |
|
}, |
|
"CII-Bench": { |
|
"accuracy": 28.5, |
|
"domain_score": { |
|
"CTC": 33.33, |
|
"Art": 32.35, |
|
"Env.": 35.19, |
|
"Life": 22.08, |
|
"Society": 27.57, |
|
"Politics": 33.33 |
|
}, |
|
"emotion_score": { |
|
"Positive": 30.34, |
|
"Negative": 28.68, |
|
"Neutral": 26.69 |
|
}, |
|
"acc_stderr": 0, |
|
"acc": 28.5 |
|
}, |
|
"Blink": { |
|
"accuracy": 38.77, |
|
"Art Style": 50.43, |
|
"Counting": 33.33, |
|
"Forensic Detection": 25.0, |
|
"Functional Correspondence": 24.62, |
|
"IQ Test": 22.0, |
|
"Jigsaw": 51.33, |
|
"Multi-view Reasoning": 44.36, |
|
"Object Localization": 59.84, |
|
"Relative Depth": 43.55, |
|
"Relative Reflectance": 29.85, |
|
"Semantic Correspondence": 34.53, |
|
"Spatial Relation": 54.55, |
|
"Visual Correspondence": 26.74, |
|
"Visual Similarity": 48.15, |
|
"acc_stderr": 0, |
|
"acc": 38.77 |
|
} |
|
} |
|
} |