|
{ |
|
"config_general": { |
|
"model_name": "GLM-4V-Plus", |
|
"model_dtype": "float16", |
|
"model_size": 0 |
|
}, |
|
"results": { |
|
"CMMMU": { |
|
"accuracy": 43.56, |
|
"acc_stderr": 0, |
|
"acc": 43.56 |
|
}, |
|
"MMMU": { |
|
"accuracy": 54.44, |
|
"acc_stderr": 0, |
|
"acc": 54.44 |
|
}, |
|
"MMMU_Pro_standard": { |
|
"reject_info": { |
|
"reject_rate": 0.06, |
|
"reject_number": 1, |
|
"total_question": 1730 |
|
}, |
|
"accuracy": 37.19, |
|
"acc_stderr": 0, |
|
"acc": 37.19 |
|
}, |
|
"MMMU_Pro_vision": { |
|
"accuracy": 23.35, |
|
"subject_score": { |
|
"History": 35.71, |
|
"Art": 47.17, |
|
"Design": 45.0, |
|
"Literature": 59.62, |
|
"Agriculture": 18.33, |
|
"Finance": 10.0, |
|
"Sociology": 42.59, |
|
"Accounting": 15.52, |
|
"Energy_and_Power": 12.07, |
|
"Pharmacy": 36.84, |
|
"Architecture_and_Engineering": 15.0, |
|
"Clinical_Medicine": 13.56, |
|
"Public_Health": 13.79, |
|
"Physics": 11.67, |
|
"Art_Theory": 47.27, |
|
"Electronics": 8.33, |
|
"Psychology": 33.33, |
|
"Biology": 23.73, |
|
"Manage": 18.0, |
|
"Economics": 6.78, |
|
"Mechanical_Engineering": 10.17, |
|
"Diagnostics_and_Laboratory_Medicine": 25.0, |
|
"Basic_Medical_Science": 32.69, |
|
"Computer_Science": 20.0, |
|
"Math": 20.0, |
|
"Music": 21.67, |
|
"Materials": 8.33, |
|
"Marketing": 16.95, |
|
"Chemistry": 25.0, |
|
"Geography": 17.31 |
|
}, |
|
"acc_stderr": 0, |
|
"acc": 23.35 |
|
}, |
|
"MmvetV2": { |
|
"reject_info": { |
|
"reject_rate": 2.9, |
|
"reject_number": 15, |
|
"total_question": 517 |
|
}, |
|
"accuracy": 56.7131, |
|
"capability_scores": { |
|
"ocr": 62.079207920792065, |
|
"math": 62.05882352941177, |
|
"spat": 51.18556701030926, |
|
"rec": 53.77833753148617, |
|
"know": 54.285714285714285, |
|
"gen": 53.901515151515135, |
|
"seq": 36.09756097560975 |
|
}, |
|
"capability_detail_scores": { |
|
"ocr_math": 67.27272727272727, |
|
"ocr_spat_math": 68.66666666666667, |
|
"ocr_rec_spat_math": 45.0, |
|
"rec_spat": 48.214285714285715, |
|
"ocr_spat": 59.61538461538461, |
|
"ocr_rec_spat": 45.83333333333333, |
|
"ocr_know_spat": 65.0, |
|
"ocr_rec": 100.0, |
|
"rec_know_spat": 57.99999999999999, |
|
"ocr": 80.9375, |
|
"rec": 64.57627118644068, |
|
"rec_know": 46.92307692307692, |
|
"rec_know_gen": 53.400000000000006, |
|
"ocr_rec_know_gen": 74.61538461538463, |
|
"ocr_rec_spat_gen": 59.76744186046512, |
|
"ocr_spat_gen": 60.0, |
|
"ocr_seq_spat_math_gen": 0.0, |
|
"ocr_seq_spat_rec_math": 0.0, |
|
"rec_spat_gen": 38.18181818181819, |
|
"ocr_spat_math_gen": 50.0, |
|
"rec_spat_seq": 28.333333333333332, |
|
"ocr_rec_spat_seq": 36.66666666666667, |
|
"rec_know_spat_gen": 50.0, |
|
"rec_gen": 59.11764705882353, |
|
"ocr_rec_spat_know": 25.0, |
|
"ocr_spat_know": 65.0, |
|
"ocr_spat_rec_know_gen": 40.0, |
|
"ocr_rec_math": 0.0, |
|
"ocr_rec_gen": 44.00000000000001, |
|
"ocr_rec_gen_seq": 65.0, |
|
"ocr_gen": 60.0, |
|
"rec_gen_seq": 51.818181818181806, |
|
"rec_seq": 0.0, |
|
"rec_spat_gen_seq": 50.0, |
|
"rec_know_seq": 0, |
|
"rec_know_gen_seq": 0.0, |
|
"ocr_seq_spat_rec_gen": 10.0, |
|
"ocr_seq_rec_know_gen": 70.0, |
|
"rec_know_math": 100.0, |
|
"ocr_rec_seq": 100.0, |
|
"rec_spat_know": 57.99999999999999 |
|
}, |
|
"acc_stderr": 0, |
|
"acc": 56.7131 |
|
}, |
|
"MathVerse": { |
|
"Text Dominant": { |
|
"accuracy": 33.63, |
|
"correct": 265, |
|
"total": 788 |
|
}, |
|
"Total": { |
|
"accuracy": 29.44, |
|
"correct": 1160, |
|
"total": 3940 |
|
}, |
|
"Text Lite": { |
|
"accuracy": 30.33, |
|
"correct": 239, |
|
"total": 788 |
|
}, |
|
"Vision Intensive": { |
|
"accuracy": 29.19, |
|
"correct": 230, |
|
"total": 788 |
|
}, |
|
"Vision Dominant": { |
|
"accuracy": 29.31, |
|
"correct": 231, |
|
"total": 788 |
|
}, |
|
"Vision Only": { |
|
"accuracy": 24.75, |
|
"correct": 195, |
|
"total": 788 |
|
}, |
|
"accuracy": 29.44, |
|
"acc_stderr": 0, |
|
"acc": 29.44 |
|
}, |
|
"Ocrlite": { |
|
"final_score": [ |
|
1188, |
|
1645 |
|
], |
|
"accuracy": 72.219, |
|
"Key Information Extraction-Bookshelf": [ |
|
31, |
|
52 |
|
], |
|
"Scene Text-centric VQA-diet_constraints": [ |
|
69, |
|
90 |
|
], |
|
"Doc-oriented VQA-Control": [ |
|
138, |
|
189 |
|
], |
|
"Doc-oriented VQA": [ |
|
144, |
|
204 |
|
], |
|
"Scene Text-centric VQA-Fake_logo": [ |
|
42, |
|
119 |
|
], |
|
"Handwritten Mathematical Expression Recognition": [ |
|
49, |
|
100 |
|
], |
|
"Key Information Extraction": [ |
|
167, |
|
209 |
|
], |
|
"Scene Text-centric VQA-Control": [ |
|
155, |
|
200 |
|
], |
|
"Scene Text-centric VQA": [ |
|
228, |
|
282 |
|
], |
|
"Artistic Text Recognition": [ |
|
39, |
|
50 |
|
], |
|
"Irregular Text Recognition": [ |
|
40, |
|
50 |
|
], |
|
"Non-Semantic Text Recognition": [ |
|
38, |
|
50 |
|
], |
|
"Regular Text Recognition": [ |
|
48, |
|
50 |
|
], |
|
"acc_stderr": 0, |
|
"acc": 72.219 |
|
}, |
|
"OcrliteZh": { |
|
"final_score": [ |
|
150, |
|
234 |
|
], |
|
"accuracy": 64.103, |
|
"Docvqa": [ |
|
7, |
|
10 |
|
], |
|
"Chartqa-human": [ |
|
3, |
|
10 |
|
], |
|
"Chartqa-au": [ |
|
8, |
|
10 |
|
], |
|
"infographic": [ |
|
7, |
|
10 |
|
], |
|
"Key Information Extraction": [ |
|
36, |
|
45 |
|
], |
|
"Scene Text-centric VQA": [ |
|
29, |
|
40 |
|
], |
|
"Artistic Text Recognition": [ |
|
5, |
|
11 |
|
], |
|
"IrRegular Text Recognition": [ |
|
7, |
|
11 |
|
], |
|
"Non-semantic Text Recognition": [ |
|
8, |
|
12 |
|
], |
|
"Regular Text Recognition": [ |
|
10, |
|
11 |
|
], |
|
"Handwriting_CN": [ |
|
14, |
|
20 |
|
], |
|
"Chinese Unlimited": [ |
|
16, |
|
44 |
|
], |
|
"acc_stderr": 0, |
|
"acc": 64.103 |
|
}, |
|
"CharXiv": { |
|
"reject_info": { |
|
"reject_rate": 0.06, |
|
"reject_number": 3, |
|
"total_question": 5000 |
|
}, |
|
"descriptive": { |
|
"Overall Score": 58.06, |
|
"By Question": { |
|
"Q1": 67.49, |
|
"Q2": 81.3, |
|
"Q3": 67.38, |
|
"Q4": 86.38, |
|
"Q5": 79.92, |
|
"Q6": 69.88, |
|
"Q7": 61.97, |
|
"Q8": 67.86, |
|
"Q9": 61.19, |
|
"Q10": 57.53, |
|
"Q11": 32.57, |
|
"Q12": 60.44, |
|
"Q13": 69.86, |
|
"Q14": 15.25, |
|
"Q15": 37.38, |
|
"Q16": 66.67, |
|
"Q17": 8.04, |
|
"Q18": 60.73, |
|
"Q19": 78.46 |
|
}, |
|
"By Category": { |
|
"Information Extraction": 73.59, |
|
"Enumeration": 47.46, |
|
"Pattern Recognition": 50.44, |
|
"Counting": 62.34, |
|
"Compositionality": 8.04 |
|
}, |
|
"By Subplot": { |
|
"1 Subplot": 69.43, |
|
"2-4 Subplots": 55.33, |
|
"5+ Subplots": 43.86 |
|
}, |
|
"By Subject": { |
|
"Computer Science": 56.55, |
|
"Economics": 59.24, |
|
"Electrical Engineering and Systems Science": 68.63, |
|
"Mathematics": 60.19, |
|
"Physics": 54.13, |
|
"Quantitative Biology": 49.6, |
|
"Quantitative Finance": 60.13, |
|
"Statistics": 56.42 |
|
}, |
|
"By Year": { |
|
"2020": 58.2, |
|
"2021": 57.91, |
|
"2022": 57.48, |
|
"2023": 58.67 |
|
}, |
|
"N_valid": 3999, |
|
"N_invalid": 0, |
|
"Question Type": "Descriptive" |
|
}, |
|
"reasoning": { |
|
"Overall Score": 31.86, |
|
"By Answer Type": { |
|
"Text-in-Chart": 33.49, |
|
"Text-in-General": 37.76, |
|
"Number-in-Chart": 33.19, |
|
"Number-in-General": 24.89 |
|
}, |
|
"By Source": { |
|
"GPT-Sourced": 33.7, |
|
"GPT-Inspired": 25.93, |
|
"Completely Human": 33.44 |
|
}, |
|
"By Subject": { |
|
"Computer Science": 35.71, |
|
"Economics": 32.61, |
|
"Electrical Engineering and Systems Science": 32.2, |
|
"Mathematics": 30.37, |
|
"Physics": 37.3, |
|
"Quantitative Biology": 26.98, |
|
"Quantitative Finance": 30.17, |
|
"Statistics": 29.2 |
|
}, |
|
"By Year": { |
|
"2020": 32.79, |
|
"2021": 34.36, |
|
"2022": 29.1, |
|
"2023": 31.05 |
|
}, |
|
"By Subplot": { |
|
"1 Subplot": 32.21, |
|
"2-4 Subplots": 35.01, |
|
"5+ Subplots": 26.27 |
|
}, |
|
"N_valid": 998, |
|
"N_invalid": 1, |
|
"Question Type": "Reasoning" |
|
}, |
|
"accuracy": 44.96, |
|
"acc_stderr": 0, |
|
"acc": 44.96 |
|
}, |
|
"MathVision": { |
|
"accuracy": 17.34, |
|
"acc_stderr": 0, |
|
"acc": 17.34 |
|
}, |
|
"CII-Bench": { |
|
"accuracy": 60.86, |
|
"domain_score": { |
|
"Life": 55.41, |
|
"Art": 59.56, |
|
"CTC": 62.96, |
|
"Society": 64.67, |
|
"Env.": 66.67, |
|
"Politics": 66.67 |
|
}, |
|
"emotion_score": { |
|
"Neutral": 60.38, |
|
"Negative": 60.75, |
|
"Positive": 61.54 |
|
}, |
|
"acc_stderr": 0, |
|
"acc": 60.86 |
|
}, |
|
"Blink": { |
|
"accuracy": 55.44, |
|
"Art Style": 51.28, |
|
"Counting": 75.0, |
|
"Forensic Detection": 13.64, |
|
"Functional Correspondence": 35.38, |
|
"IQ Test": 24.67, |
|
"Jigsaw": 69.33, |
|
"Multi-view Reasoning": 52.63, |
|
"Object Localization": 63.93, |
|
"Relative Depth": 69.35, |
|
"Relative Reflectance": 28.36, |
|
"Semantic Correspondence": 48.92, |
|
"Spatial Relation": 86.71, |
|
"Visual Correspondence": 72.09, |
|
"Visual Similarity": 82.22, |
|
"acc_stderr": 0, |
|
"acc": 55.44 |
|
} |
|
} |
|
} |