Datasets:

License:
vlm_results / InternVL2-2B /results_2025-01-25T10-42-53.190540.json
daiteng01's picture
Upload 52 files
7498b1c verified
{
"config_general": {
"model_name": "InternVL2-2B",
"model_dtype": "float16",
"model_size": 0
},
"results": {
"CMMMU": {
"艺术与设计": {
"num": 88,
"correct": 35,
"accuracy": 39.77
},
"overall": {
"num": 900,
"correct": 263,
"accuracy": 29.22
},
"商业": {
"num": 126,
"correct": 22,
"accuracy": 17.46
},
"科学": {
"num": 204,
"correct": 43,
"accuracy": 21.08
},
"健康与医学": {
"num": 153,
"correct": 52,
"accuracy": 33.99
},
"人文社会科学": {
"num": 85,
"correct": 37,
"accuracy": 43.53
},
"技术与工程": {
"num": 244,
"correct": 74,
"accuracy": 30.33
},
"accuracy": 29.22,
"acc_stderr": 0,
"acc": 29.22
},
"MMMU": {
"accuracy": 32.89,
"acc_stderr": 0,
"acc": 32.89
},
"MMMU_Pro_standard": {
"accuracy": 20.4,
"acc_stderr": 0,
"acc": 20.4
},
"MMMU_Pro_vision": {
"accuracy": 12.2,
"subject_score": {
"Finance": 6.67,
"Agriculture": 11.67,
"Design": 16.67,
"Art": 9.43,
"Literature": 19.23,
"Sociology": 14.81,
"History": 8.93,
"Pharmacy": 22.81,
"Clinical_Medicine": 8.47,
"Energy_and_Power": 5.17,
"Accounting": 12.07,
"Public_Health": 5.17,
"Architecture_and_Engineering": 11.67,
"Psychology": 10.0,
"Art_Theory": 7.27,
"Electronics": 8.33,
"Physics": 11.67,
"Biology": 16.95,
"Mechanical_Engineering": 18.64,
"Manage": 20.0,
"Economics": 8.47,
"Basic_Medical_Science": 23.08,
"Diagnostics_and_Laboratory_Medicine": 13.33,
"Computer_Science": 11.67,
"Math": 8.33,
"Music": 11.67,
"Marketing": 6.78,
"Materials": 16.67,
"Chemistry": 10.0,
"Geography": 13.46
},
"acc_stderr": 0,
"acc": 12.2
},
"MmvetV2": {
"accuracy": 39.4584,
"capability_scores": {
"ocr": 40.14423076923076,
"math": 21.764705882352942,
"spat": 34.82233502538071,
"rec": 37.93689320388349,
"know": 34.615384615384606,
"gen": 37.92727272727271,
"seq": 40.178571428571445
},
"capability_detail_scores": {
"ocr_math": 22.727272727272727,
"ocr_math_spat": 16.666666666666664,
"math_ocr_rec_spat": 0.0,
"rec_spat": 46.07142857142858,
"ocr_spat": 48.07692307692308,
"ocr_rec_spat": 8.333333333333332,
"ocr_know_spat": 42.5,
"ocr_rec": 62.5,
"know_rec_spat": 30.0,
"ocr": 65.0,
"rec": 47.96610169491527,
"know_rec": 14.615384615384613,
"know_gen_rec": 35.99999999999999,
"ocr_know_gen_rec": 45.38461538461539,
"ocr_gen_rec_spat": 33.25581395348837,
"ocr_gen_spat": 70.0,
"gen_seq_spat_math_ocr": 0.0,
"seq_spat_math_rec_ocr": 0.0,
"gen_rec_spat": 20.45454545454546,
"ocr_gen_math_spat": 40.0,
"seq_rec_spat": 48.57142857142858,
"ocr_seq_rec_spat": 50.0,
"know_gen_rec_spat": 33.33333333333333,
"gen_rec": 47.94117647058824,
"ocr_know_rec_spat": 12.5,
"gen_spat_know_rec_ocr": 75.0,
"math_ocr_rec": 100.0,
"ocr_gen_rec": 44.00000000000001,
"ocr_gen_rec_seq": 45.714285714285715,
"ocr_gen": 45.38461538461538,
"seq_gen_rec": 26.42857142857143,
"seq_rec": 44.99999999999999,
"seq_gen_rec_spat": 61.24999999999999,
"know_seq_rec": 0.0,
"gen_seq_rec": 26.42857142857143,
"seq_know_gen_rec": 45.0,
"gen_seq_spat_rec_ocr": 53.333333333333336,
"gen_seq_know_rec_ocr": 30.0,
"math_know_rec": 50.0,
"ocr_seq_rec": 0.0
},
"acc_stderr": 0,
"acc": 39.4584
},
"MathVerse": {
"Text Lite": {
"accuracy": 23.73,
"correct": 187,
"total": 788
},
"Total": {
"accuracy": 21.62,
"correct": 852,
"total": 3940
},
"Vision Intensive": {
"accuracy": 24.11,
"correct": 190,
"total": 788
},
"Text Dominant": {
"accuracy": 24.75,
"correct": 195,
"total": 788
},
"Vision Dominant": {
"accuracy": 20.43,
"correct": 161,
"total": 788
},
"Vision Only": {
"accuracy": 15.1,
"correct": 119,
"total": 788
},
"accuracy": 21.62,
"acc_stderr": 0,
"acc": 21.62
},
"Ocrlite": {
"final_score": [
944,
1645
],
"accuracy": 57.386,
"Key Information Extraction-Bookshelf": [
8,
52
],
"Scene Text-centric VQA-diet_constraints": [
20,
90
],
"Doc-oriented VQA-Control": [
98,
189
],
"Doc-oriented VQA": [
88,
204
],
"Scene Text-centric VQA-Fake_logo": [
53,
119
],
"Handwritten Mathematical Expression Recognition": [
40,
100
],
"Key Information Extraction": [
152,
209
],
"Scene Text-centric VQA-Control": [
155,
200
],
"Scene Text-centric VQA": [
170,
282
],
"Artistic Text Recognition": [
38,
50
],
"Irregular Text Recognition": [
36,
50
],
"Non-Semantic Text Recognition": [
40,
50
],
"Regular Text Recognition": [
46,
50
],
"acc_stderr": 0,
"acc": 57.386
},
"OcrliteZh": {
"final_score": [
99,
234
],
"accuracy": 42.308,
"Docvqa": [
2,
10
],
"Chartqa-human": [
3,
10
],
"Chartqa-au": [
3,
10
],
"infographic": [
3,
10
],
"Key Information Extraction": [
24,
45
],
"Scene Text-centric VQA": [
16,
40
],
"Artistic Text Recognition": [
2,
11
],
"IrRegular Text Recognition": [
5,
11
],
"Non-semantic Text Recognition": [
10,
12
],
"Regular Text Recognition": [
8,
11
],
"Handwriting_CN": [
11,
20
],
"Chinese Unlimited": [
12,
44
],
"acc_stderr": 0,
"acc": 42.308
},
"CharXiv": {
"descriptive": {
"Overall Score": 36.6,
"By Question": {
"Q1": 52.05,
"Q2": 62.61,
"Q3": 38.2,
"Q4": 54.47,
"Q5": 43.51,
"Q6": 28.92,
"Q7": 32.91,
"Q8": 31.7,
"Q9": 26.87,
"Q10": 40.41,
"Q11": 21.14,
"Q12": 47.8,
"Q13": 37.9,
"Q14": 42.91,
"Q15": 14.38,
"Q16": 36.11,
"Q17": 4.46,
"Q18": 42.51,
"Q19": 40.0
},
"By Category": {
"Information Extraction": 44.66,
"Enumeration": 30.19,
"Pattern Recognition": 33.84,
"Counting": 43.77,
"Compositionality": 4.46
},
"By Subplot": {
"1 Subplot": 48.83,
"2-4 Subplots": 31.81,
"5+ Subplots": 24.26
},
"By Subject": {
"Computer Science": 38.89,
"Economics": 37.68,
"Electrical Engineering and Systems Science": 41.18,
"Mathematics": 37.78,
"Physics": 33.27,
"Quantitative Biology": 34.33,
"Quantitative Finance": 33.41,
"Statistics": 36.06
},
"By Year": {
"2020": 37.96,
"2021": 34.77,
"2022": 37.6,
"2023": 36.19
},
"N_valid": 4000,
"N_invalid": 0,
"Question Type": "Descriptive"
},
"reasoning": {
"Overall Score": 20.1,
"By Answer Type": {
"Text-in-Chart": 22.95,
"Text-in-General": 24.24,
"Number-in-Chart": 22.84,
"Number-in-General": 10.04
},
"By Source": {
"GPT-Sourced": 25.54,
"GPT-Inspired": 18.52,
"Completely Human": 19.0
},
"By Subject": {
"Computer Science": 21.43,
"Economics": 18.12,
"Electrical Engineering and Systems Science": 21.01,
"Mathematics": 22.22,
"Physics": 18.9,
"Quantitative Biology": 22.22,
"Quantitative Finance": 18.97,
"Statistics": 17.7
},
"By Year": {
"2020": 19.03,
"2021": 22.61,
"2022": 16.8,
"2023": 21.77
},
"By Subplot": {
"1 Subplot": 20.98,
"2-4 Subplots": 21.96,
"5+ Subplots": 15.68
},
"N_valid": 1000,
"N_invalid": 0,
"Question Type": "Reasoning"
},
"accuracy": 28.35,
"acc_stderr": 0,
"acc": 28.35
},
"MathVision": {
"accuracy": 13.88,
"acc_stderr": 0,
"acc": 13.88
},
"CII-Bench": {
"accuracy": 38.95,
"domain_score": {
"Life": 29.0,
"CTC": 40.74,
"Art": 42.65,
"Society": 41.62,
"Env.": 55.56,
"Politics": 45.83
},
"emotion_score": {
"Neutral": 42.11,
"Positive": 39.32,
"Negative": 35.47
},
"acc_stderr": 0,
"acc": 38.95
},
"Blink": {
"accuracy": 40.35,
"Art Style": 52.99,
"Counting": 46.67,
"Forensic Detection": 18.94,
"Functional Correspondence": 18.46,
"IQ Test": 28.67,
"Jigsaw": 42.0,
"Multi-view Reasoning": 46.62,
"Object Localization": 45.08,
"Relative Depth": 51.61,
"Relative Reflectance": 30.6,
"Semantic Correspondence": 28.06,
"Spatial Relation": 72.73,
"Visual Correspondence": 33.14,
"Visual Similarity": 53.33,
"acc_stderr": 0,
"acc": 40.35
}
}
}