Datasets:

License:
vlm_results / Qwen2.5-VL-72B-Instruct /results_2025-01-25T10-42-53.190540.json
daiteng01's picture
Upload 52 files
7498b1c verified
raw
history blame
15.5 kB
{
"config_general": {
"model_name": "Qwen2.5-VL-72B-Instruct",
"model_dtype": "float16",
"model_size": 0
},
"results": {
"CMMMU": {
"艺术与设计": {
"num": 88,
"correct": 66,
"accuracy": 75.0
},
"overall": {
"num": 900,
"correct": 509,
"accuracy": 56.56
},
"商业": {
"num": 126,
"correct": 46,
"accuracy": 36.51
},
"科学": {
"num": 204,
"correct": 112,
"accuracy": 54.9
},
"健康与医学": {
"num": 153,
"correct": 95,
"accuracy": 62.09
},
"人文社会科学": {
"num": 85,
"correct": 60,
"accuracy": 70.59
},
"技术与工程": {
"num": 244,
"correct": 130,
"accuracy": 53.28
},
"accuracy": 56.56,
"acc_stderr": 0,
"acc": 56.56
},
"MMMU": {
"accuracy": 62.56,
"subject_score": {
"Accounting": 66.67,
"Agriculture": 53.33,
"Architecture": 33.33,
"Art": 81.67,
"Basic": 66.67,
"Biology": 60.0,
"Chemistry": 46.67,
"Clinical": 70.0,
"Computer": 63.33,
"Design": 83.33,
"Diagnostics": 43.33,
"Economics": 76.67,
"Electronics": 40.0,
"Energy": 50.0,
"Finance": 46.67,
"Geography": 73.33,
"History": 76.67,
"Literature": 90.0,
"Manage": 60.0,
"Marketing": 66.67,
"Materials": 40.0,
"Math": 56.67,
"Mechanical": 46.67,
"Music": 33.33,
"Pharmacy": 70.0,
"Physics": 66.67,
"Psychology": 80.0,
"Public": 83.33,
"Sociology": 70.0
},
"difficulty_score": {
"Medium": 61.79,
"Hard": 44.75,
"Easy": 74.58
},
"acc_stderr": 0,
"acc": 62.56
},
"MMMU_Pro_standard": {
"accuracy": 46.94,
"subject_score": {
"History": 60.71,
"Design": 66.67,
"Finance": 43.33,
"Literature": 73.08,
"Sociology": 51.85,
"Art": 73.58,
"Agriculture": 33.33,
"Energy_and_Power": 25.86,
"Accounting": 31.03,
"Architecture_and_Engineering": 33.33,
"Pharmacy": 57.89,
"Public_Health": 43.1,
"Clinical_Medicine": 42.37,
"Art_Theory": 74.55,
"Physics": 40.0,
"Electronics": 65.0,
"Psychology": 45.0,
"Economics": 54.24,
"Biology": 45.76,
"Mechanical_Engineering": 44.07,
"Manage": 40.0,
"Diagnostics_and_Laboratory_Medicine": 33.33,
"Basic_Medical_Science": 50.0,
"Computer_Science": 50.0,
"Math": 41.67,
"Materials": 28.33,
"Music": 30.0,
"Marketing": 45.76,
"Chemistry": 45.0,
"Geography": 48.08
},
"difficulty_score": {
"Medium": 42.2,
"Hard": 37.66,
"Easy": 61.17
},
"acc_stderr": 0,
"acc": 46.94
},
"MMMU_Pro_vision": {
"accuracy": 46.3,
"subject_score": {
"Design": 61.67,
"History": 58.93,
"Art": 52.83,
"Literature": 69.23,
"Sociology": 55.56,
"Pharmacy": 50.88,
"Agriculture": 21.67,
"Clinical_Medicine": 40.68,
"Accounting": 60.34,
"Energy_and_Power": 39.66,
"Architecture_and_Engineering": 35.0,
"Public_Health": 62.07,
"Physics": 45.0,
"Art_Theory": 58.18,
"Finance": 61.67,
"Manage": 40.0,
"Psychology": 43.33,
"Biology": 32.2,
"Diagnostics_and_Laboratory_Medicine": 25.0,
"Economics": 61.02,
"Electronics": 56.67,
"Basic_Medical_Science": 42.31,
"Mechanical_Engineering": 38.98,
"Computer_Science": 45.0,
"Math": 43.33,
"Music": 30.0,
"Materials": 28.33,
"Marketing": 50.85,
"Chemistry": 46.67,
"Geography": 36.54
},
"acc_stderr": 0,
"acc": 46.3
},
"MmvetV2": {
"reject_info": {
"reject_rate": 1.35,
"reject_number": 7,
"total_question": 517
},
"accuracy": 69.1765,
"capability_scores": {
"math": 76.47058823529412,
"ocr": 72.58536585365856,
"spat": 65.20408163265303,
"rec": 67.0617283950618,
"know": 65.32051282051277,
"gen": 69.1143911439115,
"seq": 72.24489795918367
},
"capability_detail_scores": {
"math_ocr": 72.72727272727273,
"spat_ocr_math": 93.33333333333333,
"rec_spat_ocr_math": 50.0,
"rec_spat": 61.78571428571429,
"spat_ocr": 64.61538461538461,
"rec_spat_ocr": 50.0,
"know_spat_ocr": 100.0,
"rec_ocr": 95.0,
"rec_know_spat": 48.0,
"ocr": 84.37500000000001,
"rec": 67.28813559322035,
"rec_know": 78.46153846153847,
"rec_know_gen": 68.19999999999997,
"rec_know_ocr_gen": 69.23076923076923,
"rec_spat_ocr_gen": 74.41860465116281,
"spat_ocr_gen": 80.0,
"spat_math_ocr_seq_gen": 100.0,
"spat_math_ocr_seq_rec": 100.0,
"rec_spat_gen": 62.27272727272729,
"spat_ocr_gen_math": 0.0,
"rec_spat_seq": 65.0,
"rec_spat_ocr_seq": 33.33333333333333,
"rec_know_spat_gen": 36.66666666666667,
"rec_gen": 70.58823529411767,
"rec_know_spat_ocr": 0.0,
"know_spat_ocr_gen_rec": 50.0,
"rec_math_ocr": 100.0,
"rec_ocr_gen": 88.00000000000001,
"rec_seq_ocr_gen": 75.0,
"ocr_gen": 67.6923076923077,
"rec_seq_gen": 71.53846153846153,
"rec_seq": 100.0,
"rec_spat_seq_gen": 75.0,
"rec_know_seq": 100.0,
"rec_gen_seq": 71.53846153846153,
"rec_know_seq_gen": 40.0,
"spat_ocr_seq_rec_gen": 53.333333333333336,
"know_ocr_seq_rec_gen": 90.0,
"rec_know_math": 0.0,
"rec_ocr_seq": 100.0
},
"acc_stderr": 0,
"acc": 69.1765
},
"MathVerse": {
"Vision Intensive": {
"accuracy": 35.28,
"correct": 278,
"total": 788
},
"Total": {
"accuracy": 37.31,
"correct": 1470,
"total": 3940
},
"Text Lite": {
"accuracy": 37.94,
"correct": 299,
"total": 788
},
"Text Dominant": {
"accuracy": 43.78,
"correct": 345,
"total": 788
},
"Vision Dominant": {
"accuracy": 36.68,
"correct": 289,
"total": 788
},
"Vision Only": {
"accuracy": 32.87,
"correct": 259,
"total": 788
},
"accuracy": 37.31,
"acc_stderr": 0,
"acc": 37.31
},
"Ocrlite": {
"final_score": [
1306,
1644
],
"accuracy": 79.44,
"Key Information Extraction-Bookshelf": [
40,
51,
0.784,
{
"Default": [
40,
51,
0.784
]
}
],
"Scene Text-centric VQA-diet_constraints": [
71,
90,
0.789,
{
"Default": [
71,
90,
0.789
]
}
],
"Doc-oriented VQA-Control": [
147,
189,
0.778,
{
"Default": [
147,
189,
0.778
]
}
],
"Doc-oriented VQA": [
170,
204,
0.833,
{
"Default": [
170,
204,
0.833
]
}
],
"Scene Text-centric VQA-Fake_logo": [
74,
119,
0.622,
{
"Default": [
74,
119,
0.622
]
}
],
"Handwritten Mathematical Expression Recognition": [
1,
100,
0.01,
{
"Default": [
1,
100,
0.01
]
}
],
"Key Information Extraction": [
200,
209,
0.957,
{
"Default": [
200,
209,
0.957
]
}
],
"Scene Text-centric VQA-Control": [
172,
200,
0.86,
{
"Default": [
172,
200,
0.86
]
}
],
"Scene Text-centric VQA": [
248,
282,
0.879,
{
"Default": [
248,
282,
0.879
]
}
],
"Artistic Text Recognition": [
44,
50,
0.88,
{
"Default": [
44,
50,
0.88
]
}
],
"Irregular Text Recognition": [
47,
50,
0.94,
{
"Default": [
47,
50,
0.94
]
}
],
"Non-Semantic Text Recognition": [
43,
50,
0.86,
{
"Default": [
43,
50,
0.86
]
}
],
"Regular Text Recognition": [
49,
50,
0.98,
{
"Default": [
49,
50,
0.98
]
}
],
"acc_stderr": 0,
"acc": 79.44
},
"OcrliteZh": {
"final_score": [
175,
234
],
"accuracy": 74.786,
"Docvqa": [
7,
10,
0.7,
{
"Default": [
7,
10,
0.7
]
}
],
"Chartqa-human": [
5,
10,
0.5,
{
"Default": [
5,
10,
0.5
]
}
],
"Chartqa-au": [
8,
10,
0.8,
{
"Default": [
8,
10,
0.8
]
}
],
"infographic": [
8,
10,
0.8,
{
"Default": [
8,
10,
0.8
]
}
],
"Key Information Extraction": [
40,
45,
0.889,
{
"Default": [
40,
45,
0.889
]
}
],
"Scene Text-centric VQA": [
30,
40,
0.75,
{
"Default": [
30,
40,
0.75
]
}
],
"Artistic Text Recognition": [
7,
11,
0.636,
{
"Default": [
7,
11,
0.636
]
}
],
"IrRegular Text Recognition": [
10,
11,
0.909,
{
"Default": [
10,
11,
0.909
]
}
],
"Non-semantic Text Recognition": [
11,
12,
0.917,
{
"Default": [
11,
12,
0.917
]
}
],
"Regular Text Recognition": [
11,
11,
1.0,
{
"Default": [
11,
11,
1.0
]
}
],
"Handwriting_CN": [
15,
20,
0.75,
{
"Default": [
15,
20,
0.75
]
}
],
"Chinese Unlimited": [
23,
44,
0.523,
{
"Default": [
23,
44,
0.523
]
}
],
"acc_stderr": 0,
"acc": 74.786
},
"CharXiv": {
"descriptive": {
"Overall Score": 85.28,
"By Question": {
"Q1": 84.43,
"Q2": 80.87,
"Q3": 72.96,
"Q4": 87.94,
"Q5": 89.12,
"Q6": 84.34,
"Q7": 88.46,
"Q8": 90.18,
"Q9": 85.07,
"Q10": 82.88,
"Q11": 77.14,
"Q12": 89.01,
"Q13": 71.23,
"Q14": 95.74,
"Q15": 95.85,
"Q16": 77.78,
"Q17": 70.54,
"Q18": 93.12,
"Q19": 92.31
},
"By Category": {
"Information Extraction": 84.1,
"Enumeration": 88.7,
"Pattern Recognition": 85.81,
"Counting": 87.28,
"Compositionality": 70.54
},
"By Subplot": {
"1 Subplot": 89.18,
"2-4 Subplots": 86.44,
"5+ Subplots": 77.01
},
"By Subject": {
"Computer Science": 84.13,
"Economics": 86.41,
"Electrical Engineering and Systems Science": 89.08,
"Mathematics": 86.48,
"Physics": 82.48,
"Quantitative Biology": 80.56,
"Quantitative Finance": 85.56,
"Statistics": 87.83
},
"By Year": {
"2020": 85.12,
"2021": 83.72,
"2022": 86.07,
"2023": 86.29
},
"N_valid": 4000,
"N_invalid": 0,
"Question Type": "Descriptive"
},
"reasoning": {
"Overall Score": 45.6,
"By Answer Type": {
"Text-in-Chart": 48.41,
"Text-in-General": 53.54,
"Number-in-Chart": 45.69,
"Number-in-General": 36.68
},
"By Source": {
"GPT-Sourced": 50.54,
"GPT-Inspired": 43.98,
"Completely Human": 44.67
},
"By Subject": {
"Computer Science": 44.44,
"Economics": 50.0,
"Electrical Engineering and Systems Science": 40.34,
"Mathematics": 51.85,
"Physics": 54.33,
"Quantitative Biology": 39.68,
"Quantitative Finance": 37.93,
"Statistics": 44.25
},
"By Year": {
"2020": 44.94,
"2021": 44.83,
"2022": 39.75,
"2023": 52.82
},
"By Subplot": {
"1 Subplot": 47.93,
"2-4 Subplots": 44.18,
"5+ Subplots": 44.07
},
"N_valid": 1000,
"N_invalid": 0,
"Question Type": "Reasoning"
},
"accuracy": 65.44,
"acc_stderr": 0,
"acc": 65.44
},
"MathVision": {
"accuracy": 30.26,
"acc_stderr": 0,
"acc": 30.26
},
"CII-Bench": {
"accuracy": 63.27,
"domain_score": {
"CTC": 62.22,
"Society": 69.19,
"Env.": 66.67,
"Art": 63.24,
"Life": 58.87,
"Politics": 58.33
},
"emotion_score": {
"Positive": 61.54,
"Negative": 66.04,
"Neutral": 62.03
},
"acc_stderr": 0,
"acc": 63.27
},
"Blink": {
"accuracy": 59.18,
"Art Style": 71.79,
"Counting": 70.83,
"Forensic Detection": 54.55,
"Functional Correspondence": 42.31,
"IQ Test": 29.33,
"Jigsaw": 72.0,
"Multi-view Reasoning": 43.61,
"Object Localization": 42.62,
"Relative Depth": 75.81,
"Relative Reflectance": 33.58,
"Semantic Correspondence": 51.8,
"Spatial Relation": 82.52,
"Visual Correspondence": 75.58,
"Visual Similarity": 80.0,
"acc_stderr": 0,
"acc": 59.18
}
}
}