Datasets:

License:
vlm_results / Qwen-VL-Max-20250402 /results_2025-01-25T10-42-53.190540.json
daiteng01's picture
Upload 52 files
7498b1c verified
{
"config_general": {
"model_name": "Qwen-VL-Max-20250402",
"model_dtype": "float16",
"model_size": 0
},
"results": {
"CMMMU": {
"reject_info": {
"reject_rate": 0.22,
"reject_number": 2,
"total_question": 900
},
"艺术与设计": {
"num": 88,
"correct": 62,
"accuracy": 70.45
},
"overall": {
"num": 898,
"correct": 442,
"accuracy": 49.22
},
"商业": {
"num": 126,
"correct": 37,
"accuracy": 29.37
},
"科学": {
"num": 204,
"correct": 95,
"accuracy": 46.57
},
"健康与医学": {
"num": 152,
"correct": 85,
"accuracy": 55.92
},
"人文社会科学": {
"num": 85,
"correct": 55,
"accuracy": 64.71
},
"技术与工程": {
"num": 243,
"correct": 108,
"accuracy": 44.44
},
"accuracy": 49.22,
"acc_stderr": 0,
"acc": 49.22
},
"MMMU": {
"reject_info": {
"reject_rate": 0.11,
"reject_number": 1,
"total_question": 900
},
"accuracy": 55.62,
"subject_score": {
"Accounting": 40.0,
"Agriculture": 60.0,
"Architecture": 43.33,
"Art": 73.33,
"Basic": 62.07,
"Biology": 50.0,
"Chemistry": 33.33,
"Clinical": 63.33,
"Computer": 50.0,
"Design": 80.0,
"Diagnostics": 50.0,
"Economics": 46.67,
"Electronics": 23.33,
"Energy": 53.33,
"Finance": 30.0,
"Geography": 73.33,
"History": 76.67,
"Literature": 83.33,
"Manage": 50.0,
"Marketing": 66.67,
"Materials": 46.67,
"Math": 46.67,
"Mechanical": 33.33,
"Music": 23.33,
"Pharmacy": 66.67,
"Physics": 60.0,
"Psychology": 73.33,
"Public": 63.33,
"Sociology": 73.33
},
"difficulty_score": {
"Medium": 56.26,
"Easy": 66.1,
"Hard": 37.02
},
"acc_stderr": 0,
"acc": 55.62
},
"MMMU_Pro_standard": {
"reject_info": {
"reject_rate": 0.12,
"reject_number": 2,
"total_question": 1730
},
"accuracy": 38.19,
"subject_score": {
"History": 48.21,
"Art": 62.26,
"Design": 48.33,
"Literature": 69.23,
"Agriculture": 26.67,
"Finance": 23.33,
"Sociology": 49.06,
"Accounting": 27.59,
"Energy_and_Power": 25.86,
"Pharmacy": 43.86,
"Architecture_and_Engineering": 33.33,
"Clinical_Medicine": 37.29,
"Public_Health": 27.59,
"Physics": 33.33,
"Art_Theory": 60.0,
"Electronics": 60.0,
"Psychology": 35.0,
"Biology": 32.2,
"Manage": 32.0,
"Economics": 38.98,
"Mechanical_Engineering": 38.98,
"Diagnostics_and_Laboratory_Medicine": 30.0,
"Computer_Science": 48.33,
"Math": 25.0,
"Basic_Medical_Science": 31.37,
"Music": 25.0,
"Materials": 31.67,
"Marketing": 35.59,
"Chemistry": 30.0,
"Geography": 44.23
},
"difficulty_score": {
"Medium": 34.88,
"Easy": 47.63,
"Hard": 32.42
},
"acc_stderr": 0,
"acc": 38.19
},
"MMMU_Pro_vision": {
"accuracy": 35.9,
"subject_score": {
"History": 46.43,
"Art": 43.4,
"Design": 56.67,
"Literature": 65.38,
"Agriculture": 25.0,
"Finance": 23.33,
"Sociology": 42.59,
"Accounting": 20.69,
"Energy_and_Power": 12.07,
"Pharmacy": 47.37,
"Architecture_and_Engineering": 30.0,
"Clinical_Medicine": 38.98,
"Public_Health": 25.86,
"Physics": 28.33,
"Art_Theory": 47.27,
"Electronics": 51.67,
"Psychology": 41.67,
"Biology": 33.9,
"Manage": 36.0,
"Economics": 30.51,
"Mechanical_Engineering": 30.51,
"Diagnostics_and_Laboratory_Medicine": 26.67,
"Basic_Medical_Science": 32.69,
"Computer_Science": 50.0,
"Math": 28.33,
"Music": 25.0,
"Materials": 25.0,
"Marketing": 35.59,
"Chemistry": 41.67,
"Geography": 40.38
},
"acc_stderr": 0,
"acc": 35.9
},
"MmvetV2": {
"reject_info": {
"reject_rate": 1.16,
"reject_number": 6,
"total_question": 517
},
"accuracy": 70.9785,
"capability_scores": {
"ocr": 80.96153846153851,
"math": 86.76470588235294,
"spat": 69.74358974358972,
"rec": 67.16748768472915,
"know": 64.63576158940397,
"gen": 70.00000000000003,
"seq": 69.46428571428571
},
"capability_detail_scores": {
"ocr_math": 90.9090909090909,
"spat_ocr_math": 92.66666666666666,
"rec_spat_ocr_math": 75.0,
"rec_spat": 60.0,
"spat_ocr": 75.0,
"rec_spat_ocr": 75.0,
"spat_ocr_know": 100.0,
"rec_ocr": 75.0,
"rec_spat_know": 53.75,
"ocr": 88.4375,
"rec": 68.44827586206897,
"rec_know": 61.53846153846154,
"rec_know_gen": 64.02061855670101,
"rec_ocr_know_gen": 80.76923076923077,
"rec_spat_ocr_gen": 79.53488372093027,
"spat_ocr_gen": 95.0,
"gen_spat_seq_math_ocr": 100.0,
"spat_seq_rec_math_ocr": 50.0,
"rec_spat_gen": 53.63636363636364,
"spat_ocr_math_gen": 60.0,
"rec_spat_seq": 47.142857142857146,
"rec_spat_seq_ocr": 66.66666666666666,
"rec_spat_know_gen": 30.0,
"rec_gen": 71.47058823529413,
"rec_spat_ocr_know": 25.0,
"gen_spat_rec_ocr_know": 75.0,
"rec_ocr_math": 100.0,
"rec_ocr_gen": 84.00000000000001,
"rec_seq_ocr_gen": 75.71428571428571,
"ocr_gen": 83.07692307692308,
"rec_seq_gen": 69.28571428571429,
"rec_seq": 58.333333333333336,
"rec_spat_seq_gen": 81.25,
"rec_seq_know": 100.0,
"rec_seq_know_gen": 70.0,
"gen_spat_seq_rec_ocr": 60.0,
"gen_seq_rec_ocr_know": 95.0,
"rec_know_math": 50.0,
"rec_seq_ocr": 100.0
},
"acc_stderr": 0,
"acc": 70.9785
},
"MathVerse": {
"Text Dominant": {
"accuracy": 62.44,
"correct": 492,
"total": 788
},
"Total": {
"accuracy": 52.03,
"correct": 2050,
"total": 3940
},
"Text Lite": {
"accuracy": 53.05,
"correct": 418,
"total": 788
},
"Vision Intensive": {
"accuracy": 49.75,
"correct": 392,
"total": 788
},
"Vision Dominant": {
"accuracy": 48.48,
"correct": 382,
"total": 788
},
"Vision Only": {
"accuracy": 46.45,
"correct": 366,
"total": 788
},
"accuracy": 52.03,
"acc_stderr": 0,
"acc": 52.03
},
"Ocrlite": {
"reject_info": {
"reject_rate": 0.12,
"reject_number": 2,
"total_question": 1644
},
"final_score": [
1261,
1642
],
"accuracy": 76.797,
"Key Information Extraction-Bookshelf": [
28,
51,
0.549,
{
"Default": [
28,
51,
0.549
]
}
],
"Scene Text-centric VQA-diet_constraints": [
68,
90,
0.756,
{
"Default": [
68,
90,
0.756
]
}
],
"Doc-oriented VQA-Control": [
157,
188,
0.835,
{
"Default": [
157,
188,
0.835
]
}
],
"Doc-oriented VQA": [
175,
204,
0.858,
{
"Default": [
175,
204,
0.858
]
}
],
"Scene Text-centric VQA-Fake_logo": [
50,
119,
0.42,
{
"Default": [
50,
119,
0.42
]
}
],
"Handwritten Mathematical Expression Recognition": [
1,
100,
0.01,
{
"Default": [
1,
100,
0.01
]
}
],
"Key Information Extraction": [
195,
209,
0.933,
{
"Default": [
195,
209,
0.933
]
}
],
"Scene Text-centric VQA-Control": [
171,
199,
0.859,
{
"Default": [
171,
199,
0.859
]
}
],
"Scene Text-centric VQA": [
246,
282,
0.872,
{
"Default": [
246,
282,
0.872
]
}
],
"Artistic Text Recognition": [
42,
50,
0.84,
{
"Default": [
42,
50,
0.84
]
}
],
"Irregular Text Recognition": [
43,
50,
0.86,
{
"Default": [
43,
50,
0.86
]
}
],
"Non-Semantic Text Recognition": [
36,
50,
0.72,
{
"Default": [
36,
50,
0.72
]
}
],
"Regular Text Recognition": [
49,
50,
0.98,
{
"Default": [
49,
50,
0.98
]
}
],
"acc_stderr": 0,
"acc": 76.797
},
"OcrliteZh": {
"reject_info": {
"reject_rate": 0.43,
"reject_number": 1,
"total_question": 234
},
"final_score": [
172,
233
],
"accuracy": 73.82,
"Docvqa": [
9,
10,
0.9,
{
"Default": [
9,
10,
0.9
]
}
],
"Chartqa-human": [
7,
10,
0.7,
{
"Default": [
7,
10,
0.7
]
}
],
"Chartqa-au": [
8,
10,
0.8,
{
"Default": [
8,
10,
0.8
]
}
],
"infographic": [
8,
9,
0.889,
{
"Default": [
8,
9,
0.889
]
}
],
"Key Information Extraction": [
42,
45,
0.933,
{
"Default": [
42,
45,
0.933
]
}
],
"Scene Text-centric VQA": [
29,
40,
0.725,
{
"Default": [
29,
40,
0.725
]
}
],
"Artistic Text Recognition": [
5,
11,
0.455,
{
"Default": [
5,
11,
0.455
]
}
],
"IrRegular Text Recognition": [
8,
11,
0.727,
{
"Default": [
8,
11,
0.727
]
}
],
"Non-semantic Text Recognition": [
10,
12,
0.833,
{
"Default": [
10,
12,
0.833
]
}
],
"Regular Text Recognition": [
10,
11,
0.909,
{
"Default": [
10,
11,
0.909
]
}
],
"Handwriting_CN": [
13,
20,
0.65,
{
"Default": [
13,
20,
0.65
]
}
],
"Chinese Unlimited": [
23,
44,
0.523,
{
"Default": [
23,
44,
0.523
]
}
],
"acc_stderr": 0,
"acc": 73.82
},
"CharXiv": {
"descriptive": {
"Overall Score": 83.53,
"By Question": {
"Q1": 89.75,
"Q2": 86.52,
"Q3": 73.39,
"Q4": 87.55,
"Q5": 84.94,
"Q6": 85.54,
"Q7": 84.62,
"Q8": 89.29,
"Q9": 75.62,
"Q10": 81.51,
"Q11": 65.14,
"Q12": 82.42,
"Q13": 73.52,
"Q14": 95.04,
"Q15": 97.44,
"Q16": 72.22,
"Q17": 59.38,
"Q18": 91.09,
"Q19": 92.31
},
"By Category": {
"Information Extraction": 84.7,
"Enumeration": 87.65,
"Pattern Recognition": 79.69,
"Counting": 83.72,
"Compositionality": 59.38
},
"By Subplot": {
"1 Subplot": 87.89,
"2-4 Subplots": 83.73,
"5+ Subplots": 76.06
},
"By Subject": {
"Computer Science": 84.52,
"Economics": 85.33,
"Electrical Engineering and Systems Science": 88.87,
"Mathematics": 83.52,
"Physics": 77.17,
"Quantitative Biology": 80.16,
"Quantitative Finance": 84.48,
"Statistics": 84.51
},
"By Year": {
"2020": 82.69,
"2021": 82.85,
"2022": 85.04,
"2023": 83.57
},
"N_valid": 4000,
"N_invalid": 0,
"Question Type": "Descriptive"
},
"reasoning": {
"Overall Score": 45.0,
"By Answer Type": {
"Text-in-Chart": 49.09,
"Text-in-General": 42.42,
"Number-in-Chart": 44.83,
"Number-in-General": 38.43
},
"By Source": {
"GPT-Sourced": 50.0,
"GPT-Inspired": 45.83,
"Completely Human": 43.17
},
"By Subject": {
"Computer Science": 49.21,
"Economics": 43.48,
"Electrical Engineering and Systems Science": 47.9,
"Mathematics": 45.93,
"Physics": 50.39,
"Quantitative Biology": 41.27,
"Quantitative Finance": 38.79,
"Statistics": 42.48
},
"By Year": {
"2020": 44.13,
"2021": 44.06,
"2022": 43.85,
"2023": 47.98
},
"By Subplot": {
"1 Subplot": 49.48,
"2-4 Subplots": 42.06,
"5+ Subplots": 42.37
},
"N_valid": 1000,
"N_invalid": 1,
"Question Type": "Reasoning"
},
"accuracy": 64.27,
"acc_stderr": 0,
"acc": 64.27
},
"MathVision": {
"reject_info": {
"reject_rate": 0.07,
"reject_number": 2,
"total_question": 3040
},
"accuracy": 33.15,
"acc_stderr": 0,
"acc": 33.15
},
"CII-Bench": {
"reject_info": {
"reject_rate": 0.13,
"reject_number": 1,
"total_question": 765
},
"accuracy": 55.1,
"domain_score": {
"Life": 50.43,
"Art": 56.62,
"CTC": 50.37,
"Society": 59.46,
"Env.": 66.67,
"Politics": 58.33
},
"emotion_score": {
"Neutral": 55.09,
"Negative": 56.98,
"Positive": 52.99
},
"acc_stderr": 0,
"acc": 55.1
},
"Blink": {
"reject_info": {
"reject_rate": 0.05,
"reject_number": 1,
"total_question": 1901
},
"accuracy": 57.42,
"Art Style": 76.92,
"Counting": 61.67,
"Forensic Detection": 46.21,
"Functional Correspondence": 39.23,
"IQ Test": 28.67,
"Jigsaw": 65.77,
"Multi-view Reasoning": 43.61,
"Object Localization": 53.28,
"Relative Depth": 73.39,
"Relative Reflectance": 31.34,
"Semantic Correspondence": 46.76,
"Spatial Relation": 74.13,
"Visual Correspondence": 77.33,
"Visual Similarity": 84.44,
"acc_stderr": 0,
"acc": 57.42
}
}
}