Datasets:

License:
vlm_results / Qwen2.5-VL-7B-Instruct /results_2025-01-25T10-42-53.190540.json
daiteng01's picture
Upload 52 files
7498b1c verified
{
"config_general": {
"model_name": "Qwen2.5-VL-7B-Instruct",
"model_dtype": "float16",
"model_size": 0
},
"results": {
"CMMMU": {
"艺术与设计": {
"num": 88,
"correct": 56,
"accuracy": 63.64
},
"overall": {
"num": 900,
"correct": 406,
"accuracy": 45.11
},
"商业": {
"num": 126,
"correct": 35,
"accuracy": 27.78
},
"科学": {
"num": 204,
"correct": 83,
"accuracy": 40.69
},
"健康与医学": {
"num": 153,
"correct": 72,
"accuracy": 47.06
},
"人文社会科学": {
"num": 85,
"correct": 49,
"accuracy": 57.65
},
"技术与工程": {
"num": 244,
"correct": 111,
"accuracy": 45.49
},
"accuracy": 45.11,
"acc_stderr": 0,
"acc": 45.11
},
"MMMU": {
"accuracy": 50.67,
"subject_score": {
"Accounting": 33.33,
"Agriculture": 40.0,
"Architecture": 50.0,
"Art": 78.33,
"Basic": 53.33,
"Biology": 50.0,
"Chemistry": 36.67,
"Clinical": 56.67,
"Computer": 50.0,
"Design": 73.33,
"Diagnostics": 40.0,
"Economics": 43.33,
"Electronics": 33.33,
"Energy": 30.0,
"Finance": 33.33,
"Geography": 46.67,
"History": 66.67,
"Literature": 80.0,
"Manage": 33.33,
"Marketing": 56.67,
"Materials": 40.0,
"Math": 43.33,
"Mechanical": 46.67,
"Music": 30.0,
"Pharmacy": 70.0,
"Physics": 33.33,
"Psychology": 73.33,
"Public": 60.0,
"Sociology": 60.0
},
"difficulty_score": {
"Medium": 46.46,
"Easy": 64.07,
"Hard": 38.67
},
"acc_stderr": 0,
"acc": 50.67
},
"MMMU_Pro_standard": {
"accuracy": 36.76,
"subject_score": {
"History": 48.21,
"Finance": 21.67,
"Design": 63.33,
"Literature": 65.38,
"Agriculture": 25.0,
"Clinical_Medicine": 28.81,
"Accounting": 25.86,
"Sociology": 38.89,
"Art": 54.72,
"Physics": 25.0,
"Public_Health": 20.69,
"Energy_and_Power": 20.69,
"Pharmacy": 49.12,
"Electronics": 53.33,
"Architecture_and_Engineering": 31.67,
"Art_Theory": 67.27,
"Psychology": 38.33,
"Economics": 33.9,
"Biology": 33.9,
"Diagnostics_and_Laboratory_Medicine": 28.33,
"Manage": 26.0,
"Mechanical_Engineering": 35.59,
"Basic_Medical_Science": 36.54,
"Computer_Science": 45.0,
"Math": 25.0,
"Music": 26.67,
"Materials": 23.33,
"Marketing": 30.51,
"Chemistry": 36.67,
"Geography": 51.92
},
"difficulty_score": {
"Medium": 34.46,
"Easy": 46.59,
"Hard": 28.43
},
"acc_stderr": 0,
"acc": 36.76
},
"MMMU_Pro_vision": {
"accuracy": 34.91,
"subject_score": {
"Design": 46.67,
"History": 53.57,
"Sociology": 33.33,
"Art": 50.94,
"Literature": 69.23,
"Agriculture": 23.33,
"Pharmacy": 40.35,
"Clinical_Medicine": 30.51,
"Architecture_and_Engineering": 20.0,
"Accounting": 43.1,
"Physics": 33.33,
"Art_Theory": 50.91,
"Energy_and_Power": 18.97,
"Psychology": 26.67,
"Biology": 30.51,
"Manage": 22.0,
"Economics": 37.29,
"Public_Health": 46.55,
"Mechanical_Engineering": 23.73,
"Diagnostics_and_Laboratory_Medicine": 26.67,
"Electronics": 41.67,
"Basic_Medical_Science": 36.54,
"Finance": 45.0,
"Computer_Science": 38.33,
"Math": 23.33,
"Music": 28.33,
"Marketing": 32.2,
"Materials": 15.0,
"Chemistry": 36.67,
"Geography": 28.85
},
"acc_stderr": 0,
"acc": 34.91
},
"MmvetV2": {
"reject_info": {
"reject_rate": 1.35,
"reject_number": 7,
"total_question": 517
},
"accuracy": 61.7843,
"capability_scores": {
"ocr": 68.5365853658537,
"math": 69.11764705882352,
"spat": 57.70408163265303,
"rec": 57.30864197530875,
"know": 57.56410256410256,
"gen": 59.22509225092255,
"seq": 44.08163265306123
},
"capability_detail_scores": {
"ocr_math": 86.36363636363636,
"spat_ocr_math": 70.0,
"spat_rec_ocr_math": 45.0,
"spat_rec": 56.07142857142857,
"spat_ocr": 77.3076923076923,
"spat_rec_ocr": 44.166666666666664,
"spat_know_ocr": 100.0,
"rec_ocr": 75.0,
"spat_know_rec": 40.0,
"ocr": 83.125,
"rec": 67.28813559322033,
"know_rec": 57.692307692307686,
"know_rec_gen": 57.999999999999964,
"know_rec_ocr_gen": 67.6923076923077,
"spat_rec_ocr_gen": 61.86046511627909,
"spat_ocr_gen": 80.0,
"seq_gen_math_spat_ocr": 20.0,
"seq_math_spat_ocr_rec": 0.0,
"spat_rec_gen": 51.81818181818182,
"spat_gen_ocr_math": 40.0,
"spat_rec_seq": 38.33333333333333,
"spat_rec_ocr_seq": 0.0,
"spat_know_rec_gen": 40.00000000000001,
"rec_gen": 60.882352941176464,
"spat_know_rec_ocr": 12.5,
"know_gen_spat_ocr_rec": 90.0,
"rec_ocr_math": 100.0,
"rec_ocr_gen": 72.0,
"rec_ocr_seq_gen": 45.0,
"ocr_gen": 77.6923076923077,
"rec_seq_gen": 36.15384615384615,
"rec_seq": 50.0,
"spat_rec_seq_gen": 63.74999999999999,
"know_rec_seq": 0.0,
"know_rec_seq_gen": 65.0,
"seq_gen_spat_ocr_rec": 50.0,
"know_seq_gen_ocr_rec": 85.00000000000001,
"know_rec_math": 50.0,
"rec_ocr_seq": 100.0
},
"acc_stderr": 0,
"acc": 61.7843
},
"MathVerse": {
"Vision Intensive": {
"accuracy": 42.64,
"correct": 336,
"total": 788
},
"Total": {
"accuracy": 45.38,
"correct": 1788,
"total": 3940
},
"Text Dominant": {
"accuracy": 53.81,
"correct": 424,
"total": 788
},
"Text Lite": {
"accuracy": 47.34,
"correct": 373,
"total": 788
},
"Vision Dominant": {
"accuracy": 45.43,
"correct": 358,
"total": 788
},
"Vision Only": {
"accuracy": 37.69,
"correct": 297,
"total": 788
},
"accuracy": 45.38,
"acc_stderr": 0,
"acc": 45.38
},
"Ocrlite": {
"final_score": [
1247,
1644
],
"accuracy": 75.852,
"Key Information Extraction-Bookshelf": [
26,
51,
0.51,
{
"Default": [
26,
51,
0.51
]
}
],
"Scene Text-centric VQA-diet_constraints": [
52,
90,
0.578,
{
"Default": [
52,
90,
0.578
]
}
],
"Doc-oriented VQA-Control": [
142,
189,
0.751,
{
"Default": [
142,
189,
0.751
]
}
],
"Doc-oriented VQA": [
171,
204,
0.838,
{
"Default": [
171,
204,
0.838
]
}
],
"Scene Text-centric VQA-Fake_logo": [
54,
119,
0.454,
{
"Default": [
54,
119,
0.454
]
}
],
"Handwritten Mathematical Expression Recognition": [
1,
100,
0.01,
{
"Default": [
1,
100,
0.01
]
}
],
"Key Information Extraction": [
198,
209,
0.947,
{
"Default": [
198,
209,
0.947
]
}
],
"Scene Text-centric VQA-Control": [
173,
200,
0.865,
{
"Default": [
173,
200,
0.865
]
}
],
"Scene Text-centric VQA": [
247,
282,
0.876,
{
"Default": [
247,
282,
0.876
]
}
],
"Artistic Text Recognition": [
42,
50,
0.84,
{
"Default": [
42,
50,
0.84
]
}
],
"Irregular Text Recognition": [
47,
50,
0.94,
{
"Default": [
47,
50,
0.94
]
}
],
"Non-Semantic Text Recognition": [
45,
50,
0.9,
{
"Default": [
45,
50,
0.9
]
}
],
"Regular Text Recognition": [
49,
50,
0.98,
{
"Default": [
49,
50,
0.98
]
}
],
"acc_stderr": 0,
"acc": 75.852
},
"OcrliteZh": {
"final_score": [
161,
234
],
"accuracy": 68.803,
"Docvqa": [
6,
10,
0.6,
{
"Default": [
6,
10,
0.6
]
}
],
"Chartqa-human": [
4,
10,
0.4,
{
"Default": [
4,
10,
0.4
]
}
],
"Chartqa-au": [
7,
10,
0.7,
{
"Default": [
7,
10,
0.7
]
}
],
"infographic": [
7,
10,
0.7,
{
"Default": [
7,
10,
0.7
]
}
],
"Key Information Extraction": [
39,
45,
0.867,
{
"Default": [
39,
45,
0.867
]
}
],
"Scene Text-centric VQA": [
28,
40,
0.7,
{
"Default": [
28,
40,
0.7
]
}
],
"Artistic Text Recognition": [
7,
11,
0.636,
{
"Default": [
7,
11,
0.636
]
}
],
"IrRegular Text Recognition": [
8,
11,
0.727,
{
"Default": [
8,
11,
0.727
]
}
],
"Non-semantic Text Recognition": [
10,
12,
0.833,
{
"Default": [
10,
12,
0.833
]
}
],
"Regular Text Recognition": [
10,
11,
0.909,
{
"Default": [
10,
11,
0.909
]
}
],
"Handwriting_CN": [
16,
20,
0.8,
{
"Default": [
16,
20,
0.8
]
}
],
"Chinese Unlimited": [
19,
44,
0.432,
{
"Default": [
19,
44,
0.432
]
}
],
"acc_stderr": 0,
"acc": 68.803
},
"CharXiv": {
"descriptive": {
"Overall Score": 38.55,
"By Question": {
"Q1": 83.2,
"Q2": 77.83,
"Q3": 69.1,
"Q4": 85.99,
"Q5": 81.17,
"Q6": 75.1,
"Q7": 76.5,
"Q8": 62.95,
"Q9": 38.31,
"Q10": 0.0,
"Q11": 0.0,
"Q12": 0.0,
"Q13": 0.0,
"Q14": 0.0,
"Q15": 0.0,
"Q16": 0.0,
"Q17": 0.0,
"Q18": 0.0,
"Q19": 0.0
},
"By Category": {
"Information Extraction": 78.53,
"Enumeration": 17.59,
"Pattern Recognition": 0.0,
"Counting": 0.0,
"Compositionality": 0.0
},
"By Subplot": {
"1 Subplot": 41.77,
"2-4 Subplots": 37.1,
"5+ Subplots": 35.59
},
"By Subject": {
"Computer Science": 38.49,
"Economics": 38.59,
"Electrical Engineering and Systems Science": 44.12,
"Mathematics": 37.04,
"Physics": 38.39,
"Quantitative Biology": 31.94,
"Quantitative Finance": 38.36,
"Statistics": 42.26
},
"By Year": {
"2020": 38.46,
"2021": 39.08,
"2022": 39.34,
"2023": 37.3
},
"N_valid": 4000,
"N_invalid": 1975,
"Question Type": "Descriptive"
},
"reasoning": {
"Overall Score": 37.7,
"By Answer Type": {
"Text-in-Chart": 38.18,
"Text-in-General": 41.41,
"Number-in-Chart": 40.52,
"Number-in-General": 32.31
},
"By Source": {
"GPT-Sourced": 40.22,
"GPT-Inspired": 37.96,
"Completely Human": 36.83
},
"By Subject": {
"Computer Science": 26.19,
"Economics": 39.13,
"Electrical Engineering and Systems Science": 39.5,
"Mathematics": 41.48,
"Physics": 42.52,
"Quantitative Biology": 39.68,
"Quantitative Finance": 36.21,
"Statistics": 36.28
},
"By Year": {
"2020": 30.77,
"2021": 41.76,
"2022": 36.89,
"2023": 41.13
},
"By Subplot": {
"1 Subplot": 39.9,
"2-4 Subplots": 37.04,
"5+ Subplots": 35.17
},
"N_valid": 1000,
"N_invalid": 34,
"Question Type": "Reasoning"
},
"accuracy": 38.12,
"acc_stderr": 0,
"acc": 38.12
},
"MathVision": {
"accuracy": 18.65,
"acc_stderr": 0,
"acc": 18.65
},
"CII-Bench": {
"accuracy": 48.89,
"domain_score": {
"CTC": 52.59,
"Society": 50.81,
"Life": 41.56,
"Art": 47.79,
"Env.": 61.11,
"Politics": 62.5
},
"emotion_score": {
"Positive": 49.57,
"Negative": 48.3,
"Neutral": 48.87
},
"acc_stderr": 0,
"acc": 48.89
},
"Blink": {
"accuracy": 56.08,
"Art Style": 59.83,
"Counting": 63.33,
"Forensic Detection": 58.33,
"Functional Correspondence": 31.54,
"IQ Test": 24.0,
"Jigsaw": 59.33,
"Multi-view Reasoning": 54.89,
"Object Localization": 55.74,
"Relative Depth": 76.61,
"Relative Reflectance": 27.61,
"Semantic Correspondence": 39.57,
"Spatial Relation": 80.42,
"Visual Correspondence": 68.02,
"Visual Similarity": 86.67,
"acc_stderr": 0,
"acc": 56.08
}
}
}