Datasets:

License:
vlm_results / Qwen2-VL-7B-Instruct /results_2025-01-25T10-42-53.190540.json
daiteng01's picture
Upload 52 files
7498b1c verified
{
"config_general": {
"model_name": "Qwen2-VL-7B-Instruct",
"model_dtype": "float16",
"model_size": 0
},
"results": {
"CMMMU": {
"accuracy": 44.78,
"acc_stderr": 0,
"acc": 44.78
},
"MMMU": {
"accuracy": 50.44,
"acc_stderr": 0,
"acc": 50.44
},
"MMMU_Pro_standard": {
"accuracy": 33.93,
"acc_stderr": 0,
"acc": 33.93
},
"MMMU_Pro_vision": {
"accuracy": 25.38,
"subject_score": {
"Design": 50.0,
"History": 33.93,
"Agriculture": 11.67,
"Sociology": 27.78,
"Finance": 15.0,
"Art": 39.62,
"Pharmacy": 24.56,
"Accounting": 8.62,
"Energy_and_Power": 22.41,
"Literature": 57.69,
"Clinical_Medicine": 13.56,
"Architecture_and_Engineering": 23.33,
"Art_Theory": 41.82,
"Public_Health": 12.07,
"Electronics": 28.33,
"Physics": 20.0,
"Psychology": 21.67,
"Biology": 28.81,
"Manage": 26.0,
"Economics": 22.03,
"Mechanical_Engineering": 32.2,
"Diagnostics_and_Laboratory_Medicine": 28.33,
"Basic_Medical_Science": 28.85,
"Computer_Science": 31.67,
"Math": 20.0,
"Music": 25.0,
"Materials": 15.0,
"Chemistry": 18.33,
"Marketing": 15.25,
"Geography": 25.0
},
"acc_stderr": 0,
"acc": 25.38
},
"MmvetV2": {
"accuracy": 56.8665,
"capability_scores": {
"ocr": 63.317307692307715,
"math": 62.35294117647059,
"spat": 53.401015228426395,
"rec": 52.66990291262139,
"know": 49.87179487179489,
"gen": 52.727272727272755,
"seq": 48.214285714285715
},
"capability_detail_scores": {
"ocr_math": 80.9090909090909,
"ocr_spat_math": 60.0,
"ocr_rec_spat_math": 50.0,
"rec_spat": 60.357142857142854,
"ocr_spat": 66.15384615384616,
"ocr_rec_spat": 40.833333333333336,
"ocr_know_spat": 87.5,
"ocr_rec": 62.5,
"rec_know_spat": 35.0,
"ocr": 80.0,
"rec": 64.91525423728812,
"rec_know": 42.30769230769231,
"rec_know_gen": 51.800000000000004,
"ocr_rec_know_gen": 63.84615384615382,
"ocr_rec_spat_gen": 55.58139534883719,
"ocr_spat_gen": 80.0,
"ocr_seq_spat_math_gen": 80.0,
"ocr_seq_spat_rec_math": 0.0,
"rec_spat_gen": 43.18181818181818,
"ocr_spat_math_gen": 50.0,
"rec_spat_seq": 42.85714285714287,
"ocr_rec_spat_seq": 50.0,
"rec_know_spat_gen": 10.0,
"rec_gen": 53.8235294117647,
"ocr_rec_spat_know": 25.0,
"ocr_spat_know": 87.5,
"ocr_spat_rec_know_gen": 75.0,
"ocr_rec_math": 100.0,
"ocr_rec_gen": 51.99999999999999,
"ocr_rec_gen_seq": 59.999999999999986,
"ocr_gen": 76.15384615384616,
"rec_gen_seq": 34.285714285714285,
"rec_seq": 61.66666666666666,
"rec_spat_gen_seq": 65.0,
"rec_know_seq": 100.0,
"rec_know_gen_seq": 25.0,
"ocr_seq_spat_rec_gen": 13.333333333333334,
"ocr_seq_rec_know_gen": 45.0,
"rec_know_math": 0.0,
"ocr_rec_seq": 100.0,
"rec_spat_know": 35.0
},
"acc_stderr": 0,
"acc": 56.8665
},
"MathVerse": {
"Text Dominant": {
"accuracy": 30.84,
"correct": 243,
"total": 788
},
"Total": {
"accuracy": 27.77,
"correct": 1094,
"total": 3940
},
"Text Lite": {
"accuracy": 29.19,
"correct": 230,
"total": 788
},
"Vision Intensive": {
"accuracy": 28.55,
"correct": 225,
"total": 788
},
"Vision Dominant": {
"accuracy": 28.3,
"correct": 223,
"total": 788
},
"Vision Only": {
"accuracy": 21.95,
"correct": 173,
"total": 788
},
"accuracy": 27.77,
"acc_stderr": 0,
"acc": 27.77
},
"Ocrlite": {
"final_score": [
1219,
1645
],
"accuracy": 74.103,
"Key Information Extraction-Bookshelf": [
24,
52
],
"Scene Text-centric VQA-diet_constraints": [
54,
90
],
"Doc-oriented VQA-Control": [
123,
189
],
"Doc-oriented VQA": [
149,
204
],
"Scene Text-centric VQA-Fake_logo": [
53,
119
],
"Handwritten Mathematical Expression Recognition": [
39,
100
],
"Key Information Extraction": [
195,
209
],
"Scene Text-centric VQA-Control": [
166,
200
],
"Scene Text-centric VQA": [
241,
282
],
"Artistic Text Recognition": [
43,
50
],
"Irregular Text Recognition": [
46,
50
],
"Non-Semantic Text Recognition": [
38,
50
],
"Regular Text Recognition": [
48,
50
],
"acc_stderr": 0,
"acc": 74.103
},
"OcrliteZh": {
"final_score": [
144,
234
],
"accuracy": 61.538,
"Docvqa": [
8,
10
],
"Chartqa-human": [
5,
10
],
"Chartqa-au": [
6,
10
],
"infographic": [
5,
10
],
"Key Information Extraction": [
34,
45
],
"Scene Text-centric VQA": [
28,
40
],
"Artistic Text Recognition": [
3,
11
],
"IrRegular Text Recognition": [
8,
11
],
"Non-semantic Text Recognition": [
9,
12
],
"Regular Text Recognition": [
10,
11
],
"Handwriting_CN": [
14,
20
],
"Chinese Unlimited": [
14,
44
],
"acc_stderr": 0,
"acc": 61.538
},
"CharXiv": {
"descriptive": {
"Overall Score": 58.55,
"By Question": {
"Q1": 76.64,
"Q2": 81.3,
"Q3": 69.96,
"Q4": 85.21,
"Q5": 81.17,
"Q6": 59.44,
"Q7": 65.38,
"Q8": 59.38,
"Q9": 59.7,
"Q10": 74.66,
"Q11": 29.71,
"Q12": 46.7,
"Q13": 64.84,
"Q14": 33.69,
"Q15": 30.03,
"Q16": 52.78,
"Q17": 8.93,
"Q18": 67.61,
"Q19": 84.62
},
"By Category": {
"Information Extraction": 74.2,
"Enumeration": 47.13,
"Pattern Recognition": 51.97,
"Counting": 63.36,
"Compositionality": 8.93
},
"By Subplot": {
"1 Subplot": 66.9,
"2-4 Subplots": 56.68,
"5+ Subplots": 47.88
},
"By Subject": {
"Computer Science": 58.13,
"Economics": 60.87,
"Electrical Engineering and Systems Science": 61.34,
"Mathematics": 61.11,
"Physics": 54.53,
"Quantitative Biology": 52.38,
"Quantitative Finance": 61.64,
"Statistics": 58.41
},
"By Year": {
"2020": 56.48,
"2021": 57.95,
"2022": 60.86,
"2023": 58.97
},
"N_valid": 4000,
"N_invalid": 5,
"Question Type": "Descriptive"
},
"reasoning": {
"Overall Score": 33.5,
"By Answer Type": {
"Text-in-Chart": 35.45,
"Text-in-General": 39.39,
"Number-in-Chart": 34.05,
"Number-in-General": 26.64
},
"By Source": {
"GPT-Sourced": 38.04,
"GPT-Inspired": 27.78,
"Completely Human": 34.17
},
"By Subject": {
"Computer Science": 28.57,
"Economics": 32.61,
"Electrical Engineering and Systems Science": 42.02,
"Mathematics": 35.56,
"Physics": 40.16,
"Quantitative Biology": 34.13,
"Quantitative Finance": 26.72,
"Statistics": 27.43
},
"By Year": {
"2020": 33.2,
"2021": 36.78,
"2022": 30.33,
"2023": 33.47
},
"By Subplot": {
"1 Subplot": 36.01,
"2-4 Subplots": 30.95,
"5+ Subplots": 33.47
},
"N_valid": 1000,
"N_invalid": 2,
"Question Type": "Reasoning"
},
"accuracy": 46.02,
"acc_stderr": 0,
"acc": 46.02
},
"MathVision": {
"accuracy": 17.04,
"acc_stderr": 0,
"acc": 17.04
},
"CII-Bench": {
"accuracy": 51.24,
"domain_score": {
"Life": 41.99,
"Art": 55.15,
"CTC": 51.85,
"Society": 51.89,
"Env.": 74.07,
"Politics": 58.33
},
"emotion_score": {
"Neutral": 53.76,
"Negative": 48.68,
"Positive": 51.28
},
"acc_stderr": 0,
"acc": 51.24
},
"Blink": {
"accuracy": 50.08,
"Art Style": 59.83,
"Counting": 69.17,
"Forensic Detection": 30.3,
"Functional Correspondence": 21.54,
"IQ Test": 28.0,
"Jigsaw": 49.33,
"Multi-view Reasoning": 42.86,
"Object Localization": 54.92,
"Relative Depth": 67.74,
"Relative Reflectance": 39.55,
"Semantic Correspondence": 31.65,
"Spatial Relation": 83.92,
"Visual Correspondence": 44.19,
"Visual Similarity": 84.44,
"acc_stderr": 0,
"acc": 50.08
}
}
}