Datasets:

License:
vlm_results / Qwen2-VL-2B-Instruct /results_2025-01-25T10-42-53.190540.json
daiteng01's picture
Upload 52 files
7498b1c verified
{
"config_general": {
"model_name": "Qwen2-VL-2B-Instruct",
"model_dtype": "float16",
"model_size": 0
},
"results": {
"CMMMU": {
"accuracy": 33.89,
"acc_stderr": 0,
"acc": 33.89
},
"MMMU": {
"accuracy": 41.44,
"acc_stderr": 0,
"acc": 41.44
},
"MMMU_Pro_standard": {
"accuracy": 26.82,
"acc_stderr": 0,
"acc": 26.82
},
"MMMU_Pro_vision": {
"accuracy": 16.47,
"subject_score": {
"Sociology": 22.22,
"History": 16.07,
"Agriculture": 15.0,
"Design": 26.67,
"Energy_and_Power": 3.45,
"Finance": 8.33,
"Literature": 51.92,
"Pharmacy": 24.56,
"Accounting": 10.34,
"Architecture_and_Engineering": 8.33,
"Clinical_Medicine": 11.86,
"Art": 24.53,
"Art_Theory": 21.82,
"Public_Health": 6.9,
"Electronics": 13.33,
"Physics": 13.33,
"Psychology": 13.33,
"Manage": 26.0,
"Biology": 22.03,
"Mechanical_Engineering": 16.95,
"Economics": 11.86,
"Basic_Medical_Science": 15.38,
"Computer_Science": 25.0,
"Diagnostics_and_Laboratory_Medicine": 15.0,
"Math": 10.0,
"Materials": 16.67,
"Music": 11.67,
"Marketing": 10.17,
"Chemistry": 16.67,
"Geography": 11.54
},
"acc_stderr": 0,
"acc": 16.47
},
"MmvetV2": {
"accuracy": 45.2998,
"capability_scores": {
"ocr": 47.5,
"math": 30.58823529411765,
"spat": 41.065989847715734,
"rec": 43.05825242718446,
"know": 44.29487179487178,
"gen": 41.34545454545452,
"seq": 34.82142857142858
},
"capability_detail_scores": {
"ocr_math": 31.818181818181817,
"ocr_math_spat": 33.33333333333333,
"ocr_math_spat_rec": 25.0,
"spat_rec": 56.07142857142857,
"ocr_spat": 55.769230769230774,
"ocr_spat_rec": 21.666666666666668,
"ocr_know_spat": 75.0,
"ocr_rec": 65.0,
"know_spat_rec": 18.0,
"ocr": 70.93750000000001,
"rec": 55.08474576271186,
"know_rec": 41.53846153846154,
"gen_know_rec": 45.099999999999994,
"ocr_gen_know_rec": 43.84615384615385,
"ocr_gen_spat_rec": 38.372093023255815,
"ocr_spat_gen": 60.0,
"math_gen_seq_ocr_spat": 0.0,
"math_rec_seq_ocr_spat": 0.0,
"gen_spat_rec": 30.909090909090903,
"ocr_math_spat_gen": 40.0,
"seq_spat_rec": 40.0,
"seq_ocr_spat_rec": 33.33333333333333,
"gen_know_spat_rec": 20.0,
"gen_rec": 42.94117647058823,
"ocr_know_spat_rec": 87.5,
"gen_know_ocr_rec": 43.84615384615385,
"gen_rec_spat_ocr_know": 65.0,
"ocr_math_rec": 100.0,
"ocr_gen_rec": 34.0,
"seq_ocr_gen_rec": 52.85714285714285,
"ocr_gen": 50.0,
"seq_gen_rec": 22.857142857142858,
"seq_rec": 40.0,
"seq_gen_spat_rec": 42.5,
"seq_know_rec": 0.0,
"seq_gen_know_rec": 55.00000000000001,
"gen_rec_seq_ocr_spat": 10.0,
"gen_rec_seq_ocr_know": 80.0,
"math_know_rec": 0.0,
"seq_ocr_rec": 0.0
},
"acc_stderr": 0,
"acc": 45.2998
},
"MathVerse": {
"Text Lite": {
"accuracy": 21.32,
"correct": 168,
"total": 788
},
"Total": {
"accuracy": 20.94,
"correct": 825,
"total": 3940
},
"Text Dominant": {
"accuracy": 23.73,
"correct": 187,
"total": 788
},
"Vision Intensive": {
"accuracy": 20.56,
"correct": 162,
"total": 788
},
"Vision Only": {
"accuracy": 18.78,
"correct": 148,
"total": 788
},
"Vision Dominant": {
"accuracy": 20.3,
"correct": 160,
"total": 788
},
"accuracy": 20.94,
"acc_stderr": 0,
"acc": 20.94
},
"Ocrlite": {
"final_score": [
1123,
1645
],
"accuracy": 68.267,
"Key Information Extraction-Bookshelf": [
23,
52
],
"Scene Text-centric VQA-diet_constraints": [
27,
90
],
"Doc-oriented VQA-Control": [
111,
189
],
"Doc-oriented VQA": [
128,
204
],
"Scene Text-centric VQA-Fake_logo": [
47,
119
],
"Handwritten Mathematical Expression Recognition": [
28,
100
],
"Key Information Extraction": [
190,
209
],
"Scene Text-centric VQA-Control": [
166,
200
],
"Scene Text-centric VQA": [
229,
282
],
"Artistic Text Recognition": [
43,
50
],
"Irregular Text Recognition": [
47,
50
],
"Non-Semantic Text Recognition": [
38,
50
],
"Regular Text Recognition": [
46,
50
],
"acc_stderr": 0,
"acc": 68.267
},
"OcrliteZh": {
"final_score": [
143,
234
],
"accuracy": 61.111,
"Docvqa": [
8,
10
],
"Chartqa-human": [
5,
10
],
"Chartqa-au": [
6,
10
],
"infographic": [
4,
10
],
"Key Information Extraction": [
33,
45
],
"Scene Text-centric VQA": [
27,
40
],
"Artistic Text Recognition": [
6,
11
],
"IrRegular Text Recognition": [
8,
11
],
"Non-semantic Text Recognition": [
10,
12
],
"Regular Text Recognition": [
11,
11
],
"Handwriting_CN": [
12,
20
],
"Chinese Unlimited": [
13,
44
],
"acc_stderr": 0,
"acc": 61.111
},
"CharXiv": {
"descriptive": {
"Overall Score": 45.25,
"By Question": {
"Q1": 66.8,
"Q2": 54.78,
"Q3": 49.79,
"Q4": 65.37,
"Q5": 55.23,
"Q6": 38.55,
"Q7": 22.22,
"Q8": 43.3,
"Q9": 29.35,
"Q10": 32.88,
"Q11": 24.57,
"Q12": 31.87,
"Q13": 45.66,
"Q14": 73.76,
"Q15": 50.8,
"Q16": 38.89,
"Q17": 6.25,
"Q18": 44.13,
"Q19": 73.85
},
"By Category": {
"Information Extraction": 50.59,
"Enumeration": 50.28,
"Pattern Recognition": 36.24,
"Counting": 39.19,
"Compositionality": 6.25
},
"By Subplot": {
"1 Subplot": 52.91,
"2-4 Subplots": 42.66,
"5+ Subplots": 36.86
},
"By Subject": {
"Computer Science": 41.67,
"Economics": 44.2,
"Electrical Engineering and Systems Science": 50.84,
"Mathematics": 48.52,
"Physics": 45.47,
"Quantitative Biology": 38.69,
"Quantitative Finance": 47.63,
"Statistics": 45.35
},
"By Year": {
"2020": 46.15,
"2021": 43.58,
"2022": 46.93,
"2023": 44.46
},
"N_valid": 4000,
"N_invalid": 0,
"Question Type": "Descriptive"
},
"reasoning": {
"Overall Score": 23.9,
"By Answer Type": {
"Text-in-Chart": 24.77,
"Text-in-General": 28.28,
"Number-in-Chart": 25.0,
"Number-in-General": 19.21
},
"By Source": {
"GPT-Sourced": 27.17,
"GPT-Inspired": 19.44,
"Completely Human": 24.5
},
"By Subject": {
"Computer Science": 28.57,
"Economics": 17.39,
"Electrical Engineering and Systems Science": 27.73,
"Mathematics": 25.19,
"Physics": 24.41,
"Quantitative Biology": 20.63,
"Quantitative Finance": 22.41,
"Statistics": 25.66
},
"By Year": {
"2020": 24.7,
"2021": 24.14,
"2022": 26.23,
"2023": 20.56
},
"By Subplot": {
"1 Subplot": 25.65,
"2-4 Subplots": 25.93,
"5+ Subplots": 17.8
},
"N_valid": 1000,
"N_invalid": 1,
"Question Type": "Reasoning"
},
"accuracy": 34.58,
"acc_stderr": 0,
"acc": 34.58
},
"MathVision": {
"accuracy": 14.11,
"acc_stderr": 0,
"acc": 14.11
},
"CII-Bench": {
"accuracy": 39.48,
"domain_score": {
"Art": 42.65,
"Env.": 50.0,
"CTC": 46.67,
"Society": 40.0,
"Life": 31.17,
"Politics": 33.33
},
"emotion_score": {
"Negative": 38.49,
"Positive": 39.74,
"Neutral": 40.23
},
"acc_stderr": 0,
"acc": 39.48
},
"Blink": {
"accuracy": 40.45,
"Art Style": 47.01,
"Counting": 55.83,
"Forensic Detection": 21.21,
"Functional Correspondence": 32.31,
"IQ Test": 16.0,
"Jigsaw": 52.67,
"Multi-view Reasoning": 33.08,
"Object Localization": 50.0,
"Relative Depth": 55.65,
"Relative Reflectance": 31.34,
"Semantic Correspondence": 24.46,
"Spatial Relation": 69.93,
"Visual Correspondence": 28.49,
"Visual Similarity": 55.56,
"acc_stderr": 0,
"acc": 40.45
}
}
}