Datasets:

License:
daiteng01 commited on
Commit
7498b1c
·
verified ·
1 Parent(s): dda04eb

Upload 52 files

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. Aria/results_2025-01-25T10-42-53.190540.json +36 -4
  2. Claude-3.5-Sonnet-20241022/results_2025-01-25T10-42-53.190540.json +31 -31
  3. Claude-3.7-Sonnet-20250219/results_2025-01-25T10-42-53.190540.json +742 -0
  4. Claude3-Opus-20240229/results_2025-01-25T10-42-53.190540.json +36 -4
  5. Doubao-Pro-Vision-32k-241028/results_2025-01-25T10-42-53.190540.json +33 -33
  6. GLM-4V-Plus/results_2025-01-25T10-42-53.190540.json +36 -4
  7. GPT-4o-20240806/results_2025-01-25T10-42-53.190540.json +36 -4
  8. GPT-4o-20241120/results_2025-01-25T10-42-53.190540.json +36 -4
  9. GPT-4o-mini-20240718/results_2025-01-25T10-42-53.190540.json +36 -4
  10. Gemini-1.5-Flash/results_2025-01-25T10-42-53.190540.json +36 -4
  11. Gemini-1.5-Pro/results_2025-01-25T10-42-53.190540.json +36 -4
  12. Gemini-2.0-Flash(experimental)/results_2025-01-25T10-42-53.190540.json +723 -0
  13. Gemini-2.0-pro-exp-20250205/results_2025-01-25T10-42-53.190540.json +758 -0
  14. Gemini-2.5-pro-preview-20250325/results_2025-01-25T10-42-53.190540.json +757 -0
  15. Gemma-3-27b-it/results_2025-01-25T10-42-53.190540.json +722 -0
  16. Idefics3-8B-Llama3/results_2025-01-25T10-42-53.190540.json +36 -4
  17. InternVL2-2B/results_2025-01-25T10-42-53.190540.json +36 -4
  18. InternVL2-8B/results_2025-01-25T10-42-53.190540.json +36 -4
  19. InternVL2-Llama3-76B/results_2025-01-25T10-42-53.190540.json +36 -4
  20. InternVL2_5-26B/results_2025-01-25T10-42-53.190540.json +518 -0
  21. InternVL2_5-3B/results_2025-01-25T10-42-53.190540.json +720 -0
  22. InternVL2_5-8B/results_2025-01-25T10-42-53.190540.json +519 -0
  23. InternVL3-78B/results_2025-01-25T10-42-53.190540.json +717 -0
  24. InternVL3-8B/results_2025-01-25T10-42-53.190540.json +718 -0
  25. Janus-1.3B/results_2025-01-25T10-42-53.190540.json +36 -4
  26. LLaVA-OneVision-0.5B/results_2025-01-25T10-42-53.190540.json +6 -6
  27. LLaVA-OneVision-7B/results_2025-01-25T10-42-53.190540.json +34 -2
  28. LLaVA-Onevision-72B/results_2025-01-25T10-42-53.190540.json +36 -4
  29. Llama-3.2-11B-Vision-Instruct/results_2025-01-25T10-42-53.190540.json +36 -4
  30. Llama-3.2-90B-Vision-Instruct/results_2025-01-25T10-42-53.190540.json +36 -4
  31. Llama-4-maverick-instruct-basic/results_2025-01-25T10-42-53.190540.json +730 -0
  32. MiniCPM-V-2.6/results_2025-01-25T10-42-53.190540.json +34 -2
  33. Mistral-Small-3.1-24B-Instruct-2503/results_2025-01-25T10-42-53.190540.json +723 -0
  34. Molmo-72B-0924/results_2025-01-25T10-42-53.190540.json +36 -4
  35. Molmo-7B-D/results_2025-01-25T10-42-53.190540.json +34 -2
  36. Mono-InternVL-2B/results_2025-01-25T10-42-53.190540.json +36 -4
  37. NVLM-D-72B/results_2025-01-25T10-42-53.190540.json +36 -4
  38. Phi-3.5-Vision-Instruct/results_2025-01-25T10-42-53.190540.json +36 -4
  39. Phi-4-multimodal-instruct/results_2025-01-25T10-42-53.190540.json +685 -0
  40. Pixtral-12B-2409/results_2025-01-25T10-42-53.190540.json +36 -4
  41. Pixtral-Large-Instruct-2411/results_2025-01-25T10-42-53.190540.json +517 -0
  42. Qwen-VL-Max-20250402/results_2025-01-25T10-42-53.190540.json +762 -0
  43. Qwen-VL-Max/results_2025-01-25T10-42-53.190540.json +36 -4
  44. Qwen2-VL-2B-Instruct/results_2025-01-25T10-42-53.190540.json +410 -0
  45. Qwen2-VL-72B-Instruct/results_2025-01-25T10-42-53.190540.json +36 -4
  46. Qwen2-VL-7B-Instruct/results_2025-01-25T10-42-53.190540.json +36 -4
  47. Qwen2.5-VL-32B-Instruct/results_2025-01-25T10-42-53.190540.json +717 -0
  48. Qwen2.5-VL-72B-Instruct/results_2025-01-25T10-42-53.190540.json +723 -0
  49. Qwen2.5-VL-7B-Instruct/results_2025-01-25T10-42-53.190540.json +722 -0
  50. Step-1V-32k/results_2025-01-25T10-42-53.190540.json +36 -4
Aria/results_2025-01-25T10-42-53.190540.json CHANGED
@@ -21,9 +21,41 @@
21
  "acc": 32.08
22
  },
23
  "MMMU_Pro_vision": {
24
- "accuracy": 52.02,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
25
  "acc_stderr": 0,
26
- "acc": 52.02
27
  },
28
  "MmvetV2": {
29
  "accuracy": 55.8414,
@@ -333,9 +365,9 @@
333
  "acc": 43.27
334
  },
335
  "MathVision": {
336
- "accuracy": 15.56,
337
  "acc_stderr": 0,
338
- "acc": 15.56
339
  },
340
  "CII-Bench": {
341
  "accuracy": 46.27,
 
21
  "acc": 32.08
22
  },
23
  "MMMU_Pro_vision": {
24
+ "accuracy": 21.21,
25
+ "subject_score": {
26
+ "History": 25.0,
27
+ "Art": 26.42,
28
+ "Design": 41.67,
29
+ "Literature": 55.77,
30
+ "Agriculture": 25.0,
31
+ "Finance": 16.67,
32
+ "Sociology": 22.22,
33
+ "Accounting": 25.86,
34
+ "Energy_and_Power": 13.79,
35
+ "Pharmacy": 31.58,
36
+ "Architecture_and_Engineering": 10.0,
37
+ "Clinical_Medicine": 16.95,
38
+ "Public_Health": 25.86,
39
+ "Physics": 16.67,
40
+ "Art_Theory": 21.82,
41
+ "Electronics": 13.33,
42
+ "Psychology": 25.0,
43
+ "Biology": 20.34,
44
+ "Manage": 12.0,
45
+ "Economics": 22.03,
46
+ "Mechanical_Engineering": 20.34,
47
+ "Diagnostics_and_Laboratory_Medicine": 10.0,
48
+ "Basic_Medical_Science": 23.08,
49
+ "Computer_Science": 23.33,
50
+ "Math": 23.33,
51
+ "Music": 20.0,
52
+ "Materials": 8.33,
53
+ "Marketing": 15.25,
54
+ "Chemistry": 15.0,
55
+ "Geography": 13.46
56
+ },
57
  "acc_stderr": 0,
58
+ "acc": 21.21
59
  },
60
  "MmvetV2": {
61
  "accuracy": 55.8414,
 
365
  "acc": 43.27
366
  },
367
  "MathVision": {
368
+ "accuracy": 13.59,
369
  "acc_stderr": 0,
370
+ "acc": 13.59
371
  },
372
  "CII-Bench": {
373
  "accuracy": 46.27,
Claude-3.5-Sonnet-20241022/results_2025-01-25T10-42-53.190540.json CHANGED
@@ -129,41 +129,41 @@
129
  "acc": 42.14
130
  },
131
  "MMMU_Pro_vision": {
132
- "accuracy": 56.65,
133
  "subject_score": {
134
- "History": 62.5,
135
- "Art": 50.94,
136
  "Design": 65.0,
137
- "Literature": 63.46,
138
- "Agriculture": 61.67,
139
- "Finance": 46.67,
140
- "Sociology": 66.67,
141
- "Accounting": 65.52,
142
- "Energy_and_Power": 46.55,
143
- "Pharmacy": 54.39,
144
- "Architecture_and_Engineering": 38.33,
145
- "Clinical_Medicine": 49.15,
146
- "Public_Health": 68.97,
147
- "Physics": 58.33,
148
- "Art_Theory": 56.36,
149
- "Electronics": 51.67,
150
- "Psychology": 78.33,
151
- "Biology": 61.02,
152
- "Manage": 58.0,
153
- "Economics": 57.63,
154
- "Mechanical_Engineering": 55.93,
155
- "Diagnostics_and_Laboratory_Medicine": 58.33,
156
- "Basic_Medical_Science": 59.62,
157
- "Computer_Science": 66.67,
158
- "Math": 60.0,
159
- "Music": 53.33,
160
- "Materials": 40.0,
161
- "Marketing": 42.37,
162
- "Chemistry": 55.0,
163
- "Geography": 48.08
164
  },
165
  "acc_stderr": 0,
166
- "acc": 56.65
167
  },
168
  "MmvetV2": {
169
  "reject_info": {
 
129
  "acc": 42.14
130
  },
131
  "MMMU_Pro_vision": {
132
+ "accuracy": 47.63,
133
  "subject_score": {
134
+ "History": 53.57,
135
+ "Art": 60.38,
136
  "Design": 65.0,
137
+ "Literature": 51.92,
138
+ "Agriculture": 31.67,
139
+ "Finance": 68.33,
140
+ "Sociology": 50.0,
141
+ "Accounting": 72.41,
142
+ "Energy_and_Power": 34.48,
143
+ "Pharmacy": 52.63,
144
+ "Architecture_and_Engineering": 35.0,
145
+ "Clinical_Medicine": 38.98,
146
+ "Public_Health": 65.52,
147
+ "Physics": 41.67,
148
+ "Art_Theory": 61.82,
149
+ "Electronics": 60.0,
150
+ "Psychology": 40.0,
151
+ "Biology": 35.59,
152
+ "Manage": 54.0,
153
+ "Economics": 62.71,
154
+ "Mechanical_Engineering": 33.9,
155
+ "Diagnostics_and_Laboratory_Medicine": 31.67,
156
+ "Basic_Medical_Science": 42.31,
157
+ "Computer_Science": 41.67,
158
+ "Math": 38.33,
159
+ "Music": 30.0,
160
+ "Materials": 30.0,
161
+ "Marketing": 57.63,
162
+ "Chemistry": 51.67,
163
+ "Geography": 40.38
164
  },
165
  "acc_stderr": 0,
166
+ "acc": 47.63
167
  },
168
  "MmvetV2": {
169
  "reject_info": {
Claude-3.7-Sonnet-20250219/results_2025-01-25T10-42-53.190540.json ADDED
@@ -0,0 +1,742 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "model_name": "Claude-3.7-Sonnet-20250219",
4
+ "model_dtype": "float16",
5
+ "model_size": 0
6
+ },
7
+ "results": {
8
+ "CMMMU": {
9
+ "艺术与设计": {
10
+ "num": 88,
11
+ "correct": 63,
12
+ "accuracy": 71.59
13
+ },
14
+ "overall": {
15
+ "num": 900,
16
+ "correct": 460,
17
+ "accuracy": 51.11
18
+ },
19
+ "商业": {
20
+ "num": 126,
21
+ "correct": 42,
22
+ "accuracy": 33.33
23
+ },
24
+ "科学": {
25
+ "num": 204,
26
+ "correct": 88,
27
+ "accuracy": 43.14
28
+ },
29
+ "健康与医学": {
30
+ "num": 153,
31
+ "correct": 92,
32
+ "accuracy": 60.13
33
+ },
34
+ "人文社会科学": {
35
+ "num": 85,
36
+ "correct": 52,
37
+ "accuracy": 61.18
38
+ },
39
+ "技术与工程": {
40
+ "num": 244,
41
+ "correct": 123,
42
+ "accuracy": 50.41
43
+ },
44
+ "accuracy": 51.11,
45
+ "acc_stderr": 0,
46
+ "acc": 51.11
47
+ },
48
+ "MMMU": {
49
+ "accuracy": 54.44,
50
+ "subject_score": {
51
+ "Accounting": 43.33,
52
+ "Agriculture": 56.67,
53
+ "Architecture": 33.33,
54
+ "Art": 83.33,
55
+ "Basic": 76.67,
56
+ "Biology": 56.67,
57
+ "Chemistry": 53.33,
58
+ "Clinical": 76.67,
59
+ "Computer": 36.67,
60
+ "Design": 83.33,
61
+ "Diagnostics": 40.0,
62
+ "Economics": 53.33,
63
+ "Electronics": 26.67,
64
+ "Energy": 23.33,
65
+ "Finance": 40.0,
66
+ "Geography": 40.0,
67
+ "History": 70.0,
68
+ "Literature": 86.67,
69
+ "Manage": 66.67,
70
+ "Marketing": 46.67,
71
+ "Materials": 43.33,
72
+ "Math": 16.67,
73
+ "Mechanical": 30.0,
74
+ "Music": 40.0,
75
+ "Pharmacy": 60.0,
76
+ "Physics": 50.0,
77
+ "Psychology": 73.33,
78
+ "Public": 70.0,
79
+ "Sociology": 73.33
80
+ },
81
+ "difficulty_score": {
82
+ "Medium": 55.42,
83
+ "Easy": 67.12,
84
+ "Hard": 31.49
85
+ },
86
+ "acc_stderr": 0,
87
+ "acc": 54.44
88
+ },
89
+ "MMMU_Pro_standard": {
90
+ "accuracy": 38.55,
91
+ "subject_score": {
92
+ "History": 48.21,
93
+ "Art": 64.15,
94
+ "Design": 66.67,
95
+ "Literature": 71.15,
96
+ "Agriculture": 35.0,
97
+ "Finance": 33.33,
98
+ "Sociology": 51.85,
99
+ "Accounting": 39.66,
100
+ "Energy_and_Power": 18.97,
101
+ "Pharmacy": 43.86,
102
+ "Clinical_Medicine": 44.07,
103
+ "Architecture_and_Engineering": 18.33,
104
+ "Public_Health": 51.72,
105
+ "Physics": 31.67,
106
+ "Art_Theory": 61.82,
107
+ "Psychology": 45.0,
108
+ "Electronics": 35.0,
109
+ "Biology": 32.2,
110
+ "Economics": 35.59,
111
+ "Manage": 30.0,
112
+ "Mechanical_Engineering": 18.64,
113
+ "Diagnostics_and_Laboratory_Medicine": 31.67,
114
+ "Basic_Medical_Science": 34.62,
115
+ "Computer_Science": 41.67,
116
+ "Math": 21.67,
117
+ "Music": 21.67,
118
+ "Materials": 26.67,
119
+ "Marketing": 35.59,
120
+ "Chemistry": 36.67,
121
+ "Geography": 38.46
122
+ },
123
+ "difficulty_score": {
124
+ "Medium": 36.08,
125
+ "Easy": 50.76,
126
+ "Hard": 27.43
127
+ },
128
+ "acc_stderr": 0,
129
+ "acc": 38.55
130
+ },
131
+ "MMMU_Pro_vision": {
132
+ "accuracy": 46.13,
133
+ "subject_score": {
134
+ "History": 46.43,
135
+ "Art": 60.38,
136
+ "Design": 63.33,
137
+ "Literature": 59.62,
138
+ "Agriculture": 30.0,
139
+ "Sociology": 51.85,
140
+ "Finance": 63.33,
141
+ "Accounting": 74.14,
142
+ "Energy_and_Power": 37.93,
143
+ "Pharmacy": 50.88,
144
+ "Clinical_Medicine": 37.29,
145
+ "Architecture_and_Engineering": 31.67,
146
+ "Public_Health": 60.34,
147
+ "Physics": 50.0,
148
+ "Art_Theory": 67.27,
149
+ "Electronics": 45.0,
150
+ "Psychology": 33.33,
151
+ "Biology": 33.9,
152
+ "Manage": 42.0,
153
+ "Economics": 66.1,
154
+ "Mechanical_Engineering": 37.29,
155
+ "Diagnostics_and_Laboratory_Medicine": 23.33,
156
+ "Basic_Medical_Science": 44.23,
157
+ "Computer_Science": 41.67,
158
+ "Math": 40.0,
159
+ "Music": 23.33,
160
+ "Materials": 35.0,
161
+ "Marketing": 54.24,
162
+ "Chemistry": 48.33,
163
+ "Geography": 36.54
164
+ },
165
+ "acc_stderr": 0,
166
+ "acc": 46.13
167
+ },
168
+ "MmvetV2": {
169
+ "reject_info": {
170
+ "reject_rate": 2.9,
171
+ "reject_number": 15,
172
+ "total_question": 517
173
+ },
174
+ "accuracy": 68.9044,
175
+ "capability_scores": {
176
+ "math": 72.94117647058825,
177
+ "ocr": 71.9024390243902,
178
+ "spat": 62.526315789473664,
179
+ "rec": 67.20403022670028,
180
+ "know": 64.45945945945944,
181
+ "gen": 69.3233082706767,
182
+ "seq": 72.36363636363635
183
+ },
184
+ "capability_detail_scores": {
185
+ "math_ocr": 81.81818181818183,
186
+ "math_ocr_spat": 73.33333333333333,
187
+ "math_ocr_spat_rec": 45.0,
188
+ "spat_rec": 63.70370370370372,
189
+ "ocr_spat": 71.53846153846153,
190
+ "ocr_spat_rec": 45.0,
191
+ "know_ocr_spat": 75.0,
192
+ "ocr_rec": 95.0,
193
+ "know_spat_rec": 40.0,
194
+ "ocr": 76.875,
195
+ "rec": 73.38983050847457,
196
+ "know_rec": 65.45454545454545,
197
+ "know_rec_gen": 67.23404255319146,
198
+ "know_ocr_rec_gen": 72.3076923076923,
199
+ "ocr_spat_rec_gen": 71.66666666666669,
200
+ "ocr_spat_gen": 85.00000000000001,
201
+ "spat_math_ocr_gen_seq": 80.0,
202
+ "spat_math_ocr_seq_rec": 50.0,
203
+ "spat_rec_gen": 45.49999999999999,
204
+ "math_ocr_spat_gen": 60.0,
205
+ "seq_spat_rec": 60.0,
206
+ "ocr_seq_spat_rec": 50.0,
207
+ "know_spat_rec_gen": 50.0,
208
+ "rec_gen": 73.23529411764707,
209
+ "know_ocr_spat_rec": 25.0,
210
+ "spat_know_ocr_gen_rec": 80.0,
211
+ "math_ocr_rec": 100.0,
212
+ "ocr_rec_gen": 82.0,
213
+ "ocr_seq_rec_gen": 75.71428571428572,
214
+ "ocr_gen": 75.3846153846154,
215
+ "seq_rec_gen": 80.0,
216
+ "seq_rec": 80.0,
217
+ "seq_spat_rec_gen": 75.00000000000001,
218
+ "know_seq_rec": 0.0,
219
+ "know_seq_rec_gen": 75.0,
220
+ "spat_ocr_gen_seq_rec": 53.333333333333336,
221
+ "know_ocr_gen_seq_rec": 100.0,
222
+ "know_math_rec": 50.0,
223
+ "ocr_seq_rec": 100.0
224
+ },
225
+ "acc_stderr": 0,
226
+ "acc": 68.9044
227
+ },
228
+ "MathVerse": {
229
+ "reject_info": {
230
+ "reject_rate": 0.05,
231
+ "reject_number": 2,
232
+ "total_question": 3940
233
+ },
234
+ "Vision Dominant": {
235
+ "accuracy": 50.76,
236
+ "correct": 400,
237
+ "total": 788
238
+ },
239
+ "Total": {
240
+ "accuracy": 53.78,
241
+ "correct": 2118,
242
+ "total": 3938
243
+ },
244
+ "Vision Intensive": {
245
+ "accuracy": 51.9,
246
+ "correct": 409,
247
+ "total": 788
248
+ },
249
+ "Text Lite": {
250
+ "accuracy": 55.33,
251
+ "correct": 436,
252
+ "total": 788
253
+ },
254
+ "Text Dominant": {
255
+ "accuracy": 62.44,
256
+ "correct": 492,
257
+ "total": 788
258
+ },
259
+ "Vision Only": {
260
+ "accuracy": 48.47,
261
+ "correct": 381,
262
+ "total": 786
263
+ },
264
+ "accuracy": 53.78,
265
+ "acc_stderr": 0,
266
+ "acc": 53.78
267
+ },
268
+ "Ocrlite": {
269
+ "final_score": [
270
+ 1247,
271
+ 1644
272
+ ],
273
+ "accuracy": 75.852,
274
+ "Key Information Extraction-Bookshelf": [
275
+ 28,
276
+ 51,
277
+ 0.549,
278
+ {
279
+ "Default": [
280
+ 28,
281
+ 51,
282
+ 0.549
283
+ ]
284
+ }
285
+ ],
286
+ "Scene Text-centric VQA-diet_constraints": [
287
+ 58,
288
+ 90,
289
+ 0.644,
290
+ {
291
+ "Default": [
292
+ 58,
293
+ 90,
294
+ 0.644
295
+ ]
296
+ }
297
+ ],
298
+ "Doc-oriented VQA-Control": [
299
+ 150,
300
+ 189,
301
+ 0.794,
302
+ {
303
+ "Default": [
304
+ 150,
305
+ 189,
306
+ 0.794
307
+ ]
308
+ }
309
+ ],
310
+ "Doc-oriented VQA": [
311
+ 180,
312
+ 204,
313
+ 0.882,
314
+ {
315
+ "Default": [
316
+ 180,
317
+ 204,
318
+ 0.882
319
+ ]
320
+ }
321
+ ],
322
+ "Scene Text-centric VQA-Fake_logo": [
323
+ 64,
324
+ 119,
325
+ 0.538,
326
+ {
327
+ "Default": [
328
+ 64,
329
+ 119,
330
+ 0.538
331
+ ]
332
+ }
333
+ ],
334
+ "Handwritten Mathematical Expression Recognition": [
335
+ 4,
336
+ 100,
337
+ 0.04,
338
+ {
339
+ "Default": [
340
+ 4,
341
+ 100,
342
+ 0.04
343
+ ]
344
+ }
345
+ ],
346
+ "Key Information Extraction": [
347
+ 186,
348
+ 209,
349
+ 0.89,
350
+ {
351
+ "Default": [
352
+ 186,
353
+ 209,
354
+ 0.89
355
+ ]
356
+ }
357
+ ],
358
+ "Scene Text-centric VQA-Control": [
359
+ 159,
360
+ 200,
361
+ 0.795,
362
+ {
363
+ "Default": [
364
+ 159,
365
+ 200,
366
+ 0.795
367
+ ]
368
+ }
369
+ ],
370
+ "Scene Text-centric VQA": [
371
+ 245,
372
+ 282,
373
+ 0.869,
374
+ {
375
+ "Default": [
376
+ 245,
377
+ 282,
378
+ 0.869
379
+ ]
380
+ }
381
+ ],
382
+ "Artistic Text Recognition": [
383
+ 41,
384
+ 50,
385
+ 0.82,
386
+ {
387
+ "Default": [
388
+ 41,
389
+ 50,
390
+ 0.82
391
+ ]
392
+ }
393
+ ],
394
+ "Irregular Text Recognition": [
395
+ 42,
396
+ 50,
397
+ 0.84,
398
+ {
399
+ "Default": [
400
+ 42,
401
+ 50,
402
+ 0.84
403
+ ]
404
+ }
405
+ ],
406
+ "Non-Semantic Text Recognition": [
407
+ 42,
408
+ 50,
409
+ 0.84,
410
+ {
411
+ "Default": [
412
+ 42,
413
+ 50,
414
+ 0.84
415
+ ]
416
+ }
417
+ ],
418
+ "Regular Text Recognition": [
419
+ 48,
420
+ 50,
421
+ 0.96,
422
+ {
423
+ "Default": [
424
+ 48,
425
+ 50,
426
+ 0.96
427
+ ]
428
+ }
429
+ ],
430
+ "acc_stderr": 0,
431
+ "acc": 75.852
432
+ },
433
+ "OcrliteZh": {
434
+ "final_score": [
435
+ 143,
436
+ 234
437
+ ],
438
+ "accuracy": 61.111,
439
+ "Docvqa": [
440
+ 9,
441
+ 10,
442
+ 0.9,
443
+ {
444
+ "Default": [
445
+ 9,
446
+ 10,
447
+ 0.9
448
+ ]
449
+ }
450
+ ],
451
+ "Chartqa-human": [
452
+ 7,
453
+ 10,
454
+ 0.7,
455
+ {
456
+ "Default": [
457
+ 7,
458
+ 10,
459
+ 0.7
460
+ ]
461
+ }
462
+ ],
463
+ "Chartqa-au": [
464
+ 8,
465
+ 10,
466
+ 0.8,
467
+ {
468
+ "Default": [
469
+ 8,
470
+ 10,
471
+ 0.8
472
+ ]
473
+ }
474
+ ],
475
+ "infographic": [
476
+ 6,
477
+ 10,
478
+ 0.6,
479
+ {
480
+ "Default": [
481
+ 6,
482
+ 10,
483
+ 0.6
484
+ ]
485
+ }
486
+ ],
487
+ "Key Information Extraction": [
488
+ 34,
489
+ 45,
490
+ 0.756,
491
+ {
492
+ "Default": [
493
+ 34,
494
+ 45,
495
+ 0.756
496
+ ]
497
+ }
498
+ ],
499
+ "Scene Text-centric VQA": [
500
+ 28,
501
+ 40,
502
+ 0.7,
503
+ {
504
+ "Default": [
505
+ 28,
506
+ 40,
507
+ 0.7
508
+ ]
509
+ }
510
+ ],
511
+ "Artistic Text Recognition": [
512
+ 3,
513
+ 11,
514
+ 0.273,
515
+ {
516
+ "Default": [
517
+ 3,
518
+ 11,
519
+ 0.273
520
+ ]
521
+ }
522
+ ],
523
+ "IrRegular Text Recognition": [
524
+ 5,
525
+ 11,
526
+ 0.455,
527
+ {
528
+ "Default": [
529
+ 5,
530
+ 11,
531
+ 0.455
532
+ ]
533
+ }
534
+ ],
535
+ "Non-semantic Text Recognition": [
536
+ 6,
537
+ 12,
538
+ 0.5,
539
+ {
540
+ "Default": [
541
+ 6,
542
+ 12,
543
+ 0.5
544
+ ]
545
+ }
546
+ ],
547
+ "Regular Text Recognition": [
548
+ 10,
549
+ 11,
550
+ 0.909,
551
+ {
552
+ "Default": [
553
+ 10,
554
+ 11,
555
+ 0.909
556
+ ]
557
+ }
558
+ ],
559
+ "Handwriting_CN": [
560
+ 6,
561
+ 20,
562
+ 0.3,
563
+ {
564
+ "Default": [
565
+ 6,
566
+ 20,
567
+ 0.3
568
+ ]
569
+ }
570
+ ],
571
+ "Chinese Unlimited": [
572
+ 21,
573
+ 44,
574
+ 0.477,
575
+ {
576
+ "Default": [
577
+ 21,
578
+ 44,
579
+ 0.477
580
+ ]
581
+ }
582
+ ],
583
+ "acc_stderr": 0,
584
+ "acc": 61.111
585
+ },
586
+ "CharXiv": {
587
+ "reject_info": {
588
+ "reject_rate": 0.08,
589
+ "reject_number": 4,
590
+ "total_question": 5000
591
+ },
592
+ "descriptive": {
593
+ "Overall Score": 82.88,
594
+ "By Question": {
595
+ "Q1": 59.84,
596
+ "Q2": 82.53,
597
+ "Q3": 70.82,
598
+ "Q4": 88.72,
599
+ "Q5": 89.03,
600
+ "Q6": 86.35,
601
+ "Q7": 90.17,
602
+ "Q8": 95.98,
603
+ "Q9": 92.0,
604
+ "Q10": 86.3,
605
+ "Q11": 82.29,
606
+ "Q12": 83.52,
607
+ "Q13": 71.69,
608
+ "Q14": 82.62,
609
+ "Q15": 80.19,
610
+ "Q16": 77.78,
611
+ "Q17": 70.09,
612
+ "Q18": 96.76,
613
+ "Q19": 93.85
614
+ },
615
+ "By Category": {
616
+ "Information Extraction": 81.11,
617
+ "Enumeration": 84.01,
618
+ "Pattern Recognition": 89.74,
619
+ "Counting": 86.26,
620
+ "Compositionality": 70.09
621
+ },
622
+ "By Subplot": {
623
+ "1 Subplot": 85.09,
624
+ "2-4 Subplots": 82.26,
625
+ "5+ Subplots": 80.25
626
+ },
627
+ "By Subject": {
628
+ "Computer Science": 80.88,
629
+ "Economics": 82.94,
630
+ "Electrical Engineering and Systems Science": 84.66,
631
+ "Mathematics": 85.53,
632
+ "Physics": 81.3,
633
+ "Quantitative Biology": 81.35,
634
+ "Quantitative Finance": 81.03,
635
+ "Statistics": 85.4
636
+ },
637
+ "By Year": {
638
+ "2020": 80.97,
639
+ "2021": 83.21,
640
+ "2022": 84.22,
641
+ "2023": 83.13
642
+ },
643
+ "N_valid": 3996,
644
+ "N_invalid": 0,
645
+ "Question Type": "Descriptive"
646
+ },
647
+ "reasoning": {
648
+ "Overall Score": 59.5,
649
+ "By Answer Type": {
650
+ "Text-in-Chart": 66.59,
651
+ "Text-in-General": 67.68,
652
+ "Number-in-Chart": 48.28,
653
+ "Number-in-General": 53.71
654
+ },
655
+ "By Source": {
656
+ "GPT-Sourced": 69.57,
657
+ "GPT-Inspired": 57.41,
658
+ "Completely Human": 57.17
659
+ },
660
+ "By Subject": {
661
+ "Computer Science": 59.52,
662
+ "Economics": 55.07,
663
+ "Electrical Engineering and Systems Science": 57.98,
664
+ "Mathematics": 62.96,
665
+ "Physics": 64.57,
666
+ "Quantitative Biology": 63.49,
667
+ "Quantitative Finance": 52.59,
668
+ "Statistics": 59.29
669
+ },
670
+ "By Year": {
671
+ "2020": 56.68,
672
+ "2021": 65.13,
673
+ "2022": 57.79,
674
+ "2023": 58.06
675
+ },
676
+ "By Subplot": {
677
+ "1 Subplot": 61.14,
678
+ "2-4 Subplots": 57.41,
679
+ "5+ Subplots": 60.17
680
+ },
681
+ "N_valid": 1000,
682
+ "N_invalid": 0,
683
+ "Question Type": "Reasoning"
684
+ },
685
+ "accuracy": 71.19,
686
+ "acc_stderr": 0,
687
+ "acc": 71.19
688
+ },
689
+ "MathVision": {
690
+ "accuracy": 44.21,
691
+ "acc_stderr": 0,
692
+ "acc": 44.21
693
+ },
694
+ "CII-Bench": {
695
+ "reject_info": {
696
+ "reject_rate": 0.52,
697
+ "reject_number": 4,
698
+ "total_question": 765
699
+ },
700
+ "accuracy": 60.97,
701
+ "domain_score": {
702
+ "Art": 58.09,
703
+ "CTC": 58.21,
704
+ "Life": 65.37,
705
+ "Society": 58.92,
706
+ "Env.": 60.78,
707
+ "Politics": 66.67
708
+ },
709
+ "emotion_score": {
710
+ "Negative": 60.38,
711
+ "Positive": 60.17,
712
+ "Neutral": 62.26
713
+ },
714
+ "acc_stderr": 0,
715
+ "acc": 60.97
716
+ },
717
+ "Blink": {
718
+ "reject_info": {
719
+ "reject_rate": 0.05,
720
+ "reject_number": 1,
721
+ "total_question": 1901
722
+ },
723
+ "accuracy": 64.37,
724
+ "Art Style": 88.03,
725
+ "Counting": 67.5,
726
+ "Forensic Detection": 58.33,
727
+ "Functional Correspondence": 53.85,
728
+ "IQ Test": 32.0,
729
+ "Jigsaw": 61.33,
730
+ "Multi-view Reasoning": 51.13,
731
+ "Object Localization": 63.11,
732
+ "Relative Depth": 77.42,
733
+ "Relative Reflectance": 31.34,
734
+ "Semantic Correspondence": 55.8,
735
+ "Spatial Relation": 79.02,
736
+ "Visual Correspondence": 91.86,
737
+ "Visual Similarity": 89.63,
738
+ "acc_stderr": 0,
739
+ "acc": 64.37
740
+ }
741
+ }
742
+ }
Claude3-Opus-20240229/results_2025-01-25T10-42-53.190540.json CHANGED
@@ -26,9 +26,41 @@
26
  "reject_number": 28,
27
  "total_question": 1730
28
  },
29
- "accuracy": 52.0,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
30
  "acc_stderr": 0,
31
- "acc": 52.0
32
  },
33
  "MmvetV2": {
34
  "reject_info": {
@@ -363,9 +395,9 @@
363
  "reject_number": 1,
364
  "total_question": 3040
365
  },
366
- "accuracy": 24.61,
367
  "acc_stderr": 0,
368
- "acc": 24.61
369
  },
370
  "CII-Bench": {
371
  "accuracy": 47.71,
 
26
  "reject_number": 28,
27
  "total_question": 1730
28
  },
29
+ "accuracy": 19.92,
30
+ "subject_score": {
31
+ "History": 12.73,
32
+ "Art": 33.33,
33
+ "Design": 29.31,
34
+ "Literature": 50.0,
35
+ "Agriculture": 16.67,
36
+ "Finance": 28.81,
37
+ "Sociology": 19.23,
38
+ "Accounting": 24.14,
39
+ "Energy_and_Power": 17.24,
40
+ "Pharmacy": 24.56,
41
+ "Architecture_and_Engineering": 6.78,
42
+ "Clinical_Medicine": 13.79,
43
+ "Public_Health": 15.52,
44
+ "Physics": 23.33,
45
+ "Art_Theory": 27.27,
46
+ "Electronics": 11.67,
47
+ "Psychology": 16.95,
48
+ "Biology": 13.56,
49
+ "Manage": 20.0,
50
+ "Economics": 27.12,
51
+ "Mechanical_Engineering": 17.24,
52
+ "Diagnostics_and_Laboratory_Medicine": 11.67,
53
+ "Basic_Medical_Science": 15.38,
54
+ "Computer_Science": 16.95,
55
+ "Math": 18.33,
56
+ "Music": 18.33,
57
+ "Materials": 18.33,
58
+ "Marketing": 15.25,
59
+ "Chemistry": 23.73,
60
+ "Geography": 18.18
61
+ },
62
  "acc_stderr": 0,
63
+ "acc": 19.92
64
  },
65
  "MmvetV2": {
66
  "reject_info": {
 
395
  "reject_number": 1,
396
  "total_question": 3040
397
  },
398
+ "accuracy": 24.35,
399
  "acc_stderr": 0,
400
+ "acc": 24.35
401
  },
402
  "CII-Bench": {
403
  "accuracy": 47.71,
Doubao-Pro-Vision-32k-241028/results_2025-01-25T10-42-53.190540.json CHANGED
@@ -134,41 +134,41 @@
134
  "acc": 44.59
135
  },
136
  "MMMU_Pro_vision": {
137
- "accuracy": 40.06,
138
  "subject_score": {
139
- "History": 35.71,
140
- "Art": 18.87,
141
- "Design": 31.67,
142
- "Literature": 40.38,
143
- "Agriculture": 20.0,
144
- "Finance": 33.33,
145
- "Sociology": 29.63,
146
- "Accounting": 39.66,
147
- "Energy_and_Power": 63.79,
148
- "Pharmacy": 52.63,
149
- "Architecture_and_Engineering": 46.67,
150
- "Clinical_Medicine": 15.25,
151
- "Public_Health": 39.66,
152
- "Physics": 60.0,
153
- "Art_Theory": 43.64,
154
- "Electronics": 41.67,
155
- "Psychology": 31.67,
156
- "Biology": 32.2,
157
- "Manage": 52.0,
158
- "Economics": 42.37,
159
- "Mechanical_Engineering": 55.93,
160
- "Diagnostics_and_Laboratory_Medicine": 13.33,
161
  "Basic_Medical_Science": 38.46,
162
- "Computer_Science": 55.0,
163
- "Math": 58.33,
164
- "Music": 40.0,
165
- "Materials": 48.33,
166
- "Marketing": 37.29,
167
- "Chemistry": 51.67,
168
- "Geography": 30.77
169
  },
170
  "acc_stderr": 0,
171
- "acc": 40.06
172
  },
173
  "MmvetV2": {
174
  "accuracy": 65.706,
@@ -478,9 +478,9 @@
478
  "acc": 54.62
479
  },
480
  "MathVision": {
481
- "accuracy": 35.56,
482
  "acc_stderr": 0,
483
- "acc": 35.56
484
  },
485
  "CII-Bench": {
486
  "accuracy": 67.97,
 
134
  "acc": 44.59
135
  },
136
  "MMMU_Pro_vision": {
137
+ "accuracy": 33.76,
138
  "subject_score": {
139
+ "History": 30.36,
140
+ "Art": 41.51,
141
+ "Design": 40.0,
142
+ "Literature": 65.38,
143
+ "Agriculture": 18.33,
144
+ "Finance": 50.0,
145
+ "Sociology": 44.44,
146
+ "Accounting": 43.1,
147
+ "Energy_and_Power": 32.76,
148
+ "Pharmacy": 43.86,
149
+ "Architecture_and_Engineering": 21.67,
150
+ "Clinical_Medicine": 22.03,
151
+ "Public_Health": 34.48,
152
+ "Physics": 36.67,
153
+ "Art_Theory": 32.73,
154
+ "Electronics": 25.0,
155
+ "Psychology": 23.33,
156
+ "Biology": 20.34,
157
+ "Manage": 26.0,
158
+ "Economics": 57.63,
159
+ "Mechanical_Engineering": 32.2,
160
+ "Diagnostics_and_Laboratory_Medicine": 16.67,
161
  "Basic_Medical_Science": 38.46,
162
+ "Computer_Science": 33.33,
163
+ "Math": 30.0,
164
+ "Music": 33.33,
165
+ "Materials": 13.33,
166
+ "Marketing": 45.76,
167
+ "Chemistry": 41.67,
168
+ "Geography": 23.08
169
  },
170
  "acc_stderr": 0,
171
+ "acc": 33.76
172
  },
173
  "MmvetV2": {
174
  "accuracy": 65.706,
 
478
  "acc": 54.62
479
  },
480
  "MathVision": {
481
+ "accuracy": 35.49,
482
  "acc_stderr": 0,
483
+ "acc": 35.49
484
  },
485
  "CII-Bench": {
486
  "accuracy": 67.97,
GLM-4V-Plus/results_2025-01-25T10-42-53.190540.json CHANGED
@@ -26,9 +26,41 @@
26
  "acc": 37.19
27
  },
28
  "MMMU_Pro_vision": {
29
- "accuracy": 23.47,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
30
  "acc_stderr": 0,
31
- "acc": 23.47
32
  },
33
  "MmvetV2": {
34
  "reject_info": {
@@ -349,9 +381,9 @@
349
  "acc": 44.96
350
  },
351
  "MathVision": {
352
- "accuracy": 17.66,
353
  "acc_stderr": 0,
354
- "acc": 17.66
355
  },
356
  "CII-Bench": {
357
  "accuracy": 60.86,
 
26
  "acc": 37.19
27
  },
28
  "MMMU_Pro_vision": {
29
+ "accuracy": 23.35,
30
+ "subject_score": {
31
+ "History": 35.71,
32
+ "Art": 47.17,
33
+ "Design": 45.0,
34
+ "Literature": 59.62,
35
+ "Agriculture": 18.33,
36
+ "Finance": 10.0,
37
+ "Sociology": 42.59,
38
+ "Accounting": 15.52,
39
+ "Energy_and_Power": 12.07,
40
+ "Pharmacy": 36.84,
41
+ "Architecture_and_Engineering": 15.0,
42
+ "Clinical_Medicine": 13.56,
43
+ "Public_Health": 13.79,
44
+ "Physics": 11.67,
45
+ "Art_Theory": 47.27,
46
+ "Electronics": 8.33,
47
+ "Psychology": 33.33,
48
+ "Biology": 23.73,
49
+ "Manage": 18.0,
50
+ "Economics": 6.78,
51
+ "Mechanical_Engineering": 10.17,
52
+ "Diagnostics_and_Laboratory_Medicine": 25.0,
53
+ "Basic_Medical_Science": 32.69,
54
+ "Computer_Science": 20.0,
55
+ "Math": 20.0,
56
+ "Music": 21.67,
57
+ "Materials": 8.33,
58
+ "Marketing": 16.95,
59
+ "Chemistry": 25.0,
60
+ "Geography": 17.31
61
+ },
62
  "acc_stderr": 0,
63
+ "acc": 23.35
64
  },
65
  "MmvetV2": {
66
  "reject_info": {
 
381
  "acc": 44.96
382
  },
383
  "MathVision": {
384
+ "accuracy": 17.34,
385
  "acc_stderr": 0,
386
+ "acc": 17.34
387
  },
388
  "CII-Bench": {
389
  "accuracy": 60.86,
GPT-4o-20240806/results_2025-01-25T10-42-53.190540.json CHANGED
@@ -21,9 +21,41 @@
21
  "acc": 37.17
22
  },
23
  "MMMU_Pro_vision": {
24
- "accuracy": 46.53,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
25
  "acc_stderr": 0,
26
- "acc": 46.53
27
  },
28
  "MmvetV2": {
29
  "accuracy": 64.6615,
@@ -339,9 +371,9 @@
339
  "reject_number": 1,
340
  "total_question": 3040
341
  },
342
- "accuracy": 28.79,
343
  "acc_stderr": 0,
344
- "acc": 28.79
345
  },
346
  "CII-Bench": {
347
  "accuracy": 59.22,
 
21
  "acc": 37.17
22
  },
23
  "MMMU_Pro_vision": {
24
+ "accuracy": 39.36,
25
+ "subject_score": {
26
+ "History": 46.43,
27
+ "Art": 54.72,
28
+ "Design": 53.33,
29
+ "Literature": 69.23,
30
+ "Agriculture": 23.33,
31
+ "Finance": 58.33,
32
+ "Sociology": 51.85,
33
+ "Accounting": 51.72,
34
+ "Energy_and_Power": 22.41,
35
+ "Pharmacy": 35.09,
36
+ "Architecture_and_Engineering": 23.33,
37
+ "Clinical_Medicine": 30.51,
38
+ "Public_Health": 62.07,
39
+ "Physics": 53.33,
40
+ "Art_Theory": 56.36,
41
+ "Electronics": 21.67,
42
+ "Psychology": 35.0,
43
+ "Biology": 40.68,
44
+ "Manage": 40.0,
45
+ "Economics": 50.85,
46
+ "Mechanical_Engineering": 23.73,
47
+ "Diagnostics_and_Laboratory_Medicine": 18.33,
48
+ "Basic_Medical_Science": 42.31,
49
+ "Computer_Science": 41.67,
50
+ "Math": 30.0,
51
+ "Music": 23.33,
52
+ "Materials": 15.0,
53
+ "Marketing": 38.98,
54
+ "Chemistry": 43.33,
55
+ "Geography": 32.69
56
+ },
57
  "acc_stderr": 0,
58
+ "acc": 39.36
59
  },
60
  "MmvetV2": {
61
  "accuracy": 64.6615,
 
371
  "reject_number": 1,
372
  "total_question": 3040
373
  },
374
+ "accuracy": 27.11,
375
  "acc_stderr": 0,
376
+ "acc": 27.11
377
  },
378
  "CII-Bench": {
379
  "accuracy": 59.22,
GPT-4o-20241120/results_2025-01-25T10-42-53.190540.json CHANGED
@@ -71,9 +71,41 @@
71
  "reject_number": 14,
72
  "total_question": 1730
73
  },
74
- "accuracy": 45.4,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
75
  "acc_stderr": 0,
76
- "acc": 45.4
77
  },
78
  "MmvetV2": {
79
  "reject_info": {
@@ -403,9 +435,9 @@
403
  "reject_number": 17,
404
  "total_question": 3040
405
  },
406
- "accuracy": 29.61,
407
  "acc_stderr": 0,
408
- "acc": 29.61
409
  },
410
  "CII-Bench": {
411
  "accuracy": 61.05,
 
71
  "reject_number": 14,
72
  "total_question": 1730
73
  },
74
+ "accuracy": 45.05,
75
+ "subject_score": {
76
+ "History": 46.43,
77
+ "Art": 63.46,
78
+ "Design": 70.0,
79
+ "Finance": 66.67,
80
+ "Agriculture": 36.67,
81
+ "Literature": 71.15,
82
+ "Sociology": 59.26,
83
+ "Pharmacy": 40.35,
84
+ "Accounting": 56.14,
85
+ "Energy_and_Power": 43.1,
86
+ "Clinical_Medicine": 32.76,
87
+ "Architecture_and_Engineering": 26.67,
88
+ "Public_Health": 53.45,
89
+ "Art_Theory": 65.38,
90
+ "Physics": 40.0,
91
+ "Psychology": 42.37,
92
+ "Electronics": 35.59,
93
+ "Biology": 44.07,
94
+ "Economics": 61.02,
95
+ "Manage": 36.73,
96
+ "Mechanical_Engineering": 34.48,
97
+ "Diagnostics_and_Laboratory_Medicine": 31.03,
98
+ "Basic_Medical_Science": 44.23,
99
+ "Computer_Science": 45.76,
100
+ "Math": 31.67,
101
+ "Music": 23.33,
102
+ "Materials": 16.67,
103
+ "Marketing": 53.45,
104
+ "Chemistry": 46.67,
105
+ "Geography": 40.38
106
+ },
107
  "acc_stderr": 0,
108
+ "acc": 45.05
109
  },
110
  "MmvetV2": {
111
  "reject_info": {
 
435
  "reject_number": 17,
436
  "total_question": 3040
437
  },
438
+ "accuracy": 29.21,
439
  "acc_stderr": 0,
440
+ "acc": 29.21
441
  },
442
  "CII-Bench": {
443
  "accuracy": 61.05,
GPT-4o-mini-20240718/results_2025-01-25T10-42-53.190540.json CHANGED
@@ -21,9 +21,41 @@
21
  "acc": 31.39
22
  },
23
  "MMMU_Pro_vision": {
24
- "accuracy": 23.35,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
25
  "acc_stderr": 0,
26
- "acc": 23.35
27
  },
28
  "MmvetV2": {
29
  "accuracy": 59.4197,
@@ -339,9 +371,9 @@
339
  "reject_number": 1,
340
  "total_question": 3040
341
  },
342
- "accuracy": 26.95,
343
  "acc_stderr": 0,
344
- "acc": 26.95
345
  },
346
  "CII-Bench": {
347
  "accuracy": 45.75,
 
21
  "acc": 31.39
22
  },
23
  "MMMU_Pro_vision": {
24
+ "accuracy": 28.84,
25
+ "subject_score": {
26
+ "History": 26.79,
27
+ "Art": 45.28,
28
+ "Design": 45.0,
29
+ "Literature": 67.31,
30
+ "Agriculture": 21.67,
31
+ "Finance": 40.0,
32
+ "Sociology": 37.04,
33
+ "Accounting": 37.93,
34
+ "Energy_and_Power": 17.24,
35
+ "Pharmacy": 38.6,
36
+ "Architecture_and_Engineering": 20.0,
37
+ "Clinical_Medicine": 10.17,
38
+ "Public_Health": 24.14,
39
+ "Physics": 26.67,
40
+ "Art_Theory": 41.82,
41
+ "Electronics": 13.33,
42
+ "Psychology": 25.0,
43
+ "Biology": 22.03,
44
+ "Manage": 24.0,
45
+ "Economics": 30.51,
46
+ "Mechanical_Engineering": 20.34,
47
+ "Diagnostics_and_Laboratory_Medicine": 30.0,
48
+ "Basic_Medical_Science": 30.77,
49
+ "Computer_Science": 33.33,
50
+ "Math": 20.0,
51
+ "Music": 26.67,
52
+ "Materials": 11.67,
53
+ "Marketing": 27.12,
54
+ "Chemistry": 33.33,
55
+ "Geography": 25.0
56
+ },
57
  "acc_stderr": 0,
58
+ "acc": 28.84
59
  },
60
  "MmvetV2": {
61
  "accuracy": 59.4197,
 
371
  "reject_number": 1,
372
  "total_question": 3040
373
  },
374
+ "accuracy": 26.36,
375
  "acc_stderr": 0,
376
+ "acc": 26.36
377
  },
378
  "CII-Bench": {
379
  "accuracy": 45.75,
Gemini-1.5-Flash/results_2025-01-25T10-42-53.190540.json CHANGED
@@ -26,9 +26,41 @@
26
  "reject_number": 1,
27
  "total_question": 1730
28
  },
29
- "accuracy": 33.02,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
30
  "acc_stderr": 0,
31
- "acc": 33.02
32
  },
33
  "MmvetV2": {
34
  "accuracy": 60.0193,
@@ -343,9 +375,9 @@
343
  "acc": 51.6
344
  },
345
  "MathVision": {
346
- "accuracy": 21.15,
347
  "acc_stderr": 0,
348
- "acc": 21.15
349
  },
350
  "CII-Bench": {
351
  "accuracy": 45.23,
 
26
  "reject_number": 1,
27
  "total_question": 1730
28
  },
29
+ "accuracy": 26.32,
30
+ "subject_score": {
31
+ "History": 30.36,
32
+ "Art": 38.46,
33
+ "Design": 43.33,
34
+ "Literature": 46.15,
35
+ "Agriculture": 18.33,
36
+ "Finance": 20.0,
37
+ "Sociology": 31.48,
38
+ "Accounting": 24.14,
39
+ "Energy_and_Power": 12.07,
40
+ "Pharmacy": 26.32,
41
+ "Architecture_and_Engineering": 21.67,
42
+ "Clinical_Medicine": 16.95,
43
+ "Public_Health": 22.41,
44
+ "Physics": 31.67,
45
+ "Art_Theory": 29.09,
46
+ "Electronics": 25.0,
47
+ "Psychology": 20.0,
48
+ "Biology": 30.51,
49
+ "Manage": 28.0,
50
+ "Economics": 23.73,
51
+ "Mechanical_Engineering": 23.73,
52
+ "Diagnostics_and_Laboratory_Medicine": 15.0,
53
+ "Basic_Medical_Science": 40.38,
54
+ "Computer_Science": 23.33,
55
+ "Math": 23.33,
56
+ "Music": 26.67,
57
+ "Materials": 23.33,
58
+ "Marketing": 33.9,
59
+ "Chemistry": 20.0,
60
+ "Geography": 26.92
61
+ },
62
  "acc_stderr": 0,
63
+ "acc": 26.32
64
  },
65
  "MmvetV2": {
66
  "accuracy": 60.0193,
 
375
  "acc": 51.6
376
  },
377
  "MathVision": {
378
+ "accuracy": 18.49,
379
  "acc_stderr": 0,
380
+ "acc": 18.49
381
  },
382
  "CII-Bench": {
383
  "accuracy": 45.23,
Gemini-1.5-Pro/results_2025-01-25T10-42-53.190540.json CHANGED
@@ -63,9 +63,41 @@
63
  "reject_number": 57,
64
  "total_question": 1730
65
  },
66
- "accuracy": 47.7,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
67
  "acc_stderr": 0,
68
- "acc": 47.7
69
  },
70
  "MmvetV2": {
71
  "accuracy": 65.9768,
@@ -381,9 +413,9 @@
381
  "acc": 62.41
382
  },
383
  "MathVision": {
384
- "accuracy": 44.64,
385
  "acc_stderr": 0,
386
- "acc": 44.64
387
  },
388
  "CII-Bench": {
389
  "accuracy": 59.61,
 
63
  "reject_number": 57,
64
  "total_question": 1730
65
  },
66
+ "accuracy": 43.16,
67
+ "subject_score": {
68
+ "Sociology": 50.98,
69
+ "Design": 60.71,
70
+ "History": 52.73,
71
+ "Art": 58.82,
72
+ "Agriculture": 28.33,
73
+ "Pharmacy": 39.29,
74
+ "Accounting": 60.0,
75
+ "Clinical_Medicine": 36.21,
76
+ "Public_Health": 56.36,
77
+ "Art_Theory": 64.15,
78
+ "Architecture_and_Engineering": 32.76,
79
+ "Finance": 59.65,
80
+ "Energy_and_Power": 37.5,
81
+ "Psychology": 40.68,
82
+ "Literature": 60.0,
83
+ "Physics": 42.37,
84
+ "Manage": 38.3,
85
+ "Economics": 47.37,
86
+ "Electronics": 39.66,
87
+ "Biology": 40.35,
88
+ "Diagnostics_and_Laboratory_Medicine": 23.33,
89
+ "Basic_Medical_Science": 46.0,
90
+ "Mechanical_Engineering": 32.2,
91
+ "Computer_Science": 41.38,
92
+ "Math": 34.48,
93
+ "Music": 20.0,
94
+ "Marketing": 59.65,
95
+ "Materials": 23.21,
96
+ "Chemistry": 40.68,
97
+ "Geography": 37.5
98
+ },
99
  "acc_stderr": 0,
100
+ "acc": 43.16
101
  },
102
  "MmvetV2": {
103
  "accuracy": 65.9768,
 
413
  "acc": 62.41
414
  },
415
  "MathVision": {
416
+ "accuracy": 44.47,
417
  "acc_stderr": 0,
418
+ "acc": 44.47
419
  },
420
  "CII-Bench": {
421
  "accuracy": 59.61,
Gemini-2.0-Flash(experimental)/results_2025-01-25T10-42-53.190540.json ADDED
@@ -0,0 +1,723 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "model_name": "Gemini-2.0-Flash(experimental)",
4
+ "model_dtype": "float16",
5
+ "model_size": 0
6
+ },
7
+ "results": {
8
+ "CMMMU": {
9
+ "艺术与设计": {
10
+ "num": 88,
11
+ "correct": 64,
12
+ "accuracy": 72.73
13
+ },
14
+ "overall": {
15
+ "num": 900,
16
+ "correct": 492,
17
+ "accuracy": 54.67
18
+ },
19
+ "商业": {
20
+ "num": 126,
21
+ "correct": 56,
22
+ "accuracy": 44.44
23
+ },
24
+ "科学": {
25
+ "num": 204,
26
+ "correct": 103,
27
+ "accuracy": 50.49
28
+ },
29
+ "健康与医学": {
30
+ "num": 153,
31
+ "correct": 99,
32
+ "accuracy": 64.71
33
+ },
34
+ "人文社会科学": {
35
+ "num": 85,
36
+ "correct": 54,
37
+ "accuracy": 63.53
38
+ },
39
+ "技术与工程": {
40
+ "num": 244,
41
+ "correct": 116,
42
+ "accuracy": 47.54
43
+ },
44
+ "accuracy": 54.67,
45
+ "acc_stderr": 0,
46
+ "acc": 54.67
47
+ },
48
+ "MMMU": {
49
+ "accuracy": 65.89,
50
+ "subject_score": {
51
+ "Accounting": 56.67,
52
+ "Agriculture": 80.0,
53
+ "Architecture": 53.33,
54
+ "Art": 91.67,
55
+ "Basic": 73.33,
56
+ "Biology": 63.33,
57
+ "Chemistry": 50.0,
58
+ "Clinical": 70.0,
59
+ "Computer": 66.67,
60
+ "Design": 83.33,
61
+ "Diagnostics": 50.0,
62
+ "Economics": 66.67,
63
+ "Electronics": 43.33,
64
+ "Energy": 56.67,
65
+ "Finance": 56.67,
66
+ "Geography": 63.33,
67
+ "History": 80.0,
68
+ "Literature": 93.33,
69
+ "Manage": 63.33,
70
+ "Marketing": 66.67,
71
+ "Materials": 50.0,
72
+ "Math": 46.67,
73
+ "Mechanical": 60.0,
74
+ "Music": 30.0,
75
+ "Pharmacy": 80.0,
76
+ "Physics": 73.33,
77
+ "Psychology": 70.0,
78
+ "Public": 76.67,
79
+ "Sociology": 70.0
80
+ },
81
+ "difficulty_score": {
82
+ "Medium": 65.57,
83
+ "Easy": 73.22,
84
+ "Hard": 54.7
85
+ },
86
+ "acc_stderr": 0,
87
+ "acc": 65.89
88
+ },
89
+ "MMMU_Pro_standard": {
90
+ "accuracy": 51.21,
91
+ "subject_score": {
92
+ "History": 57.14,
93
+ "Art": 69.81,
94
+ "Design": 68.33,
95
+ "Literature": 63.46,
96
+ "Agriculture": 43.33,
97
+ "Finance": 66.67,
98
+ "Sociology": 51.85,
99
+ "Accounting": 63.79,
100
+ "Energy_and_Power": 37.93,
101
+ "Pharmacy": 45.61,
102
+ "Architecture_and_Engineering": 45.0,
103
+ "Clinical_Medicine": 42.37,
104
+ "Public_Health": 67.24,
105
+ "Physics": 40.0,
106
+ "Art_Theory": 67.27,
107
+ "Electronics": 53.33,
108
+ "Psychology": 50.0,
109
+ "Biology": 42.37,
110
+ "Manage": 40.0,
111
+ "Economics": 62.71,
112
+ "Mechanical_Engineering": 45.76,
113
+ "Diagnostics_and_Laboratory_Medicine": 31.67,
114
+ "Basic_Medical_Science": 59.62,
115
+ "Computer_Science": 56.67,
116
+ "Math": 55.0,
117
+ "Music": 23.33,
118
+ "Materials": 36.67,
119
+ "Marketing": 62.71,
120
+ "Chemistry": 48.33,
121
+ "Geography": 42.31
122
+ },
123
+ "difficulty_score": {
124
+ "Medium": 48.69,
125
+ "Easy": 62.88,
126
+ "Hard": 40.9
127
+ },
128
+ "acc_stderr": 0,
129
+ "acc": 51.21
130
+ },
131
+ "MMMU_Pro_vision": {
132
+ "accuracy": 47.63,
133
+ "subject_score": {
134
+ "History": 51.79,
135
+ "Art": 60.38,
136
+ "Design": 68.33,
137
+ "Literature": 65.38,
138
+ "Agriculture": 28.33,
139
+ "Finance": 70.0,
140
+ "Sociology": 50.0,
141
+ "Accounting": 58.62,
142
+ "Energy_and_Power": 34.48,
143
+ "Pharmacy": 49.12,
144
+ "Architecture_and_Engineering": 30.0,
145
+ "Clinical_Medicine": 45.76,
146
+ "Public_Health": 58.62,
147
+ "Physics": 51.67,
148
+ "Art_Theory": 65.45,
149
+ "Electronics": 60.0,
150
+ "Psychology": 46.67,
151
+ "Biology": 45.76,
152
+ "Manage": 36.0,
153
+ "Economics": 64.41,
154
+ "Mechanical_Engineering": 32.2,
155
+ "Diagnostics_and_Laboratory_Medicine": 21.67,
156
+ "Basic_Medical_Science": 51.92,
157
+ "Computer_Science": 43.33,
158
+ "Math": 41.67,
159
+ "Music": 23.33,
160
+ "Materials": 31.67,
161
+ "Marketing": 55.93,
162
+ "Chemistry": 46.67,
163
+ "Geography": 44.23
164
+ },
165
+ "acc_stderr": 0,
166
+ "acc": 47.63
167
+ },
168
+ "MmvetV2": {
169
+ "accuracy": 72.4178,
170
+ "capability_scores": {
171
+ "ocr": 77.21153846153848,
172
+ "math": 87.3529411764706,
173
+ "spat": 68.68020304568523,
174
+ "rec": 69.75728155339812,
175
+ "know": 71.08974358974358,
176
+ "gen": 70.8363636363637,
177
+ "seq": 69.64285714285714
178
+ },
179
+ "capability_detail_scores": {
180
+ "ocr_math": 86.36363636363636,
181
+ "spat_ocr_math": 99.33333333333334,
182
+ "spat_ocr_rec_math": 50.0,
183
+ "spat_rec": 69.64285714285714,
184
+ "spat_ocr": 75.0,
185
+ "spat_ocr_rec": 45.83333333333333,
186
+ "spat_know_ocr": 95.0,
187
+ "ocr_rec": 100.0,
188
+ "spat_know_rec": 57.99999999999999,
189
+ "ocr": 82.81250000000001,
190
+ "rec": 73.38983050847457,
191
+ "know_rec": 84.61538461538461,
192
+ "know_gen_rec": 69.59999999999998,
193
+ "know_ocr_rec_gen": 76.92307692307693,
194
+ "spat_ocr_rec_gen": 78.6046511627907,
195
+ "spat_ocr_gen": 85.00000000000001,
196
+ "gen_seq_ocr_math_spat": 100.0,
197
+ "seq_ocr_math_spat_rec": 0.0,
198
+ "spat_gen_rec": 52.27272727272726,
199
+ "spat_ocr_gen_math": 30.0,
200
+ "spat_seq_rec": 50.0,
201
+ "spat_seq_ocr_rec": 50.0,
202
+ "spat_know_gen_rec": 53.333333333333336,
203
+ "gen_rec": 67.05882352941178,
204
+ "spat_know_ocr_rec": 25.0,
205
+ "ocr_know_gen_rec": 76.92307692307693,
206
+ "know_gen_ocr_spat_rec": 80.0,
207
+ "ocr_rec_math": 100.0,
208
+ "ocr_rec_gen": 88.00000000000001,
209
+ "seq_ocr_rec_gen": 78.57142857142856,
210
+ "ocr_gen": 75.3846153846154,
211
+ "seq_gen_rec": 70.71428571428571,
212
+ "seq_rec": 71.66666666666667,
213
+ "spat_seq_gen_rec": 78.75,
214
+ "know_seq_rec": 100.0,
215
+ "know_gen_rec_seq": 85.00000000000001,
216
+ "gen_seq_ocr_spat_rec": 50.0,
217
+ "know_gen_seq_ocr_rec": 90.0,
218
+ "know_rec_math": 100.0,
219
+ "seq_ocr_rec": 100.0
220
+ },
221
+ "acc_stderr": 0,
222
+ "acc": 72.4178
223
+ },
224
+ "MathVerse": {
225
+ "reject_info": {
226
+ "reject_rate": 0.03,
227
+ "reject_number": 1,
228
+ "total_question": 3940
229
+ },
230
+ "Text Dominant": {
231
+ "accuracy": 61.8,
232
+ "correct": 487,
233
+ "total": 788
234
+ },
235
+ "Total": {
236
+ "accuracy": 53.21,
237
+ "correct": 2096,
238
+ "total": 3939
239
+ },
240
+ "Text Lite": {
241
+ "accuracy": 53.17,
242
+ "correct": 419,
243
+ "total": 788
244
+ },
245
+ "Vision Intensive": {
246
+ "accuracy": 50.44,
247
+ "correct": 397,
248
+ "total": 787
249
+ },
250
+ "Vision Dominant": {
251
+ "accuracy": 49.75,
252
+ "correct": 392,
253
+ "total": 788
254
+ },
255
+ "Vision Only": {
256
+ "accuracy": 50.89,
257
+ "correct": 401,
258
+ "total": 788
259
+ },
260
+ "accuracy": 53.21,
261
+ "acc_stderr": 0,
262
+ "acc": 53.21
263
+ },
264
+ "Ocrlite": {
265
+ "final_score": [
266
+ 1347,
267
+ 1644
268
+ ],
269
+ "accuracy": 81.934,
270
+ "Key Information Extraction-Bookshelf": [
271
+ 46,
272
+ 51,
273
+ 0.902,
274
+ {
275
+ "Default": [
276
+ 46,
277
+ 51,
278
+ 0.902
279
+ ]
280
+ }
281
+ ],
282
+ "Scene Text-centric VQA-diet_constraints": [
283
+ 72,
284
+ 90,
285
+ 0.8,
286
+ {
287
+ "Default": [
288
+ 72,
289
+ 90,
290
+ 0.8
291
+ ]
292
+ }
293
+ ],
294
+ "Doc-oriented VQA-Control": [
295
+ 158,
296
+ 189,
297
+ 0.836,
298
+ {
299
+ "Default": [
300
+ 158,
301
+ 189,
302
+ 0.836
303
+ ]
304
+ }
305
+ ],
306
+ "Doc-oriented VQA": [
307
+ 179,
308
+ 204,
309
+ 0.877,
310
+ {
311
+ "Default": [
312
+ 179,
313
+ 204,
314
+ 0.877
315
+ ]
316
+ }
317
+ ],
318
+ "Scene Text-centric VQA-Fake_logo": [
319
+ 73,
320
+ 119,
321
+ 0.613,
322
+ {
323
+ "Default": [
324
+ 73,
325
+ 119,
326
+ 0.613
327
+ ]
328
+ }
329
+ ],
330
+ "Handwritten Mathematical Expression Recognition": [
331
+ 3,
332
+ 100,
333
+ 0.03,
334
+ {
335
+ "Default": [
336
+ 3,
337
+ 100,
338
+ 0.03
339
+ ]
340
+ }
341
+ ],
342
+ "Key Information Extraction": [
343
+ 200,
344
+ 209,
345
+ 0.957,
346
+ {
347
+ "Default": [
348
+ 200,
349
+ 209,
350
+ 0.957
351
+ ]
352
+ }
353
+ ],
354
+ "Scene Text-centric VQA-Control": [
355
+ 184,
356
+ 200,
357
+ 0.92,
358
+ {
359
+ "Default": [
360
+ 184,
361
+ 200,
362
+ 0.92
363
+ ]
364
+ }
365
+ ],
366
+ "Scene Text-centric VQA": [
367
+ 250,
368
+ 282,
369
+ 0.887,
370
+ {
371
+ "Default": [
372
+ 250,
373
+ 282,
374
+ 0.887
375
+ ]
376
+ }
377
+ ],
378
+ "Artistic Text Recognition": [
379
+ 41,
380
+ 50,
381
+ 0.82,
382
+ {
383
+ "Default": [
384
+ 41,
385
+ 50,
386
+ 0.82
387
+ ]
388
+ }
389
+ ],
390
+ "Irregular Text Recognition": [
391
+ 46,
392
+ 50,
393
+ 0.92,
394
+ {
395
+ "Default": [
396
+ 46,
397
+ 50,
398
+ 0.92
399
+ ]
400
+ }
401
+ ],
402
+ "Non-Semantic Text Recognition": [
403
+ 45,
404
+ 50,
405
+ 0.9,
406
+ {
407
+ "Default": [
408
+ 45,
409
+ 50,
410
+ 0.9
411
+ ]
412
+ }
413
+ ],
414
+ "Regular Text Recognition": [
415
+ 50,
416
+ 50,
417
+ 1.0,
418
+ {
419
+ "Default": [
420
+ 50,
421
+ 50,
422
+ 1.0
423
+ ]
424
+ }
425
+ ],
426
+ "acc_stderr": 0,
427
+ "acc": 81.934
428
+ },
429
+ "OcrliteZh": {
430
+ "final_score": [
431
+ 143,
432
+ 234
433
+ ],
434
+ "accuracy": 61.111,
435
+ "Docvqa": [
436
+ 6,
437
+ 10,
438
+ 0.6,
439
+ {
440
+ "Default": [
441
+ 6,
442
+ 10,
443
+ 0.6
444
+ ]
445
+ }
446
+ ],
447
+ "Chartqa-human": [
448
+ 7,
449
+ 10,
450
+ 0.7,
451
+ {
452
+ "Default": [
453
+ 7,
454
+ 10,
455
+ 0.7
456
+ ]
457
+ }
458
+ ],
459
+ "Chartqa-au": [
460
+ 7,
461
+ 10,
462
+ 0.7,
463
+ {
464
+ "Default": [
465
+ 7,
466
+ 10,
467
+ 0.7
468
+ ]
469
+ }
470
+ ],
471
+ "infographic": [
472
+ 4,
473
+ 10,
474
+ 0.4,
475
+ {
476
+ "Default": [
477
+ 4,
478
+ 10,
479
+ 0.4
480
+ ]
481
+ }
482
+ ],
483
+ "Key Information Extraction": [
484
+ 38,
485
+ 45,
486
+ 0.844,
487
+ {
488
+ "Default": [
489
+ 38,
490
+ 45,
491
+ 0.844
492
+ ]
493
+ }
494
+ ],
495
+ "Scene Text-centric VQA": [
496
+ 23,
497
+ 40,
498
+ 0.575,
499
+ {
500
+ "Default": [
501
+ 23,
502
+ 40,
503
+ 0.575
504
+ ]
505
+ }
506
+ ],
507
+ "Artistic Text Recognition": [
508
+ 5,
509
+ 11,
510
+ 0.455,
511
+ {
512
+ "Default": [
513
+ 5,
514
+ 11,
515
+ 0.455
516
+ ]
517
+ }
518
+ ],
519
+ "IrRegular Text Recognition": [
520
+ 4,
521
+ 11,
522
+ 0.364,
523
+ {
524
+ "Default": [
525
+ 4,
526
+ 11,
527
+ 0.364
528
+ ]
529
+ }
530
+ ],
531
+ "Non-semantic Text Recognition": [
532
+ 8,
533
+ 12,
534
+ 0.667,
535
+ {
536
+ "Default": [
537
+ 8,
538
+ 12,
539
+ 0.667
540
+ ]
541
+ }
542
+ ],
543
+ "Regular Text Recognition": [
544
+ 8,
545
+ 11,
546
+ 0.727,
547
+ {
548
+ "Default": [
549
+ 8,
550
+ 11,
551
+ 0.727
552
+ ]
553
+ }
554
+ ],
555
+ "Handwriting_CN": [
556
+ 10,
557
+ 20,
558
+ 0.5,
559
+ {
560
+ "Default": [
561
+ 10,
562
+ 20,
563
+ 0.5
564
+ ]
565
+ }
566
+ ],
567
+ "Chinese Unlimited": [
568
+ 23,
569
+ 44,
570
+ 0.523,
571
+ {
572
+ "Default": [
573
+ 23,
574
+ 44,
575
+ 0.523
576
+ ]
577
+ }
578
+ ],
579
+ "acc_stderr": 0,
580
+ "acc": 61.111
581
+ },
582
+ "CharXiv": {
583
+ "descriptive": {
584
+ "Overall Score": 80.38,
585
+ "By Question": {
586
+ "Q1": 74.59,
587
+ "Q2": 81.3,
588
+ "Q3": 72.96,
589
+ "Q4": 89.11,
590
+ "Q5": 82.01,
591
+ "Q6": 82.73,
592
+ "Q7": 78.21,
593
+ "Q8": 92.86,
594
+ "Q9": 86.07,
595
+ "Q10": 88.36,
596
+ "Q11": 64.57,
597
+ "Q12": 76.92,
598
+ "Q13": 73.06,
599
+ "Q14": 75.89,
600
+ "Q15": 83.07,
601
+ "Q16": 69.44,
602
+ "Q17": 65.62,
603
+ "Q18": 94.74,
604
+ "Q19": 90.77
605
+ },
606
+ "By Category": {
607
+ "Information Extraction": 80.25,
608
+ "Enumeration": 81.92,
609
+ "Pattern Recognition": 81.22,
610
+ "Counting": 83.46,
611
+ "Compositionality": 65.62
612
+ },
613
+ "By Subplot": {
614
+ "1 Subplot": 83.61,
615
+ "2-4 Subplots": 80.69,
616
+ "5+ Subplots": 74.58
617
+ },
618
+ "By Subject": {
619
+ "Computer Science": 79.76,
620
+ "Economics": 80.25,
621
+ "Electrical Engineering and Systems Science": 86.13,
622
+ "Mathematics": 82.04,
623
+ "Physics": 76.18,
624
+ "Quantitative Biology": 75.2,
625
+ "Quantitative Finance": 81.25,
626
+ "Statistics": 82.74
627
+ },
628
+ "By Year": {
629
+ "2020": 79.25,
630
+ "2021": 79.6,
631
+ "2022": 81.66,
632
+ "2023": 81.05
633
+ },
634
+ "N_valid": 4000,
635
+ "N_invalid": 0,
636
+ "Question Type": "Descriptive"
637
+ },
638
+ "reasoning": {
639
+ "Overall Score": 51.8,
640
+ "By Answer Type": {
641
+ "Text-in-Chart": 55.45,
642
+ "Text-in-General": 46.46,
643
+ "Number-in-Chart": 53.88,
644
+ "Number-in-General": 44.98
645
+ },
646
+ "By Source": {
647
+ "GPT-Sourced": 64.13,
648
+ "GPT-Inspired": 47.22,
649
+ "Completely Human": 49.67
650
+ },
651
+ "By Subject": {
652
+ "Computer Science": 47.62,
653
+ "Economics": 54.35,
654
+ "Electrical Engineering and Systems Science": 52.94,
655
+ "Mathematics": 53.33,
656
+ "Physics": 54.33,
657
+ "Quantitative Biology": 57.14,
658
+ "Quantitative Finance": 48.28,
659
+ "Statistics": 45.13
660
+ },
661
+ "By Year": {
662
+ "2020": 51.42,
663
+ "2021": 59.39,
664
+ "2022": 44.67,
665
+ "2023": 51.21
666
+ },
667
+ "By Subplot": {
668
+ "1 Subplot": 57.77,
669
+ "2-4 Subplots": 48.15,
670
+ "5+ Subplots": 47.88
671
+ },
672
+ "N_valid": 1000,
673
+ "N_invalid": 0,
674
+ "Question Type": "Reasoning"
675
+ },
676
+ "accuracy": 66.09,
677
+ "acc_stderr": 0,
678
+ "acc": 66.09
679
+ },
680
+ "MathVision": {
681
+ "accuracy": 47.5,
682
+ "acc_stderr": 0,
683
+ "acc": 47.5
684
+ },
685
+ "CII-Bench": {
686
+ "accuracy": 62.61,
687
+ "domain_score": {
688
+ "Life": 62.34,
689
+ "Art": 66.91,
690
+ "CTC": 50.37,
691
+ "Society": 65.41,
692
+ "Env.": 68.52,
693
+ "Politics": 75.0
694
+ },
695
+ "emotion_score": {
696
+ "Neutral": 64.66,
697
+ "Negative": 64.91,
698
+ "Positive": 57.69
699
+ },
700
+ "acc_stderr": 0,
701
+ "acc": 62.61
702
+ },
703
+ "Blink": {
704
+ "accuracy": 62.02,
705
+ "Art Style": 79.49,
706
+ "Counting": 66.67,
707
+ "Forensic Detection": 78.03,
708
+ "Functional Correspondence": 50.77,
709
+ "IQ Test": 24.67,
710
+ "Jigsaw": 57.33,
711
+ "Multi-view Reasoning": 52.63,
712
+ "Object Localization": 50.0,
713
+ "Relative Depth": 83.87,
714
+ "Relative Reflectance": 29.1,
715
+ "Semantic Correspondence": 55.4,
716
+ "Spatial Relation": 76.22,
717
+ "Visual Correspondence": 83.14,
718
+ "Visual Similarity": 82.22,
719
+ "acc_stderr": 0,
720
+ "acc": 62.02
721
+ }
722
+ }
723
+ }
Gemini-2.0-pro-exp-20250205/results_2025-01-25T10-42-53.190540.json ADDED
@@ -0,0 +1,758 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "model_name": "Gemini-2.0-pro-exp-20250205",
4
+ "model_dtype": "float16",
5
+ "model_size": 0
6
+ },
7
+ "results": {
8
+ "CMMMU": {
9
+ "艺术与设计": {
10
+ "num": 88,
11
+ "correct": 69,
12
+ "accuracy": 78.41
13
+ },
14
+ "overall": {
15
+ "num": 900,
16
+ "correct": 550,
17
+ "accuracy": 61.11
18
+ },
19
+ "商业": {
20
+ "num": 126,
21
+ "correct": 53,
22
+ "accuracy": 42.06
23
+ },
24
+ "科学": {
25
+ "num": 204,
26
+ "correct": 114,
27
+ "accuracy": 55.88
28
+ },
29
+ "健康与医学": {
30
+ "num": 153,
31
+ "correct": 103,
32
+ "accuracy": 67.32
33
+ },
34
+ "人文社会科学": {
35
+ "num": 85,
36
+ "correct": 63,
37
+ "accuracy": 74.12
38
+ },
39
+ "技术与工程": {
40
+ "num": 244,
41
+ "correct": 148,
42
+ "accuracy": 60.66
43
+ },
44
+ "accuracy": 61.11,
45
+ "acc_stderr": 0,
46
+ "acc": 61.11
47
+ },
48
+ "MMMU": {
49
+ "accuracy": 62.89,
50
+ "subject_score": {
51
+ "Accounting": 50.0,
52
+ "Agriculture": 66.67,
53
+ "Architecture": 36.67,
54
+ "Art": 88.33,
55
+ "Basic": 73.33,
56
+ "Biology": 56.67,
57
+ "Chemistry": 56.67,
58
+ "Clinical": 66.67,
59
+ "Computer": 56.67,
60
+ "Design": 86.67,
61
+ "Diagnostics": 46.67,
62
+ "Economics": 66.67,
63
+ "Electronics": 36.67,
64
+ "Energy": 50.0,
65
+ "Finance": 30.0,
66
+ "Geography": 63.33,
67
+ "History": 80.0,
68
+ "Literature": 90.0,
69
+ "Manage": 63.33,
70
+ "Marketing": 60.0,
71
+ "Materials": 53.33,
72
+ "Math": 53.33,
73
+ "Mechanical": 46.67,
74
+ "Music": 36.67,
75
+ "Pharmacy": 86.67,
76
+ "Physics": 76.67,
77
+ "Psychology": 73.33,
78
+ "Public": 73.33,
79
+ "Sociology": 73.33
80
+ },
81
+ "difficulty_score": {
82
+ "Medium": 62.26,
83
+ "Easy": 72.2,
84
+ "Hard": 49.17
85
+ },
86
+ "acc_stderr": 0,
87
+ "acc": 62.89
88
+ },
89
+ "MMMU_Pro_standard": {
90
+ "accuracy": 45.9,
91
+ "subject_score": {
92
+ "History": 55.36,
93
+ "Art": 66.04,
94
+ "Design": 66.67,
95
+ "Literature": 59.62,
96
+ "Agriculture": 35.0,
97
+ "Finance": 28.33,
98
+ "Sociology": 59.26,
99
+ "Accounting": 36.21,
100
+ "Energy_and_Power": 34.48,
101
+ "Pharmacy": 49.12,
102
+ "Architecture_and_Engineering": 36.67,
103
+ "Clinical_Medicine": 52.54,
104
+ "Public_Health": 46.55,
105
+ "Physics": 40.0,
106
+ "Art_Theory": 69.09,
107
+ "Electronics": 51.67,
108
+ "Psychology": 50.0,
109
+ "Biology": 44.07,
110
+ "Manage": 40.0,
111
+ "Economics": 44.07,
112
+ "Mechanical_Engineering": 33.9,
113
+ "Diagnostics_and_Laboratory_Medicine": 43.33,
114
+ "Basic_Medical_Science": 57.69,
115
+ "Computer_Science": 46.67,
116
+ "Math": 46.67,
117
+ "Music": 31.67,
118
+ "Materials": 21.67,
119
+ "Marketing": 45.76,
120
+ "Chemistry": 51.67,
121
+ "Geography": 40.38
122
+ },
123
+ "difficulty_score": {
124
+ "Medium": 45.44,
125
+ "Easy": 56.82,
126
+ "Hard": 32.42
127
+ },
128
+ "acc_stderr": 0,
129
+ "acc": 45.9
130
+ },
131
+ "MMMU_Pro_vision": {
132
+ "reject_info": {
133
+ "reject_rate": 0.06,
134
+ "reject_number": 1,
135
+ "total_question": 1730
136
+ },
137
+ "accuracy": 41.01,
138
+ "subject_score": {
139
+ "History": 50.0,
140
+ "Art": 63.46,
141
+ "Design": 71.67,
142
+ "Literature": 67.31,
143
+ "Agriculture": 30.0,
144
+ "Finance": 26.67,
145
+ "Sociology": 51.85,
146
+ "Accounting": 39.66,
147
+ "Energy_and_Power": 17.24,
148
+ "Pharmacy": 38.6,
149
+ "Architecture_and_Engineering": 26.67,
150
+ "Clinical_Medicine": 42.37,
151
+ "Public_Health": 36.21,
152
+ "Physics": 36.67,
153
+ "Art_Theory": 74.55,
154
+ "Electronics": 36.67,
155
+ "Psychology": 40.0,
156
+ "Biology": 35.59,
157
+ "Manage": 42.0,
158
+ "Economics": 59.32,
159
+ "Mechanical_Engineering": 30.51,
160
+ "Diagnostics_and_Laboratory_Medicine": 33.33,
161
+ "Basic_Medical_Science": 55.77,
162
+ "Computer_Science": 41.67,
163
+ "Math": 33.33,
164
+ "Music": 30.0,
165
+ "Materials": 18.33,
166
+ "Marketing": 37.29,
167
+ "Chemistry": 43.33,
168
+ "Geography": 30.77
169
+ },
170
+ "acc_stderr": 0,
171
+ "acc": 41.01
172
+ },
173
+ "MmvetV2": {
174
+ "reject_info": {
175
+ "reject_rate": 1.93,
176
+ "reject_number": 10,
177
+ "total_question": 517
178
+ },
179
+ "accuracy": 72.1893,
180
+ "capability_scores": {
181
+ "math": 87.87878787878788,
182
+ "ocr": 78.57843137254909,
183
+ "spat": 67.62886597938139,
184
+ "rec": 68.85856079404473,
185
+ "know": 70.26490066225163,
186
+ "gen": 72.23048327137552,
187
+ "seq": 70.55555555555556
188
+ },
189
+ "capability_detail_scores": {
190
+ "math_ocr": 89.0909090909091,
191
+ "math_spat_ocr": 86.66666666666667,
192
+ "spat_math_rec_ocr": 100.0,
193
+ "spat_rec": 57.407407407407405,
194
+ "spat_ocr": 82.6923076923077,
195
+ "spat_rec_ocr": 56.666666666666664,
196
+ "know_spat_ocr": 100.0,
197
+ "rec_ocr": 87.5,
198
+ "spat_know_rec": 45.0,
199
+ "ocr": 86.45161290322581,
200
+ "rec": 70.34482758620689,
201
+ "know_rec": 73.07692307692307,
202
+ "know_rec_gen": 71.7525773195876,
203
+ "know_gen_rec_ocr": 77.6923076923077,
204
+ "spat_gen_rec_ocr": 75.71428571428574,
205
+ "gen_spat_ocr": 90.0,
206
+ "spat_ocr_gen_seq_math": 80.0,
207
+ "rec_spat_ocr_seq_math": 100.0,
208
+ "spat_rec_gen": 55.90909090909091,
209
+ "gen_math_spat_ocr": 40.0,
210
+ "spat_seq_rec": 51.42857142857144,
211
+ "spat_seq_rec_ocr": 43.333333333333336,
212
+ "spat_know_rec_gen": 73.33333333333333,
213
+ "rec_gen": 70.29411764705883,
214
+ "spat_know_rec_ocr": 0.0,
215
+ "know_ocr_rec_gen": 77.6923076923077,
216
+ "rec_spat_ocr_gen_know": 75.0,
217
+ "math_rec_ocr": 100.0,
218
+ "gen_rec_ocr": 83.99999999999999,
219
+ "seq_gen_rec_ocr": 82.85714285714285,
220
+ "gen_ocr": 80.0,
221
+ "seq_rec_gen": 68.57142857142858,
222
+ "seq_rec": 68.33333333333333,
223
+ "spat_seq_rec_gen": 80.0,
224
+ "seq_know_rec": 100.0,
225
+ "seq_know_rec_gen": 85.00000000000001,
226
+ "rec_spat_ocr_gen_seq": 40.0,
227
+ "rec_know_ocr_gen_seq": 100.0,
228
+ "know_math_rec": 100.0,
229
+ "seq_rec_ocr": 100.0
230
+ },
231
+ "acc_stderr": 0,
232
+ "acc": 72.1893
233
+ },
234
+ "MathVerse": {
235
+ "reject_info": {
236
+ "reject_rate": 28.32,
237
+ "reject_number": 1116,
238
+ "total_question": 3940
239
+ },
240
+ "Text Dominant": {
241
+ "accuracy": 58.4,
242
+ "correct": 351,
243
+ "total": 601
244
+ },
245
+ "Total": {
246
+ "accuracy": 50.57,
247
+ "correct": 1428,
248
+ "total": 2824
249
+ },
250
+ "Vision Intensive": {
251
+ "accuracy": 49.06,
252
+ "correct": 286,
253
+ "total": 583
254
+ },
255
+ "Text Lite": {
256
+ "accuracy": 54.16,
257
+ "correct": 319,
258
+ "total": 589
259
+ },
260
+ "Vision Dominant": {
261
+ "accuracy": 48.35,
262
+ "correct": 278,
263
+ "total": 575
264
+ },
265
+ "Vision Only": {
266
+ "accuracy": 40.76,
267
+ "correct": 194,
268
+ "total": 476
269
+ },
270
+ "accuracy": 50.57,
271
+ "acc_stderr": 0,
272
+ "acc": 50.57
273
+ },
274
+ "Ocrlite": {
275
+ "reject_info": {
276
+ "reject_rate": 2.55,
277
+ "reject_number": 42,
278
+ "total_question": 1644
279
+ },
280
+ "final_score": [
281
+ 1326,
282
+ 1602
283
+ ],
284
+ "accuracy": 82.772,
285
+ "Key Information Extraction-Bookshelf": [
286
+ 42,
287
+ 49,
288
+ 0.857,
289
+ {
290
+ "Default": [
291
+ 42,
292
+ 49,
293
+ 0.857
294
+ ]
295
+ }
296
+ ],
297
+ "Scene Text-centric VQA-diet_constraints": [
298
+ 77,
299
+ 89,
300
+ 0.865,
301
+ {
302
+ "Default": [
303
+ 77,
304
+ 89,
305
+ 0.865
306
+ ]
307
+ }
308
+ ],
309
+ "Doc-oriented VQA-Control": [
310
+ 146,
311
+ 186,
312
+ 0.785,
313
+ {
314
+ "Default": [
315
+ 146,
316
+ 186,
317
+ 0.785
318
+ ]
319
+ }
320
+ ],
321
+ "Doc-oriented VQA": [
322
+ 169,
323
+ 199,
324
+ 0.849,
325
+ {
326
+ "Default": [
327
+ 169,
328
+ 199,
329
+ 0.849
330
+ ]
331
+ }
332
+ ],
333
+ "Scene Text-centric VQA-Fake_logo": [
334
+ 82,
335
+ 119,
336
+ 0.689,
337
+ {
338
+ "Default": [
339
+ 82,
340
+ 119,
341
+ 0.689
342
+ ]
343
+ }
344
+ ],
345
+ "Handwritten Mathematical Expression Recognition": [
346
+ 1,
347
+ 73,
348
+ 0.014,
349
+ {
350
+ "Default": [
351
+ 1,
352
+ 73,
353
+ 0.014
354
+ ]
355
+ }
356
+ ],
357
+ "Key Information Extraction": [
358
+ 186,
359
+ 206,
360
+ 0.903,
361
+ {
362
+ "Default": [
363
+ 186,
364
+ 206,
365
+ 0.903
366
+ ]
367
+ }
368
+ ],
369
+ "Scene Text-centric VQA-Control": [
370
+ 181,
371
+ 200,
372
+ 0.905,
373
+ {
374
+ "Default": [
375
+ 181,
376
+ 200,
377
+ 0.905
378
+ ]
379
+ }
380
+ ],
381
+ "Scene Text-centric VQA": [
382
+ 258,
383
+ 281,
384
+ 0.918,
385
+ {
386
+ "Default": [
387
+ 258,
388
+ 281,
389
+ 0.918
390
+ ]
391
+ }
392
+ ],
393
+ "Artistic Text Recognition": [
394
+ 43,
395
+ 50,
396
+ 0.86,
397
+ {
398
+ "Default": [
399
+ 43,
400
+ 50,
401
+ 0.86
402
+ ]
403
+ }
404
+ ],
405
+ "Irregular Text Recognition": [
406
+ 45,
407
+ 50,
408
+ 0.9,
409
+ {
410
+ "Default": [
411
+ 45,
412
+ 50,
413
+ 0.9
414
+ ]
415
+ }
416
+ ],
417
+ "Non-Semantic Text Recognition": [
418
+ 46,
419
+ 50,
420
+ 0.92,
421
+ {
422
+ "Default": [
423
+ 46,
424
+ 50,
425
+ 0.92
426
+ ]
427
+ }
428
+ ],
429
+ "Regular Text Recognition": [
430
+ 50,
431
+ 50,
432
+ 1.0,
433
+ {
434
+ "Default": [
435
+ 50,
436
+ 50,
437
+ 1.0
438
+ ]
439
+ }
440
+ ],
441
+ "acc_stderr": 0,
442
+ "acc": 82.772
443
+ },
444
+ "OcrliteZh": {
445
+ "reject_info": {
446
+ "reject_rate": 2.56,
447
+ "reject_number": 6,
448
+ "total_question": 234
449
+ },
450
+ "final_score": [
451
+ 168,
452
+ 228
453
+ ],
454
+ "accuracy": 73.684,
455
+ "Docvqa": [
456
+ 8,
457
+ 9,
458
+ 0.889,
459
+ {
460
+ "Default": [
461
+ 8,
462
+ 9,
463
+ 0.889
464
+ ]
465
+ }
466
+ ],
467
+ "Chartqa-human": [
468
+ 6,
469
+ 9,
470
+ 0.667,
471
+ {
472
+ "Default": [
473
+ 6,
474
+ 9,
475
+ 0.667
476
+ ]
477
+ }
478
+ ],
479
+ "Chartqa-au": [
480
+ 8,
481
+ 9,
482
+ 0.889,
483
+ {
484
+ "Default": [
485
+ 8,
486
+ 9,
487
+ 0.889
488
+ ]
489
+ }
490
+ ],
491
+ "infographic": [
492
+ 6,
493
+ 10,
494
+ 0.6,
495
+ {
496
+ "Default": [
497
+ 6,
498
+ 10,
499
+ 0.6
500
+ ]
501
+ }
502
+ ],
503
+ "Key Information Extraction": [
504
+ 43,
505
+ 44,
506
+ 0.977,
507
+ {
508
+ "Default": [
509
+ 43,
510
+ 44,
511
+ 0.977
512
+ ]
513
+ }
514
+ ],
515
+ "Scene Text-centric VQA": [
516
+ 32,
517
+ 39,
518
+ 0.821,
519
+ {
520
+ "Default": [
521
+ 32,
522
+ 39,
523
+ 0.821
524
+ ]
525
+ }
526
+ ],
527
+ "Artistic Text Recognition": [
528
+ 5,
529
+ 11,
530
+ 0.455,
531
+ {
532
+ "Default": [
533
+ 5,
534
+ 11,
535
+ 0.455
536
+ ]
537
+ }
538
+ ],
539
+ "IrRegular Text Recognition": [
540
+ 6,
541
+ 11,
542
+ 0.545,
543
+ {
544
+ "Default": [
545
+ 6,
546
+ 11,
547
+ 0.545
548
+ ]
549
+ }
550
+ ],
551
+ "Non-semantic Text Recognition": [
552
+ 8,
553
+ 12,
554
+ 0.667,
555
+ {
556
+ "Default": [
557
+ 8,
558
+ 12,
559
+ 0.667
560
+ ]
561
+ }
562
+ ],
563
+ "Regular Text Recognition": [
564
+ 9,
565
+ 11,
566
+ 0.818,
567
+ {
568
+ "Default": [
569
+ 9,
570
+ 11,
571
+ 0.818
572
+ ]
573
+ }
574
+ ],
575
+ "Handwriting_CN": [
576
+ 12,
577
+ 20,
578
+ 0.6,
579
+ {
580
+ "Default": [
581
+ 12,
582
+ 20,
583
+ 0.6
584
+ ]
585
+ }
586
+ ],
587
+ "Chinese Unlimited": [
588
+ 25,
589
+ 43,
590
+ 0.581,
591
+ {
592
+ "Default": [
593
+ 25,
594
+ 43,
595
+ 0.581
596
+ ]
597
+ }
598
+ ],
599
+ "acc_stderr": 0,
600
+ "acc": 73.684
601
+ },
602
+ "CharXiv": {
603
+ "descriptive": {
604
+ "Overall Score": 83.12,
605
+ "By Question": {
606
+ "Q1": 81.97,
607
+ "Q2": 82.17,
608
+ "Q3": 75.54,
609
+ "Q4": 88.72,
610
+ "Q5": 85.36,
611
+ "Q6": 80.32,
612
+ "Q7": 77.35,
613
+ "Q8": 87.95,
614
+ "Q9": 82.09,
615
+ "Q10": 86.99,
616
+ "Q11": 69.71,
617
+ "Q12": 84.62,
618
+ "Q13": 85.39,
619
+ "Q14": 88.3,
620
+ "Q15": 92.01,
621
+ "Q16": 86.11,
622
+ "Q17": 58.93,
623
+ "Q18": 94.74,
624
+ "Q19": 93.85
625
+ },
626
+ "By Category": {
627
+ "Information Extraction": 81.73,
628
+ "Enumeration": 87.65,
629
+ "Pattern Recognition": 84.5,
630
+ "Counting": 87.02,
631
+ "Compositionality": 58.93
632
+ },
633
+ "By Subplot": {
634
+ "1 Subplot": 86.46,
635
+ "2-4 Subplots": 83.8,
636
+ "5+ Subplots": 76.59
637
+ },
638
+ "By Subject": {
639
+ "Computer Science": 84.13,
640
+ "Economics": 85.87,
641
+ "Electrical Engineering and Systems Science": 88.03,
642
+ "Mathematics": 85.74,
643
+ "Physics": 79.53,
644
+ "Quantitative Biology": 76.79,
645
+ "Quantitative Finance": 80.82,
646
+ "Statistics": 83.85
647
+ },
648
+ "By Year": {
649
+ "2020": 82.09,
650
+ "2021": 81.51,
651
+ "2022": 84.94,
652
+ "2023": 84.07
653
+ },
654
+ "N_valid": 4000,
655
+ "N_invalid": 0,
656
+ "Question Type": "Descriptive"
657
+ },
658
+ "reasoning": {
659
+ "Overall Score": 51.0,
660
+ "By Answer Type": {
661
+ "Text-in-Chart": 54.55,
662
+ "Text-in-General": 43.43,
663
+ "Number-in-Chart": 56.9,
664
+ "Number-in-General": 41.48
665
+ },
666
+ "By Source": {
667
+ "GPT-Sourced": 59.24,
668
+ "GPT-Inspired": 48.61,
669
+ "Completely Human": 49.33
670
+ },
671
+ "By Subject": {
672
+ "Computer Science": 51.59,
673
+ "Economics": 44.2,
674
+ "Electrical Engineering and Systems Science": 52.94,
675
+ "Mathematics": 58.52,
676
+ "Physics": 54.33,
677
+ "Quantitative Biology": 51.59,
678
+ "Quantitative Finance": 47.41,
679
+ "Statistics": 46.9
680
+ },
681
+ "By Year": {
682
+ "2020": 48.58,
683
+ "2021": 50.57,
684
+ "2022": 52.46,
685
+ "2023": 52.42
686
+ },
687
+ "By Subplot": {
688
+ "1 Subplot": 55.7,
689
+ "2-4 Subplots": 45.24,
690
+ "5+ Subplots": 52.54
691
+ },
692
+ "N_valid": 1000,
693
+ "N_invalid": 0,
694
+ "Question Type": "Reasoning"
695
+ },
696
+ "accuracy": 67.06,
697
+ "acc_stderr": 0,
698
+ "acc": 67.06
699
+ },
700
+ "MathVision": {
701
+ "reject_info": {
702
+ "reject_rate": 0.07,
703
+ "reject_number": 2,
704
+ "total_question": 3040
705
+ },
706
+ "accuracy": 52.86,
707
+ "acc_stderr": 0,
708
+ "acc": 52.86
709
+ },
710
+ "CII-Bench": {
711
+ "reject_info": {
712
+ "reject_rate": 0.39,
713
+ "reject_number": 3,
714
+ "total_question": 765
715
+ },
716
+ "accuracy": 67.72,
717
+ "domain_score": {
718
+ "Life": 70.56,
719
+ "Art": 70.37,
720
+ "CTC": 57.46,
721
+ "Society": 69.02,
722
+ "Env.": 66.67,
723
+ "Politics": 75.0
724
+ },
725
+ "emotion_score": {
726
+ "Neutral": 71.05,
727
+ "Negative": 65.78,
728
+ "Positive": 66.09
729
+ },
730
+ "acc_stderr": 0,
731
+ "acc": 67.72
732
+ },
733
+ "Blink": {
734
+ "reject_info": {
735
+ "reject_rate": 0.16,
736
+ "reject_number": 3,
737
+ "total_question": 1901
738
+ },
739
+ "accuracy": 64.7,
740
+ "Art Style": 87.18,
741
+ "Counting": 70.0,
742
+ "Forensic Detection": 78.79,
743
+ "Functional Correspondence": 53.08,
744
+ "IQ Test": 21.33,
745
+ "Jigsaw": 57.72,
746
+ "Multi-view Reasoning": 59.4,
747
+ "Object Localization": 60.66,
748
+ "Relative Depth": 84.68,
749
+ "Relative Reflectance": 33.83,
750
+ "Semantic Correspondence": 63.31,
751
+ "Spatial Relation": 74.65,
752
+ "Visual Correspondence": 83.72,
753
+ "Visual Similarity": 81.48,
754
+ "acc_stderr": 0,
755
+ "acc": 64.7
756
+ }
757
+ }
758
+ }
Gemini-2.5-pro-preview-20250325/results_2025-01-25T10-42-53.190540.json ADDED
@@ -0,0 +1,757 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "model_name": "Gemini-2.5-pro-preview-20250325",
4
+ "model_dtype": "float16",
5
+ "model_size": 0
6
+ },
7
+ "results": {
8
+ "CMMMU": {
9
+ "reject_info": {
10
+ "reject_rate": 1.11,
11
+ "reject_number": 10,
12
+ "total_question": 900
13
+ },
14
+ "艺术与设计": {
15
+ "num": 88,
16
+ "correct": 72,
17
+ "accuracy": 81.82
18
+ },
19
+ "overall": {
20
+ "num": 890,
21
+ "correct": 634,
22
+ "accuracy": 71.24
23
+ },
24
+ "商业": {
25
+ "num": 125,
26
+ "correct": 84,
27
+ "accuracy": 67.2
28
+ },
29
+ "科学": {
30
+ "num": 200,
31
+ "correct": 132,
32
+ "accuracy": 66.0
33
+ },
34
+ "健康与医学": {
35
+ "num": 153,
36
+ "correct": 118,
37
+ "accuracy": 77.12
38
+ },
39
+ "人文社会科学": {
40
+ "num": 85,
41
+ "correct": 67,
42
+ "accuracy": 78.82
43
+ },
44
+ "技术与工程": {
45
+ "num": 239,
46
+ "correct": 161,
47
+ "accuracy": 67.36
48
+ },
49
+ "accuracy": 71.24,
50
+ "acc_stderr": 0,
51
+ "acc": 71.24
52
+ },
53
+ "MMMU": {
54
+ "reject_info": {
55
+ "reject_rate": 10.89,
56
+ "reject_number": 98,
57
+ "total_question": 900
58
+ },
59
+ "accuracy": 69.58,
60
+ "subject_score": {
61
+ "Accounting": 61.54,
62
+ "Agriculture": 86.67,
63
+ "Architecture": 63.64,
64
+ "Art": 95.0,
65
+ "Basic": 86.67,
66
+ "Biology": 67.86,
67
+ "Chemistry": 66.67,
68
+ "Clinical": 63.33,
69
+ "Computer": 50.0,
70
+ "Design": 90.0,
71
+ "Diagnostics": 56.67,
72
+ "Economics": 70.0,
73
+ "Electronics": 33.33,
74
+ "Energy": 50.0,
75
+ "Finance": 75.0,
76
+ "Geography": 50.0,
77
+ "History": 80.0,
78
+ "Literature": 90.0,
79
+ "Manage": 70.0,
80
+ "Marketing": 62.96,
81
+ "Materials": 38.1,
82
+ "Math": 57.89,
83
+ "Mechanical": 50.0,
84
+ "Music": 34.78,
85
+ "Pharmacy": 83.33,
86
+ "Physics": 68.97,
87
+ "Psychology": 80.0,
88
+ "Public": 70.0,
89
+ "Sociology": 83.33
90
+ },
91
+ "difficulty_score": {
92
+ "Medium": 69.29,
93
+ "Easy": 81.21,
94
+ "Hard": 46.76
95
+ },
96
+ "acc_stderr": 0,
97
+ "acc": 69.58
98
+ },
99
+ "MMMU_Pro_standard": {
100
+ "reject_info": {
101
+ "reject_rate": 1.33,
102
+ "reject_number": 23,
103
+ "total_question": 1730
104
+ },
105
+ "accuracy": 56.82,
106
+ "subject_score": {
107
+ "History": 60.71,
108
+ "Design": 76.67,
109
+ "Literature": 82.69,
110
+ "Finance": 71.19,
111
+ "Accounting": 63.16,
112
+ "Energy_and_Power": 50.0,
113
+ "Pharmacy": 49.12,
114
+ "Architecture_and_Engineering": 49.06,
115
+ "Sociology": 61.11,
116
+ "Clinical_Medicine": 61.02,
117
+ "Art": 79.25,
118
+ "Public_Health": 43.1,
119
+ "Physics": 48.33,
120
+ "Art_Theory": 78.18,
121
+ "Electronics": 64.29,
122
+ "Psychology": 53.33,
123
+ "Biology": 61.02,
124
+ "Economics": 54.24,
125
+ "Mechanical_Engineering": 50.91,
126
+ "Diagnostics_and_Laboratory_Medicine": 45.0,
127
+ "Basic_Medical_Science": 71.15,
128
+ "Agriculture": 63.33,
129
+ "Computer_Science": 48.33,
130
+ "Math": 58.93,
131
+ "Music": 25.0,
132
+ "Materials": 48.28,
133
+ "Marketing": 49.15,
134
+ "Chemistry": 55.0,
135
+ "Geography": 57.69,
136
+ "Manage": 30.0
137
+ },
138
+ "difficulty_score": {
139
+ "Medium": 54.75,
140
+ "Hard": 50.0,
141
+ "Easy": 64.96
142
+ },
143
+ "acc_stderr": 0,
144
+ "acc": 56.82
145
+ },
146
+ "MMMU_Pro_vision": {
147
+ "reject_info": {
148
+ "reject_rate": 2.08,
149
+ "reject_number": 36,
150
+ "total_question": 1730
151
+ },
152
+ "accuracy": 65.82,
153
+ "subject_score": {
154
+ "Art": 69.81,
155
+ "History": 66.07,
156
+ "Design": 65.0,
157
+ "Literature": 69.23,
158
+ "Agriculture": 51.67,
159
+ "Sociology": 62.96,
160
+ "Pharmacy": 73.68,
161
+ "Finance": 88.14,
162
+ "Energy_and_Power": 71.93,
163
+ "Accounting": 84.48,
164
+ "Clinical_Medicine": 52.54,
165
+ "Public_Health": 72.41,
166
+ "Architecture_and_Engineering": 61.54,
167
+ "Art_Theory": 78.18,
168
+ "Physics": 62.71,
169
+ "Psychology": 51.67,
170
+ "Electronics": 83.33,
171
+ "Manage": 58.0,
172
+ "Biology": 52.54,
173
+ "Diagnostics_and_Laboratory_Medicine": 40.0,
174
+ "Economics": 77.97,
175
+ "Mechanical_Engineering": 62.96,
176
+ "Basic_Medical_Science": 61.54,
177
+ "Computer_Science": 70.69,
178
+ "Math": 71.93,
179
+ "Music": 36.67,
180
+ "Materials": 56.9,
181
+ "Marketing": 77.59,
182
+ "Chemistry": 76.67,
183
+ "Geography": 71.15
184
+ },
185
+ "acc_stderr": 0,
186
+ "acc": 65.82
187
+ },
188
+ "MmvetV2": {
189
+ "accuracy": 77.6596,
190
+ "capability_scores": {
191
+ "math": 88.52941176470588,
192
+ "ocr": 81.39423076923083,
193
+ "spat": 73.09644670050761,
194
+ "rec": 75.29126213592241,
195
+ "know": 77.11538461538461,
196
+ "gen": 76.509090909091,
197
+ "seq": 79.82142857142856
198
+ },
199
+ "capability_detail_scores": {
200
+ "math_ocr": 98.18181818181819,
201
+ "math_spat_ocr": 92.66666666666666,
202
+ "ocr_math_spat_rec": 50.0,
203
+ "spat_rec": 65.35714285714286,
204
+ "spat_ocr": 88.07692307692308,
205
+ "ocr_spat_rec": 79.16666666666666,
206
+ "spat_know_ocr": 100.0,
207
+ "ocr_rec": 95.0,
208
+ "spat_know_rec": 69.0,
209
+ "ocr": 82.5,
210
+ "rec": 77.79661016949152,
211
+ "know_rec": 80.76923076923077,
212
+ "gen_know_rec": 78.1,
213
+ "ocr_gen_know_rec": 82.30769230769229,
214
+ "ocr_spat_gen_rec": 77.20930232558142,
215
+ "spat_gen_ocr": 95.0,
216
+ "math_spat_seq_gen_ocr": 100.0,
217
+ "math_spat_seq_ocr_rec": 0.0,
218
+ "spat_gen_rec": 56.81818181818182,
219
+ "math_spat_gen_ocr": 40.0,
220
+ "seq_spat_rec": 68.57142857142857,
221
+ "ocr_seq_spat_rec": 46.666666666666664,
222
+ "gen_spat_know_rec": 63.33333333333333,
223
+ "gen_rec": 74.41176470588235,
224
+ "ocr_spat_know_rec": 0.0,
225
+ "spat_rec_gen_ocr_know": 75.0,
226
+ "ocr_math_rec": 100.0,
227
+ "ocr_gen_rec": 88.00000000000001,
228
+ "ocr_seq_gen_rec": 84.28571428571429,
229
+ "gen_ocr": 76.92307692307693,
230
+ "seq_gen_rec": 78.57142857142857,
231
+ "seq_rec": 100.0,
232
+ "seq_spat_gen_rec": 78.75,
233
+ "seq_know_rec": 100.0,
234
+ "gen_know_seq_rec": 85.00000000000001,
235
+ "spat_seq_gen_ocr_rec": 86.66666666666667,
236
+ "rec_seq_gen_ocr_know": 100.0,
237
+ "math_know_rec": 100.0,
238
+ "ocr_seq_rec": 100.0
239
+ },
240
+ "acc_stderr": 0,
241
+ "acc": 77.6596
242
+ },
243
+ "MathVerse": {
244
+ "reject_info": {
245
+ "reject_rate": 1.5,
246
+ "reject_number": 59,
247
+ "total_question": 3940
248
+ },
249
+ "Text Dominant": {
250
+ "accuracy": 63.01,
251
+ "correct": 494,
252
+ "total": 784
253
+ },
254
+ "Total": {
255
+ "accuracy": 60.27,
256
+ "correct": 2339,
257
+ "total": 3881
258
+ },
259
+ "Vision Dominant": {
260
+ "accuracy": 59.56,
261
+ "correct": 461,
262
+ "total": 774
263
+ },
264
+ "Vision Intensive": {
265
+ "accuracy": 62.18,
266
+ "correct": 480,
267
+ "total": 772
268
+ },
269
+ "Text Lite": {
270
+ "accuracy": 61.78,
271
+ "correct": 480,
272
+ "total": 777
273
+ },
274
+ "Vision Only": {
275
+ "accuracy": 54.78,
276
+ "correct": 424,
277
+ "total": 774
278
+ },
279
+ "accuracy": 60.27,
280
+ "acc_stderr": 0,
281
+ "acc": 60.27
282
+ },
283
+ "Ocrlite": {
284
+ "final_score": [
285
+ 1373,
286
+ 1644
287
+ ],
288
+ "accuracy": 83.516,
289
+ "Key Information Extraction-Bookshelf": [
290
+ 49,
291
+ 51,
292
+ 0.961,
293
+ {
294
+ "Default": [
295
+ 49,
296
+ 51,
297
+ 0.961
298
+ ]
299
+ }
300
+ ],
301
+ "Scene Text-centric VQA-diet_constraints": [
302
+ 68,
303
+ 90,
304
+ 0.756,
305
+ {
306
+ "Default": [
307
+ 68,
308
+ 90,
309
+ 0.756
310
+ ]
311
+ }
312
+ ],
313
+ "Doc-oriented VQA-Control": [
314
+ 165,
315
+ 189,
316
+ 0.873,
317
+ {
318
+ "Default": [
319
+ 165,
320
+ 189,
321
+ 0.873
322
+ ]
323
+ }
324
+ ],
325
+ "Doc-oriented VQA": [
326
+ 188,
327
+ 204,
328
+ 0.922,
329
+ {
330
+ "Default": [
331
+ 188,
332
+ 204,
333
+ 0.922
334
+ ]
335
+ }
336
+ ],
337
+ "Scene Text-centric VQA-Fake_logo": [
338
+ 94,
339
+ 119,
340
+ 0.79,
341
+ {
342
+ "Default": [
343
+ 94,
344
+ 119,
345
+ 0.79
346
+ ]
347
+ }
348
+ ],
349
+ "Handwritten Mathematical Expression Recognition": [
350
+ 1,
351
+ 100,
352
+ 0.01,
353
+ {
354
+ "Default": [
355
+ 1,
356
+ 100,
357
+ 0.01
358
+ ]
359
+ }
360
+ ],
361
+ "Key Information Extraction": [
362
+ 190,
363
+ 209,
364
+ 0.909,
365
+ {
366
+ "Default": [
367
+ 190,
368
+ 209,
369
+ 0.909
370
+ ]
371
+ }
372
+ ],
373
+ "Scene Text-centric VQA-Control": [
374
+ 182,
375
+ 200,
376
+ 0.91,
377
+ {
378
+ "Default": [
379
+ 182,
380
+ 200,
381
+ 0.91
382
+ ]
383
+ }
384
+ ],
385
+ "Scene Text-centric VQA": [
386
+ 257,
387
+ 282,
388
+ 0.911,
389
+ {
390
+ "Default": [
391
+ 257,
392
+ 282,
393
+ 0.911
394
+ ]
395
+ }
396
+ ],
397
+ "Artistic Text Recognition": [
398
+ 40,
399
+ 50,
400
+ 0.8,
401
+ {
402
+ "Default": [
403
+ 40,
404
+ 50,
405
+ 0.8
406
+ ]
407
+ }
408
+ ],
409
+ "Irregular Text Recognition": [
410
+ 44,
411
+ 50,
412
+ 0.88,
413
+ {
414
+ "Default": [
415
+ 44,
416
+ 50,
417
+ 0.88
418
+ ]
419
+ }
420
+ ],
421
+ "Non-Semantic Text Recognition": [
422
+ 45,
423
+ 50,
424
+ 0.9,
425
+ {
426
+ "Default": [
427
+ 45,
428
+ 50,
429
+ 0.9
430
+ ]
431
+ }
432
+ ],
433
+ "Regular Text Recognition": [
434
+ 50,
435
+ 50,
436
+ 1.0,
437
+ {
438
+ "Default": [
439
+ 50,
440
+ 50,
441
+ 1.0
442
+ ]
443
+ }
444
+ ],
445
+ "acc_stderr": 0,
446
+ "acc": 83.516
447
+ },
448
+ "OcrliteZh": {
449
+ "final_score": [
450
+ 178,
451
+ 234
452
+ ],
453
+ "accuracy": 76.068,
454
+ "Docvqa": [
455
+ 9,
456
+ 10,
457
+ 0.9,
458
+ {
459
+ "Default": [
460
+ 9,
461
+ 10,
462
+ 0.9
463
+ ]
464
+ }
465
+ ],
466
+ "Chartqa-human": [
467
+ 8,
468
+ 10,
469
+ 0.8,
470
+ {
471
+ "Default": [
472
+ 8,
473
+ 10,
474
+ 0.8
475
+ ]
476
+ }
477
+ ],
478
+ "Chartqa-au": [
479
+ 9,
480
+ 10,
481
+ 0.9,
482
+ {
483
+ "Default": [
484
+ 9,
485
+ 10,
486
+ 0.9
487
+ ]
488
+ }
489
+ ],
490
+ "infographic": [
491
+ 9,
492
+ 10,
493
+ 0.9,
494
+ {
495
+ "Default": [
496
+ 9,
497
+ 10,
498
+ 0.9
499
+ ]
500
+ }
501
+ ],
502
+ "Key Information Extraction": [
503
+ 43,
504
+ 45,
505
+ 0.956,
506
+ {
507
+ "Default": [
508
+ 43,
509
+ 45,
510
+ 0.956
511
+ ]
512
+ }
513
+ ],
514
+ "Scene Text-centric VQA": [
515
+ 34,
516
+ 40,
517
+ 0.85,
518
+ {
519
+ "Default": [
520
+ 34,
521
+ 40,
522
+ 0.85
523
+ ]
524
+ }
525
+ ],
526
+ "Artistic Text Recognition": [
527
+ 4,
528
+ 11,
529
+ 0.364,
530
+ {
531
+ "Default": [
532
+ 4,
533
+ 11,
534
+ 0.364
535
+ ]
536
+ }
537
+ ],
538
+ "IrRegular Text Recognition": [
539
+ 4,
540
+ 11,
541
+ 0.364,
542
+ {
543
+ "Default": [
544
+ 4,
545
+ 11,
546
+ 0.364
547
+ ]
548
+ }
549
+ ],
550
+ "Non-semantic Text Recognition": [
551
+ 10,
552
+ 12,
553
+ 0.833,
554
+ {
555
+ "Default": [
556
+ 10,
557
+ 12,
558
+ 0.833
559
+ ]
560
+ }
561
+ ],
562
+ "Regular Text Recognition": [
563
+ 9,
564
+ 11,
565
+ 0.818,
566
+ {
567
+ "Default": [
568
+ 9,
569
+ 11,
570
+ 0.818
571
+ ]
572
+ }
573
+ ],
574
+ "Handwriting_CN": [
575
+ 11,
576
+ 20,
577
+ 0.55,
578
+ {
579
+ "Default": [
580
+ 11,
581
+ 20,
582
+ 0.55
583
+ ]
584
+ }
585
+ ],
586
+ "Chinese Unlimited": [
587
+ 28,
588
+ 44,
589
+ 0.636,
590
+ {
591
+ "Default": [
592
+ 28,
593
+ 44,
594
+ 0.636
595
+ ]
596
+ }
597
+ ],
598
+ "acc_stderr": 0,
599
+ "acc": 76.068
600
+ },
601
+ "CharXiv": {
602
+ "reject_info": {
603
+ "reject_rate": 9.52,
604
+ "reject_number": 476,
605
+ "total_question": 5000
606
+ },
607
+ "descriptive": {
608
+ "Overall Score": 90.31,
609
+ "By Question": {
610
+ "Q1": 90.61,
611
+ "Q2": 87.13,
612
+ "Q3": 76.1,
613
+ "Q4": 90.95,
614
+ "Q5": 92.45,
615
+ "Q6": 89.15,
616
+ "Q7": 90.48,
617
+ "Q8": 98.08,
618
+ "Q9": 94.19,
619
+ "Q10": 95.38,
620
+ "Q11": 79.74,
621
+ "Q12": 92.22,
622
+ "Q13": 79.38,
623
+ "Q14": 97.2,
624
+ "Q15": 98.2,
625
+ "Q16": 79.31,
626
+ "Q17": 84.08,
627
+ "Q18": 95.02,
628
+ "Q19": 94.12
629
+ },
630
+ "By Category": {
631
+ "Information Extraction": 88.2,
632
+ "Enumeration": 94.01,
633
+ "Pattern Recognition": 88.09,
634
+ "Counting": 93.68,
635
+ "Compositionality": 84.08
636
+ },
637
+ "By Subplot": {
638
+ "1 Subplot": 91.87,
639
+ "2-4 Subplots": 90.11,
640
+ "5+ Subplots": 88.3
641
+ },
642
+ "By Subject": {
643
+ "Computer Science": 92.46,
644
+ "Economics": 92.03,
645
+ "Electrical Engineering and Systems Science": 94.35,
646
+ "Mathematics": 91.45,
647
+ "Physics": 87.2,
648
+ "Quantitative Biology": 88.29,
649
+ "Quantitative Finance": 88.36,
650
+ "Statistics": 91.37
651
+ },
652
+ "By Year": {
653
+ "2020": 90.75,
654
+ "2021": 89.04,
655
+ "2022": 90.57,
656
+ "2023": 90.92
657
+ },
658
+ "N_valid": 3529,
659
+ "N_invalid": 0,
660
+ "Question Type": "Descriptive"
661
+ },
662
+ "reasoning": {
663
+ "Overall Score": 64.92,
664
+ "By Answer Type": {
665
+ "Text-in-Chart": 71.14,
666
+ "Text-in-General": 58.16,
667
+ "Number-in-Chart": 59.74,
668
+ "Number-in-General": 61.06
669
+ },
670
+ "By Source": {
671
+ "GPT-Sourced": 66.12,
672
+ "GPT-Inspired": 64.81,
673
+ "Completely Human": 64.6
674
+ },
675
+ "By Subject": {
676
+ "Computer Science": 61.6,
677
+ "Economics": 63.77,
678
+ "Electrical Engineering and Systems Science": 65.25,
679
+ "Mathematics": 63.7,
680
+ "Physics": 66.14,
681
+ "Quantitative Biology": 69.84,
682
+ "Quantitative Finance": 61.74,
683
+ "Statistics": 67.57
684
+ },
685
+ "By Year": {
686
+ "2020": 63.37,
687
+ "2021": 64.37,
688
+ "2022": 62.7,
689
+ "2023": 69.23
690
+ },
691
+ "By Subplot": {
692
+ "1 Subplot": 67.1,
693
+ "2-4 Subplots": 61.27,
694
+ "5+ Subplots": 67.23
695
+ },
696
+ "N_valid": 995,
697
+ "N_invalid": 0,
698
+ "Question Type": "Reasoning"
699
+ },
700
+ "accuracy": 77.62,
701
+ "acc_stderr": 0,
702
+ "acc": 77.62
703
+ },
704
+ "MathVision": {
705
+ "reject_info": {
706
+ "reject_rate": 20.3,
707
+ "reject_number": 617,
708
+ "total_question": 3040
709
+ },
710
+ "accuracy": 64.55,
711
+ "acc_stderr": 0,
712
+ "acc": 64.55
713
+ },
714
+ "CII-Bench": {
715
+ "accuracy": 73.46,
716
+ "domain_score": {
717
+ "Art": 70.59,
718
+ "Life": 78.35,
719
+ "CTC": 66.67,
720
+ "Society": 76.22,
721
+ "Env.": 64.81,
722
+ "Politics": 79.17
723
+ },
724
+ "emotion_score": {
725
+ "Negative": 74.72,
726
+ "Neutral": 74.81,
727
+ "Positive": 70.51
728
+ },
729
+ "acc_stderr": 0,
730
+ "acc": 73.46
731
+ },
732
+ "Blink": {
733
+ "reject_info": {
734
+ "reject_rate": 3.73,
735
+ "reject_number": 71,
736
+ "total_question": 1901
737
+ },
738
+ "accuracy": 71.75,
739
+ "Art Style": 86.32,
740
+ "Counting": 80.0,
741
+ "Forensic Detection": 90.15,
742
+ "Functional Correspondence": 70.77,
743
+ "IQ Test": 41.25,
744
+ "Jigsaw": 73.33,
745
+ "Multi-view Reasoning": 36.09,
746
+ "Object Localization": 62.3,
747
+ "Relative Depth": 84.68,
748
+ "Relative Reflectance": 32.84,
749
+ "Semantic Correspondence": 64.03,
750
+ "Spatial Relation": 88.11,
751
+ "Visual Correspondence": 93.57,
752
+ "Visual Similarity": 84.44,
753
+ "acc_stderr": 0,
754
+ "acc": 71.75
755
+ }
756
+ }
757
+ }
Gemma-3-27b-it/results_2025-01-25T10-42-53.190540.json ADDED
@@ -0,0 +1,722 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "model_name": "Gemma-3-27b-it",
4
+ "model_dtype": "float16",
5
+ "model_size": 0
6
+ },
7
+ "results": {
8
+ "CMMMU": {
9
+ "艺术与设计": {
10
+ "num": 88,
11
+ "correct": 53,
12
+ "accuracy": 60.23
13
+ },
14
+ "overall": {
15
+ "num": 900,
16
+ "correct": 374,
17
+ "accuracy": 41.56
18
+ },
19
+ "商业": {
20
+ "num": 126,
21
+ "correct": 32,
22
+ "accuracy": 25.4
23
+ },
24
+ "科学": {
25
+ "num": 204,
26
+ "correct": 81,
27
+ "accuracy": 39.71
28
+ },
29
+ "健康与医学": {
30
+ "num": 153,
31
+ "correct": 72,
32
+ "accuracy": 47.06
33
+ },
34
+ "人文社会科学": {
35
+ "num": 85,
36
+ "correct": 39,
37
+ "accuracy": 45.88
38
+ },
39
+ "技术与工程": {
40
+ "num": 244,
41
+ "correct": 97,
42
+ "accuracy": 39.75
43
+ },
44
+ "accuracy": 41.56,
45
+ "acc_stderr": 0,
46
+ "acc": 41.56
47
+ },
48
+ "MMMU": {
49
+ "accuracy": 53.89,
50
+ "subject_score": {
51
+ "Accounting": 40.0,
52
+ "Agriculture": 50.0,
53
+ "Architecture": 53.33,
54
+ "Art": 71.67,
55
+ "Basic": 63.33,
56
+ "Biology": 33.33,
57
+ "Chemistry": 30.0,
58
+ "Clinical": 60.0,
59
+ "Computer": 46.67,
60
+ "Design": 80.0,
61
+ "Diagnostics": 40.0,
62
+ "Economics": 70.0,
63
+ "Electronics": 30.0,
64
+ "Energy": 50.0,
65
+ "Finance": 33.33,
66
+ "Geography": 40.0,
67
+ "History": 76.67,
68
+ "Literature": 93.33,
69
+ "Manage": 50.0,
70
+ "Marketing": 53.33,
71
+ "Materials": 46.67,
72
+ "Math": 56.67,
73
+ "Mechanical": 43.33,
74
+ "Music": 33.33,
75
+ "Pharmacy": 66.67,
76
+ "Physics": 33.33,
77
+ "Psychology": 73.33,
78
+ "Public": 56.67,
79
+ "Sociology": 70.0
80
+ },
81
+ "difficulty_score": {
82
+ "Medium": 52.59,
83
+ "Easy": 65.08,
84
+ "Hard": 38.67
85
+ },
86
+ "acc_stderr": 0,
87
+ "acc": 53.89
88
+ },
89
+ "MMMU_Pro_standard": {
90
+ "accuracy": 36.07,
91
+ "subject_score": {
92
+ "History": 41.07,
93
+ "Design": 56.67,
94
+ "Literature": 63.46,
95
+ "Sociology": 50.0,
96
+ "Art": 60.38,
97
+ "Agriculture": 33.33,
98
+ "Pharmacy": 36.84,
99
+ "Clinical_Medicine": 32.2,
100
+ "Accounting": 25.86,
101
+ "Public_Health": 18.97,
102
+ "Physics": 31.67,
103
+ "Energy_and_Power": 29.31,
104
+ "Art_Theory": 67.27,
105
+ "Psychology": 30.0,
106
+ "Biology": 25.42,
107
+ "Economics": 33.9,
108
+ "Manage": 32.0,
109
+ "Finance": 33.33,
110
+ "Architecture_and_Engineering": 26.67,
111
+ "Diagnostics_and_Laboratory_Medicine": 21.67,
112
+ "Basic_Medical_Science": 40.38,
113
+ "Mechanical_Engineering": 38.98,
114
+ "Electronics": 45.0,
115
+ "Computer_Science": 30.0,
116
+ "Math": 41.67,
117
+ "Music": 26.67,
118
+ "Marketing": 35.59,
119
+ "Materials": 18.33,
120
+ "Chemistry": 33.33,
121
+ "Geography": 30.77
122
+ },
123
+ "difficulty_score": {
124
+ "Medium": 32.46,
125
+ "Hard": 29.68,
126
+ "Easy": 46.4
127
+ },
128
+ "acc_stderr": 0,
129
+ "acc": 36.07
130
+ },
131
+ "MMMU_Pro_vision": {
132
+ "accuracy": 27.98,
133
+ "subject_score": {
134
+ "Art": 28.3,
135
+ "Sociology": 40.74,
136
+ "Agriculture": 25.0,
137
+ "Design": 40.0,
138
+ "History": 32.14,
139
+ "Literature": 61.54,
140
+ "Finance": 36.67,
141
+ "Pharmacy": 24.56,
142
+ "Clinical_Medicine": 23.73,
143
+ "Accounting": 34.48,
144
+ "Physics": 28.33,
145
+ "Public_Health": 29.31,
146
+ "Energy_and_Power": 18.97,
147
+ "Art_Theory": 34.55,
148
+ "Psychology": 31.67,
149
+ "Architecture_and_Engineering": 15.0,
150
+ "Manage": 26.0,
151
+ "Biology": 27.12,
152
+ "Economics": 30.51,
153
+ "Diagnostics_and_Laboratory_Medicine": 16.67,
154
+ "Electronics": 16.67,
155
+ "Mechanical_Engineering": 22.03,
156
+ "Basic_Medical_Science": 28.85,
157
+ "Computer_Science": 30.0,
158
+ "Math": 25.0,
159
+ "Music": 33.33,
160
+ "Marketing": 28.81,
161
+ "Materials": 15.0,
162
+ "Chemistry": 20.0,
163
+ "Geography": 19.23
164
+ },
165
+ "acc_stderr": 0,
166
+ "acc": 27.98
167
+ },
168
+ "MmvetV2": {
169
+ "reject_info": {
170
+ "reject_rate": 0.19,
171
+ "reject_number": 1,
172
+ "total_question": 517
173
+ },
174
+ "accuracy": 66.2209,
175
+ "capability_scores": {
176
+ "math": 73.52941176470588,
177
+ "ocr": 71.00961538461542,
178
+ "spat": 61.167512690355316,
179
+ "rec": 63.406326034063355,
180
+ "know": 61.025641025641,
181
+ "gen": 67.67272727272737,
182
+ "seq": 61.999999999999986
183
+ },
184
+ "capability_detail_scores": {
185
+ "math_ocr": 68.18181818181817,
186
+ "math_spat_ocr": 90.0,
187
+ "math_spat_rec_ocr": 50.0,
188
+ "spat_rec": 55.714285714285715,
189
+ "spat_ocr": 63.46153846153846,
190
+ "spat_rec_ocr": 33.33333333333333,
191
+ "spat_know_ocr": 87.5,
192
+ "rec_ocr": 70.0,
193
+ "spat_rec_know": 45.0,
194
+ "ocr": 83.75,
195
+ "rec": 72.37288135593221,
196
+ "rec_know": 64.61538461538461,
197
+ "gen_rec_know": 63.49999999999998,
198
+ "gen_rec_know_ocr": 73.84615384615387,
199
+ "gen_spat_rec_ocr": 73.25581395348838,
200
+ "gen_spat_ocr": 85.00000000000001,
201
+ "math_gen_ocr_seq_spat": 100.0,
202
+ "math_ocr_seq_spat_rec": 0.0,
203
+ "gen_spat_rec": 51.36363636363635,
204
+ "math_spat_ocr_gen": 100.0,
205
+ "spat_rec_seq": 28.57142857142857,
206
+ "spat_rec_ocr_seq": 66.66666666666666,
207
+ "gen_spat_rec_know": 56.66666666666668,
208
+ "gen_rec": 73.5294117647059,
209
+ "spat_rec_know_ocr": 0.0,
210
+ "gen_know_ocr_spat_rec": 65.0,
211
+ "math_rec_ocr": 100.0,
212
+ "gen_rec_ocr": 72.0,
213
+ "gen_rec_ocr_seq": 77.14285714285715,
214
+ "gen_ocr": 73.84615384615384,
215
+ "gen_rec_seq": 62.85714285714287,
216
+ "rec_seq": 55.99999999999999,
217
+ "gen_spat_rec_seq": 85.00000000000001,
218
+ "rec_know_seq": 0.0,
219
+ "gen_rec_know_seq": 45.0,
220
+ "gen_ocr_seq_spat_rec": 53.333333333333336,
221
+ "gen_know_ocr_seq_rec": 90.0,
222
+ "math_rec_know": 0.0,
223
+ "rec_ocr_seq": 100.0
224
+ },
225
+ "acc_stderr": 0,
226
+ "acc": 66.2209
227
+ },
228
+ "MathVerse": {
229
+ "Text Lite": {
230
+ "accuracy": 34.9,
231
+ "correct": 275,
232
+ "total": 788
233
+ },
234
+ "Total": {
235
+ "accuracy": 32.49,
236
+ "correct": 1280,
237
+ "total": 3940
238
+ },
239
+ "Vision Dominant": {
240
+ "accuracy": 28.43,
241
+ "correct": 224,
242
+ "total": 788
243
+ },
244
+ "Vision Intensive": {
245
+ "accuracy": 32.36,
246
+ "correct": 255,
247
+ "total": 788
248
+ },
249
+ "Text Dominant": {
250
+ "accuracy": 43.15,
251
+ "correct": 340,
252
+ "total": 788
253
+ },
254
+ "Vision Only": {
255
+ "accuracy": 23.6,
256
+ "correct": 186,
257
+ "total": 788
258
+ },
259
+ "accuracy": 32.49,
260
+ "acc_stderr": 0,
261
+ "acc": 32.49
262
+ },
263
+ "Ocrlite": {
264
+ "final_score": [
265
+ 1129,
266
+ 1644
267
+ ],
268
+ "accuracy": 68.674,
269
+ "Key Information Extraction-Bookshelf": [
270
+ 28,
271
+ 51,
272
+ 0.549,
273
+ {
274
+ "Default": [
275
+ 28,
276
+ 51,
277
+ 0.549
278
+ ]
279
+ }
280
+ ],
281
+ "Scene Text-centric VQA-diet_constraints": [
282
+ 57,
283
+ 90,
284
+ 0.633,
285
+ {
286
+ "Default": [
287
+ 57,
288
+ 90,
289
+ 0.633
290
+ ]
291
+ }
292
+ ],
293
+ "Doc-oriented VQA-Control": [
294
+ 104,
295
+ 189,
296
+ 0.55,
297
+ {
298
+ "Default": [
299
+ 104,
300
+ 189,
301
+ 0.55
302
+ ]
303
+ }
304
+ ],
305
+ "Doc-oriented VQA": [
306
+ 139,
307
+ 204,
308
+ 0.681,
309
+ {
310
+ "Default": [
311
+ 139,
312
+ 204,
313
+ 0.681
314
+ ]
315
+ }
316
+ ],
317
+ "Scene Text-centric VQA-Fake_logo": [
318
+ 54,
319
+ 119,
320
+ 0.454,
321
+ {
322
+ "Default": [
323
+ 54,
324
+ 119,
325
+ 0.454
326
+ ]
327
+ }
328
+ ],
329
+ "Handwritten Mathematical Expression Recognition": [
330
+ 2,
331
+ 100,
332
+ 0.02,
333
+ {
334
+ "Default": [
335
+ 2,
336
+ 100,
337
+ 0.02
338
+ ]
339
+ }
340
+ ],
341
+ "Key Information Extraction": [
342
+ 178,
343
+ 209,
344
+ 0.852,
345
+ {
346
+ "Default": [
347
+ 178,
348
+ 209,
349
+ 0.852
350
+ ]
351
+ }
352
+ ],
353
+ "Scene Text-centric VQA-Control": [
354
+ 171,
355
+ 200,
356
+ 0.855,
357
+ {
358
+ "Default": [
359
+ 171,
360
+ 200,
361
+ 0.855
362
+ ]
363
+ }
364
+ ],
365
+ "Scene Text-centric VQA": [
366
+ 229,
367
+ 282,
368
+ 0.812,
369
+ {
370
+ "Default": [
371
+ 229,
372
+ 282,
373
+ 0.812
374
+ ]
375
+ }
376
+ ],
377
+ "Artistic Text Recognition": [
378
+ 40,
379
+ 50,
380
+ 0.8,
381
+ {
382
+ "Default": [
383
+ 40,
384
+ 50,
385
+ 0.8
386
+ ]
387
+ }
388
+ ],
389
+ "Irregular Text Recognition": [
390
+ 40,
391
+ 50,
392
+ 0.8,
393
+ {
394
+ "Default": [
395
+ 40,
396
+ 50,
397
+ 0.8
398
+ ]
399
+ }
400
+ ],
401
+ "Non-Semantic Text Recognition": [
402
+ 38,
403
+ 50,
404
+ 0.76,
405
+ {
406
+ "Default": [
407
+ 38,
408
+ 50,
409
+ 0.76
410
+ ]
411
+ }
412
+ ],
413
+ "Regular Text Recognition": [
414
+ 49,
415
+ 50,
416
+ 0.98,
417
+ {
418
+ "Default": [
419
+ 49,
420
+ 50,
421
+ 0.98
422
+ ]
423
+ }
424
+ ],
425
+ "acc_stderr": 0,
426
+ "acc": 68.674
427
+ },
428
+ "OcrliteZh": {
429
+ "final_score": [
430
+ 97,
431
+ 234
432
+ ],
433
+ "accuracy": 41.453,
434
+ "Docvqa": [
435
+ 3,
436
+ 10,
437
+ 0.3,
438
+ {
439
+ "Default": [
440
+ 3,
441
+ 10,
442
+ 0.3
443
+ ]
444
+ }
445
+ ],
446
+ "Chartqa-human": [
447
+ 5,
448
+ 10,
449
+ 0.5,
450
+ {
451
+ "Default": [
452
+ 5,
453
+ 10,
454
+ 0.5
455
+ ]
456
+ }
457
+ ],
458
+ "Chartqa-au": [
459
+ 3,
460
+ 10,
461
+ 0.3,
462
+ {
463
+ "Default": [
464
+ 3,
465
+ 10,
466
+ 0.3
467
+ ]
468
+ }
469
+ ],
470
+ "infographic": [
471
+ 4,
472
+ 10,
473
+ 0.4,
474
+ {
475
+ "Default": [
476
+ 4,
477
+ 10,
478
+ 0.4
479
+ ]
480
+ }
481
+ ],
482
+ "Key Information Extraction": [
483
+ 29,
484
+ 45,
485
+ 0.644,
486
+ {
487
+ "Default": [
488
+ 29,
489
+ 45,
490
+ 0.644
491
+ ]
492
+ }
493
+ ],
494
+ "Scene Text-centric VQA": [
495
+ 19,
496
+ 40,
497
+ 0.475,
498
+ {
499
+ "Default": [
500
+ 19,
501
+ 40,
502
+ 0.475
503
+ ]
504
+ }
505
+ ],
506
+ "Artistic Text Recognition": [
507
+ 0,
508
+ 11,
509
+ 0.0,
510
+ {
511
+ "Default": [
512
+ 0,
513
+ 11,
514
+ 0.0
515
+ ]
516
+ }
517
+ ],
518
+ "IrRegular Text Recognition": [
519
+ 2,
520
+ 11,
521
+ 0.182,
522
+ {
523
+ "Default": [
524
+ 2,
525
+ 11,
526
+ 0.182
527
+ ]
528
+ }
529
+ ],
530
+ "Non-semantic Text Recognition": [
531
+ 4,
532
+ 12,
533
+ 0.333,
534
+ {
535
+ "Default": [
536
+ 4,
537
+ 12,
538
+ 0.333
539
+ ]
540
+ }
541
+ ],
542
+ "Regular Text Recognition": [
543
+ 6,
544
+ 11,
545
+ 0.545,
546
+ {
547
+ "Default": [
548
+ 6,
549
+ 11,
550
+ 0.545
551
+ ]
552
+ }
553
+ ],
554
+ "Handwriting_CN": [
555
+ 6,
556
+ 20,
557
+ 0.3,
558
+ {
559
+ "Default": [
560
+ 6,
561
+ 20,
562
+ 0.3
563
+ ]
564
+ }
565
+ ],
566
+ "Chinese Unlimited": [
567
+ 16,
568
+ 44,
569
+ 0.364,
570
+ {
571
+ "Default": [
572
+ 16,
573
+ 44,
574
+ 0.364
575
+ ]
576
+ }
577
+ ],
578
+ "acc_stderr": 0,
579
+ "acc": 41.453
580
+ },
581
+ "CharXiv": {
582
+ "descriptive": {
583
+ "Overall Score": 64.35,
584
+ "By Question": {
585
+ "Q1": 57.79,
586
+ "Q2": 74.78,
587
+ "Q3": 63.95,
588
+ "Q4": 75.1,
589
+ "Q5": 69.46,
590
+ "Q6": 60.64,
591
+ "Q7": 47.86,
592
+ "Q8": 68.3,
593
+ "Q9": 65.17,
594
+ "Q10": 60.96,
595
+ "Q11": 57.71,
596
+ "Q12": 57.69,
597
+ "Q13": 64.84,
598
+ "Q14": 63.12,
599
+ "Q15": 67.09,
600
+ "Q16": 61.11,
601
+ "Q17": 39.73,
602
+ "Q18": 86.64,
603
+ "Q19": 86.15
604
+ },
605
+ "By Category": {
606
+ "Information Extraction": 64.29,
607
+ "Enumeration": 65.7,
608
+ "Pattern Recognition": 73.58,
609
+ "Counting": 63.61,
610
+ "Compositionality": 39.73
611
+ },
612
+ "By Subplot": {
613
+ "1 Subplot": 73.38,
614
+ "2-4 Subplots": 63.82,
615
+ "5+ Subplots": 50.42
616
+ },
617
+ "By Subject": {
618
+ "Computer Science": 65.48,
619
+ "Economics": 65.76,
620
+ "Electrical Engineering and Systems Science": 70.38,
621
+ "Mathematics": 65.93,
622
+ "Physics": 58.86,
623
+ "Quantitative Biology": 55.95,
624
+ "Quantitative Finance": 66.38,
625
+ "Statistics": 66.59
626
+ },
627
+ "By Year": {
628
+ "2020": 66.5,
629
+ "2021": 61.3,
630
+ "2022": 64.45,
631
+ "2023": 65.32
632
+ },
633
+ "N_valid": 4000,
634
+ "N_invalid": 0,
635
+ "Question Type": "Descriptive"
636
+ },
637
+ "reasoning": {
638
+ "Overall Score": 28.8,
639
+ "By Answer Type": {
640
+ "Text-in-Chart": 33.18,
641
+ "Text-in-General": 33.33,
642
+ "Number-in-Chart": 27.16,
643
+ "Number-in-General": 20.09
644
+ },
645
+ "By Source": {
646
+ "GPT-Sourced": 33.15,
647
+ "GPT-Inspired": 25.46,
648
+ "Completely Human": 28.67
649
+ },
650
+ "By Subject": {
651
+ "Computer Science": 30.16,
652
+ "Economics": 27.54,
653
+ "Electrical Engineering and Systems Science": 27.73,
654
+ "Mathematics": 34.07,
655
+ "Physics": 29.92,
656
+ "Quantitative Biology": 31.75,
657
+ "Quantitative Finance": 20.69,
658
+ "Statistics": 27.43
659
+ },
660
+ "By Year": {
661
+ "2020": 21.46,
662
+ "2021": 28.74,
663
+ "2022": 32.38,
664
+ "2023": 32.66
665
+ },
666
+ "By Subplot": {
667
+ "1 Subplot": 30.57,
668
+ "2-4 Subplots": 29.37,
669
+ "5+ Subplots": 25.0
670
+ },
671
+ "N_valid": 1000,
672
+ "N_invalid": 0,
673
+ "Question Type": "Reasoning"
674
+ },
675
+ "accuracy": 46.57,
676
+ "acc_stderr": 0,
677
+ "acc": 46.57
678
+ },
679
+ "MathVision": {
680
+ "accuracy": 33.22,
681
+ "acc_stderr": 0,
682
+ "acc": 33.22
683
+ },
684
+ "CII-Bench": {
685
+ "accuracy": 50.85,
686
+ "domain_score": {
687
+ "Art": 52.21,
688
+ "Env.": 55.56,
689
+ "Society": 50.27,
690
+ "CTC": 45.19,
691
+ "Life": 51.52,
692
+ "Politics": 62.5
693
+ },
694
+ "emotion_score": {
695
+ "Negative": 55.09,
696
+ "Positive": 47.44,
697
+ "Neutral": 49.62
698
+ },
699
+ "acc_stderr": 0,
700
+ "acc": 50.85
701
+ },
702
+ "Blink": {
703
+ "accuracy": 54.29,
704
+ "Art Style": 70.94,
705
+ "Counting": 55.83,
706
+ "Forensic Detection": 37.12,
707
+ "Functional Correspondence": 30.0,
708
+ "IQ Test": 32.67,
709
+ "Jigsaw": 75.33,
710
+ "Multi-view Reasoning": 51.88,
711
+ "Object Localization": 54.1,
712
+ "Relative Depth": 66.94,
713
+ "Relative Reflectance": 34.33,
714
+ "Semantic Correspondence": 35.25,
715
+ "Spatial Relation": 75.52,
716
+ "Visual Correspondence": 57.56,
717
+ "Visual Similarity": 82.96,
718
+ "acc_stderr": 0,
719
+ "acc": 54.29
720
+ }
721
+ }
722
+ }
Idefics3-8B-Llama3/results_2025-01-25T10-42-53.190540.json CHANGED
@@ -21,9 +21,41 @@
21
  "acc": 27.86
22
  },
23
  "MMMU_Pro_vision": {
24
- "accuracy": 13.53,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
25
  "acc_stderr": 0,
26
- "acc": 13.53
27
  },
28
  "MmvetV2": {
29
  "accuracy": 36.6344,
@@ -335,9 +367,9 @@
335
  "acc": 34.62
336
  },
337
  "MathVision": {
338
- "accuracy": 16.25,
339
  "acc_stderr": 0,
340
- "acc": 16.25
341
  },
342
  "CII-Bench": {
343
  "accuracy": 39.22,
 
21
  "acc": 27.86
22
  },
23
  "MMMU_Pro_vision": {
24
+ "accuracy": 8.55,
25
+ "subject_score": {
26
+ "History": 12.5,
27
+ "Art": 7.55,
28
+ "Design": 6.67,
29
+ "Literature": 17.31,
30
+ "Agriculture": 3.33,
31
+ "Finance": 5.0,
32
+ "Sociology": 7.41,
33
+ "Accounting": 5.17,
34
+ "Energy_and_Power": 8.62,
35
+ "Pharmacy": 12.28,
36
+ "Architecture_and_Engineering": 13.33,
37
+ "Clinical_Medicine": 8.47,
38
+ "Public_Health": 1.72,
39
+ "Physics": 20.0,
40
+ "Art_Theory": 1.82,
41
+ "Electronics": 8.33,
42
+ "Psychology": 5.0,
43
+ "Biology": 10.17,
44
+ "Manage": 16.0,
45
+ "Economics": 5.08,
46
+ "Mechanical_Engineering": 16.95,
47
+ "Diagnostics_and_Laboratory_Medicine": 5.0,
48
+ "Basic_Medical_Science": 9.62,
49
+ "Computer_Science": 6.67,
50
+ "Math": 10.0,
51
+ "Music": 6.67,
52
+ "Materials": 10.0,
53
+ "Marketing": 6.78,
54
+ "Chemistry": 6.67,
55
+ "Geography": 3.85
56
+ },
57
  "acc_stderr": 0,
58
+ "acc": 8.55
59
  },
60
  "MmvetV2": {
61
  "accuracy": 36.6344,
 
367
  "acc": 34.62
368
  },
369
  "MathVision": {
370
+ "accuracy": 16.22,
371
  "acc_stderr": 0,
372
+ "acc": 16.22
373
  },
374
  "CII-Bench": {
375
  "accuracy": 39.22,
InternVL2-2B/results_2025-01-25T10-42-53.190540.json CHANGED
@@ -56,9 +56,41 @@
56
  "acc": 20.4
57
  },
58
  "MMMU_Pro_vision": {
59
- "accuracy": 10.81,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
60
  "acc_stderr": 0,
61
- "acc": 10.81
62
  },
63
  "MmvetV2": {
64
  "accuracy": 39.4584,
@@ -368,9 +400,9 @@
368
  "acc": 28.35
369
  },
370
  "MathVision": {
371
- "accuracy": 14.01,
372
  "acc_stderr": 0,
373
- "acc": 14.01
374
  },
375
  "CII-Bench": {
376
  "accuracy": 38.95,
 
56
  "acc": 20.4
57
  },
58
  "MMMU_Pro_vision": {
59
+ "accuracy": 12.2,
60
+ "subject_score": {
61
+ "Finance": 6.67,
62
+ "Agriculture": 11.67,
63
+ "Design": 16.67,
64
+ "Art": 9.43,
65
+ "Literature": 19.23,
66
+ "Sociology": 14.81,
67
+ "History": 8.93,
68
+ "Pharmacy": 22.81,
69
+ "Clinical_Medicine": 8.47,
70
+ "Energy_and_Power": 5.17,
71
+ "Accounting": 12.07,
72
+ "Public_Health": 5.17,
73
+ "Architecture_and_Engineering": 11.67,
74
+ "Psychology": 10.0,
75
+ "Art_Theory": 7.27,
76
+ "Electronics": 8.33,
77
+ "Physics": 11.67,
78
+ "Biology": 16.95,
79
+ "Mechanical_Engineering": 18.64,
80
+ "Manage": 20.0,
81
+ "Economics": 8.47,
82
+ "Basic_Medical_Science": 23.08,
83
+ "Diagnostics_and_Laboratory_Medicine": 13.33,
84
+ "Computer_Science": 11.67,
85
+ "Math": 8.33,
86
+ "Music": 11.67,
87
+ "Marketing": 6.78,
88
+ "Materials": 16.67,
89
+ "Chemistry": 10.0,
90
+ "Geography": 13.46
91
+ },
92
  "acc_stderr": 0,
93
+ "acc": 12.2
94
  },
95
  "MmvetV2": {
96
  "accuracy": 39.4584,
 
400
  "acc": 28.35
401
  },
402
  "MathVision": {
403
+ "accuracy": 13.88,
404
  "acc_stderr": 0,
405
+ "acc": 13.88
406
  },
407
  "CII-Bench": {
408
  "accuracy": 38.95,
InternVL2-8B/results_2025-01-25T10-42-53.190540.json CHANGED
@@ -21,9 +21,41 @@
21
  "acc": 31.27
22
  },
23
  "MMMU_Pro_vision": {
24
- "accuracy": 35.55,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
25
  "acc_stderr": 0,
26
- "acc": 35.55
27
  },
28
  "MmvetV2": {
29
  "accuracy": 52.0116,
@@ -332,9 +364,9 @@
332
  "acc": 37.61
333
  },
334
  "MathVision": {
335
- "accuracy": 19.77,
336
  "acc_stderr": 0,
337
- "acc": 19.77
338
  },
339
  "CII-Bench": {
340
  "accuracy": 51.9,
 
21
  "acc": 31.27
22
  },
23
  "MMMU_Pro_vision": {
24
+ "accuracy": 18.84,
25
+ "subject_score": {
26
+ "History": 12.5,
27
+ "Art": 20.75,
28
+ "Design": 25.0,
29
+ "Literature": 48.08,
30
+ "Agriculture": 18.33,
31
+ "Finance": 23.33,
32
+ "Sociology": 25.93,
33
+ "Accounting": 17.24,
34
+ "Energy_and_Power": 15.52,
35
+ "Pharmacy": 31.58,
36
+ "Architecture_and_Engineering": 18.33,
37
+ "Clinical_Medicine": 3.39,
38
+ "Public_Health": 12.07,
39
+ "Physics": 15.0,
40
+ "Art_Theory": 9.09,
41
+ "Electronics": 20.0,
42
+ "Psychology": 16.67,
43
+ "Biology": 16.95,
44
+ "Manage": 18.0,
45
+ "Economics": 25.42,
46
+ "Mechanical_Engineering": 22.03,
47
+ "Diagnostics_and_Laboratory_Medicine": 21.67,
48
+ "Basic_Medical_Science": 15.38,
49
+ "Computer_Science": 13.33,
50
+ "Math": 15.0,
51
+ "Music": 16.67,
52
+ "Materials": 8.33,
53
+ "Marketing": 25.42,
54
+ "Chemistry": 20.0,
55
+ "Geography": 17.31
56
+ },
57
  "acc_stderr": 0,
58
+ "acc": 18.84
59
  },
60
  "MmvetV2": {
61
  "accuracy": 52.0116,
 
364
  "acc": 37.61
365
  },
366
  "MathVision": {
367
+ "accuracy": 19.34,
368
  "acc_stderr": 0,
369
+ "acc": 19.34
370
  },
371
  "CII-Bench": {
372
  "accuracy": 51.9,
InternVL2-Llama3-76B/results_2025-01-25T10-42-53.190540.json CHANGED
@@ -21,9 +21,41 @@
21
  "acc": 36.3
22
  },
23
  "MMMU_Pro_vision": {
24
- "accuracy": 13.82,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
25
  "acc_stderr": 0,
26
- "acc": 13.82
27
  },
28
  "MmvetV2": {
29
  "accuracy": 62.3404,
@@ -334,9 +366,9 @@
334
  "acc": 52.09
335
  },
336
  "MathVision": {
337
- "accuracy": 17.07,
338
  "acc_stderr": 0,
339
- "acc": 17.07
340
  },
341
  "CII-Bench": {
342
  "accuracy": 53.99,
 
21
  "acc": 36.3
22
  },
23
  "MMMU_Pro_vision": {
24
+ "accuracy": 23.58,
25
+ "subject_score": {
26
+ "Design": 28.33,
27
+ "History": 23.21,
28
+ "Literature": 57.69,
29
+ "Art": 30.19,
30
+ "Agriculture": 11.67,
31
+ "Finance": 15.0,
32
+ "Sociology": 38.89,
33
+ "Public_Health": 25.86,
34
+ "Clinical_Medicine": 13.56,
35
+ "Pharmacy": 28.07,
36
+ "Accounting": 27.59,
37
+ "Architecture_and_Engineering": 8.33,
38
+ "Physics": 18.33,
39
+ "Art_Theory": 40.0,
40
+ "Psychology": 16.67,
41
+ "Biology": 27.12,
42
+ "Economics": 15.25,
43
+ "Manage": 28.0,
44
+ "Mechanical_Engineering": 27.12,
45
+ "Basic_Medical_Science": 28.85,
46
+ "Diagnostics_and_Laboratory_Medicine": 28.33,
47
+ "Electronics": 21.67,
48
+ "Computer_Science": 26.67,
49
+ "Math": 20.0,
50
+ "Music": 21.67,
51
+ "Marketing": 25.42,
52
+ "Energy_and_Power": 15.52,
53
+ "Chemistry": 18.33,
54
+ "Geography": 21.15,
55
+ "Materials": 8.33
56
+ },
57
  "acc_stderr": 0,
58
+ "acc": 23.58
59
  },
60
  "MmvetV2": {
61
  "accuracy": 62.3404,
 
366
  "acc": 52.09
367
  },
368
  "MathVision": {
369
+ "accuracy": 16.18,
370
  "acc_stderr": 0,
371
+ "acc": 16.18
372
  },
373
  "CII-Bench": {
374
  "accuracy": 53.99,
InternVL2_5-26B/results_2025-01-25T10-42-53.190540.json ADDED
@@ -0,0 +1,518 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "model_name": "InternVL2_5-26B",
4
+ "model_dtype": "float16",
5
+ "model_size": 0
6
+ },
7
+ "results": {
8
+ "CMMMU": {
9
+ "艺术与设计": {
10
+ "num": 88,
11
+ "correct": 60,
12
+ "accuracy": 68.18
13
+ },
14
+ "overall": {
15
+ "num": 900,
16
+ "correct": 389,
17
+ "accuracy": 43.22
18
+ },
19
+ "商业": {
20
+ "num": 126,
21
+ "correct": 37,
22
+ "accuracy": 29.37
23
+ },
24
+ "科学": {
25
+ "num": 204,
26
+ "correct": 73,
27
+ "accuracy": 35.78
28
+ },
29
+ "健康与医学": {
30
+ "num": 153,
31
+ "correct": 78,
32
+ "accuracy": 50.98
33
+ },
34
+ "人文社会科学": {
35
+ "num": 85,
36
+ "correct": 53,
37
+ "accuracy": 62.35
38
+ },
39
+ "技术与工程": {
40
+ "num": 244,
41
+ "correct": 88,
42
+ "accuracy": 36.07
43
+ },
44
+ "accuracy": 43.22,
45
+ "acc_stderr": 0,
46
+ "acc": 43.22
47
+ },
48
+ "MMMU": {
49
+ "accuracy": 52.44,
50
+ "subject_score": {
51
+ "Accounting": 43.33,
52
+ "Agriculture": 53.33,
53
+ "Architecture": 30.0,
54
+ "Art": 78.33,
55
+ "Basic": 60.0,
56
+ "Biology": 60.0,
57
+ "Chemistry": 30.0,
58
+ "Clinical": 63.33,
59
+ "Computer": 43.33,
60
+ "Design": 83.33,
61
+ "Diagnostics": 53.33,
62
+ "Economics": 46.67,
63
+ "Electronics": 30.0,
64
+ "Energy": 40.0,
65
+ "Finance": 36.67,
66
+ "Geography": 53.33,
67
+ "History": 70.0,
68
+ "Literature": 90.0,
69
+ "Manage": 50.0,
70
+ "Marketing": 53.33,
71
+ "Materials": 43.33,
72
+ "Math": 36.67,
73
+ "Mechanical": 30.0,
74
+ "Music": 26.67,
75
+ "Pharmacy": 50.0,
76
+ "Physics": 43.33,
77
+ "Psychology": 60.0,
78
+ "Public": 73.33,
79
+ "Sociology": 63.33
80
+ },
81
+ "difficulty_score": {
82
+ "Medium": 50.47,
83
+ "Easy": 65.76,
84
+ "Hard": 35.36
85
+ },
86
+ "acc_stderr": 0,
87
+ "acc": 52.44
88
+ },
89
+ "MMMU_Pro_standard": {
90
+ "accuracy": 37.46,
91
+ "subject_score": {
92
+ "History": 60.71,
93
+ "Art": 66.04,
94
+ "Design": 73.33,
95
+ "Literature": 73.08,
96
+ "Agriculture": 26.67,
97
+ "Finance": 28.33,
98
+ "Sociology": 46.3,
99
+ "Accounting": 24.14,
100
+ "Energy_and_Power": 15.52,
101
+ "Pharmacy": 47.37,
102
+ "Architecture_and_Engineering": 13.33,
103
+ "Clinical_Medicine": 33.9,
104
+ "Public_Health": 34.48,
105
+ "Physics": 20.0,
106
+ "Art_Theory": 72.73,
107
+ "Electronics": 33.33,
108
+ "Psychology": 40.0,
109
+ "Biology": 37.29,
110
+ "Manage": 28.0,
111
+ "Economics": 25.42,
112
+ "Mechanical_Engineering": 30.51,
113
+ "Diagnostics_and_Laboratory_Medicine": 28.33,
114
+ "Basic_Medical_Science": 38.46,
115
+ "Computer_Science": 38.33,
116
+ "Math": 36.67,
117
+ "Music": 26.67,
118
+ "Materials": 16.67,
119
+ "Marketing": 38.98,
120
+ "Chemistry": 33.33,
121
+ "Geography": 48.08
122
+ },
123
+ "difficulty_score": {
124
+ "Medium": 35.71,
125
+ "Easy": 48.67,
126
+ "Hard": 26.18
127
+ },
128
+ "acc_stderr": 0,
129
+ "acc": 37.46
130
+ },
131
+ "MMMU_Pro_vision": {
132
+ "accuracy": 23.7,
133
+ "subject_score": {
134
+ "History": 30.36,
135
+ "Art": 22.64,
136
+ "Design": 25.0,
137
+ "Literature": 55.77,
138
+ "Agriculture": 25.0,
139
+ "Finance": 25.0,
140
+ "Sociology": 35.19,
141
+ "Accounting": 18.97,
142
+ "Energy_and_Power": 17.24,
143
+ "Pharmacy": 26.32,
144
+ "Architecture_and_Engineering": 16.67,
145
+ "Clinical_Medicine": 15.25,
146
+ "Public_Health": 18.97,
147
+ "Physics": 18.33,
148
+ "Art_Theory": 27.27,
149
+ "Electronics": 31.67,
150
+ "Psychology": 23.33,
151
+ "Biology": 15.25,
152
+ "Manage": 24.0,
153
+ "Economics": 22.03,
154
+ "Mechanical_Engineering": 22.03,
155
+ "Diagnostics_and_Laboratory_Medicine": 23.33,
156
+ "Basic_Medical_Science": 23.08,
157
+ "Computer_Science": 31.67,
158
+ "Math": 15.0,
159
+ "Music": 16.67,
160
+ "Materials": 18.33,
161
+ "Marketing": 23.73,
162
+ "Chemistry": 20.0,
163
+ "Geography": 28.85
164
+ },
165
+ "acc_stderr": 0,
166
+ "acc": 23.7
167
+ },
168
+ "MmvetV2": {
169
+ "accuracy": 61.7602,
170
+ "capability_scores": {
171
+ "ocr": 68.5096153846154,
172
+ "math": 59.11764705882353,
173
+ "spat": 57.86802030456848,
174
+ "rec": 58.83495145631078,
175
+ "know": 54.16666666666665,
176
+ "gen": 59.67272727272731,
177
+ "seq": 55.71428571428573
178
+ },
179
+ "capability_detail_scores": {
180
+ "ocr_math": 80.9090909090909,
181
+ "ocr_math_spat": 55.333333333333336,
182
+ "ocr_math_rec_spat": 50.0,
183
+ "rec_spat": 59.285714285714285,
184
+ "ocr_spat": 75.0,
185
+ "ocr_rec_spat": 41.66666666666667,
186
+ "ocr_spat_know": 87.5,
187
+ "ocr_rec": 87.5,
188
+ "rec_spat_know": 39.0,
189
+ "ocr": 84.0625,
190
+ "rec": 68.30508474576268,
191
+ "rec_know": 41.53846153846153,
192
+ "gen_rec_know": 54.49999999999998,
193
+ "ocr_gen_rec_know": 71.53846153846153,
194
+ "ocr_gen_rec_spat": 66.97674418604652,
195
+ "ocr_gen_spat": 80.0,
196
+ "seq_math_spat_ocr_gen": 40.0,
197
+ "rec_seq_math_spat_ocr": 0.0,
198
+ "gen_rec_spat": 46.36363636363636,
199
+ "ocr_gen_math_spat": 0.0,
200
+ "rec_spat_seq": 34.285714285714285,
201
+ "ocr_rec_spat_seq": 63.33333333333333,
202
+ "gen_rec_spat_know": 53.333333333333336,
203
+ "gen_rec": 68.82352941176472,
204
+ "ocr_rec_spat_know": 50.0,
205
+ "rec_know_spat_ocr_gen": 65.0,
206
+ "ocr_math_rec": 100.0,
207
+ "ocr_gen_rec": 66.0,
208
+ "ocr_gen_rec_seq": 78.57142857142857,
209
+ "ocr_gen": 60.0,
210
+ "gen_rec_seq": 56.42857142857143,
211
+ "rec_seq": 60.0,
212
+ "gen_spat_rec_seq": 71.25,
213
+ "rec_seq_know": 0.0,
214
+ "gen_rec_seq_know": 40.0,
215
+ "gen_rec_spat_seq": 71.25,
216
+ "rec_seq_spat_ocr_gen": 10.0,
217
+ "rec_seq_know_ocr_gen": 85.0,
218
+ "math_rec_know": 25.0,
219
+ "ocr_rec_seq": 100.0
220
+ },
221
+ "acc_stderr": 0,
222
+ "acc": 61.7602
223
+ },
224
+ "MathVerse": {
225
+ "Text Dominant": {
226
+ "accuracy": 44.54,
227
+ "correct": 351,
228
+ "total": 788
229
+ },
230
+ "Total": {
231
+ "accuracy": 37.23,
232
+ "correct": 1467,
233
+ "total": 3940
234
+ },
235
+ "Text Lite": {
236
+ "accuracy": 40.36,
237
+ "correct": 318,
238
+ "total": 788
239
+ },
240
+ "Vision Intensive": {
241
+ "accuracy": 38.83,
242
+ "correct": 306,
243
+ "total": 788
244
+ },
245
+ "Vision Dominant": {
246
+ "accuracy": 38.58,
247
+ "correct": 304,
248
+ "total": 788
249
+ },
250
+ "Vision Only": {
251
+ "accuracy": 23.86,
252
+ "correct": 188,
253
+ "total": 788
254
+ },
255
+ "accuracy": 37.23,
256
+ "acc_stderr": 0,
257
+ "acc": 37.23
258
+ },
259
+ "Ocrlite": {
260
+ "final_score": [
261
+ 1230,
262
+ 1645
263
+ ],
264
+ "accuracy": 74.772,
265
+ "Key Information Extraction-Bookshelf": [
266
+ 30,
267
+ 52
268
+ ],
269
+ "Scene Text-centric VQA-diet_constraints": [
270
+ 59,
271
+ 90
272
+ ],
273
+ "Doc-oriented VQA-Control": [
274
+ 139,
275
+ 189
276
+ ],
277
+ "Doc-oriented VQA": [
278
+ 145,
279
+ 204
280
+ ],
281
+ "Scene Text-centric VQA-Fake_logo": [
282
+ 45,
283
+ 119
284
+ ],
285
+ "Handwritten Mathematical Expression Recognition": [
286
+ 49,
287
+ 100
288
+ ],
289
+ "Key Information Extraction": [
290
+ 190,
291
+ 209
292
+ ],
293
+ "Scene Text-centric VQA-Control": [
294
+ 166,
295
+ 200
296
+ ],
297
+ "Scene Text-centric VQA": [
298
+ 231,
299
+ 282
300
+ ],
301
+ "Artistic Text Recognition": [
302
+ 40,
303
+ 50
304
+ ],
305
+ "Irregular Text Recognition": [
306
+ 45,
307
+ 50
308
+ ],
309
+ "Non-Semantic Text Recognition": [
310
+ 42,
311
+ 50
312
+ ],
313
+ "Regular Text Recognition": [
314
+ 49,
315
+ 50
316
+ ],
317
+ "acc_stderr": 0,
318
+ "acc": 74.772
319
+ },
320
+ "OcrliteZh": {
321
+ "final_score": [
322
+ 140,
323
+ 234
324
+ ],
325
+ "accuracy": 59.829,
326
+ "Docvqa": [
327
+ 7,
328
+ 10
329
+ ],
330
+ "Chartqa-human": [
331
+ 4,
332
+ 10
333
+ ],
334
+ "Chartqa-au": [
335
+ 4,
336
+ 10
337
+ ],
338
+ "infographic": [
339
+ 5,
340
+ 10
341
+ ],
342
+ "Key Information Extraction": [
343
+ 33,
344
+ 45
345
+ ],
346
+ "Scene Text-centric VQA": [
347
+ 26,
348
+ 40
349
+ ],
350
+ "Artistic Text Recognition": [
351
+ 6,
352
+ 11
353
+ ],
354
+ "IrRegular Text Recognition": [
355
+ 6,
356
+ 11
357
+ ],
358
+ "Non-semantic Text Recognition": [
359
+ 10,
360
+ 12
361
+ ],
362
+ "Regular Text Recognition": [
363
+ 9,
364
+ 11
365
+ ],
366
+ "Handwriting_CN": [
367
+ 13,
368
+ 20
369
+ ],
370
+ "Chinese Unlimited": [
371
+ 17,
372
+ 44
373
+ ],
374
+ "acc_stderr": 0,
375
+ "acc": 59.829
376
+ },
377
+ "CharXiv": {
378
+ "descriptive": {
379
+ "Overall Score": 73.0,
380
+ "By Question": {
381
+ "Q1": 80.33,
382
+ "Q2": 77.39,
383
+ "Q3": 67.81,
384
+ "Q4": 86.77,
385
+ "Q5": 83.68,
386
+ "Q6": 69.08,
387
+ "Q7": 71.37,
388
+ "Q8": 75.0,
389
+ "Q9": 64.68,
390
+ "Q10": 74.66,
391
+ "Q11": 53.71,
392
+ "Q12": 68.13,
393
+ "Q13": 68.04,
394
+ "Q14": 89.36,
395
+ "Q15": 93.29,
396
+ "Q16": 61.11,
397
+ "Q17": 24.11,
398
+ "Q18": 71.26,
399
+ "Q19": 86.15
400
+ },
401
+ "By Category": {
402
+ "Information Extraction": 76.75,
403
+ "Enumeration": 79.98,
404
+ "Pattern Recognition": 63.76,
405
+ "Counting": 73.54,
406
+ "Compositionality": 24.11
407
+ },
408
+ "By Subplot": {
409
+ "1 Subplot": 79.6,
410
+ "2-4 Subplots": 73.35,
411
+ "5+ Subplots": 61.65
412
+ },
413
+ "By Subject": {
414
+ "Computer Science": 71.43,
415
+ "Economics": 73.19,
416
+ "Electrical Engineering and Systems Science": 78.57,
417
+ "Mathematics": 75.93,
418
+ "Physics": 66.73,
419
+ "Quantitative Biology": 68.65,
420
+ "Quantitative Finance": 75.0,
421
+ "Statistics": 75.0
422
+ },
423
+ "By Year": {
424
+ "2020": 72.17,
425
+ "2021": 71.55,
426
+ "2022": 74.28,
427
+ "2023": 74.09
428
+ },
429
+ "N_valid": 4000,
430
+ "N_invalid": 0,
431
+ "Question Type": "Descriptive"
432
+ },
433
+ "reasoning": {
434
+ "Overall Score": 31.9,
435
+ "By Answer Type": {
436
+ "Text-in-Chart": 33.64,
437
+ "Text-in-General": 38.38,
438
+ "Number-in-Chart": 39.22,
439
+ "Number-in-General": 18.34
440
+ },
441
+ "By Source": {
442
+ "GPT-Sourced": 36.41,
443
+ "GPT-Inspired": 31.02,
444
+ "Completely Human": 30.83
445
+ },
446
+ "By Subject": {
447
+ "Computer Science": 28.57,
448
+ "Economics": 36.96,
449
+ "Electrical Engineering and Systems Science": 34.45,
450
+ "Mathematics": 32.59,
451
+ "Physics": 35.43,
452
+ "Quantitative Biology": 30.16,
453
+ "Quantitative Finance": 29.31,
454
+ "Statistics": 26.55
455
+ },
456
+ "By Year": {
457
+ "2020": 32.79,
458
+ "2021": 36.02,
459
+ "2022": 27.87,
460
+ "2023": 30.65
461
+ },
462
+ "By Subplot": {
463
+ "1 Subplot": 36.27,
464
+ "2-4 Subplots": 30.69,
465
+ "5+ Subplots": 26.69
466
+ },
467
+ "N_valid": 1000,
468
+ "N_invalid": 0,
469
+ "Question Type": "Reasoning"
470
+ },
471
+ "accuracy": 52.45,
472
+ "acc_stderr": 0,
473
+ "acc": 52.45
474
+ },
475
+ "MathVision": {
476
+ "accuracy": 19.21,
477
+ "acc_stderr": 0,
478
+ "acc": 19.21
479
+ },
480
+ "CII-Bench": {
481
+ "accuracy": 61.83,
482
+ "domain_score": {
483
+ "Life": 58.01,
484
+ "Art": 61.03,
485
+ "CTC": 53.33,
486
+ "Society": 67.57,
487
+ "Env.": 79.63,
488
+ "Politics": 66.67
489
+ },
490
+ "emotion_score": {
491
+ "Neutral": 62.41,
492
+ "Negative": 63.02,
493
+ "Positive": 59.83
494
+ },
495
+ "acc_stderr": 0,
496
+ "acc": 61.83
497
+ },
498
+ "Blink": {
499
+ "accuracy": 54.6,
500
+ "Art Style": 79.49,
501
+ "Counting": 75.83,
502
+ "Forensic Detection": 31.82,
503
+ "Functional Correspondence": 30.77,
504
+ "IQ Test": 24.67,
505
+ "Jigsaw": 52.0,
506
+ "Multi-view Reasoning": 50.38,
507
+ "Object Localization": 54.1,
508
+ "Relative Depth": 64.52,
509
+ "Relative Reflectance": 32.09,
510
+ "Semantic Correspondence": 51.08,
511
+ "Spatial Relation": 79.72,
512
+ "Visual Correspondence": 62.79,
513
+ "Visual Similarity": 80.0,
514
+ "acc_stderr": 0,
515
+ "acc": 54.6
516
+ }
517
+ }
518
+ }
InternVL2_5-3B/results_2025-01-25T10-42-53.190540.json ADDED
@@ -0,0 +1,720 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "model_name": "InternVL2_5-3B",
4
+ "model_dtype": "float16",
5
+ "model_size": 0
6
+ },
7
+ "results": {
8
+ "CMMMU": {
9
+ "艺术与设计": {
10
+ "num": 88,
11
+ "correct": 45,
12
+ "accuracy": 51.14
13
+ },
14
+ "overall": {
15
+ "num": 900,
16
+ "correct": 317,
17
+ "accuracy": 35.22
18
+ },
19
+ "商业": {
20
+ "num": 126,
21
+ "correct": 33,
22
+ "accuracy": 26.19
23
+ },
24
+ "科学": {
25
+ "num": 204,
26
+ "correct": 59,
27
+ "accuracy": 28.92
28
+ },
29
+ "健康与医学": {
30
+ "num": 153,
31
+ "correct": 62,
32
+ "accuracy": 40.52
33
+ },
34
+ "人文社会科学": {
35
+ "num": 85,
36
+ "correct": 34,
37
+ "accuracy": 40.0
38
+ },
39
+ "技术与工程": {
40
+ "num": 244,
41
+ "correct": 84,
42
+ "accuracy": 34.43
43
+ },
44
+ "accuracy": 35.22,
45
+ "acc_stderr": 0,
46
+ "acc": 35.22
47
+ },
48
+ "MMMU": {
49
+ "accuracy": 41.78,
50
+ "subject_score": {
51
+ "Accounting": 26.67,
52
+ "Agriculture": 30.0,
53
+ "Architecture": 26.67,
54
+ "Art": 63.33,
55
+ "Basic": 60.0,
56
+ "Biology": 40.0,
57
+ "Chemistry": 26.67,
58
+ "Clinical": 43.33,
59
+ "Computer": 46.67,
60
+ "Design": 73.33,
61
+ "Diagnostics": 36.67,
62
+ "Economics": 30.0,
63
+ "Electronics": 26.67,
64
+ "Energy": 33.33,
65
+ "Finance": 10.0,
66
+ "Geography": 36.67,
67
+ "History": 60.0,
68
+ "Literature": 76.67,
69
+ "Manage": 33.33,
70
+ "Marketing": 43.33,
71
+ "Materials": 40.0,
72
+ "Math": 43.33,
73
+ "Mechanical": 23.33,
74
+ "Music": 33.33,
75
+ "Pharmacy": 50.0,
76
+ "Physics": 26.67,
77
+ "Psychology": 46.67,
78
+ "Public": 53.33,
79
+ "Sociology": 50.0
80
+ },
81
+ "difficulty_score": {
82
+ "Medium": 39.39,
83
+ "Easy": 52.54,
84
+ "Hard": 29.83
85
+ },
86
+ "acc_stderr": 0,
87
+ "acc": 41.78
88
+ },
89
+ "MMMU_Pro_standard": {
90
+ "accuracy": 25.55,
91
+ "subject_score": {
92
+ "History": 35.71,
93
+ "Art": 47.17,
94
+ "Design": 53.33,
95
+ "Literature": 61.54,
96
+ "Agriculture": 21.67,
97
+ "Finance": 11.67,
98
+ "Sociology": 35.19,
99
+ "Accounting": 12.07,
100
+ "Energy_and_Power": 10.34,
101
+ "Pharmacy": 29.82,
102
+ "Architecture_and_Engineering": 13.33,
103
+ "Clinical_Medicine": 23.73,
104
+ "Public_Health": 13.79,
105
+ "Physics": 16.67,
106
+ "Art_Theory": 52.73,
107
+ "Electronics": 26.67,
108
+ "Psychology": 16.67,
109
+ "Biology": 30.51,
110
+ "Manage": 26.0,
111
+ "Economics": 15.25,
112
+ "Mechanical_Engineering": 16.95,
113
+ "Diagnostics_and_Laboratory_Medicine": 21.67,
114
+ "Basic_Medical_Science": 34.62,
115
+ "Computer_Science": 15.0,
116
+ "Math": 23.33,
117
+ "Music": 23.33,
118
+ "Materials": 13.33,
119
+ "Marketing": 22.03,
120
+ "Chemistry": 25.0,
121
+ "Geography": 28.85
122
+ },
123
+ "difficulty_score": {
124
+ "Medium": 22.1,
125
+ "Easy": 35.8,
126
+ "Hard": 18.95
127
+ },
128
+ "acc_stderr": 0,
129
+ "acc": 25.55
130
+ },
131
+ "MMMU_Pro_vision": {
132
+ "accuracy": 15.43,
133
+ "subject_score": {
134
+ "History": 10.71,
135
+ "Art": 11.32,
136
+ "Design": 13.33,
137
+ "Literature": 42.31,
138
+ "Agriculture": 13.33,
139
+ "Finance": 10.0,
140
+ "Sociology": 16.67,
141
+ "Accounting": 15.52,
142
+ "Energy_and_Power": 17.24,
143
+ "Pharmacy": 26.32,
144
+ "Architecture_and_Engineering": 16.67,
145
+ "Clinical_Medicine": 6.78,
146
+ "Public_Health": 12.07,
147
+ "Physics": 16.67,
148
+ "Art_Theory": 12.73,
149
+ "Electronics": 18.33,
150
+ "Psychology": 16.67,
151
+ "Biology": 11.86,
152
+ "Manage": 28.0,
153
+ "Economics": 10.17,
154
+ "Mechanical_Engineering": 16.95,
155
+ "Diagnostics_and_Laboratory_Medicine": 13.33,
156
+ "Basic_Medical_Science": 21.15,
157
+ "Computer_Science": 20.0,
158
+ "Math": 11.67,
159
+ "Music": 8.33,
160
+ "Materials": 8.33,
161
+ "Marketing": 13.56,
162
+ "Chemistry": 15.0,
163
+ "Geography": 13.46
164
+ },
165
+ "acc_stderr": 0,
166
+ "acc": 15.43
167
+ },
168
+ "MmvetV2": {
169
+ "accuracy": 50.0967,
170
+ "capability_scores": {
171
+ "math": 32.94117647058824,
172
+ "ocr": 57.59615384615381,
173
+ "spat": 45.38071065989844,
174
+ "rec": 47.3300970873787,
175
+ "know": 46.602564102564095,
176
+ "gen": 47.85454545454542,
177
+ "seq": 43.21428571428573
178
+ },
179
+ "capability_detail_scores": {
180
+ "math_ocr": 27.27272727272727,
181
+ "spat_math_ocr": 36.666666666666664,
182
+ "rec_math_spat_ocr": 45.0,
183
+ "rec_spat": 35.714285714285715,
184
+ "spat_ocr": 63.46153846153846,
185
+ "rec_spat_ocr": 41.66666666666667,
186
+ "know_spat_ocr": 87.5,
187
+ "rec_ocr": 75.0,
188
+ "rec_spat_know": 28.999999999999996,
189
+ "ocr": 84.375,
190
+ "rec": 60.338983050847474,
191
+ "rec_know": 34.61538461538461,
192
+ "rec_gen_know": 46.899999999999984,
193
+ "know_rec_gen_ocr": 63.84615384615384,
194
+ "rec_gen_spat_ocr": 49.53488372093025,
195
+ "spat_gen_ocr": 60.0,
196
+ "seq_ocr_gen_spat_math": 50.0,
197
+ "seq_ocr_rec_spat_math": 0.0,
198
+ "rec_gen_spat": 35.45454545454545,
199
+ "gen_spat_math_ocr": 30.0,
200
+ "rec_seq_spat": 21.428571428571427,
201
+ "rec_seq_spat_ocr": 86.66666666666667,
202
+ "rec_gen_spat_know": 26.666666666666668,
203
+ "rec_gen": 52.64705882352941,
204
+ "know_rec_ocr_spat": 45.0,
205
+ "rec_gen_ocr_know": 63.84615384615384,
206
+ "know_ocr_rec_gen_spat": 75.0,
207
+ "rec_math_ocr": 100.0,
208
+ "rec_gen_ocr": 42.00000000000001,
209
+ "rec_seq_gen_ocr": 57.14285714285715,
210
+ "gen_ocr": 50.0,
211
+ "rec_seq_gen": 29.999999999999993,
212
+ "rec_seq": 35.0,
213
+ "rec_seq_gen_spat": 50.0,
214
+ "rec_seq_know": 0.0,
215
+ "rec_seq_gen_know": 50.0,
216
+ "seq_ocr_rec_gen_spat": 60.0,
217
+ "seq_know_ocr_rec_gen": 75.0,
218
+ "rec_math_know": 0.0,
219
+ "rec_seq_ocr": 100.0,
220
+ "know_rec_spat_ocr": 45.0,
221
+ "know_rec_spat": 28.999999999999996
222
+ },
223
+ "acc_stderr": 0,
224
+ "acc": 50.0967
225
+ },
226
+ "MathVerse": {
227
+ "Text Dominant": {
228
+ "accuracy": 25.76,
229
+ "correct": 203,
230
+ "total": 788
231
+ },
232
+ "Total": {
233
+ "accuracy": 20.46,
234
+ "correct": 806,
235
+ "total": 3940
236
+ },
237
+ "Text Lite": {
238
+ "accuracy": 21.7,
239
+ "correct": 171,
240
+ "total": 788
241
+ },
242
+ "Vision Intensive": {
243
+ "accuracy": 18.53,
244
+ "correct": 146,
245
+ "total": 788
246
+ },
247
+ "Vision Dominant": {
248
+ "accuracy": 18.15,
249
+ "correct": 143,
250
+ "total": 788
251
+ },
252
+ "Vision Only": {
253
+ "accuracy": 18.15,
254
+ "correct": 143,
255
+ "total": 788
256
+ },
257
+ "accuracy": 20.46,
258
+ "acc_stderr": 0,
259
+ "acc": 20.46
260
+ },
261
+ "Ocrlite": {
262
+ "final_score": [
263
+ 997,
264
+ 1644
265
+ ],
266
+ "accuracy": 60.645,
267
+ "Key Information Extraction-Bookshelf": [
268
+ 12,
269
+ 51,
270
+ 0.235,
271
+ {
272
+ "Default": [
273
+ 12,
274
+ 51,
275
+ 0.235
276
+ ]
277
+ }
278
+ ],
279
+ "Scene Text-centric VQA-diet_constraints": [
280
+ 36,
281
+ 90,
282
+ 0.4,
283
+ {
284
+ "Default": [
285
+ 36,
286
+ 90,
287
+ 0.4
288
+ ]
289
+ }
290
+ ],
291
+ "Doc-oriented VQA-Control": [
292
+ 101,
293
+ 189,
294
+ 0.534,
295
+ {
296
+ "Default": [
297
+ 101,
298
+ 189,
299
+ 0.534
300
+ ]
301
+ }
302
+ ],
303
+ "Doc-oriented VQA": [
304
+ 115,
305
+ 204,
306
+ 0.564,
307
+ {
308
+ "Default": [
309
+ 115,
310
+ 204,
311
+ 0.564
312
+ ]
313
+ }
314
+ ],
315
+ "Scene Text-centric VQA-Fake_logo": [
316
+ 46,
317
+ 119,
318
+ 0.387,
319
+ {
320
+ "Default": [
321
+ 46,
322
+ 119,
323
+ 0.387
324
+ ]
325
+ }
326
+ ],
327
+ "Handwritten Mathematical Expression Recognition": [
328
+ 3,
329
+ 100,
330
+ 0.03,
331
+ {
332
+ "Default": [
333
+ 3,
334
+ 100,
335
+ 0.03
336
+ ]
337
+ }
338
+ ],
339
+ "Key Information Extraction": [
340
+ 167,
341
+ 209,
342
+ 0.799,
343
+ {
344
+ "Default": [
345
+ 167,
346
+ 209,
347
+ 0.799
348
+ ]
349
+ }
350
+ ],
351
+ "Scene Text-centric VQA-Control": [
352
+ 163,
353
+ 200,
354
+ 0.815,
355
+ {
356
+ "Default": [
357
+ 163,
358
+ 200,
359
+ 0.815
360
+ ]
361
+ }
362
+ ],
363
+ "Scene Text-centric VQA": [
364
+ 192,
365
+ 282,
366
+ 0.681,
367
+ {
368
+ "Default": [
369
+ 192,
370
+ 282,
371
+ 0.681
372
+ ]
373
+ }
374
+ ],
375
+ "Artistic Text Recognition": [
376
+ 34,
377
+ 50,
378
+ 0.68,
379
+ {
380
+ "Default": [
381
+ 34,
382
+ 50,
383
+ 0.68
384
+ ]
385
+ }
386
+ ],
387
+ "Irregular Text Recognition": [
388
+ 39,
389
+ 50,
390
+ 0.78,
391
+ {
392
+ "Default": [
393
+ 39,
394
+ 50,
395
+ 0.78
396
+ ]
397
+ }
398
+ ],
399
+ "Non-Semantic Text Recognition": [
400
+ 39,
401
+ 50,
402
+ 0.78,
403
+ {
404
+ "Default": [
405
+ 39,
406
+ 50,
407
+ 0.78
408
+ ]
409
+ }
410
+ ],
411
+ "Regular Text Recognition": [
412
+ 50,
413
+ 50,
414
+ 1.0,
415
+ {
416
+ "Default": [
417
+ 50,
418
+ 50,
419
+ 1.0
420
+ ]
421
+ }
422
+ ],
423
+ "acc_stderr": 0,
424
+ "acc": 60.645
425
+ },
426
+ "OcrliteZh": {
427
+ "final_score": [
428
+ 108,
429
+ 234
430
+ ],
431
+ "accuracy": 46.154,
432
+ "Docvqa": [
433
+ 3,
434
+ 10,
435
+ 0.3,
436
+ {
437
+ "Default": [
438
+ 3,
439
+ 10,
440
+ 0.3
441
+ ]
442
+ }
443
+ ],
444
+ "Chartqa-human": [
445
+ 5,
446
+ 10,
447
+ 0.5,
448
+ {
449
+ "Default": [
450
+ 5,
451
+ 10,
452
+ 0.5
453
+ ]
454
+ }
455
+ ],
456
+ "Chartqa-au": [
457
+ 5,
458
+ 10,
459
+ 0.5,
460
+ {
461
+ "Default": [
462
+ 5,
463
+ 10,
464
+ 0.5
465
+ ]
466
+ }
467
+ ],
468
+ "infographic": [
469
+ 3,
470
+ 10,
471
+ 0.3,
472
+ {
473
+ "Default": [
474
+ 3,
475
+ 10,
476
+ 0.3
477
+ ]
478
+ }
479
+ ],
480
+ "Key Information Extraction": [
481
+ 30,
482
+ 45,
483
+ 0.667,
484
+ {
485
+ "Default": [
486
+ 30,
487
+ 45,
488
+ 0.667
489
+ ]
490
+ }
491
+ ],
492
+ "Scene Text-centric VQA": [
493
+ 17,
494
+ 40,
495
+ 0.425,
496
+ {
497
+ "Default": [
498
+ 17,
499
+ 40,
500
+ 0.425
501
+ ]
502
+ }
503
+ ],
504
+ "Artistic Text Recognition": [
505
+ 2,
506
+ 11,
507
+ 0.182,
508
+ {
509
+ "Default": [
510
+ 2,
511
+ 11,
512
+ 0.182
513
+ ]
514
+ }
515
+ ],
516
+ "IrRegular Text Recognition": [
517
+ 4,
518
+ 11,
519
+ 0.364,
520
+ {
521
+ "Default": [
522
+ 4,
523
+ 11,
524
+ 0.364
525
+ ]
526
+ }
527
+ ],
528
+ "Non-semantic Text Recognition": [
529
+ 10,
530
+ 12,
531
+ 0.833,
532
+ {
533
+ "Default": [
534
+ 10,
535
+ 12,
536
+ 0.833
537
+ ]
538
+ }
539
+ ],
540
+ "Regular Text Recognition": [
541
+ 10,
542
+ 11,
543
+ 0.909,
544
+ {
545
+ "Default": [
546
+ 10,
547
+ 11,
548
+ 0.909
549
+ ]
550
+ }
551
+ ],
552
+ "Handwriting_CN": [
553
+ 12,
554
+ 20,
555
+ 0.6,
556
+ {
557
+ "Default": [
558
+ 12,
559
+ 20,
560
+ 0.6
561
+ ]
562
+ }
563
+ ],
564
+ "Chinese Unlimited": [
565
+ 7,
566
+ 44,
567
+ 0.159,
568
+ {
569
+ "Default": [
570
+ 7,
571
+ 44,
572
+ 0.159
573
+ ]
574
+ }
575
+ ],
576
+ "acc_stderr": 0,
577
+ "acc": 46.154
578
+ },
579
+ "CharXiv": {
580
+ "descriptive": {
581
+ "Overall Score": 50.73,
582
+ "By Question": {
583
+ "Q1": 45.9,
584
+ "Q2": 57.83,
585
+ "Q3": 42.92,
586
+ "Q4": 57.98,
587
+ "Q5": 54.81,
588
+ "Q6": 38.55,
589
+ "Q7": 51.71,
590
+ "Q8": 39.29,
591
+ "Q9": 27.86,
592
+ "Q10": 41.1,
593
+ "Q11": 57.71,
594
+ "Q12": 44.51,
595
+ "Q13": 48.4,
596
+ "Q14": 79.79,
597
+ "Q15": 85.62,
598
+ "Q16": 44.44,
599
+ "Q17": 9.38,
600
+ "Q18": 48.99,
601
+ "Q19": 67.69
602
+ },
603
+ "By Category": {
604
+ "Information Extraction": 49.94,
605
+ "Enumeration": 59.97,
606
+ "Pattern Recognition": 51.97,
607
+ "Counting": 47.07,
608
+ "Compositionality": 9.38
609
+ },
610
+ "By Subplot": {
611
+ "1 Subplot": 60.95,
612
+ "2-4 Subplots": 47.55,
613
+ "5+ Subplots": 39.09
614
+ },
615
+ "By Subject": {
616
+ "Computer Science": 51.59,
617
+ "Economics": 50.54,
618
+ "Electrical Engineering and Systems Science": 54.41,
619
+ "Mathematics": 52.59,
620
+ "Physics": 45.08,
621
+ "Quantitative Biology": 47.82,
622
+ "Quantitative Finance": 51.94,
623
+ "Statistics": 52.21
624
+ },
625
+ "By Year": {
626
+ "2020": 48.68,
627
+ "2021": 50.48,
628
+ "2022": 50.92,
629
+ "2023": 52.82
630
+ },
631
+ "N_valid": 4000,
632
+ "N_invalid": 0,
633
+ "Question Type": "Descriptive"
634
+ },
635
+ "reasoning": {
636
+ "Overall Score": 21.4,
637
+ "By Answer Type": {
638
+ "Text-in-Chart": 27.5,
639
+ "Text-in-General": 25.25,
640
+ "Number-in-Chart": 21.12,
641
+ "Number-in-General": 8.3
642
+ },
643
+ "By Source": {
644
+ "GPT-Sourced": 20.65,
645
+ "GPT-Inspired": 23.61,
646
+ "Completely Human": 20.83
647
+ },
648
+ "By Subject": {
649
+ "Computer Science": 17.46,
650
+ "Economics": 24.64,
651
+ "Electrical Engineering and Systems Science": 22.69,
652
+ "Mathematics": 20.0,
653
+ "Physics": 16.54,
654
+ "Quantitative Biology": 19.84,
655
+ "Quantitative Finance": 25.0,
656
+ "Statistics": 25.66
657
+ },
658
+ "By Year": {
659
+ "2020": 23.08,
660
+ "2021": 21.46,
661
+ "2022": 19.26,
662
+ "2023": 21.77
663
+ },
664
+ "By Subplot": {
665
+ "1 Subplot": 19.69,
666
+ "2-4 Subplots": 24.6,
667
+ "5+ Subplots": 19.07
668
+ },
669
+ "N_valid": 1000,
670
+ "N_invalid": 0,
671
+ "Question Type": "Reasoning"
672
+ },
673
+ "accuracy": 36.06,
674
+ "acc_stderr": 0,
675
+ "acc": 36.06
676
+ },
677
+ "MathVision": {
678
+ "accuracy": 12.93,
679
+ "acc_stderr": 0,
680
+ "acc": 12.93
681
+ },
682
+ "CII-Bench": {
683
+ "accuracy": 41.18,
684
+ "domain_score": {
685
+ "Life": 31.6,
686
+ "Art": 46.32,
687
+ "CTC": 37.78,
688
+ "Society": 44.32,
689
+ "Env.": 59.26,
690
+ "Politics": 58.33
691
+ },
692
+ "emotion_score": {
693
+ "Neutral": 46.62,
694
+ "Negative": 40.0,
695
+ "Positive": 36.32
696
+ },
697
+ "acc_stderr": 0,
698
+ "acc": 41.18
699
+ },
700
+ "Blink": {
701
+ "accuracy": 43.35,
702
+ "Art Style": 58.12,
703
+ "Counting": 55.83,
704
+ "Forensic Detection": 10.61,
705
+ "Functional Correspondence": 18.46,
706
+ "IQ Test": 31.33,
707
+ "Jigsaw": 47.33,
708
+ "Multi-view Reasoning": 51.13,
709
+ "Object Localization": 50.82,
710
+ "Relative Depth": 62.1,
711
+ "Relative Reflectance": 38.06,
712
+ "Semantic Correspondence": 25.9,
713
+ "Spatial Relation": 83.22,
714
+ "Visual Correspondence": 29.65,
715
+ "Visual Similarity": 51.11,
716
+ "acc_stderr": 0,
717
+ "acc": 43.35
718
+ }
719
+ }
720
+ }
InternVL2_5-8B/results_2025-01-25T10-42-53.190540.json ADDED
@@ -0,0 +1,519 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "model_name": "InternVL2_5-8B",
4
+ "model_dtype": "float16",
5
+ "model_size": 0
6
+ },
7
+ "results": {
8
+ "CMMMU": {
9
+ "艺术与设计": {
10
+ "num": 88,
11
+ "correct": 56,
12
+ "accuracy": 63.64
13
+ },
14
+ "overall": {
15
+ "num": 900,
16
+ "correct": 379,
17
+ "accuracy": 42.11
18
+ },
19
+ "商业": {
20
+ "num": 126,
21
+ "correct": 33,
22
+ "accuracy": 26.19
23
+ },
24
+ "科学": {
25
+ "num": 204,
26
+ "correct": 73,
27
+ "accuracy": 35.78
28
+ },
29
+ "健康与医学": {
30
+ "num": 153,
31
+ "correct": 73,
32
+ "accuracy": 47.71
33
+ },
34
+ "人文社会科学": {
35
+ "num": 85,
36
+ "correct": 48,
37
+ "accuracy": 56.47
38
+ },
39
+ "技术与工程": {
40
+ "num": 244,
41
+ "correct": 96,
42
+ "accuracy": 39.34
43
+ },
44
+ "accuracy": 42.11,
45
+ "acc_stderr": 0,
46
+ "acc": 42.11
47
+ },
48
+ "MMMU": {
49
+ "accuracy": 53.56,
50
+ "subject_score": {
51
+ "Accounting": 46.67,
52
+ "Agriculture": 50.0,
53
+ "Architecture": 30.0,
54
+ "Art": 81.67,
55
+ "Basic": 53.33,
56
+ "Biology": 43.33,
57
+ "Chemistry": 26.67,
58
+ "Clinical": 60.0,
59
+ "Computer": 60.0,
60
+ "Design": 80.0,
61
+ "Diagnostics": 46.67,
62
+ "Economics": 43.33,
63
+ "Electronics": 43.33,
64
+ "Energy": 40.0,
65
+ "Finance": 36.67,
66
+ "Geography": 50.0,
67
+ "History": 66.67,
68
+ "Literature": 90.0,
69
+ "Manage": 53.33,
70
+ "Marketing": 60.0,
71
+ "Materials": 40.0,
72
+ "Math": 46.67,
73
+ "Mechanical": 43.33,
74
+ "Music": 46.67,
75
+ "Pharmacy": 60.0,
76
+ "Physics": 33.33,
77
+ "Psychology": 70.0,
78
+ "Public": 63.33,
79
+ "Sociology": 60.0
80
+ },
81
+ "difficulty_score": {
82
+ "Medium": 52.36,
83
+ "Easy": 61.69,
84
+ "Hard": 43.09
85
+ },
86
+ "acc_stderr": 0,
87
+ "acc": 53.56
88
+ },
89
+ "MMMU_Pro_standard": {
90
+ "accuracy": 36.53,
91
+ "subject_score": {
92
+ "History": 42.86,
93
+ "Art": 58.49,
94
+ "Design": 60.0,
95
+ "Literature": 75.0,
96
+ "Agriculture": 25.0,
97
+ "Finance": 30.0,
98
+ "Sociology": 51.85,
99
+ "Accounting": 25.86,
100
+ "Energy_and_Power": 15.52,
101
+ "Pharmacy": 40.35,
102
+ "Architecture_and_Engineering": 16.67,
103
+ "Clinical_Medicine": 35.59,
104
+ "Public_Health": 27.59,
105
+ "Physics": 25.0,
106
+ "Art_Theory": 61.82,
107
+ "Electronics": 40.0,
108
+ "Psychology": 35.0,
109
+ "Biology": 33.9,
110
+ "Manage": 32.0,
111
+ "Economics": 16.95,
112
+ "Mechanical_Engineering": 23.73,
113
+ "Diagnostics_and_Laboratory_Medicine": 28.33,
114
+ "Basic_Medical_Science": 44.23,
115
+ "Computer_Science": 43.33,
116
+ "Math": 33.33,
117
+ "Music": 33.33,
118
+ "Materials": 21.67,
119
+ "Marketing": 42.37,
120
+ "Chemistry": 38.33,
121
+ "Geography": 50.0
122
+ },
123
+ "difficulty_score": {
124
+ "Medium": 33.83,
125
+ "Easy": 48.11,
126
+ "Hard": 26.68
127
+ },
128
+ "acc_stderr": 0,
129
+ "acc": 36.53
130
+ },
131
+ "MMMU_Pro_vision": {
132
+ "accuracy": 25.2,
133
+ "subject_score": {
134
+ "History": 28.57,
135
+ "Art": 26.42,
136
+ "Design": 31.67,
137
+ "Literature": 51.92,
138
+ "Agriculture": 25.0,
139
+ "Finance": 25.0,
140
+ "Sociology": 35.19,
141
+ "Accounting": 31.03,
142
+ "Energy_and_Power": 17.24,
143
+ "Pharmacy": 28.07,
144
+ "Architecture_and_Engineering": 16.67,
145
+ "Clinical_Medicine": 10.17,
146
+ "Public_Health": 17.24,
147
+ "Physics": 20.0,
148
+ "Art_Theory": 32.73,
149
+ "Electronics": 26.67,
150
+ "Psychology": 20.0,
151
+ "Biology": 18.64,
152
+ "Manage": 36.0,
153
+ "Economics": 18.64,
154
+ "Mechanical_Engineering": 33.9,
155
+ "Diagnostics_and_Laboratory_Medicine": 30.0,
156
+ "Basic_Medical_Science": 25.0,
157
+ "Computer_Science": 25.0,
158
+ "Math": 25.0,
159
+ "Music": 21.67,
160
+ "Materials": 15.0,
161
+ "Marketing": 27.12,
162
+ "Chemistry": 21.67,
163
+ "Geography": 21.15
164
+ },
165
+ "acc_stderr": 0,
166
+ "acc": 25.2
167
+ },
168
+ "MmvetV2": {
169
+ "accuracy": 57.1567,
170
+ "capability_scores": {
171
+ "ocr": 62.06730769230767,
172
+ "math": 60.29411764705882,
173
+ "spat": 53.75634517766494,
174
+ "rec": 54.12621359223305,
175
+ "know": 52.307692307692314,
176
+ "gen": 54.58181818181818,
177
+ "seq": 45.71428571428572
178
+ },
179
+ "capability_detail_scores": {
180
+ "ocr_math": 72.72727272727273,
181
+ "ocr_spat_math": 52.66666666666667,
182
+ "ocr_rec_spat_math": 40.0,
183
+ "rec_spat": 62.5,
184
+ "ocr_spat": 69.23076923076923,
185
+ "ocr_rec_spat": 20.833333333333336,
186
+ "know_ocr_spat": 100.0,
187
+ "ocr_rec": 87.5,
188
+ "know_rec_spat": 44.00000000000001,
189
+ "ocr": 75.0,
190
+ "rec": 64.23728813559322,
191
+ "know_rec": 50.0,
192
+ "know_rec_gen": 51.69999999999999,
193
+ "ocr_gen_rec_know": 60.76923076923077,
194
+ "ocr_rec_gen_spat": 61.16279069767442,
195
+ "ocr_gen_spat": 80.0,
196
+ "ocr_spat_math_seq_gen": 40.0,
197
+ "ocr_spat_math_seq_rec": 0.0,
198
+ "rec_gen_spat": 40.45454545454545,
199
+ "ocr_gen_spat_math": 40.0,
200
+ "rec_spat_seq": 34.285714285714285,
201
+ "ocr_rec_spat_seq": 53.333333333333336,
202
+ "know_rec_gen_spat": 13.333333333333334,
203
+ "rec_gen": 62.05882352941177,
204
+ "ocr_rec_know_spat": 25.0,
205
+ "know_rec_gen_ocr": 60.76923076923077,
206
+ "ocr_spat_know_rec_gen": 60.0,
207
+ "ocr_rec_math": 100.0,
208
+ "ocr_rec_gen": 64.0,
209
+ "ocr_rec_gen_seq": 70.0,
210
+ "ocr_gen": 63.07692307692307,
211
+ "rec_gen_seq": 34.285714285714285,
212
+ "rec_seq": 40.0,
213
+ "rec_gen_seq_spat": 65.0,
214
+ "know_rec_seq": 0.0,
215
+ "know_rec_gen_seq": 60.00000000000001,
216
+ "rec_gen_spat_seq": 65.0,
217
+ "ocr_spat_seq_rec_gen": 46.666666666666664,
218
+ "ocr_seq_know_rec_gen": 64.99999999999999,
219
+ "know_rec_math": 100.0,
220
+ "ocr_rec_seq": 0.0
221
+ },
222
+ "acc_stderr": 0,
223
+ "acc": 57.1567
224
+ },
225
+ "MathVerse": {
226
+ "Text Dominant": {
227
+ "accuracy": 38.83,
228
+ "correct": 306,
229
+ "total": 788
230
+ },
231
+ "Total": {
232
+ "accuracy": 32.89,
233
+ "correct": 1296,
234
+ "total": 3940
235
+ },
236
+ "Text Lite": {
237
+ "accuracy": 36.04,
238
+ "correct": 284,
239
+ "total": 788
240
+ },
241
+ "Vision Intensive": {
242
+ "accuracy": 35.03,
243
+ "correct": 276,
244
+ "total": 788
245
+ },
246
+ "Vision Dominant": {
247
+ "accuracy": 34.14,
248
+ "correct": 269,
249
+ "total": 788
250
+ },
251
+ "Vision Only": {
252
+ "accuracy": 20.43,
253
+ "correct": 161,
254
+ "total": 788
255
+ },
256
+ "accuracy": 32.89,
257
+ "acc_stderr": 0,
258
+ "acc": 32.89
259
+ },
260
+ "Ocrlite": {
261
+ "final_score": [
262
+ 1182,
263
+ 1645
264
+ ],
265
+ "accuracy": 71.854,
266
+ "Key Information Extraction-Bookshelf": [
267
+ 26,
268
+ 52
269
+ ],
270
+ "Scene Text-centric VQA-diet_constraints": [
271
+ 61,
272
+ 90
273
+ ],
274
+ "Doc-oriented VQA-Control": [
275
+ 139,
276
+ 189
277
+ ],
278
+ "Doc-oriented VQA": [
279
+ 139,
280
+ 204
281
+ ],
282
+ "Scene Text-centric VQA-Fake_logo": [
283
+ 37,
284
+ 119
285
+ ],
286
+ "Handwritten Mathematical Expression Recognition": [
287
+ 46,
288
+ 100
289
+ ],
290
+ "Key Information Extraction": [
291
+ 180,
292
+ 209
293
+ ],
294
+ "Scene Text-centric VQA-Control": [
295
+ 168,
296
+ 200
297
+ ],
298
+ "Scene Text-centric VQA": [
299
+ 222,
300
+ 282
301
+ ],
302
+ "Artistic Text Recognition": [
303
+ 33,
304
+ 50
305
+ ],
306
+ "Irregular Text Recognition": [
307
+ 41,
308
+ 50
309
+ ],
310
+ "Non-Semantic Text Recognition": [
311
+ 42,
312
+ 50
313
+ ],
314
+ "Regular Text Recognition": [
315
+ 48,
316
+ 50
317
+ ],
318
+ "acc_stderr": 0,
319
+ "acc": 71.854
320
+ },
321
+ "OcrliteZh": {
322
+ "final_score": [
323
+ 134,
324
+ 234
325
+ ],
326
+ "accuracy": 57.265,
327
+ "Docvqa": [
328
+ 6,
329
+ 10
330
+ ],
331
+ "Chartqa-human": [
332
+ 4,
333
+ 10
334
+ ],
335
+ "Chartqa-au": [
336
+ 4,
337
+ 10
338
+ ],
339
+ "infographic": [
340
+ 4,
341
+ 10
342
+ ],
343
+ "Key Information Extraction": [
344
+ 33,
345
+ 45
346
+ ],
347
+ "Scene Text-centric VQA": [
348
+ 26,
349
+ 40
350
+ ],
351
+ "Artistic Text Recognition": [
352
+ 5,
353
+ 11
354
+ ],
355
+ "IrRegular Text Recognition": [
356
+ 4,
357
+ 11
358
+ ],
359
+ "Non-semantic Text Recognition": [
360
+ 9,
361
+ 12
362
+ ],
363
+ "Regular Text Recognition": [
364
+ 9,
365
+ 11
366
+ ],
367
+ "Handwriting_CN": [
368
+ 13,
369
+ 20
370
+ ],
371
+ "Chinese Unlimited": [
372
+ 17,
373
+ 44
374
+ ],
375
+ "acc_stderr": 0,
376
+ "acc": 57.265
377
+ },
378
+ "CharXiv": {
379
+ "descriptive": {
380
+ "Overall Score": 68.62,
381
+ "By Question": {
382
+ "Q1": 77.05,
383
+ "Q2": 79.57,
384
+ "Q3": 59.66,
385
+ "Q4": 78.21,
386
+ "Q5": 73.22,
387
+ "Q6": 63.05,
388
+ "Q7": 70.09,
389
+ "Q8": 70.09,
390
+ "Q9": 65.17,
391
+ "Q10": 61.64,
392
+ "Q11": 40.57,
393
+ "Q12": 60.44,
394
+ "Q13": 66.21,
395
+ "Q14": 85.82,
396
+ "Q15": 89.14,
397
+ "Q16": 61.11,
398
+ "Q17": 32.59,
399
+ "Q18": 67.61,
400
+ "Q19": 78.46
401
+ },
402
+ "By Category": {
403
+ "Information Extraction": 71.59,
404
+ "Enumeration": 77.0,
405
+ "Pattern Recognition": 56.77,
406
+ "Counting": 63.87,
407
+ "Compositionality": 32.59
408
+ },
409
+ "By Subplot": {
410
+ "1 Subplot": 77.4,
411
+ "2-4 Subplots": 66.2,
412
+ "5+ Subplots": 58.16
413
+ },
414
+ "By Subject": {
415
+ "Computer Science": 69.05,
416
+ "Economics": 68.3,
417
+ "Electrical Engineering and Systems Science": 74.58,
418
+ "Mathematics": 67.78,
419
+ "Physics": 64.96,
420
+ "Quantitative Biology": 66.07,
421
+ "Quantitative Finance": 67.24,
422
+ "Statistics": 71.68
423
+ },
424
+ "By Year": {
425
+ "2020": 67.91,
426
+ "2021": 67.62,
427
+ "2022": 69.57,
428
+ "2023": 69.46
429
+ },
430
+ "N_valid": 4000,
431
+ "N_invalid": 0,
432
+ "Question Type": "Descriptive"
433
+ },
434
+ "reasoning": {
435
+ "Overall Score": 32.1,
436
+ "By Answer Type": {
437
+ "Text-in-Chart": 38.18,
438
+ "Text-in-General": 37.37,
439
+ "Number-in-Chart": 35.78,
440
+ "Number-in-General": 14.41
441
+ },
442
+ "By Source": {
443
+ "GPT-Sourced": 41.85,
444
+ "GPT-Inspired": 29.63,
445
+ "Completely Human": 30.0
446
+ },
447
+ "By Subject": {
448
+ "Computer Science": 32.54,
449
+ "Economics": 34.06,
450
+ "Electrical Engineering and Systems Science": 31.93,
451
+ "Mathematics": 32.59,
452
+ "Physics": 37.8,
453
+ "Quantitative Biology": 30.16,
454
+ "Quantitative Finance": 25.86,
455
+ "Statistics": 30.97
456
+ },
457
+ "By Year": {
458
+ "2020": 29.96,
459
+ "2021": 33.33,
460
+ "2022": 31.15,
461
+ "2023": 33.87
462
+ },
463
+ "By Subplot": {
464
+ "1 Subplot": 32.9,
465
+ "2-4 Subplots": 31.75,
466
+ "5+ Subplots": 31.36
467
+ },
468
+ "N_valid": 1000,
469
+ "N_invalid": 0,
470
+ "Question Type": "Reasoning"
471
+ },
472
+ "accuracy": 50.36,
473
+ "acc_stderr": 0,
474
+ "acc": 50.36
475
+ },
476
+ "MathVision": {
477
+ "accuracy": 17.53,
478
+ "acc_stderr": 0,
479
+ "acc": 17.53
480
+ },
481
+ "CII-Bench": {
482
+ "accuracy": 53.2,
483
+ "domain_score": {
484
+ "Life": 46.75,
485
+ "Art": 55.88,
486
+ "CTC": 48.89,
487
+ "Society": 58.38,
488
+ "Env.": 64.81,
489
+ "Politics": 58.33
490
+ },
491
+ "emotion_score": {
492
+ "Neutral": 55.26,
493
+ "Negative": 55.47,
494
+ "Positive": 48.29
495
+ },
496
+ "acc_stderr": 0,
497
+ "acc": 53.2
498
+ },
499
+ "Blink": {
500
+ "accuracy": 50.34,
501
+ "Art Style": 49.57,
502
+ "Counting": 71.67,
503
+ "Forensic Detection": 30.3,
504
+ "Functional Correspondence": 22.31,
505
+ "IQ Test": 21.33,
506
+ "Jigsaw": 62.67,
507
+ "Multi-view Reasoning": 55.64,
508
+ "Object Localization": 53.28,
509
+ "Relative Depth": 70.97,
510
+ "Relative Reflectance": 35.82,
511
+ "Semantic Correspondence": 27.34,
512
+ "Spatial Relation": 78.32,
513
+ "Visual Correspondence": 39.53,
514
+ "Visual Similarity": 92.59,
515
+ "acc_stderr": 0,
516
+ "acc": 50.34
517
+ }
518
+ }
519
+ }
InternVL3-78B/results_2025-01-25T10-42-53.190540.json ADDED
@@ -0,0 +1,717 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "model_name": "InternVL3-78B",
4
+ "model_dtype": "float16",
5
+ "model_size": 0
6
+ },
7
+ "results": {
8
+ "CMMMU": {
9
+ "艺术与设计": {
10
+ "num": 88,
11
+ "correct": 66,
12
+ "accuracy": 75.0
13
+ },
14
+ "overall": {
15
+ "num": 900,
16
+ "correct": 476,
17
+ "accuracy": 52.89
18
+ },
19
+ "商业": {
20
+ "num": 126,
21
+ "correct": 42,
22
+ "accuracy": 33.33
23
+ },
24
+ "科学": {
25
+ "num": 204,
26
+ "correct": 97,
27
+ "accuracy": 47.55
28
+ },
29
+ "健康与医学": {
30
+ "num": 153,
31
+ "correct": 89,
32
+ "accuracy": 58.17
33
+ },
34
+ "人文社会科学": {
35
+ "num": 85,
36
+ "correct": 57,
37
+ "accuracy": 67.06
38
+ },
39
+ "技术与工程": {
40
+ "num": 244,
41
+ "correct": 125,
42
+ "accuracy": 51.23
43
+ },
44
+ "accuracy": 52.89,
45
+ "acc_stderr": 0,
46
+ "acc": 52.89
47
+ },
48
+ "MMMU": {
49
+ "accuracy": 66.44,
50
+ "subject_score": {
51
+ "Accounting": 60.0,
52
+ "Agriculture": 66.67,
53
+ "Architecture": 50.0,
54
+ "Art": 81.67,
55
+ "Basic": 73.33,
56
+ "Biology": 56.67,
57
+ "Chemistry": 50.0,
58
+ "Clinical": 70.0,
59
+ "Computer": 60.0,
60
+ "Design": 83.33,
61
+ "Diagnostics": 50.0,
62
+ "Economics": 73.33,
63
+ "Electronics": 46.67,
64
+ "Energy": 60.0,
65
+ "Finance": 50.0,
66
+ "Geography": 76.67,
67
+ "History": 76.67,
68
+ "Literature": 83.33,
69
+ "Manage": 56.67,
70
+ "Marketing": 90.0,
71
+ "Materials": 46.67,
72
+ "Math": 50.0,
73
+ "Mechanical": 56.67,
74
+ "Music": 56.67,
75
+ "Pharmacy": 70.0,
76
+ "Physics": 73.33,
77
+ "Psychology": 80.0,
78
+ "Public": 86.67,
79
+ "Sociology": 76.67
80
+ },
81
+ "difficulty_score": {
82
+ "Medium": 66.75,
83
+ "Easy": 75.59,
84
+ "Hard": 50.83
85
+ },
86
+ "acc_stderr": 0,
87
+ "acc": 66.44
88
+ },
89
+ "MMMU_Pro_standard": {
90
+ "accuracy": 49.65,
91
+ "subject_score": {
92
+ "History": 73.21,
93
+ "Literature": 78.85,
94
+ "Finance": 53.33,
95
+ "Design": 68.33,
96
+ "Sociology": 62.96,
97
+ "Agriculture": 48.33,
98
+ "Art": 69.81,
99
+ "Accounting": 36.21,
100
+ "Clinical_Medicine": 47.46,
101
+ "Energy_and_Power": 22.41,
102
+ "Pharmacy": 57.89,
103
+ "Architecture_and_Engineering": 26.67,
104
+ "Public_Health": 46.55,
105
+ "Electronics": 60.0,
106
+ "Physics": 46.67,
107
+ "Art_Theory": 80.0,
108
+ "Psychology": 50.0,
109
+ "Manage": 42.0,
110
+ "Economics": 42.37,
111
+ "Biology": 47.46,
112
+ "Mechanical_Engineering": 37.29,
113
+ "Diagnostics_and_Laboratory_Medicine": 40.0,
114
+ "Basic_Medical_Science": 48.08,
115
+ "Computer_Science": 55.0,
116
+ "Math": 43.33,
117
+ "Music": 31.67,
118
+ "Materials": 26.67,
119
+ "Marketing": 55.93,
120
+ "Chemistry": 45.0,
121
+ "Geography": 55.77
122
+ },
123
+ "difficulty_score": {
124
+ "Medium": 48.06,
125
+ "Easy": 60.8,
126
+ "Hard": 38.15
127
+ },
128
+ "acc_stderr": 0,
129
+ "acc": 49.65
130
+ },
131
+ "MMMU_Pro_vision": {
132
+ "accuracy": 39.13,
133
+ "subject_score": {
134
+ "Sociology": 50.0,
135
+ "Literature": 57.69,
136
+ "History": 39.29,
137
+ "Design": 56.67,
138
+ "Art": 54.72,
139
+ "Clinical_Medicine": 33.9,
140
+ "Architecture_and_Engineering": 21.67,
141
+ "Public_Health": 31.03,
142
+ "Agriculture": 28.33,
143
+ "Art_Theory": 58.18,
144
+ "Pharmacy": 42.11,
145
+ "Psychology": 31.67,
146
+ "Biology": 38.98,
147
+ "Manage": 30.0,
148
+ "Economics": 35.59,
149
+ "Physics": 40.0,
150
+ "Accounting": 48.28,
151
+ "Diagnostics_and_Laboratory_Medicine": 31.67,
152
+ "Mechanical_Engineering": 23.73,
153
+ "Basic_Medical_Science": 46.15,
154
+ "Finance": 46.67,
155
+ "Electronics": 48.33,
156
+ "Computer_Science": 40.0,
157
+ "Math": 38.33,
158
+ "Energy_and_Power": 25.86,
159
+ "Music": 28.33,
160
+ "Marketing": 40.68,
161
+ "Materials": 26.67,
162
+ "Chemistry": 41.67,
163
+ "Geography": 44.23
164
+ },
165
+ "acc_stderr": 0,
166
+ "acc": 39.13
167
+ },
168
+ "MmvetV2": {
169
+ "accuracy": 69.0716,
170
+ "capability_scores": {
171
+ "ocr": 76.87500000000004,
172
+ "math": 74.11764705882354,
173
+ "spat": 66.75126903553297,
174
+ "rec": 66.35922330097094,
175
+ "know": 66.15384615384613,
176
+ "gen": 69.3454545454546,
177
+ "seq": 61.78571428571429
178
+ },
179
+ "capability_detail_scores": {
180
+ "ocr_math": 57.27272727272727,
181
+ "spat_ocr_math": 90.0,
182
+ "rec_spat_ocr_math": 100.0,
183
+ "rec_spat": 54.642857142857146,
184
+ "spat_ocr": 80.0,
185
+ "rec_spat_ocr": 75.0,
186
+ "spat_ocr_know": 100.0,
187
+ "rec_ocr": 75.0,
188
+ "rec_spat_know": 39.0,
189
+ "ocr": 86.875,
190
+ "rec": 67.45762711864406,
191
+ "rec_know": 53.84615384615385,
192
+ "rec_know_gen": 69.89999999999998,
193
+ "rec_ocr_know_gen": 77.6923076923077,
194
+ "rec_spat_ocr_gen": 75.34883720930232,
195
+ "spat_ocr_gen": 90.0,
196
+ "spat_ocr_math_seq_gen": 40.0,
197
+ "spat_ocr_rec_math_seq": 0.0,
198
+ "rec_spat_gen": 50.0,
199
+ "spat_ocr_math_gen": 50.0,
200
+ "rec_seq_spat": 54.285714285714285,
201
+ "rec_seq_spat_ocr": 60.0,
202
+ "rec_spat_know_gen": 63.33333333333334,
203
+ "rec_gen": 72.3529411764706,
204
+ "rec_spat_ocr_know": 25.0,
205
+ "spat_ocr_rec_gen_know": 65.0,
206
+ "rec_ocr_math": 100.0,
207
+ "rec_ocr_gen": 90.0,
208
+ "rec_seq_ocr_gen": 80.0,
209
+ "ocr_gen": 66.15384615384615,
210
+ "rec_seq_gen": 60.0,
211
+ "rec_seq": 65.0,
212
+ "rec_seq_spat_gen": 68.75,
213
+ "rec_seq_know": 0.0,
214
+ "rec_seq_know_gen": 35.0,
215
+ "spat_ocr_rec_seq_gen": 53.333333333333336,
216
+ "ocr_rec_seq_gen_know": 95.0,
217
+ "rec_math_know": 75.0,
218
+ "rec_seq_ocr": 100.0
219
+ },
220
+ "acc_stderr": 0,
221
+ "acc": 69.0716
222
+ },
223
+ "MathVerse": {
224
+ "Text Dominant": {
225
+ "accuracy": 54.31,
226
+ "correct": 428,
227
+ "total": 788
228
+ },
229
+ "Total": {
230
+ "accuracy": 47.56,
231
+ "correct": 1874,
232
+ "total": 3940
233
+ },
234
+ "Vision Dominant": {
235
+ "accuracy": 48.48,
236
+ "correct": 382,
237
+ "total": 788
238
+ },
239
+ "Vision Intensive": {
240
+ "accuracy": 48.73,
241
+ "correct": 384,
242
+ "total": 788
243
+ },
244
+ "Text Lite": {
245
+ "accuracy": 50.38,
246
+ "correct": 397,
247
+ "total": 788
248
+ },
249
+ "Vision Only": {
250
+ "accuracy": 35.91,
251
+ "correct": 283,
252
+ "total": 788
253
+ },
254
+ "accuracy": 47.56,
255
+ "acc_stderr": 0,
256
+ "acc": 47.56
257
+ },
258
+ "Ocrlite": {
259
+ "final_score": [
260
+ 1195,
261
+ 1644
262
+ ],
263
+ "accuracy": 72.689,
264
+ "Key Information Extraction-Bookshelf": [
265
+ 31,
266
+ 51,
267
+ 0.608,
268
+ {
269
+ "Default": [
270
+ 31,
271
+ 51,
272
+ 0.608
273
+ ]
274
+ }
275
+ ],
276
+ "Scene Text-centric VQA-diet_constraints": [
277
+ 61,
278
+ 90,
279
+ 0.678,
280
+ {
281
+ "Default": [
282
+ 61,
283
+ 90,
284
+ 0.678
285
+ ]
286
+ }
287
+ ],
288
+ "Doc-oriented VQA-Control": [
289
+ 129,
290
+ 189,
291
+ 0.683,
292
+ {
293
+ "Default": [
294
+ 129,
295
+ 189,
296
+ 0.683
297
+ ]
298
+ }
299
+ ],
300
+ "Doc-oriented VQA": [
301
+ 152,
302
+ 204,
303
+ 0.745,
304
+ {
305
+ "Default": [
306
+ 152,
307
+ 204,
308
+ 0.745
309
+ ]
310
+ }
311
+ ],
312
+ "Scene Text-centric VQA-Fake_logo": [
313
+ 64,
314
+ 119,
315
+ 0.538,
316
+ {
317
+ "Default": [
318
+ 64,
319
+ 119,
320
+ 0.538
321
+ ]
322
+ }
323
+ ],
324
+ "Handwritten Mathematical Expression Recognition": [
325
+ 1,
326
+ 100,
327
+ 0.01,
328
+ {
329
+ "Default": [
330
+ 1,
331
+ 100,
332
+ 0.01
333
+ ]
334
+ }
335
+ ],
336
+ "Key Information Extraction": [
337
+ 184,
338
+ 209,
339
+ 0.88,
340
+ {
341
+ "Default": [
342
+ 184,
343
+ 209,
344
+ 0.88
345
+ ]
346
+ }
347
+ ],
348
+ "Scene Text-centric VQA-Control": [
349
+ 166,
350
+ 200,
351
+ 0.83,
352
+ {
353
+ "Default": [
354
+ 166,
355
+ 200,
356
+ 0.83
357
+ ]
358
+ }
359
+ ],
360
+ "Scene Text-centric VQA": [
361
+ 227,
362
+ 282,
363
+ 0.805,
364
+ {
365
+ "Default": [
366
+ 227,
367
+ 282,
368
+ 0.805
369
+ ]
370
+ }
371
+ ],
372
+ "Artistic Text Recognition": [
373
+ 42,
374
+ 50,
375
+ 0.84,
376
+ {
377
+ "Default": [
378
+ 42,
379
+ 50,
380
+ 0.84
381
+ ]
382
+ }
383
+ ],
384
+ "Irregular Text Recognition": [
385
+ 46,
386
+ 50,
387
+ 0.92,
388
+ {
389
+ "Default": [
390
+ 46,
391
+ 50,
392
+ 0.92
393
+ ]
394
+ }
395
+ ],
396
+ "Non-Semantic Text Recognition": [
397
+ 42,
398
+ 50,
399
+ 0.84,
400
+ {
401
+ "Default": [
402
+ 42,
403
+ 50,
404
+ 0.84
405
+ ]
406
+ }
407
+ ],
408
+ "Regular Text Recognition": [
409
+ 50,
410
+ 50,
411
+ 1.0,
412
+ {
413
+ "Default": [
414
+ 50,
415
+ 50,
416
+ 1.0
417
+ ]
418
+ }
419
+ ],
420
+ "acc_stderr": 0,
421
+ "acc": 72.689
422
+ },
423
+ "OcrliteZh": {
424
+ "final_score": [
425
+ 153,
426
+ 234
427
+ ],
428
+ "accuracy": 65.385,
429
+ "Docvqa": [
430
+ 4,
431
+ 10,
432
+ 0.4,
433
+ {
434
+ "Default": [
435
+ 4,
436
+ 10,
437
+ 0.4
438
+ ]
439
+ }
440
+ ],
441
+ "Chartqa-human": [
442
+ 4,
443
+ 10,
444
+ 0.4,
445
+ {
446
+ "Default": [
447
+ 4,
448
+ 10,
449
+ 0.4
450
+ ]
451
+ }
452
+ ],
453
+ "Chartqa-au": [
454
+ 6,
455
+ 10,
456
+ 0.6,
457
+ {
458
+ "Default": [
459
+ 6,
460
+ 10,
461
+ 0.6
462
+ ]
463
+ }
464
+ ],
465
+ "infographic": [
466
+ 6,
467
+ 10,
468
+ 0.6,
469
+ {
470
+ "Default": [
471
+ 6,
472
+ 10,
473
+ 0.6
474
+ ]
475
+ }
476
+ ],
477
+ "Key Information Extraction": [
478
+ 35,
479
+ 45,
480
+ 0.778,
481
+ {
482
+ "Default": [
483
+ 35,
484
+ 45,
485
+ 0.778
486
+ ]
487
+ }
488
+ ],
489
+ "Scene Text-centric VQA": [
490
+ 27,
491
+ 40,
492
+ 0.675,
493
+ {
494
+ "Default": [
495
+ 27,
496
+ 40,
497
+ 0.675
498
+ ]
499
+ }
500
+ ],
501
+ "Artistic Text Recognition": [
502
+ 5,
503
+ 11,
504
+ 0.455,
505
+ {
506
+ "Default": [
507
+ 5,
508
+ 11,
509
+ 0.455
510
+ ]
511
+ }
512
+ ],
513
+ "IrRegular Text Recognition": [
514
+ 6,
515
+ 11,
516
+ 0.545,
517
+ {
518
+ "Default": [
519
+ 6,
520
+ 11,
521
+ 0.545
522
+ ]
523
+ }
524
+ ],
525
+ "Non-semantic Text Recognition": [
526
+ 11,
527
+ 12,
528
+ 0.917,
529
+ {
530
+ "Default": [
531
+ 11,
532
+ 12,
533
+ 0.917
534
+ ]
535
+ }
536
+ ],
537
+ "Regular Text Recognition": [
538
+ 10,
539
+ 11,
540
+ 0.909,
541
+ {
542
+ "Default": [
543
+ 10,
544
+ 11,
545
+ 0.909
546
+ ]
547
+ }
548
+ ],
549
+ "Handwriting_CN": [
550
+ 16,
551
+ 20,
552
+ 0.8,
553
+ {
554
+ "Default": [
555
+ 16,
556
+ 20,
557
+ 0.8
558
+ ]
559
+ }
560
+ ],
561
+ "Chinese Unlimited": [
562
+ 23,
563
+ 44,
564
+ 0.523,
565
+ {
566
+ "Default": [
567
+ 23,
568
+ 44,
569
+ 0.523
570
+ ]
571
+ }
572
+ ],
573
+ "acc_stderr": 0,
574
+ "acc": 65.385
575
+ },
576
+ "CharXiv": {
577
+ "descriptive": {
578
+ "Overall Score": 83.85,
579
+ "By Question": {
580
+ "Q1": 87.3,
581
+ "Q2": 83.91,
582
+ "Q3": 66.95,
583
+ "Q4": 85.21,
584
+ "Q5": 87.03,
585
+ "Q6": 85.54,
586
+ "Q7": 86.32,
587
+ "Q8": 88.84,
588
+ "Q9": 83.08,
589
+ "Q10": 74.66,
590
+ "Q11": 80.0,
591
+ "Q12": 79.12,
592
+ "Q13": 79.0,
593
+ "Q14": 93.26,
594
+ "Q15": 97.44,
595
+ "Q16": 77.78,
596
+ "Q17": 64.29,
597
+ "Q18": 90.28,
598
+ "Q19": 84.62
599
+ },
600
+ "By Category": {
601
+ "Information Extraction": 83.27,
602
+ "Enumeration": 89.35,
603
+ "Pattern Recognition": 85.37,
604
+ "Counting": 78.37,
605
+ "Compositionality": 64.29
606
+ },
607
+ "By Subplot": {
608
+ "1 Subplot": 87.05,
609
+ "2-4 Subplots": 86.38,
610
+ "5+ Subplots": 74.58
611
+ },
612
+ "By Subject": {
613
+ "Computer Science": 84.13,
614
+ "Economics": 83.88,
615
+ "Electrical Engineering and Systems Science": 87.39,
616
+ "Mathematics": 82.96,
617
+ "Physics": 80.51,
618
+ "Quantitative Biology": 81.55,
619
+ "Quantitative Finance": 86.21,
620
+ "Statistics": 84.73
621
+ },
622
+ "By Year": {
623
+ "2020": 82.49,
624
+ "2021": 81.42,
625
+ "2022": 85.86,
626
+ "2023": 85.79
627
+ },
628
+ "N_valid": 4000,
629
+ "N_invalid": 5,
630
+ "Question Type": "Descriptive"
631
+ },
632
+ "reasoning": {
633
+ "Overall Score": 41.4,
634
+ "By Answer Type": {
635
+ "Text-in-Chart": 45.91,
636
+ "Text-in-General": 42.42,
637
+ "Number-in-Chart": 43.53,
638
+ "Number-in-General": 30.13
639
+ },
640
+ "By Source": {
641
+ "GPT-Sourced": 53.8,
642
+ "GPT-Inspired": 39.81,
643
+ "Completely Human": 38.17
644
+ },
645
+ "By Subject": {
646
+ "Computer Science": 38.1,
647
+ "Economics": 46.38,
648
+ "Electrical Engineering and Systems Science": 33.61,
649
+ "Mathematics": 45.93,
650
+ "Physics": 48.82,
651
+ "Quantitative Biology": 46.83,
652
+ "Quantitative Finance": 33.62,
653
+ "Statistics": 35.4
654
+ },
655
+ "By Year": {
656
+ "2020": 38.87,
657
+ "2021": 45.98,
658
+ "2022": 40.98,
659
+ "2023": 39.52
660
+ },
661
+ "By Subplot": {
662
+ "1 Subplot": 41.97,
663
+ "2-4 Subplots": 41.01,
664
+ "5+ Subplots": 41.1
665
+ },
666
+ "N_valid": 1000,
667
+ "N_invalid": 0,
668
+ "Question Type": "Reasoning"
669
+ },
670
+ "accuracy": 62.62,
671
+ "acc_stderr": 0,
672
+ "acc": 62.62
673
+ },
674
+ "MathVision": {
675
+ "accuracy": 39.97,
676
+ "acc_stderr": 0,
677
+ "acc": 39.97
678
+ },
679
+ "CII-Bench": {
680
+ "accuracy": 67.32,
681
+ "domain_score": {
682
+ "CTC": 65.19,
683
+ "Env.": 72.22,
684
+ "Art": 61.03,
685
+ "Society": 70.27,
686
+ "Life": 68.4,
687
+ "Politics": 70.83
688
+ },
689
+ "emotion_score": {
690
+ "Positive": 70.09,
691
+ "Negative": 64.91,
692
+ "Neutral": 67.29
693
+ },
694
+ "acc_stderr": 0,
695
+ "acc": 67.32
696
+ },
697
+ "Blink": {
698
+ "accuracy": 61.13,
699
+ "Art Style": 85.47,
700
+ "Counting": 79.17,
701
+ "Forensic Detection": 45.45,
702
+ "Functional Correspondence": 37.69,
703
+ "IQ Test": 27.33,
704
+ "Jigsaw": 51.33,
705
+ "Multi-view Reasoning": 54.14,
706
+ "Object Localization": 53.28,
707
+ "Relative Depth": 83.06,
708
+ "Relative Reflectance": 32.09,
709
+ "Semantic Correspondence": 52.52,
710
+ "Spatial Relation": 91.61,
711
+ "Visual Correspondence": 82.56,
712
+ "Visual Similarity": 82.22,
713
+ "acc_stderr": 0,
714
+ "acc": 61.13
715
+ }
716
+ }
717
+ }
InternVL3-8B/results_2025-01-25T10-42-53.190540.json ADDED
@@ -0,0 +1,718 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "model_name": "InternVL3-8B",
4
+ "model_dtype": "float16",
5
+ "model_size": 0
6
+ },
7
+ "results": {
8
+ "CMMMU": {
9
+ "艺术与设计": {
10
+ "num": 88,
11
+ "correct": 63,
12
+ "accuracy": 71.59
13
+ },
14
+ "overall": {
15
+ "num": 900,
16
+ "correct": 427,
17
+ "accuracy": 47.44
18
+ },
19
+ "商业": {
20
+ "num": 126,
21
+ "correct": 49,
22
+ "accuracy": 38.89
23
+ },
24
+ "科学": {
25
+ "num": 204,
26
+ "correct": 86,
27
+ "accuracy": 42.16
28
+ },
29
+ "健康与医学": {
30
+ "num": 153,
31
+ "correct": 80,
32
+ "accuracy": 52.29
33
+ },
34
+ "人文社会科学": {
35
+ "num": 85,
36
+ "correct": 46,
37
+ "accuracy": 54.12
38
+ },
39
+ "技术与工程": {
40
+ "num": 244,
41
+ "correct": 103,
42
+ "accuracy": 42.21
43
+ },
44
+ "accuracy": 47.44,
45
+ "acc_stderr": 0,
46
+ "acc": 47.44
47
+ },
48
+ "MMMU": {
49
+ "accuracy": 55.0,
50
+ "subject_score": {
51
+ "Accounting": 46.67,
52
+ "Agriculture": 56.67,
53
+ "Architecture": 40.0,
54
+ "Art": 80.0,
55
+ "Basic": 56.67,
56
+ "Biology": 53.33,
57
+ "Chemistry": 36.67,
58
+ "Clinical": 63.33,
59
+ "Computer": 53.33,
60
+ "Design": 76.67,
61
+ "Diagnostics": 46.67,
62
+ "Economics": 50.0,
63
+ "Electronics": 33.33,
64
+ "Energy": 40.0,
65
+ "Finance": 30.0,
66
+ "Geography": 53.33,
67
+ "History": 76.67,
68
+ "Literature": 86.67,
69
+ "Manage": 50.0,
70
+ "Marketing": 60.0,
71
+ "Materials": 50.0,
72
+ "Math": 43.33,
73
+ "Mechanical": 30.0,
74
+ "Music": 36.67,
75
+ "Pharmacy": 56.67,
76
+ "Physics": 46.67,
77
+ "Psychology": 73.33,
78
+ "Public": 76.67,
79
+ "Sociology": 66.67
80
+ },
81
+ "difficulty_score": {
82
+ "Medium": 54.48,
83
+ "Hard": 35.91,
84
+ "Easy": 67.46
85
+ },
86
+ "acc_stderr": 0,
87
+ "acc": 55.0
88
+ },
89
+ "MMMU_Pro_standard": {
90
+ "accuracy": 39.02,
91
+ "subject_score": {
92
+ "History": 55.36,
93
+ "Design": 60.0,
94
+ "Finance": 25.0,
95
+ "Literature": 75.0,
96
+ "Sociology": 55.56,
97
+ "Clinical_Medicine": 38.98,
98
+ "Architecture_and_Engineering": 31.67,
99
+ "Accounting": 18.97,
100
+ "Pharmacy": 47.37,
101
+ "Energy_and_Power": 17.24,
102
+ "Art": 58.49,
103
+ "Agriculture": 33.33,
104
+ "Physics": 36.67,
105
+ "Public_Health": 36.21,
106
+ "Art_Theory": 67.27,
107
+ "Electronics": 61.67,
108
+ "Manage": 42.0,
109
+ "Psychology": 38.33,
110
+ "Biology": 37.29,
111
+ "Mechanical_Engineering": 25.42,
112
+ "Economics": 27.12,
113
+ "Basic_Medical_Science": 36.54,
114
+ "Diagnostics_and_Laboratory_Medicine": 33.33,
115
+ "Computer_Science": 26.67,
116
+ "Math": 26.67,
117
+ "Materials": 16.67,
118
+ "Music": 33.33,
119
+ "Marketing": 37.29,
120
+ "Chemistry": 36.67,
121
+ "Geography": 46.15
122
+ },
123
+ "difficulty_score": {
124
+ "Medium": 36.58,
125
+ "Hard": 29.93,
126
+ "Easy": 49.62
127
+ },
128
+ "acc_stderr": 0,
129
+ "acc": 39.02
130
+ },
131
+ "MMMU_Pro_vision": {
132
+ "accuracy": 29.77,
133
+ "subject_score": {
134
+ "History": 46.43,
135
+ "Design": 45.0,
136
+ "Art": 47.17,
137
+ "Sociology": 40.74,
138
+ "Agriculture": 18.33,
139
+ "Energy_and_Power": 20.69,
140
+ "Accounting": 24.14,
141
+ "Literature": 67.31,
142
+ "Finance": 23.33,
143
+ "Pharmacy": 33.33,
144
+ "Clinical_Medicine": 20.34,
145
+ "Architecture_and_Engineering": 18.33,
146
+ "Public_Health": 20.69,
147
+ "Art_Theory": 50.91,
148
+ "Psychology": 21.67,
149
+ "Physics": 21.67,
150
+ "Electronics": 35.0,
151
+ "Biology": 25.42,
152
+ "Mechanical_Engineering": 22.03,
153
+ "Manage": 32.0,
154
+ "Economics": 27.12,
155
+ "Diagnostics_and_Laboratory_Medicine": 31.67,
156
+ "Basic_Medical_Science": 36.54,
157
+ "Computer_Science": 36.67,
158
+ "Math": 21.67,
159
+ "Materials": 16.67,
160
+ "Music": 25.0,
161
+ "Marketing": 22.03,
162
+ "Chemistry": 25.0,
163
+ "Geography": 26.92
164
+ },
165
+ "acc_stderr": 0,
166
+ "acc": 29.77
167
+ },
168
+ "MmvetV2": {
169
+ "accuracy": 64.6228,
170
+ "capability_scores": {
171
+ "ocr": 71.39423076923079,
172
+ "math": 71.47058823529412,
173
+ "spat": 62.690355329949234,
174
+ "rec": 61.04368932038846,
175
+ "know": 58.46153846153845,
176
+ "gen": 61.600000000000065,
177
+ "seq": 60.0
178
+ },
179
+ "capability_detail_scores": {
180
+ "ocr_math": 63.63636363636363,
181
+ "ocr_math_spat": 85.33333333333334,
182
+ "ocr_math_rec_spat": 100.0,
183
+ "rec_spat": 66.07142857142857,
184
+ "ocr_spat": 81.92307692307692,
185
+ "ocr_rec_spat": 60.83333333333333,
186
+ "ocr_know_spat": 100.0,
187
+ "ocr_rec": 75.0,
188
+ "know_rec_spat": 34.0,
189
+ "ocr": 85.3125,
190
+ "rec": 68.30508474576268,
191
+ "know_rec": 53.07692307692308,
192
+ "know_rec_gen": 63.299999999999976,
193
+ "ocr_know_rec_gen": 74.6153846153846,
194
+ "ocr_spat_rec_gen": 60.93023255813952,
195
+ "ocr_gen_spat": 85.00000000000001,
196
+ "ocr_seq_gen_spat_math": 100.0,
197
+ "ocr_rec_seq_spat_math": 0.0,
198
+ "spat_rec_gen": 49.0909090909091,
199
+ "ocr_math_gen_spat": 50.0,
200
+ "gen_rec_spat": 49.0909090909091,
201
+ "seq_rec_spat": 42.857142857142854,
202
+ "ocr_seq_rec_spat": 60.0,
203
+ "know_spat_rec_gen": 10.0,
204
+ "rec_gen": 62.35294117647059,
205
+ "ocr_know_rec_spat": 12.5,
206
+ "ocr_rec_gen_spat_know": 65.0,
207
+ "ocr_math_rec": 100.0,
208
+ "ocr_rec_gen": 64.0,
209
+ "ocr_seq_rec_gen": 80.0,
210
+ "ocr_gen": 53.84615384615385,
211
+ "seq_rec_gen": 62.14285714285713,
212
+ "seq_rec": 60.0,
213
+ "spat_seq_rec_gen": 70.0,
214
+ "know_seq_rec": 0.0,
215
+ "know_seq_rec_gen": 0.0,
216
+ "ocr_rec_seq_gen_spat": 50.0,
217
+ "ocr_rec_seq_gen_know": 90.0,
218
+ "know_math_rec": 0.0,
219
+ "ocr_seq_rec": 100.0
220
+ },
221
+ "acc_stderr": 0,
222
+ "acc": 64.6228
223
+ },
224
+ "MathVerse": {
225
+ "Vision Dominant": {
226
+ "accuracy": 40.99,
227
+ "correct": 323,
228
+ "total": 788
229
+ },
230
+ "Total": {
231
+ "accuracy": 41.37,
232
+ "correct": 1630,
233
+ "total": 3940
234
+ },
235
+ "Text Dominant": {
236
+ "accuracy": 50.25,
237
+ "correct": 396,
238
+ "total": 788
239
+ },
240
+ "Text Lite": {
241
+ "accuracy": 43.53,
242
+ "correct": 343,
243
+ "total": 788
244
+ },
245
+ "Vision Intensive": {
246
+ "accuracy": 41.75,
247
+ "correct": 329,
248
+ "total": 788
249
+ },
250
+ "Vision Only": {
251
+ "accuracy": 30.33,
252
+ "correct": 239,
253
+ "total": 788
254
+ },
255
+ "accuracy": 41.37,
256
+ "acc_stderr": 0,
257
+ "acc": 41.37
258
+ },
259
+ "Ocrlite": {
260
+ "final_score": [
261
+ 1184,
262
+ 1644
263
+ ],
264
+ "accuracy": 72.019,
265
+ "Key Information Extraction-Bookshelf": [
266
+ 25,
267
+ 51,
268
+ 0.49,
269
+ {
270
+ "Default": [
271
+ 25,
272
+ 51,
273
+ 0.49
274
+ ]
275
+ }
276
+ ],
277
+ "Scene Text-centric VQA-diet_constraints": [
278
+ 74,
279
+ 90,
280
+ 0.822,
281
+ {
282
+ "Default": [
283
+ 74,
284
+ 90,
285
+ 0.822
286
+ ]
287
+ }
288
+ ],
289
+ "Doc-oriented VQA-Control": [
290
+ 123,
291
+ 189,
292
+ 0.651,
293
+ {
294
+ "Default": [
295
+ 123,
296
+ 189,
297
+ 0.651
298
+ ]
299
+ }
300
+ ],
301
+ "Doc-oriented VQA": [
302
+ 152,
303
+ 204,
304
+ 0.745,
305
+ {
306
+ "Default": [
307
+ 152,
308
+ 204,
309
+ 0.745
310
+ ]
311
+ }
312
+ ],
313
+ "Scene Text-centric VQA-Fake_logo": [
314
+ 51,
315
+ 119,
316
+ 0.429,
317
+ {
318
+ "Default": [
319
+ 51,
320
+ 119,
321
+ 0.429
322
+ ]
323
+ }
324
+ ],
325
+ "Handwritten Mathematical Expression Recognition": [
326
+ 33,
327
+ 100,
328
+ 0.33,
329
+ {
330
+ "Default": [
331
+ 33,
332
+ 100,
333
+ 0.33
334
+ ]
335
+ }
336
+ ],
337
+ "Key Information Extraction": [
338
+ 176,
339
+ 209,
340
+ 0.842,
341
+ {
342
+ "Default": [
343
+ 176,
344
+ 209,
345
+ 0.842
346
+ ]
347
+ }
348
+ ],
349
+ "Scene Text-centric VQA-Control": [
350
+ 162,
351
+ 200,
352
+ 0.81,
353
+ {
354
+ "Default": [
355
+ 162,
356
+ 200,
357
+ 0.81
358
+ ]
359
+ }
360
+ ],
361
+ "Scene Text-centric VQA": [
362
+ 214,
363
+ 282,
364
+ 0.759,
365
+ {
366
+ "Default": [
367
+ 214,
368
+ 282,
369
+ 0.759
370
+ ]
371
+ }
372
+ ],
373
+ "Artistic Text Recognition": [
374
+ 38,
375
+ 50,
376
+ 0.76,
377
+ {
378
+ "Default": [
379
+ 38,
380
+ 50,
381
+ 0.76
382
+ ]
383
+ }
384
+ ],
385
+ "Irregular Text Recognition": [
386
+ 42,
387
+ 50,
388
+ 0.84,
389
+ {
390
+ "Default": [
391
+ 42,
392
+ 50,
393
+ 0.84
394
+ ]
395
+ }
396
+ ],
397
+ "Non-Semantic Text Recognition": [
398
+ 46,
399
+ 50,
400
+ 0.92,
401
+ {
402
+ "Default": [
403
+ 46,
404
+ 50,
405
+ 0.92
406
+ ]
407
+ }
408
+ ],
409
+ "Regular Text Recognition": [
410
+ 48,
411
+ 50,
412
+ 0.96,
413
+ {
414
+ "Default": [
415
+ 48,
416
+ 50,
417
+ 0.96
418
+ ]
419
+ }
420
+ ],
421
+ "acc_stderr": 0,
422
+ "acc": 72.019
423
+ },
424
+ "OcrliteZh": {
425
+ "final_score": [
426
+ 140,
427
+ 234
428
+ ],
429
+ "accuracy": 59.829,
430
+ "Docvqa": [
431
+ 4,
432
+ 10,
433
+ 0.4,
434
+ {
435
+ "Default": [
436
+ 4,
437
+ 10,
438
+ 0.4
439
+ ]
440
+ }
441
+ ],
442
+ "Chartqa-human": [
443
+ 3,
444
+ 10,
445
+ 0.3,
446
+ {
447
+ "Default": [
448
+ 3,
449
+ 10,
450
+ 0.3
451
+ ]
452
+ }
453
+ ],
454
+ "Chartqa-au": [
455
+ 6,
456
+ 10,
457
+ 0.6,
458
+ {
459
+ "Default": [
460
+ 6,
461
+ 10,
462
+ 0.6
463
+ ]
464
+ }
465
+ ],
466
+ "infographic": [
467
+ 4,
468
+ 10,
469
+ 0.4,
470
+ {
471
+ "Default": [
472
+ 4,
473
+ 10,
474
+ 0.4
475
+ ]
476
+ }
477
+ ],
478
+ "Key Information Extraction": [
479
+ 36,
480
+ 45,
481
+ 0.8,
482
+ {
483
+ "Default": [
484
+ 36,
485
+ 45,
486
+ 0.8
487
+ ]
488
+ }
489
+ ],
490
+ "Scene Text-centric VQA": [
491
+ 24,
492
+ 40,
493
+ 0.6,
494
+ {
495
+ "Default": [
496
+ 24,
497
+ 40,
498
+ 0.6
499
+ ]
500
+ }
501
+ ],
502
+ "Artistic Text Recognition": [
503
+ 4,
504
+ 11,
505
+ 0.364,
506
+ {
507
+ "Default": [
508
+ 4,
509
+ 11,
510
+ 0.364
511
+ ]
512
+ }
513
+ ],
514
+ "IrRegular Text Recognition": [
515
+ 9,
516
+ 11,
517
+ 0.818,
518
+ {
519
+ "Default": [
520
+ 9,
521
+ 11,
522
+ 0.818
523
+ ]
524
+ }
525
+ ],
526
+ "Non-semantic Text Recognition": [
527
+ 10,
528
+ 12,
529
+ 0.833,
530
+ {
531
+ "Default": [
532
+ 10,
533
+ 12,
534
+ 0.833
535
+ ]
536
+ }
537
+ ],
538
+ "Regular Text Recognition": [
539
+ 9,
540
+ 11,
541
+ 0.818,
542
+ {
543
+ "Default": [
544
+ 9,
545
+ 11,
546
+ 0.818
547
+ ]
548
+ }
549
+ ],
550
+ "Handwriting_CN": [
551
+ 13,
552
+ 20,
553
+ 0.65,
554
+ {
555
+ "Default": [
556
+ 13,
557
+ 20,
558
+ 0.65
559
+ ]
560
+ }
561
+ ],
562
+ "Chinese Unlimited": [
563
+ 18,
564
+ 44,
565
+ 0.409,
566
+ {
567
+ "Default": [
568
+ 18,
569
+ 44,
570
+ 0.409
571
+ ]
572
+ }
573
+ ],
574
+ "acc_stderr": 0,
575
+ "acc": 59.829
576
+ },
577
+ "CharXiv": {
578
+ "descriptive": {
579
+ "Overall Score": 72.42,
580
+ "By Question": {
581
+ "Q1": 81.97,
582
+ "Q2": 79.13,
583
+ "Q3": 63.52,
584
+ "Q4": 87.16,
585
+ "Q5": 83.68,
586
+ "Q6": 77.11,
587
+ "Q7": 70.94,
588
+ "Q8": 78.12,
589
+ "Q9": 65.67,
590
+ "Q10": 66.44,
591
+ "Q11": 68.0,
592
+ "Q12": 59.34,
593
+ "Q13": 56.16,
594
+ "Q14": 82.98,
595
+ "Q15": 81.47,
596
+ "Q16": 66.67,
597
+ "Q17": 28.57,
598
+ "Q18": 81.38,
599
+ "Q19": 81.54
600
+ },
601
+ "By Category": {
602
+ "Information Extraction": 77.82,
603
+ "Enumeration": 74.17,
604
+ "Pattern Recognition": 75.11,
605
+ "Counting": 65.65,
606
+ "Compositionality": 28.57
607
+ },
608
+ "By Subplot": {
609
+ "1 Subplot": 80.51,
610
+ "2-4 Subplots": 71.83,
611
+ "5+ Subplots": 60.17
612
+ },
613
+ "By Subject": {
614
+ "Computer Science": 68.25,
615
+ "Economics": 74.28,
616
+ "Electrical Engineering and Systems Science": 76.89,
617
+ "Mathematics": 74.63,
618
+ "Physics": 67.72,
619
+ "Quantitative Biology": 70.44,
620
+ "Quantitative Finance": 71.34,
621
+ "Statistics": 76.11
622
+ },
623
+ "By Year": {
624
+ "2020": 73.38,
625
+ "2021": 71.93,
626
+ "2022": 71.62,
627
+ "2023": 72.78
628
+ },
629
+ "N_valid": 4000,
630
+ "N_invalid": 0,
631
+ "Question Type": "Descriptive"
632
+ },
633
+ "reasoning": {
634
+ "Overall Score": 36.6,
635
+ "By Answer Type": {
636
+ "Text-in-Chart": 37.73,
637
+ "Text-in-General": 44.44,
638
+ "Number-in-Chart": 38.79,
639
+ "Number-in-General": 28.82
640
+ },
641
+ "By Source": {
642
+ "GPT-Sourced": 38.04,
643
+ "GPT-Inspired": 36.57,
644
+ "Completely Human": 36.17
645
+ },
646
+ "By Subject": {
647
+ "Computer Science": 30.16,
648
+ "Economics": 38.41,
649
+ "Electrical Engineering and Systems Science": 39.5,
650
+ "Mathematics": 38.52,
651
+ "Physics": 43.31,
652
+ "Quantitative Biology": 36.51,
653
+ "Quantitative Finance": 33.62,
654
+ "Statistics": 31.86
655
+ },
656
+ "By Year": {
657
+ "2020": 33.6,
658
+ "2021": 38.31,
659
+ "2022": 35.66,
660
+ "2023": 38.71
661
+ },
662
+ "By Subplot": {
663
+ "1 Subplot": 38.6,
664
+ "2-4 Subplots": 37.57,
665
+ "5+ Subplots": 31.78
666
+ },
667
+ "N_valid": 1000,
668
+ "N_invalid": 0,
669
+ "Question Type": "Reasoning"
670
+ },
671
+ "accuracy": 54.51,
672
+ "acc_stderr": 0,
673
+ "acc": 54.51
674
+ },
675
+ "MathVision": {
676
+ "accuracy": 30.46,
677
+ "acc_stderr": 0,
678
+ "acc": 30.46
679
+ },
680
+ "CII-Bench": {
681
+ "accuracy": 56.08,
682
+ "domain_score": {
683
+ "CTC": 49.63,
684
+ "Art": 52.94,
685
+ "Life": 52.81,
686
+ "Society": 63.24,
687
+ "Env.": 62.96,
688
+ "Politics": 70.83
689
+ },
690
+ "emotion_score": {
691
+ "Positive": 51.71,
692
+ "Negative": 59.62,
693
+ "Neutral": 56.39
694
+ },
695
+ "acc_stderr": 0,
696
+ "acc": 56.08
697
+ },
698
+ "Blink": {
699
+ "accuracy": 55.29,
700
+ "Art Style": 80.34,
701
+ "Counting": 68.33,
702
+ "Forensic Detection": 37.88,
703
+ "Functional Correspondence": 28.46,
704
+ "IQ Test": 25.33,
705
+ "Jigsaw": 64.67,
706
+ "Multi-view Reasoning": 47.37,
707
+ "Object Localization": 51.64,
708
+ "Relative Depth": 77.42,
709
+ "Relative Reflectance": 34.33,
710
+ "Semantic Correspondence": 38.85,
711
+ "Spatial Relation": 88.81,
712
+ "Visual Correspondence": 55.23,
713
+ "Visual Similarity": 80.74,
714
+ "acc_stderr": 0,
715
+ "acc": 55.29
716
+ }
717
+ }
718
+ }
Janus-1.3B/results_2025-01-25T10-42-53.190540.json CHANGED
@@ -21,9 +21,41 @@
21
  "acc": 15.09
22
  },
23
  "MMMU_Pro_vision": {
24
- "accuracy": 10.75,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
25
  "acc_stderr": 0,
26
- "acc": 10.75
27
  },
28
  "MmvetV2": {
29
  "accuracy": 27.911,
@@ -333,9 +365,9 @@
333
  "acc": 17.32
334
  },
335
  "MathVision": {
336
- "accuracy": 14.21,
337
  "acc_stderr": 0,
338
- "acc": 14.21
339
  },
340
  "CII-Bench": {
341
  "accuracy": 21.96,
 
21
  "acc": 15.09
22
  },
23
  "MMMU_Pro_vision": {
24
+ "accuracy": 11.97,
25
+ "subject_score": {
26
+ "History": 0.0,
27
+ "Art": 13.21,
28
+ "Design": 3.33,
29
+ "Literature": 9.62,
30
+ "Agriculture": 13.33,
31
+ "Finance": 11.67,
32
+ "Sociology": 12.96,
33
+ "Accounting": 20.69,
34
+ "Energy_and_Power": 13.79,
35
+ "Pharmacy": 17.54,
36
+ "Architecture_and_Engineering": 10.0,
37
+ "Clinical_Medicine": 10.17,
38
+ "Public_Health": 3.45,
39
+ "Physics": 16.67,
40
+ "Art_Theory": 5.45,
41
+ "Electronics": 5.0,
42
+ "Psychology": 10.0,
43
+ "Biology": 15.25,
44
+ "Manage": 18.0,
45
+ "Economics": 15.25,
46
+ "Mechanical_Engineering": 18.64,
47
+ "Diagnostics_and_Laboratory_Medicine": 6.67,
48
+ "Basic_Medical_Science": 9.62,
49
+ "Computer_Science": 10.0,
50
+ "Math": 15.0,
51
+ "Music": 16.67,
52
+ "Materials": 10.0,
53
+ "Marketing": 13.56,
54
+ "Chemistry": 20.0,
55
+ "Geography": 13.46
56
+ },
57
  "acc_stderr": 0,
58
+ "acc": 11.97
59
  },
60
  "MmvetV2": {
61
  "accuracy": 27.911,
 
365
  "acc": 17.32
366
  },
367
  "MathVision": {
368
+ "accuracy": 14.24,
369
  "acc_stderr": 0,
370
+ "acc": 14.24
371
  },
372
  "CII-Bench": {
373
  "accuracy": 21.96,
LLaVA-OneVision-0.5B/results_2025-01-25T10-42-53.190540.json CHANGED
@@ -129,7 +129,7 @@
129
  "acc": 17.28
130
  },
131
  "MMMU_Pro_vision": {
132
- "accuracy": 11.97,
133
  "subject_score": {
134
  "Art": 13.21,
135
  "History": 5.36,
@@ -152,18 +152,18 @@
152
  "Mechanical_Engineering": 16.95,
153
  "Economics": 3.39,
154
  "Biology": 11.86,
155
- "Diagnostics_and_Laboratory_Medicine": 10.0,
156
  "Basic_Medical_Science": 15.38,
157
  "Computer_Science": 16.67,
158
  "Math": 13.33,
159
  "Music": 31.67,
160
- "Materials": 11.67,
161
  "Marketing": 8.47,
162
  "Chemistry": 23.33,
163
  "Geography": 11.54
164
  },
165
  "acc_stderr": 0,
166
- "acc": 11.97
167
  },
168
  "MmvetV2": {
169
  "reject_info": {
@@ -479,9 +479,9 @@
479
  "acc": 22.84
480
  },
481
  "MathVision": {
482
- "accuracy": 13.29,
483
  "acc_stderr": 0,
484
- "acc": 13.29
485
  },
486
  "CII-Bench": {
487
  "accuracy": 28.5,
 
129
  "acc": 17.28
130
  },
131
  "MMMU_Pro_vision": {
132
+ "accuracy": 12.08,
133
  "subject_score": {
134
  "Art": 13.21,
135
  "History": 5.36,
 
152
  "Mechanical_Engineering": 16.95,
153
  "Economics": 3.39,
154
  "Biology": 11.86,
155
+ "Diagnostics_and_Laboratory_Medicine": 11.67,
156
  "Basic_Medical_Science": 15.38,
157
  "Computer_Science": 16.67,
158
  "Math": 13.33,
159
  "Music": 31.67,
160
+ "Materials": 13.33,
161
  "Marketing": 8.47,
162
  "Chemistry": 23.33,
163
  "Geography": 11.54
164
  },
165
  "acc_stderr": 0,
166
+ "acc": 12.08
167
  },
168
  "MmvetV2": {
169
  "reject_info": {
 
479
  "acc": 22.84
480
  },
481
  "MathVision": {
482
+ "accuracy": 13.09,
483
  "acc_stderr": 0,
484
+ "acc": 13.09
485
  },
486
  "CII-Bench": {
487
  "accuracy": 28.5,
LLaVA-OneVision-7B/results_2025-01-25T10-42-53.190540.json CHANGED
@@ -21,9 +21,41 @@
21
  "acc": 28.67
22
  },
23
  "MMMU_Pro_vision": {
24
- "accuracy": 13.82,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
25
  "acc_stderr": 0,
26
- "acc": 13.82
27
  },
28
  "MmvetV2": {
29
  "reject_info": {
 
21
  "acc": 28.67
22
  },
23
  "MMMU_Pro_vision": {
24
+ "accuracy": 11.39,
25
+ "subject_score": {
26
+ "Literature": 17.31,
27
+ "Sociology": 9.26,
28
+ "History": 10.71,
29
+ "Finance": 15.0,
30
+ "Art": 11.32,
31
+ "Design": 8.33,
32
+ "Agriculture": 8.33,
33
+ "Clinical_Medicine": 13.56,
34
+ "Accounting": 8.62,
35
+ "Energy_and_Power": 13.79,
36
+ "Architecture_and_Engineering": 5.0,
37
+ "Electronics": 6.67,
38
+ "Art_Theory": 9.09,
39
+ "Physics": 13.33,
40
+ "Public_Health": 5.17,
41
+ "Manage": 22.0,
42
+ "Biology": 13.56,
43
+ "Psychology": 16.67,
44
+ "Economics": 16.95,
45
+ "Diagnostics_and_Laboratory_Medicine": 8.33,
46
+ "Mechanical_Engineering": 5.08,
47
+ "Basic_Medical_Science": 7.69,
48
+ "Computer_Science": 11.67,
49
+ "Math": 8.33,
50
+ "Pharmacy": 15.79,
51
+ "Music": 16.67,
52
+ "Materials": 5.0,
53
+ "Marketing": 15.25,
54
+ "Chemistry": 8.33,
55
+ "Geography": 17.31
56
+ },
57
  "acc_stderr": 0,
58
+ "acc": 11.39
59
  },
60
  "MmvetV2": {
61
  "reject_info": {
LLaVA-Onevision-72B/results_2025-01-25T10-42-53.190540.json CHANGED
@@ -66,9 +66,41 @@
66
  "reject_number": 2,
67
  "total_question": 1730
68
  },
69
- "accuracy": 31.94,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
70
  "acc_stderr": 0,
71
- "acc": 31.94
72
  },
73
  "MmvetV2": {
74
  "reject_info": {
@@ -397,9 +429,9 @@
397
  "acc": 52.09
398
  },
399
  "MathVision": {
400
- "accuracy": 25.1,
401
  "acc_stderr": 0,
402
- "acc": 25.1
403
  },
404
  "CII-Bench": {
405
  "accuracy": 57.78,
 
66
  "reject_number": 2,
67
  "total_question": 1730
68
  },
69
+ "accuracy": 23.9,
70
+ "subject_score": {
71
+ "History": 26.79,
72
+ "Literature": 55.77,
73
+ "Sociology": 35.19,
74
+ "Art": 28.3,
75
+ "Agriculture": 16.67,
76
+ "Design": 35.0,
77
+ "Pharmacy": 38.6,
78
+ "Energy_and_Power": 15.52,
79
+ "Architecture_and_Engineering": 11.67,
80
+ "Art_Theory": 38.18,
81
+ "Electronics": 23.33,
82
+ "Accounting": 21.43,
83
+ "Psychology": 16.67,
84
+ "Biology": 16.95,
85
+ "Manage": 20.0,
86
+ "Public_Health": 17.24,
87
+ "Economics": 28.81,
88
+ "Diagnostics_and_Laboratory_Medicine": 11.67,
89
+ "Clinical_Medicine": 18.64,
90
+ "Physics": 18.33,
91
+ "Mechanical_Engineering": 25.42,
92
+ "Finance": 26.67,
93
+ "Computer_Science": 30.0,
94
+ "Math": 20.0,
95
+ "Basic_Medical_Science": 17.31,
96
+ "Marketing": 22.03,
97
+ "Music": 25.0,
98
+ "Materials": 21.67,
99
+ "Chemistry": 16.67,
100
+ "Geography": 23.08
101
+ },
102
  "acc_stderr": 0,
103
+ "acc": 23.9
104
  },
105
  "MmvetV2": {
106
  "reject_info": {
 
429
  "acc": 52.09
430
  },
431
  "MathVision": {
432
+ "accuracy": 25.03,
433
  "acc_stderr": 0,
434
+ "acc": 25.03
435
  },
436
  "CII-Bench": {
437
  "accuracy": 57.78,
Llama-3.2-11B-Vision-Instruct/results_2025-01-25T10-42-53.190540.json CHANGED
@@ -21,9 +21,41 @@
21
  "acc": 26.53
22
  },
23
  "MMMU_Pro_vision": {
24
- "accuracy": 33.93,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
25
  "acc_stderr": 0,
26
- "acc": 33.93
27
  },
28
  "MmvetV2": {
29
  "reject_info": {
@@ -337,9 +369,9 @@
337
  "acc": 40.14
338
  },
339
  "MathVision": {
340
- "accuracy": 16.71,
341
  "acc_stderr": 0,
342
- "acc": 16.71
343
  },
344
  "CII-Bench": {
345
  "accuracy": 1.44,
 
21
  "acc": 26.53
22
  },
23
  "MMMU_Pro_vision": {
24
+ "accuracy": 12.6,
25
+ "subject_score": {
26
+ "History": 8.93,
27
+ "Art": 1.89,
28
+ "Design": 3.33,
29
+ "Literature": 1.92,
30
+ "Agriculture": 18.33,
31
+ "Finance": 10.0,
32
+ "Sociology": 9.26,
33
+ "Accounting": 17.24,
34
+ "Energy_and_Power": 12.07,
35
+ "Pharmacy": 33.33,
36
+ "Architecture_and_Engineering": 11.67,
37
+ "Clinical_Medicine": 3.39,
38
+ "Public_Health": 12.07,
39
+ "Physics": 16.67,
40
+ "Art_Theory": 7.27,
41
+ "Electronics": 11.67,
42
+ "Psychology": 13.33,
43
+ "Biology": 11.86,
44
+ "Manage": 22.0,
45
+ "Economics": 10.17,
46
+ "Mechanical_Engineering": 16.95,
47
+ "Diagnostics_and_Laboratory_Medicine": 13.33,
48
+ "Basic_Medical_Science": 9.62,
49
+ "Computer_Science": 16.67,
50
+ "Math": 6.67,
51
+ "Music": 13.33,
52
+ "Materials": 11.67,
53
+ "Marketing": 10.17,
54
+ "Chemistry": 25.0,
55
+ "Geography": 17.31
56
+ },
57
  "acc_stderr": 0,
58
+ "acc": 12.6
59
  },
60
  "MmvetV2": {
61
  "reject_info": {
 
369
  "acc": 40.14
370
  },
371
  "MathVision": {
372
+ "accuracy": 15.72,
373
  "acc_stderr": 0,
374
+ "acc": 15.72
375
  },
376
  "CII-Bench": {
377
  "accuracy": 1.44,
Llama-3.2-90B-Vision-Instruct/results_2025-01-25T10-42-53.190540.json CHANGED
@@ -94,9 +94,41 @@
94
  "acc": 38.09
95
  },
96
  "MMMU_Pro_vision": {
97
- "accuracy": 23.58,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
98
  "acc_stderr": 0,
99
- "acc": 23.58
100
  },
101
  "MmvetV2": {
102
  "accuracy": 47.234,
@@ -408,9 +440,9 @@
408
  "acc": 49.1
409
  },
410
  "MathVision": {
411
- "accuracy": 21.51,
412
  "acc_stderr": 0,
413
- "acc": 21.51
414
  },
415
  "CII-Bench": {
416
  "accuracy": 55.82,
 
94
  "acc": 38.09
95
  },
96
  "MMMU_Pro_vision": {
97
+ "accuracy": 16.01,
98
+ "subject_score": {
99
+ "Art": 11.32,
100
+ "Design": 8.33,
101
+ "History": 10.71,
102
+ "Agriculture": 8.33,
103
+ "Literature": 17.31,
104
+ "Sociology": 11.11,
105
+ "Finance": 15.0,
106
+ "Accounting": 18.97,
107
+ "Pharmacy": 28.07,
108
+ "Architecture_and_Engineering": 28.33,
109
+ "Clinical_Medicine": 6.78,
110
+ "Public_Health": 10.34,
111
+ "Physics": 16.67,
112
+ "Art_Theory": 10.91,
113
+ "Energy_and_Power": 20.69,
114
+ "Psychology": 18.33,
115
+ "Biology": 15.25,
116
+ "Manage": 30.0,
117
+ "Economics": 20.34,
118
+ "Electronics": 15.0,
119
+ "Mechanical_Engineering": 22.03,
120
+ "Diagnostics_and_Laboratory_Medicine": 10.0,
121
+ "Basic_Medical_Science": 11.54,
122
+ "Computer_Science": 15.0,
123
+ "Math": 13.33,
124
+ "Music": 23.33,
125
+ "Marketing": 16.95,
126
+ "Materials": 10.0,
127
+ "Chemistry": 25.0,
128
+ "Geography": 11.54
129
+ },
130
  "acc_stderr": 0,
131
+ "acc": 16.01
132
  },
133
  "MmvetV2": {
134
  "accuracy": 47.234,
 
440
  "acc": 49.1
441
  },
442
  "MathVision": {
443
+ "accuracy": 21.18,
444
  "acc_stderr": 0,
445
+ "acc": 21.18
446
  },
447
  "CII-Bench": {
448
  "accuracy": 55.82,
Llama-4-maverick-instruct-basic/results_2025-01-25T10-42-53.190540.json ADDED
@@ -0,0 +1,730 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "model_name": "Llama-4-maverick-instruct-basic",
4
+ "model_dtype": "float16",
5
+ "model_size": 0
6
+ },
7
+ "results": {
8
+ "CMMMU": {
9
+ "艺术与设计": {
10
+ "num": 88,
11
+ "correct": 65,
12
+ "accuracy": 73.86
13
+ },
14
+ "overall": {
15
+ "num": 900,
16
+ "correct": 515,
17
+ "accuracy": 57.22
18
+ },
19
+ "商业": {
20
+ "num": 126,
21
+ "correct": 52,
22
+ "accuracy": 41.27
23
+ },
24
+ "科学": {
25
+ "num": 204,
26
+ "correct": 111,
27
+ "accuracy": 54.41
28
+ },
29
+ "健康与医学": {
30
+ "num": 153,
31
+ "correct": 89,
32
+ "accuracy": 58.17
33
+ },
34
+ "人文社会科学": {
35
+ "num": 85,
36
+ "correct": 48,
37
+ "accuracy": 56.47
38
+ },
39
+ "技术与工程": {
40
+ "num": 244,
41
+ "correct": 150,
42
+ "accuracy": 61.48
43
+ },
44
+ "accuracy": 57.22,
45
+ "acc_stderr": 0,
46
+ "acc": 57.22
47
+ },
48
+ "MMMU": {
49
+ "accuracy": 69.56,
50
+ "subject_score": {
51
+ "Accounting": 76.67,
52
+ "Agriculture": 60.0,
53
+ "Architecture": 63.33,
54
+ "Art": 73.33,
55
+ "Basic": 73.33,
56
+ "Biology": 40.0,
57
+ "Chemistry": 60.0,
58
+ "Clinical": 73.33,
59
+ "Computer": 63.33,
60
+ "Design": 73.33,
61
+ "Diagnostics": 56.67,
62
+ "Economics": 76.67,
63
+ "Electronics": 60.0,
64
+ "Energy": 76.67,
65
+ "Finance": 80.0,
66
+ "Geography": 66.67,
67
+ "History": 73.33,
68
+ "Literature": 86.67,
69
+ "Manage": 63.33,
70
+ "Marketing": 86.67,
71
+ "Materials": 50.0,
72
+ "Math": 63.33,
73
+ "Mechanical": 50.0,
74
+ "Music": 46.67,
75
+ "Pharmacy": 90.0,
76
+ "Physics": 73.33,
77
+ "Psychology": 76.67,
78
+ "Public": 96.67,
79
+ "Sociology": 83.33
80
+ },
81
+ "difficulty_score": {
82
+ "Medium": 68.4,
83
+ "Easy": 77.63,
84
+ "Hard": 59.12
85
+ },
86
+ "acc_stderr": 0,
87
+ "acc": 69.56
88
+ },
89
+ "MMMU_Pro_standard": {
90
+ "accuracy": 52.95,
91
+ "subject_score": {
92
+ "History": 46.43,
93
+ "Design": 51.67,
94
+ "Art": 54.72,
95
+ "Literature": 63.46,
96
+ "Sociology": 55.56,
97
+ "Agriculture": 36.67,
98
+ "Accounting": 82.76,
99
+ "Finance": 73.33,
100
+ "Pharmacy": 57.89,
101
+ "Clinical_Medicine": 44.07,
102
+ "Energy_and_Power": 51.72,
103
+ "Architecture_and_Engineering": 41.67,
104
+ "Public_Health": 67.24,
105
+ "Physics": 45.0,
106
+ "Art_Theory": 56.36,
107
+ "Electronics": 70.0,
108
+ "Psychology": 45.0,
109
+ "Economics": 67.8,
110
+ "Manage": 50.0,
111
+ "Biology": 49.15,
112
+ "Diagnostics_and_Laboratory_Medicine": 48.33,
113
+ "Mechanical_Engineering": 47.46,
114
+ "Basic_Medical_Science": 46.15,
115
+ "Computer_Science": 53.33,
116
+ "Math": 53.33,
117
+ "Music": 20.0,
118
+ "Marketing": 69.49,
119
+ "Materials": 45.0,
120
+ "Chemistry": 56.67,
121
+ "Geography": 38.46
122
+ },
123
+ "difficulty_score": {
124
+ "Medium": 51.06,
125
+ "Hard": 47.63,
126
+ "Easy": 59.85
127
+ },
128
+ "acc_stderr": 0,
129
+ "acc": 52.95
130
+ },
131
+ "MMMU_Pro_vision": {
132
+ "accuracy": 51.79,
133
+ "subject_score": {
134
+ "Design": 65.0,
135
+ "History": 55.36,
136
+ "Art": 47.17,
137
+ "Literature": 65.38,
138
+ "Agriculture": 31.67,
139
+ "Sociology": 48.15,
140
+ "Finance": 76.67,
141
+ "Accounting": 77.59,
142
+ "Pharmacy": 57.89,
143
+ "Energy_and_Power": 58.62,
144
+ "Clinical_Medicine": 40.68,
145
+ "Architecture_and_Engineering": 36.67,
146
+ "Public_Health": 60.34,
147
+ "Physics": 58.33,
148
+ "Art_Theory": 58.18,
149
+ "Psychology": 36.67,
150
+ "Electronics": 58.33,
151
+ "Biology": 44.07,
152
+ "Manage": 58.0,
153
+ "Economics": 71.19,
154
+ "Diagnostics_and_Laboratory_Medicine": 40.0,
155
+ "Mechanical_Engineering": 44.07,
156
+ "Basic_Medical_Science": 51.92,
157
+ "Computer_Science": 40.0,
158
+ "Math": 46.67,
159
+ "Music": 26.67,
160
+ "Materials": 43.33,
161
+ "Marketing": 67.8,
162
+ "Chemistry": 50.0,
163
+ "Geography": 40.38
164
+ },
165
+ "acc_stderr": 0,
166
+ "acc": 51.79
167
+ },
168
+ "MmvetV2": {
169
+ "reject_info": {
170
+ "reject_rate": 0.39,
171
+ "reject_number": 2,
172
+ "total_question": 517
173
+ },
174
+ "accuracy": 67.4951,
175
+ "capability_scores": {
176
+ "math": 82.6470588235294,
177
+ "ocr": 78.12500000000003,
178
+ "spat": 66.93877551020405,
179
+ "rec": 62.756097560975675,
180
+ "know": 58.84615384615384,
181
+ "gen": 65.49090909090913,
182
+ "seq": 69.07407407407406
183
+ },
184
+ "capability_detail_scores": {
185
+ "math_ocr": 100.0,
186
+ "spat_ocr_math": 86.0,
187
+ "spat_rec_ocr_math": 50.0,
188
+ "rec_spat": 52.85714285714286,
189
+ "spat_ocr": 96.53846153846153,
190
+ "spat_rec_ocr": 45.83333333333333,
191
+ "spat_know_ocr": 100.0,
192
+ "rec_ocr": 87.5,
193
+ "know_rec_spat": 40.0,
194
+ "ocr": 74.6875,
195
+ "rec": 67.96610169491527,
196
+ "know_rec": 64.61538461538461,
197
+ "gen_know_rec": 57.199999999999974,
198
+ "gen_know_rec_ocr": 76.92307692307693,
199
+ "spat_gen_rec_ocr": 77.20930232558139,
200
+ "spat_gen_ocr": 100.0,
201
+ "math_spat_seq_gen_ocr": 90.0,
202
+ "math_spat_rec_seq_ocr": 100.0,
203
+ "gen_rec_spat": 47.27272727272727,
204
+ "spat_gen_ocr_math": 30.0,
205
+ "seq_rec_spat": 61.66666666666666,
206
+ "spat_seq_rec_ocr": 16.666666666666664,
207
+ "gen_know_rec_spat": 73.33333333333334,
208
+ "gen_rec": 65.88235294117648,
209
+ "know_rec_spat_ocr": 37.5,
210
+ "spat_rec_know_gen_ocr": 65.0,
211
+ "gen_rec_know": 57.199999999999974,
212
+ "math_rec_ocr": 100.0,
213
+ "gen_rec_ocr": 88.00000000000001,
214
+ "gen_rec_ocr_seq": 84.28571428571429,
215
+ "gen_ocr": 78.46153846153847,
216
+ "gen_rec_seq": 68.57142857142857,
217
+ "seq_rec": 92.0,
218
+ "gen_rec_spat_seq": 78.75,
219
+ "know_rec_seq": 0.0,
220
+ "gen_seq_rec": 68.57142857142857,
221
+ "gen_know_rec_seq": 70.0,
222
+ "spat_rec_seq_gen_ocr": 20.0,
223
+ "seq_rec_know_gen_ocr": 90.0,
224
+ "math_know_rec": 0.0,
225
+ "seq_rec_ocr": 100.0,
226
+ "spat_know_rec_ocr": 37.5
227
+ },
228
+ "acc_stderr": 0,
229
+ "acc": 67.4951
230
+ },
231
+ "MathVerse": {
232
+ "Vision Intensive": {
233
+ "accuracy": 55.58,
234
+ "correct": 438,
235
+ "total": 788
236
+ },
237
+ "Total": {
238
+ "accuracy": 55.3,
239
+ "correct": 2179,
240
+ "total": 3940
241
+ },
242
+ "Vision Dominant": {
243
+ "accuracy": 50.38,
244
+ "correct": 397,
245
+ "total": 788
246
+ },
247
+ "Text Lite": {
248
+ "accuracy": 58.5,
249
+ "correct": 461,
250
+ "total": 788
251
+ },
252
+ "Text Dominant": {
253
+ "accuracy": 62.44,
254
+ "correct": 492,
255
+ "total": 788
256
+ },
257
+ "Vision Only": {
258
+ "accuracy": 49.62,
259
+ "correct": 391,
260
+ "total": 788
261
+ },
262
+ "accuracy": 55.3,
263
+ "acc_stderr": 0,
264
+ "acc": 55.3
265
+ },
266
+ "Ocrlite": {
267
+ "reject_info": {
268
+ "reject_rate": 0.43,
269
+ "reject_number": 7,
270
+ "total_question": 1644
271
+ },
272
+ "final_score": [
273
+ 1327,
274
+ 1637
275
+ ],
276
+ "accuracy": 81.063,
277
+ "Key Information Extraction-Bookshelf": [
278
+ 38,
279
+ 51,
280
+ 0.745,
281
+ {
282
+ "Default": [
283
+ 38,
284
+ 51,
285
+ 0.745
286
+ ]
287
+ }
288
+ ],
289
+ "Scene Text-centric VQA-diet_constraints": [
290
+ 77,
291
+ 90,
292
+ 0.856,
293
+ {
294
+ "Default": [
295
+ 77,
296
+ 90,
297
+ 0.856
298
+ ]
299
+ }
300
+ ],
301
+ "Doc-oriented VQA-Control": [
302
+ 157,
303
+ 189,
304
+ 0.831,
305
+ {
306
+ "Default": [
307
+ 157,
308
+ 189,
309
+ 0.831
310
+ ]
311
+ }
312
+ ],
313
+ "Doc-oriented VQA": [
314
+ 182,
315
+ 204,
316
+ 0.892,
317
+ {
318
+ "Default": [
319
+ 182,
320
+ 204,
321
+ 0.892
322
+ ]
323
+ }
324
+ ],
325
+ "Scene Text-centric VQA-Fake_logo": [
326
+ 76,
327
+ 119,
328
+ 0.639,
329
+ {
330
+ "Default": [
331
+ 76,
332
+ 119,
333
+ 0.639
334
+ ]
335
+ }
336
+ ],
337
+ "Handwritten Mathematical Expression Recognition": [
338
+ 4,
339
+ 100,
340
+ 0.04,
341
+ {
342
+ "Default": [
343
+ 4,
344
+ 100,
345
+ 0.04
346
+ ]
347
+ }
348
+ ],
349
+ "Key Information Extraction": [
350
+ 198,
351
+ 209,
352
+ 0.947,
353
+ {
354
+ "Default": [
355
+ 198,
356
+ 209,
357
+ 0.947
358
+ ]
359
+ }
360
+ ],
361
+ "Scene Text-centric VQA-Control": [
362
+ 168,
363
+ 193,
364
+ 0.87,
365
+ {
366
+ "Default": [
367
+ 168,
368
+ 193,
369
+ 0.87
370
+ ]
371
+ }
372
+ ],
373
+ "Scene Text-centric VQA": [
374
+ 250,
375
+ 282,
376
+ 0.887,
377
+ {
378
+ "Default": [
379
+ 250,
380
+ 282,
381
+ 0.887
382
+ ]
383
+ }
384
+ ],
385
+ "Artistic Text Recognition": [
386
+ 42,
387
+ 50,
388
+ 0.84,
389
+ {
390
+ "Default": [
391
+ 42,
392
+ 50,
393
+ 0.84
394
+ ]
395
+ }
396
+ ],
397
+ "Irregular Text Recognition": [
398
+ 41,
399
+ 50,
400
+ 0.82,
401
+ {
402
+ "Default": [
403
+ 41,
404
+ 50,
405
+ 0.82
406
+ ]
407
+ }
408
+ ],
409
+ "Non-Semantic Text Recognition": [
410
+ 45,
411
+ 50,
412
+ 0.9,
413
+ {
414
+ "Default": [
415
+ 45,
416
+ 50,
417
+ 0.9
418
+ ]
419
+ }
420
+ ],
421
+ "Regular Text Recognition": [
422
+ 49,
423
+ 50,
424
+ 0.98,
425
+ {
426
+ "Default": [
427
+ 49,
428
+ 50,
429
+ 0.98
430
+ ]
431
+ }
432
+ ],
433
+ "acc_stderr": 0,
434
+ "acc": 81.063
435
+ },
436
+ "OcrliteZh": {
437
+ "final_score": [
438
+ 133,
439
+ 234
440
+ ],
441
+ "accuracy": 56.838,
442
+ "Docvqa": [
443
+ 7,
444
+ 10,
445
+ 0.7,
446
+ {
447
+ "Default": [
448
+ 7,
449
+ 10,
450
+ 0.7
451
+ ]
452
+ }
453
+ ],
454
+ "Chartqa-human": [
455
+ 8,
456
+ 10,
457
+ 0.8,
458
+ {
459
+ "Default": [
460
+ 8,
461
+ 10,
462
+ 0.8
463
+ ]
464
+ }
465
+ ],
466
+ "Chartqa-au": [
467
+ 8,
468
+ 10,
469
+ 0.8,
470
+ {
471
+ "Default": [
472
+ 8,
473
+ 10,
474
+ 0.8
475
+ ]
476
+ }
477
+ ],
478
+ "infographic": [
479
+ 9,
480
+ 10,
481
+ 0.9,
482
+ {
483
+ "Default": [
484
+ 9,
485
+ 10,
486
+ 0.9
487
+ ]
488
+ }
489
+ ],
490
+ "Key Information Extraction": [
491
+ 37,
492
+ 45,
493
+ 0.822,
494
+ {
495
+ "Default": [
496
+ 37,
497
+ 45,
498
+ 0.822
499
+ ]
500
+ }
501
+ ],
502
+ "Scene Text-centric VQA": [
503
+ 24,
504
+ 40,
505
+ 0.6,
506
+ {
507
+ "Default": [
508
+ 24,
509
+ 40,
510
+ 0.6
511
+ ]
512
+ }
513
+ ],
514
+ "Artistic Text Recognition": [
515
+ 1,
516
+ 11,
517
+ 0.091,
518
+ {
519
+ "Default": [
520
+ 1,
521
+ 11,
522
+ 0.091
523
+ ]
524
+ }
525
+ ],
526
+ "IrRegular Text Recognition": [
527
+ 1,
528
+ 11,
529
+ 0.091,
530
+ {
531
+ "Default": [
532
+ 1,
533
+ 11,
534
+ 0.091
535
+ ]
536
+ }
537
+ ],
538
+ "Non-semantic Text Recognition": [
539
+ 6,
540
+ 12,
541
+ 0.5,
542
+ {
543
+ "Default": [
544
+ 6,
545
+ 12,
546
+ 0.5
547
+ ]
548
+ }
549
+ ],
550
+ "Regular Text Recognition": [
551
+ 5,
552
+ 11,
553
+ 0.455,
554
+ {
555
+ "Default": [
556
+ 5,
557
+ 11,
558
+ 0.455
559
+ ]
560
+ }
561
+ ],
562
+ "Handwriting_CN": [
563
+ 6,
564
+ 20,
565
+ 0.3,
566
+ {
567
+ "Default": [
568
+ 6,
569
+ 20,
570
+ 0.3
571
+ ]
572
+ }
573
+ ],
574
+ "Chinese Unlimited": [
575
+ 21,
576
+ 44,
577
+ 0.477,
578
+ {
579
+ "Default": [
580
+ 21,
581
+ 44,
582
+ 0.477
583
+ ]
584
+ }
585
+ ],
586
+ "acc_stderr": 0,
587
+ "acc": 56.838
588
+ },
589
+ "CharXiv": {
590
+ "descriptive": {
591
+ "Overall Score": 86.1,
592
+ "By Question": {
593
+ "Q1": 88.52,
594
+ "Q2": 84.35,
595
+ "Q3": 77.68,
596
+ "Q4": 91.83,
597
+ "Q5": 89.12,
598
+ "Q6": 88.76,
599
+ "Q7": 87.18,
600
+ "Q8": 94.64,
601
+ "Q9": 89.05,
602
+ "Q10": 86.99,
603
+ "Q11": 62.29,
604
+ "Q12": 87.36,
605
+ "Q13": 78.54,
606
+ "Q14": 92.2,
607
+ "Q15": 95.21,
608
+ "Q16": 77.78,
609
+ "Q17": 63.84,
610
+ "Q18": 94.33,
611
+ "Q19": 90.77
612
+ },
613
+ "By Category": {
614
+ "Information Extraction": 86.89,
615
+ "Enumeration": 90.48,
616
+ "Pattern Recognition": 80.79,
617
+ "Counting": 87.79,
618
+ "Compositionality": 63.84
619
+ },
620
+ "By Subplot": {
621
+ "1 Subplot": 89.57,
622
+ "2-4 Subplots": 85.91,
623
+ "5+ Subplots": 80.72
624
+ },
625
+ "By Subject": {
626
+ "Computer Science": 87.1,
627
+ "Economics": 87.14,
628
+ "Electrical Engineering and Systems Science": 90.55,
629
+ "Mathematics": 87.04,
630
+ "Physics": 82.48,
631
+ "Quantitative Biology": 80.75,
632
+ "Quantitative Finance": 86.21,
633
+ "Statistics": 87.83
634
+ },
635
+ "By Year": {
636
+ "2020": 86.34,
637
+ "2021": 84.29,
638
+ "2022": 86.07,
639
+ "2023": 87.8
640
+ },
641
+ "N_valid": 4000,
642
+ "N_invalid": 0,
643
+ "Question Type": "Descriptive"
644
+ },
645
+ "reasoning": {
646
+ "Overall Score": 50.2,
647
+ "By Answer Type": {
648
+ "Text-in-Chart": 53.64,
649
+ "Text-in-General": 53.54,
650
+ "Number-in-Chart": 45.69,
651
+ "Number-in-General": 46.72
652
+ },
653
+ "By Source": {
654
+ "GPT-Sourced": 52.17,
655
+ "GPT-Inspired": 50.0,
656
+ "Completely Human": 49.67
657
+ },
658
+ "By Subject": {
659
+ "Computer Science": 50.0,
660
+ "Economics": 47.83,
661
+ "Electrical Engineering and Systems Science": 44.54,
662
+ "Mathematics": 52.59,
663
+ "Physics": 53.54,
664
+ "Quantitative Biology": 47.62,
665
+ "Quantitative Finance": 48.28,
666
+ "Statistics": 57.52
667
+ },
668
+ "By Year": {
669
+ "2020": 48.99,
670
+ "2021": 55.17,
671
+ "2022": 46.31,
672
+ "2023": 50.0
673
+ },
674
+ "By Subplot": {
675
+ "1 Subplot": 52.59,
676
+ "2-4 Subplots": 47.88,
677
+ "5+ Subplots": 50.0
678
+ },
679
+ "N_valid": 1000,
680
+ "N_invalid": 0,
681
+ "Question Type": "Reasoning"
682
+ },
683
+ "accuracy": 68.15,
684
+ "acc_stderr": 0,
685
+ "acc": 68.15
686
+ },
687
+ "MathVision": {
688
+ "accuracy": 40.1,
689
+ "acc_stderr": 0,
690
+ "acc": 40.1
691
+ },
692
+ "CII-Bench": {
693
+ "accuracy": 55.42,
694
+ "domain_score": {
695
+ "Art": 56.62,
696
+ "CTC": 45.19,
697
+ "Life": 58.44,
698
+ "Society": 55.14,
699
+ "Env.": 62.96,
700
+ "Politics": 62.5
701
+ },
702
+ "emotion_score": {
703
+ "Negative": 57.74,
704
+ "Positive": 52.56,
705
+ "Neutral": 55.64
706
+ },
707
+ "acc_stderr": 0,
708
+ "acc": 55.42
709
+ },
710
+ "Blink": {
711
+ "accuracy": 64.91,
712
+ "Art Style": 82.05,
713
+ "Counting": 70.0,
714
+ "Forensic Detection": 58.33,
715
+ "Functional Correspondence": 58.46,
716
+ "IQ Test": 30.0,
717
+ "Jigsaw": 69.33,
718
+ "Multi-view Reasoning": 56.39,
719
+ "Object Localization": 52.46,
720
+ "Relative Depth": 83.06,
721
+ "Relative Reflectance": 36.57,
722
+ "Semantic Correspondence": 58.99,
723
+ "Spatial Relation": 80.42,
724
+ "Visual Correspondence": 86.05,
725
+ "Visual Similarity": 85.93,
726
+ "acc_stderr": 0,
727
+ "acc": 64.91
728
+ }
729
+ }
730
+ }
MiniCPM-V-2.6/results_2025-01-25T10-42-53.190540.json CHANGED
@@ -21,9 +21,41 @@
21
  "acc": 28.38
22
  },
23
  "MMMU_Pro_vision": {
24
- "accuracy": 23.01,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
25
  "acc_stderr": 0,
26
- "acc": 23.01
27
  },
28
  "MmvetV2": {
29
  "accuracy": 54.7582,
 
21
  "acc": 28.38
22
  },
23
  "MMMU_Pro_vision": {
24
+ "accuracy": 18.38,
25
+ "subject_score": {
26
+ "Art": 16.98,
27
+ "Design": 33.33,
28
+ "Sociology": 25.93,
29
+ "Finance": 10.0,
30
+ "Energy_and_Power": 18.97,
31
+ "Literature": 53.85,
32
+ "Agriculture": 20.0,
33
+ "History": 19.64,
34
+ "Clinical_Medicine": 8.47,
35
+ "Pharmacy": 26.32,
36
+ "Public_Health": 10.34,
37
+ "Accounting": 10.34,
38
+ "Art_Theory": 21.82,
39
+ "Psychology": 16.67,
40
+ "Physics": 5.0,
41
+ "Architecture_and_Engineering": 11.67,
42
+ "Biology": 20.34,
43
+ "Economics": 18.64,
44
+ "Manage": 22.0,
45
+ "Diagnostics_and_Laboratory_Medicine": 6.67,
46
+ "Electronics": 13.33,
47
+ "Basic_Medical_Science": 25.0,
48
+ "Mechanical_Engineering": 13.56,
49
+ "Computer_Science": 23.33,
50
+ "Math": 13.33,
51
+ "Materials": 11.67,
52
+ "Music": 21.67,
53
+ "Chemistry": 23.33,
54
+ "Marketing": 11.86,
55
+ "Geography": 25.0
56
+ },
57
  "acc_stderr": 0,
58
+ "acc": 18.38
59
  },
60
  "MmvetV2": {
61
  "accuracy": 54.7582,
Mistral-Small-3.1-24B-Instruct-2503/results_2025-01-25T10-42-53.190540.json ADDED
@@ -0,0 +1,723 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "model_name": "Mistral-Small-3.1-24B-Instruct-2503",
4
+ "model_dtype": "float16",
5
+ "model_size": 0
6
+ },
7
+ "results": {
8
+ "CMMMU": {
9
+ "艺术与设计": {
10
+ "num": 88,
11
+ "correct": 54,
12
+ "accuracy": 61.36
13
+ },
14
+ "overall": {
15
+ "num": 900,
16
+ "correct": 391,
17
+ "accuracy": 43.44
18
+ },
19
+ "商业": {
20
+ "num": 126,
21
+ "correct": 40,
22
+ "accuracy": 31.75
23
+ },
24
+ "科学": {
25
+ "num": 204,
26
+ "correct": 77,
27
+ "accuracy": 37.75
28
+ },
29
+ "健康与医学": {
30
+ "num": 153,
31
+ "correct": 71,
32
+ "accuracy": 46.41
33
+ },
34
+ "人文社会科学": {
35
+ "num": 85,
36
+ "correct": 44,
37
+ "accuracy": 51.76
38
+ },
39
+ "技术与工程": {
40
+ "num": 244,
41
+ "correct": 105,
42
+ "accuracy": 43.03
43
+ },
44
+ "accuracy": 43.44,
45
+ "acc_stderr": 0,
46
+ "acc": 43.44
47
+ },
48
+ "MMMU": {
49
+ "accuracy": 53.78,
50
+ "subject_score": {
51
+ "Accounting": 46.67,
52
+ "Agriculture": 56.67,
53
+ "Architecture": 40.0,
54
+ "Art": 73.33,
55
+ "Basic": 70.0,
56
+ "Biology": 46.67,
57
+ "Chemistry": 30.0,
58
+ "Clinical": 73.33,
59
+ "Computer": 43.33,
60
+ "Design": 90.0,
61
+ "Diagnostics": 43.33,
62
+ "Economics": 56.67,
63
+ "Electronics": 30.0,
64
+ "Energy": 40.0,
65
+ "Finance": 33.33,
66
+ "Geography": 63.33,
67
+ "History": 66.67,
68
+ "Literature": 86.67,
69
+ "Manage": 56.67,
70
+ "Marketing": 66.67,
71
+ "Materials": 33.33,
72
+ "Math": 33.33,
73
+ "Mechanical": 36.67,
74
+ "Music": 30.0,
75
+ "Pharmacy": 56.67,
76
+ "Physics": 40.0,
77
+ "Psychology": 60.0,
78
+ "Public": 73.33,
79
+ "Sociology": 63.33
80
+ },
81
+ "difficulty_score": {
82
+ "Medium": 50.71,
83
+ "Easy": 66.44,
84
+ "Hard": 40.33
85
+ },
86
+ "acc_stderr": 0,
87
+ "acc": 53.78
88
+ },
89
+ "MMMU_Pro_standard": {
90
+ "accuracy": 38.96,
91
+ "subject_score": {
92
+ "History": 46.43,
93
+ "Art": 60.38,
94
+ "Design": 58.33,
95
+ "Literature": 69.23,
96
+ "Agriculture": 26.67,
97
+ "Finance": 28.33,
98
+ "Sociology": 59.26,
99
+ "Accounting": 27.59,
100
+ "Energy_and_Power": 25.86,
101
+ "Pharmacy": 52.63,
102
+ "Architecture_and_Engineering": 30.0,
103
+ "Clinical_Medicine": 40.68,
104
+ "Public_Health": 39.66,
105
+ "Physics": 21.67,
106
+ "Art_Theory": 54.55,
107
+ "Electronics": 45.0,
108
+ "Psychology": 35.0,
109
+ "Biology": 40.68,
110
+ "Manage": 36.0,
111
+ "Economics": 49.15,
112
+ "Mechanical_Engineering": 33.9,
113
+ "Diagnostics_and_Laboratory_Medicine": 28.33,
114
+ "Basic_Medical_Science": 40.38,
115
+ "Computer_Science": 46.67,
116
+ "Math": 23.33,
117
+ "Music": 26.67,
118
+ "Materials": 25.0,
119
+ "Marketing": 32.2,
120
+ "Chemistry": 31.67,
121
+ "Geography": 44.23
122
+ },
123
+ "difficulty_score": {
124
+ "Medium": 36.33,
125
+ "Easy": 49.05,
126
+ "Hard": 30.92
127
+ },
128
+ "acc_stderr": 0,
129
+ "acc": 38.96
130
+ },
131
+ "MMMU_Pro_vision": {
132
+ "accuracy": 40.17,
133
+ "subject_score": {
134
+ "History": 50.0,
135
+ "Art": 49.06,
136
+ "Design": 51.67,
137
+ "Literature": 67.31,
138
+ "Agriculture": 35.0,
139
+ "Finance": 56.67,
140
+ "Sociology": 44.44,
141
+ "Accounting": 50.0,
142
+ "Energy_and_Power": 27.59,
143
+ "Pharmacy": 54.39,
144
+ "Architecture_and_Engineering": 25.0,
145
+ "Clinical_Medicine": 35.59,
146
+ "Public_Health": 56.9,
147
+ "Physics": 25.0,
148
+ "Art_Theory": 54.55,
149
+ "Electronics": 31.67,
150
+ "Psychology": 36.67,
151
+ "Biology": 35.59,
152
+ "Manage": 34.0,
153
+ "Economics": 54.24,
154
+ "Mechanical_Engineering": 30.51,
155
+ "Diagnostics_and_Laboratory_Medicine": 23.33,
156
+ "Basic_Medical_Science": 38.46,
157
+ "Computer_Science": 36.67,
158
+ "Math": 31.67,
159
+ "Music": 31.67,
160
+ "Materials": 25.0,
161
+ "Marketing": 47.46,
162
+ "Chemistry": 38.33,
163
+ "Geography": 32.69
164
+ },
165
+ "acc_stderr": 0,
166
+ "acc": 40.17
167
+ },
168
+ "MmvetV2": {
169
+ "accuracy": 63.4623,
170
+ "capability_scores": {
171
+ "ocr": 72.54807692307695,
172
+ "math": 80.0,
173
+ "spat": 61.37055837563451,
174
+ "rec": 58.93203883495154,
175
+ "know": 56.66666666666664,
176
+ "gen": 62.1818181818182,
177
+ "seq": 59.285714285714306
178
+ },
179
+ "capability_detail_scores": {
180
+ "ocr_math": 81.81818181818183,
181
+ "math_ocr_spat": 86.0,
182
+ "math_ocr_rec_spat": 40.0,
183
+ "rec_spat": 51.42857142857144,
184
+ "ocr_spat": 83.07692307692308,
185
+ "ocr_rec_spat": 37.5,
186
+ "know_ocr_spat": 100.0,
187
+ "ocr_rec": 62.5,
188
+ "know_rec_spat": 25.0,
189
+ "ocr": 85.3125,
190
+ "rec": 60.67796610169491,
191
+ "know_rec": 60.76923076923077,
192
+ "know_gen_rec": 56.09999999999997,
193
+ "know_gen_ocr_rec": 69.23076923076923,
194
+ "gen_ocr_rec_spat": 71.16279069767441,
195
+ "gen_ocr_spat": 75.0,
196
+ "ocr_spat_seq_math_gen": 100.0,
197
+ "ocr_spat_seq_rec_math": 50.0,
198
+ "gen_rec_spat": 58.63636363636364,
199
+ "math_gen_ocr_spat": 0.0,
200
+ "seq_rec_spat": 40.0,
201
+ "seq_ocr_rec_spat": 50.0,
202
+ "know_gen_rec_spat": 36.66666666666667,
203
+ "gen_rec": 66.17647058823529,
204
+ "know_ocr_rec_spat": 20.0,
205
+ "ocr_know_spat_rec_gen": 80.0,
206
+ "ocr_rec_math": 100.0,
207
+ "gen_ocr_rec": 92.0,
208
+ "seq_gen_ocr_rec": 62.85714285714287,
209
+ "gen_ocr": 61.53846153846154,
210
+ "seq_gen_rec": 67.14285714285714,
211
+ "seq_rec": 54.99999999999999,
212
+ "seq_gen_rec_spat": 61.24999999999999,
213
+ "know_seq_rec": 100.0,
214
+ "gen_seq_rec": 67.14285714285714,
215
+ "seq_know_gen_rec": 30.0,
216
+ "ocr_spat_seq_rec_gen": 33.33333333333333,
217
+ "ocr_know_seq_rec_gen": 90.0,
218
+ "know_rec_math": 100.0,
219
+ "seq_ocr_rec": 100.0
220
+ },
221
+ "acc_stderr": 0,
222
+ "acc": 63.4623
223
+ },
224
+ "MathVerse": {
225
+ "reject_info": {
226
+ "reject_rate": 0.3,
227
+ "reject_number": 12,
228
+ "total_question": 3940
229
+ },
230
+ "Vision Intensive": {
231
+ "accuracy": 27.01,
232
+ "correct": 212,
233
+ "total": 785
234
+ },
235
+ "Total": {
236
+ "accuracy": 28.51,
237
+ "correct": 1120,
238
+ "total": 3928
239
+ },
240
+ "Text Dominant": {
241
+ "accuracy": 34.95,
242
+ "correct": 274,
243
+ "total": 784
244
+ },
245
+ "Vision Dominant": {
246
+ "accuracy": 27.13,
247
+ "correct": 213,
248
+ "total": 785
249
+ },
250
+ "Vision Only": {
251
+ "accuracy": 24.62,
252
+ "correct": 194,
253
+ "total": 788
254
+ },
255
+ "Text Lite": {
256
+ "accuracy": 28.88,
257
+ "correct": 227,
258
+ "total": 786
259
+ },
260
+ "accuracy": 28.51,
261
+ "acc_stderr": 0,
262
+ "acc": 28.51
263
+ },
264
+ "Ocrlite": {
265
+ "final_score": [
266
+ 1253,
267
+ 1644
268
+ ],
269
+ "accuracy": 76.217,
270
+ "Key Information Extraction-Bookshelf": [
271
+ 32,
272
+ 51,
273
+ 0.627,
274
+ {
275
+ "Default": [
276
+ 32,
277
+ 51,
278
+ 0.627
279
+ ]
280
+ }
281
+ ],
282
+ "Scene Text-centric VQA-diet_constraints": [
283
+ 60,
284
+ 90,
285
+ 0.667,
286
+ {
287
+ "Default": [
288
+ 60,
289
+ 90,
290
+ 0.667
291
+ ]
292
+ }
293
+ ],
294
+ "Doc-oriented VQA-Control": [
295
+ 147,
296
+ 189,
297
+ 0.778,
298
+ {
299
+ "Default": [
300
+ 147,
301
+ 189,
302
+ 0.778
303
+ ]
304
+ }
305
+ ],
306
+ "Doc-oriented VQA": [
307
+ 178,
308
+ 204,
309
+ 0.873,
310
+ {
311
+ "Default": [
312
+ 178,
313
+ 204,
314
+ 0.873
315
+ ]
316
+ }
317
+ ],
318
+ "Scene Text-centric VQA-Fake_logo": [
319
+ 72,
320
+ 119,
321
+ 0.605,
322
+ {
323
+ "Default": [
324
+ 72,
325
+ 119,
326
+ 0.605
327
+ ]
328
+ }
329
+ ],
330
+ "Handwritten Mathematical Expression Recognition": [
331
+ 1,
332
+ 100,
333
+ 0.01,
334
+ {
335
+ "Default": [
336
+ 1,
337
+ 100,
338
+ 0.01
339
+ ]
340
+ }
341
+ ],
342
+ "Key Information Extraction": [
343
+ 187,
344
+ 209,
345
+ 0.895,
346
+ {
347
+ "Default": [
348
+ 187,
349
+ 209,
350
+ 0.895
351
+ ]
352
+ }
353
+ ],
354
+ "Scene Text-centric VQA-Control": [
355
+ 171,
356
+ 200,
357
+ 0.855,
358
+ {
359
+ "Default": [
360
+ 171,
361
+ 200,
362
+ 0.855
363
+ ]
364
+ }
365
+ ],
366
+ "Scene Text-centric VQA": [
367
+ 242,
368
+ 282,
369
+ 0.858,
370
+ {
371
+ "Default": [
372
+ 242,
373
+ 282,
374
+ 0.858
375
+ ]
376
+ }
377
+ ],
378
+ "Artistic Text Recognition": [
379
+ 36,
380
+ 50,
381
+ 0.72,
382
+ {
383
+ "Default": [
384
+ 36,
385
+ 50,
386
+ 0.72
387
+ ]
388
+ }
389
+ ],
390
+ "Irregular Text Recognition": [
391
+ 41,
392
+ 50,
393
+ 0.82,
394
+ {
395
+ "Default": [
396
+ 41,
397
+ 50,
398
+ 0.82
399
+ ]
400
+ }
401
+ ],
402
+ "Non-Semantic Text Recognition": [
403
+ 39,
404
+ 50,
405
+ 0.78,
406
+ {
407
+ "Default": [
408
+ 39,
409
+ 50,
410
+ 0.78
411
+ ]
412
+ }
413
+ ],
414
+ "Regular Text Recognition": [
415
+ 47,
416
+ 50,
417
+ 0.94,
418
+ {
419
+ "Default": [
420
+ 47,
421
+ 50,
422
+ 0.94
423
+ ]
424
+ }
425
+ ],
426
+ "acc_stderr": 0,
427
+ "acc": 76.217
428
+ },
429
+ "OcrliteZh": {
430
+ "final_score": [
431
+ 113,
432
+ 234
433
+ ],
434
+ "accuracy": 48.291,
435
+ "Docvqa": [
436
+ 6,
437
+ 10,
438
+ 0.6,
439
+ {
440
+ "Default": [
441
+ 6,
442
+ 10,
443
+ 0.6
444
+ ]
445
+ }
446
+ ],
447
+ "Chartqa-human": [
448
+ 4,
449
+ 10,
450
+ 0.4,
451
+ {
452
+ "Default": [
453
+ 4,
454
+ 10,
455
+ 0.4
456
+ ]
457
+ }
458
+ ],
459
+ "Chartqa-au": [
460
+ 8,
461
+ 10,
462
+ 0.8,
463
+ {
464
+ "Default": [
465
+ 8,
466
+ 10,
467
+ 0.8
468
+ ]
469
+ }
470
+ ],
471
+ "infographic": [
472
+ 6,
473
+ 10,
474
+ 0.6,
475
+ {
476
+ "Default": [
477
+ 6,
478
+ 10,
479
+ 0.6
480
+ ]
481
+ }
482
+ ],
483
+ "Key Information Extraction": [
484
+ 30,
485
+ 45,
486
+ 0.667,
487
+ {
488
+ "Default": [
489
+ 30,
490
+ 45,
491
+ 0.667
492
+ ]
493
+ }
494
+ ],
495
+ "Scene Text-centric VQA": [
496
+ 23,
497
+ 40,
498
+ 0.575,
499
+ {
500
+ "Default": [
501
+ 23,
502
+ 40,
503
+ 0.575
504
+ ]
505
+ }
506
+ ],
507
+ "Artistic Text Recognition": [
508
+ 2,
509
+ 11,
510
+ 0.182,
511
+ {
512
+ "Default": [
513
+ 2,
514
+ 11,
515
+ 0.182
516
+ ]
517
+ }
518
+ ],
519
+ "IrRegular Text Recognition": [
520
+ 4,
521
+ 11,
522
+ 0.364,
523
+ {
524
+ "Default": [
525
+ 4,
526
+ 11,
527
+ 0.364
528
+ ]
529
+ }
530
+ ],
531
+ "Non-semantic Text Recognition": [
532
+ 2,
533
+ 12,
534
+ 0.167,
535
+ {
536
+ "Default": [
537
+ 2,
538
+ 12,
539
+ 0.167
540
+ ]
541
+ }
542
+ ],
543
+ "Regular Text Recognition": [
544
+ 6,
545
+ 11,
546
+ 0.545,
547
+ {
548
+ "Default": [
549
+ 6,
550
+ 11,
551
+ 0.545
552
+ ]
553
+ }
554
+ ],
555
+ "Handwriting_CN": [
556
+ 6,
557
+ 20,
558
+ 0.3,
559
+ {
560
+ "Default": [
561
+ 6,
562
+ 20,
563
+ 0.3
564
+ ]
565
+ }
566
+ ],
567
+ "Chinese Unlimited": [
568
+ 16,
569
+ 44,
570
+ 0.364,
571
+ {
572
+ "Default": [
573
+ 16,
574
+ 44,
575
+ 0.364
576
+ ]
577
+ }
578
+ ],
579
+ "acc_stderr": 0,
580
+ "acc": 48.291
581
+ },
582
+ "CharXiv": {
583
+ "descriptive": {
584
+ "Overall Score": 81.97,
585
+ "By Question": {
586
+ "Q1": 83.61,
587
+ "Q2": 87.83,
588
+ "Q3": 78.54,
589
+ "Q4": 88.33,
590
+ "Q5": 90.79,
591
+ "Q6": 80.72,
592
+ "Q7": 84.19,
593
+ "Q8": 87.95,
594
+ "Q9": 84.58,
595
+ "Q10": 75.34,
596
+ "Q11": 57.71,
597
+ "Q12": 74.73,
598
+ "Q13": 70.32,
599
+ "Q14": 84.75,
600
+ "Q15": 90.73,
601
+ "Q16": 75.0,
602
+ "Q17": 63.84,
603
+ "Q18": 91.5,
604
+ "Q19": 93.85
605
+ },
606
+ "By Category": {
607
+ "Information Extraction": 84.88,
608
+ "Enumeration": 84.26,
609
+ "Pattern Recognition": 77.29,
610
+ "Counting": 78.12,
611
+ "Compositionality": 63.84
612
+ },
613
+ "By Subplot": {
614
+ "1 Subplot": 86.01,
615
+ "2-4 Subplots": 83.53,
616
+ "5+ Subplots": 72.88
617
+ },
618
+ "By Subject": {
619
+ "Computer Science": 81.55,
620
+ "Economics": 82.07,
621
+ "Electrical Engineering and Systems Science": 88.03,
622
+ "Mathematics": 82.22,
623
+ "Physics": 78.15,
624
+ "Quantitative Biology": 76.98,
625
+ "Quantitative Finance": 80.82,
626
+ "Statistics": 86.73
627
+ },
628
+ "By Year": {
629
+ "2020": 81.07,
630
+ "2021": 81.13,
631
+ "2022": 84.22,
632
+ "2023": 81.55
633
+ },
634
+ "N_valid": 4000,
635
+ "N_invalid": 0,
636
+ "Question Type": "Descriptive"
637
+ },
638
+ "reasoning": {
639
+ "Overall Score": 47.9,
640
+ "By Answer Type": {
641
+ "Text-in-Chart": 54.77,
642
+ "Text-in-General": 54.55,
643
+ "Number-in-Chart": 43.1,
644
+ "Number-in-General": 36.68
645
+ },
646
+ "By Source": {
647
+ "GPT-Sourced": 57.61,
648
+ "GPT-Inspired": 47.22,
649
+ "Completely Human": 45.17
650
+ },
651
+ "By Subject": {
652
+ "Computer Science": 48.41,
653
+ "Economics": 48.55,
654
+ "Electrical Engineering and Systems Science": 48.74,
655
+ "Mathematics": 52.59,
656
+ "Physics": 51.97,
657
+ "Quantitative Biology": 43.65,
658
+ "Quantitative Finance": 44.83,
659
+ "Statistics": 43.36
660
+ },
661
+ "By Year": {
662
+ "2020": 44.13,
663
+ "2021": 47.89,
664
+ "2022": 44.26,
665
+ "2023": 55.24
666
+ },
667
+ "By Subplot": {
668
+ "1 Subplot": 49.22,
669
+ "2-4 Subplots": 44.71,
670
+ "5+ Subplots": 50.85
671
+ },
672
+ "N_valid": 1000,
673
+ "N_invalid": 0,
674
+ "Question Type": "Reasoning"
675
+ },
676
+ "accuracy": 64.94,
677
+ "acc_stderr": 0,
678
+ "acc": 64.94
679
+ },
680
+ "MathVision": {
681
+ "accuracy": 33.75,
682
+ "acc_stderr": 0,
683
+ "acc": 33.75
684
+ },
685
+ "CII-Bench": {
686
+ "accuracy": 52.42,
687
+ "domain_score": {
688
+ "Life": 49.35,
689
+ "Art": 50.0,
690
+ "CTC": 55.56,
691
+ "Society": 56.76,
692
+ "Env.": 48.15,
693
+ "Politics": 54.17
694
+ },
695
+ "emotion_score": {
696
+ "Neutral": 55.64,
697
+ "Negative": 52.45,
698
+ "Positive": 48.72
699
+ },
700
+ "acc_stderr": 0,
701
+ "acc": 52.42
702
+ },
703
+ "Blink": {
704
+ "accuracy": 55.23,
705
+ "Art Style": 58.97,
706
+ "Counting": 70.83,
707
+ "Forensic Detection": 53.03,
708
+ "Functional Correspondence": 31.54,
709
+ "IQ Test": 24.67,
710
+ "Jigsaw": 63.33,
711
+ "Multi-view Reasoning": 56.39,
712
+ "Object Localization": 50.82,
713
+ "Relative Depth": 71.77,
714
+ "Relative Reflectance": 27.61,
715
+ "Semantic Correspondence": 44.6,
716
+ "Spatial Relation": 76.92,
717
+ "Visual Correspondence": 68.02,
718
+ "Visual Similarity": 74.81,
719
+ "acc_stderr": 0,
720
+ "acc": 55.23
721
+ }
722
+ }
723
+ }
Molmo-72B-0924/results_2025-01-25T10-42-53.190540.json CHANGED
@@ -21,9 +21,41 @@
21
  "acc": 36.65
22
  },
23
  "MMMU_Pro_vision": {
24
- "accuracy": 60.69,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
25
  "acc_stderr": 0,
26
- "acc": 60.69
27
  },
28
  "MmvetV2": {
29
  "accuracy": 59.2456,
@@ -332,9 +364,9 @@
332
  "acc": 54.27
333
  },
334
  "MathVision": {
335
- "accuracy": 24.38,
336
  "acc_stderr": 0,
337
- "acc": 24.38
338
  },
339
  "CII-Bench": {
340
  "accuracy": 52.55,
 
21
  "acc": 36.65
22
  },
23
  "MMMU_Pro_vision": {
24
+ "accuracy": 24.28,
25
+ "subject_score": {
26
+ "History": 30.36,
27
+ "Art": 35.85,
28
+ "Design": 36.67,
29
+ "Literature": 59.62,
30
+ "Agriculture": 6.67,
31
+ "Finance": 28.33,
32
+ "Sociology": 27.78,
33
+ "Accounting": 25.86,
34
+ "Energy_and_Power": 13.79,
35
+ "Pharmacy": 33.33,
36
+ "Architecture_and_Engineering": 13.33,
37
+ "Clinical_Medicine": 10.17,
38
+ "Public_Health": 18.97,
39
+ "Physics": 21.67,
40
+ "Art_Theory": 32.73,
41
+ "Electronics": 16.67,
42
+ "Psychology": 28.33,
43
+ "Biology": 23.73,
44
+ "Manage": 26.0,
45
+ "Economics": 40.68,
46
+ "Mechanical_Engineering": 28.81,
47
+ "Diagnostics_and_Laboratory_Medicine": 16.67,
48
+ "Basic_Medical_Science": 30.77,
49
+ "Computer_Science": 25.0,
50
+ "Math": 16.67,
51
+ "Music": 11.67,
52
+ "Materials": 11.67,
53
+ "Marketing": 20.34,
54
+ "Chemistry": 15.0,
55
+ "Geography": 30.77
56
+ },
57
  "acc_stderr": 0,
58
+ "acc": 24.28
59
  },
60
  "MmvetV2": {
61
  "accuracy": 59.2456,
 
364
  "acc": 54.27
365
  },
366
  "MathVision": {
367
+ "accuracy": 24.24,
368
  "acc_stderr": 0,
369
+ "acc": 24.24
370
  },
371
  "CII-Bench": {
372
  "accuracy": 52.55,
Molmo-7B-D/results_2025-01-25T10-42-53.190540.json CHANGED
@@ -21,9 +21,41 @@
21
  "acc": 26.07
22
  },
23
  "MMMU_Pro_vision": {
24
- "accuracy": 54.97,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
25
  "acc_stderr": 0,
26
- "acc": 54.97
27
  },
28
  "MmvetV2": {
29
  "accuracy": 52.6886,
 
21
  "acc": 26.07
22
  },
23
  "MMMU_Pro_vision": {
24
+ "accuracy": 16.07,
25
+ "subject_score": {
26
+ "History": 26.79,
27
+ "Art": 20.75,
28
+ "Design": 23.33,
29
+ "Literature": 48.08,
30
+ "Agriculture": 10.0,
31
+ "Finance": 5.0,
32
+ "Sociology": 20.37,
33
+ "Accounting": 12.07,
34
+ "Energy_and_Power": 10.34,
35
+ "Pharmacy": 26.32,
36
+ "Architecture_and_Engineering": 15.0,
37
+ "Clinical_Medicine": 11.86,
38
+ "Public_Health": 6.9,
39
+ "Physics": 11.67,
40
+ "Art_Theory": 16.36,
41
+ "Electronics": 15.0,
42
+ "Psychology": 10.0,
43
+ "Biology": 15.25,
44
+ "Manage": 20.0,
45
+ "Economics": 8.47,
46
+ "Mechanical_Engineering": 16.95,
47
+ "Diagnostics_and_Laboratory_Medicine": 18.33,
48
+ "Basic_Medical_Science": 17.31,
49
+ "Computer_Science": 15.0,
50
+ "Math": 18.33,
51
+ "Music": 13.33,
52
+ "Materials": 11.67,
53
+ "Marketing": 5.08,
54
+ "Chemistry": 18.33,
55
+ "Geography": 21.15
56
+ },
57
  "acc_stderr": 0,
58
+ "acc": 16.07
59
  },
60
  "MmvetV2": {
61
  "accuracy": 52.6886,
Mono-InternVL-2B/results_2025-01-25T10-42-53.190540.json CHANGED
@@ -21,9 +21,41 @@
21
  "acc": 16.53
22
  },
23
  "MMMU_Pro_vision": {
24
- "accuracy": 10.69,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
25
  "acc_stderr": 0,
26
- "acc": 10.69
27
  },
28
  "MmvetV2": {
29
  "accuracy": 32.4371,
@@ -332,9 +364,9 @@
332
  "acc": 22.88
333
  },
334
  "MathVision": {
335
- "accuracy": 12.53,
336
  "acc_stderr": 0,
337
- "acc": 12.53
338
  },
339
  "CII-Bench": {
340
  "accuracy": 23.4,
 
21
  "acc": 16.53
22
  },
23
  "MMMU_Pro_vision": {
24
+ "accuracy": 10.0,
25
+ "subject_score": {
26
+ "History": 12.5,
27
+ "Art": 9.43,
28
+ "Design": 6.67,
29
+ "Literature": 15.38,
30
+ "Agriculture": 13.33,
31
+ "Finance": 8.33,
32
+ "Sociology": 11.11,
33
+ "Accounting": 8.62,
34
+ "Energy_and_Power": 3.45,
35
+ "Pharmacy": 24.56,
36
+ "Architecture_and_Engineering": 3.33,
37
+ "Clinical_Medicine": 10.17,
38
+ "Public_Health": 3.45,
39
+ "Physics": 11.67,
40
+ "Art_Theory": 12.73,
41
+ "Electronics": 1.67,
42
+ "Psychology": 15.0,
43
+ "Biology": 11.86,
44
+ "Manage": 6.0,
45
+ "Economics": 11.86,
46
+ "Mechanical_Engineering": 5.08,
47
+ "Diagnostics_and_Laboratory_Medicine": 13.33,
48
+ "Basic_Medical_Science": 7.69,
49
+ "Computer_Science": 10.0,
50
+ "Math": 18.33,
51
+ "Music": 10.0,
52
+ "Materials": 6.67,
53
+ "Marketing": 8.47,
54
+ "Chemistry": 6.67,
55
+ "Geography": 13.46
56
+ },
57
  "acc_stderr": 0,
58
+ "acc": 10.0
59
  },
60
  "MmvetV2": {
61
  "accuracy": 32.4371,
 
364
  "acc": 22.88
365
  },
366
  "MathVision": {
367
+ "accuracy": 12.34,
368
  "acc_stderr": 0,
369
+ "acc": 12.34
370
  },
371
  "CII-Bench": {
372
  "accuracy": 23.4,
NVLM-D-72B/results_2025-01-25T10-42-53.190540.json CHANGED
@@ -26,9 +26,41 @@
26
  "acc": 36.84
27
  },
28
  "MMMU_Pro_vision": {
29
- "accuracy": 38.73,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
30
  "acc_stderr": 0,
31
- "acc": 38.73
32
  },
33
  "MmvetV2": {
34
  "reject_info": {
@@ -349,9 +381,9 @@
349
  "acc": 49.3
350
  },
351
  "MathVision": {
352
- "accuracy": 20.26,
353
  "acc_stderr": 0,
354
- "acc": 20.26
355
  },
356
  "CII-Bench": {
357
  "accuracy": 55.42,
 
26
  "acc": 36.84
27
  },
28
  "MMMU_Pro_vision": {
29
+ "accuracy": 21.68,
30
+ "subject_score": {
31
+ "Agriculture": 16.67,
32
+ "History": 26.79,
33
+ "Finance": 26.67,
34
+ "Sociology": 35.19,
35
+ "Design": 31.67,
36
+ "Art": 32.08,
37
+ "Accounting": 24.14,
38
+ "Architecture_and_Engineering": 13.33,
39
+ "Literature": 50.0,
40
+ "Pharmacy": 31.58,
41
+ "Public_Health": 8.62,
42
+ "Physics": 15.0,
43
+ "Energy_and_Power": 8.62,
44
+ "Art_Theory": 30.91,
45
+ "Clinical_Medicine": 22.03,
46
+ "Psychology": 18.33,
47
+ "Manage": 16.0,
48
+ "Biology": 23.73,
49
+ "Economics": 23.73,
50
+ "Electronics": 13.33,
51
+ "Mechanical_Engineering": 23.73,
52
+ "Diagnostics_and_Laboratory_Medicine": 10.0,
53
+ "Computer_Science": 21.67,
54
+ "Basic_Medical_Science": 38.46,
55
+ "Math": 18.33,
56
+ "Music": 15.0,
57
+ "Marketing": 23.73,
58
+ "Materials": 15.0,
59
+ "Chemistry": 16.67,
60
+ "Geography": 5.77
61
+ },
62
  "acc_stderr": 0,
63
+ "acc": 21.68
64
  },
65
  "MmvetV2": {
66
  "reject_info": {
 
381
  "acc": 49.3
382
  },
383
  "MathVision": {
384
+ "accuracy": 19.74,
385
  "acc_stderr": 0,
386
+ "acc": 19.74
387
  },
388
  "CII-Bench": {
389
  "accuracy": 55.42,
Phi-3.5-Vision-Instruct/results_2025-01-25T10-42-53.190540.json CHANGED
@@ -21,9 +21,41 @@
21
  "acc": 24.28
22
  },
23
  "MMMU_Pro_vision": {
24
- "accuracy": 11.5,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
25
  "acc_stderr": 0,
26
- "acc": 11.5
27
  },
28
  "MmvetV2": {
29
  "accuracy": 42.5725,
@@ -334,9 +366,9 @@
334
  "acc": 38.15
335
  },
336
  "MathVision": {
337
- "accuracy": 14.47,
338
  "acc_stderr": 0,
339
- "acc": 14.47
340
  },
341
  "CII-Bench": {
342
  "accuracy": 36.08,
 
21
  "acc": 24.28
22
  },
23
  "MMMU_Pro_vision": {
24
+ "accuracy": 12.43,
25
+ "subject_score": {
26
+ "History": 7.14,
27
+ "Art": 16.98,
28
+ "Design": 13.33,
29
+ "Literature": 26.92,
30
+ "Agriculture": 5.0,
31
+ "Finance": 8.33,
32
+ "Sociology": 12.96,
33
+ "Accounting": 10.34,
34
+ "Energy_and_Power": 6.9,
35
+ "Pharmacy": 19.3,
36
+ "Architecture_and_Engineering": 8.33,
37
+ "Clinical_Medicine": 3.39,
38
+ "Public_Health": 10.34,
39
+ "Physics": 11.67,
40
+ "Art_Theory": 1.82,
41
+ "Electronics": 11.67,
42
+ "Psychology": 13.33,
43
+ "Biology": 15.25,
44
+ "Manage": 22.0,
45
+ "Economics": 10.17,
46
+ "Mechanical_Engineering": 15.25,
47
+ "Diagnostics_and_Laboratory_Medicine": 10.0,
48
+ "Basic_Medical_Science": 26.92,
49
+ "Computer_Science": 13.33,
50
+ "Math": 10.0,
51
+ "Music": 20.0,
52
+ "Materials": 8.33,
53
+ "Marketing": 10.17,
54
+ "Chemistry": 18.33,
55
+ "Geography": 9.62
56
+ },
57
  "acc_stderr": 0,
58
+ "acc": 12.43
59
  },
60
  "MmvetV2": {
61
  "accuracy": 42.5725,
 
366
  "acc": 38.15
367
  },
368
  "MathVision": {
369
+ "accuracy": 14.64,
370
  "acc_stderr": 0,
371
+ "acc": 14.64
372
  },
373
  "CII-Bench": {
374
  "accuracy": 36.08,
Phi-4-multimodal-instruct/results_2025-01-25T10-42-53.190540.json ADDED
@@ -0,0 +1,685 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "model_name": "Phi-4-multimodal-instruct",
4
+ "model_dtype": "float16",
5
+ "model_size": 0
6
+ },
7
+ "results": {
8
+ "CMMMU": {
9
+ "艺术与设计": {
10
+ "num": 88,
11
+ "correct": 42,
12
+ "accuracy": 47.73
13
+ },
14
+ "overall": {
15
+ "num": 900,
16
+ "correct": 308,
17
+ "accuracy": 34.22
18
+ },
19
+ "商业": {
20
+ "num": 126,
21
+ "correct": 32,
22
+ "accuracy": 25.4
23
+ },
24
+ "科学": {
25
+ "num": 204,
26
+ "correct": 63,
27
+ "accuracy": 30.88
28
+ },
29
+ "健康与医学": {
30
+ "num": 153,
31
+ "correct": 58,
32
+ "accuracy": 37.91
33
+ },
34
+ "人文社会科学": {
35
+ "num": 85,
36
+ "correct": 30,
37
+ "accuracy": 35.29
38
+ },
39
+ "技术与工程": {
40
+ "num": 244,
41
+ "correct": 83,
42
+ "accuracy": 34.02
43
+ },
44
+ "accuracy": 34.22,
45
+ "acc_stderr": 0,
46
+ "acc": 34.22
47
+ },
48
+ "MMMU": {
49
+ "accuracy": 54.0,
50
+ "subject_score": {
51
+ "Accounting": 50.0,
52
+ "Agriculture": 53.33,
53
+ "Architecture": 40.0,
54
+ "Art": 78.33,
55
+ "Basic": 56.67,
56
+ "Biology": 50.0,
57
+ "Chemistry": 26.67,
58
+ "Clinical": 70.0,
59
+ "Computer": 50.0,
60
+ "Design": 73.33,
61
+ "Diagnostics": 40.0,
62
+ "Economics": 56.67,
63
+ "Electronics": 36.67,
64
+ "Energy": 46.67,
65
+ "Finance": 46.67,
66
+ "Geography": 43.33,
67
+ "History": 60.0,
68
+ "Literature": 90.0,
69
+ "Manage": 50.0,
70
+ "Marketing": 56.67,
71
+ "Materials": 33.33,
72
+ "Math": 43.33,
73
+ "Mechanical": 26.67,
74
+ "Music": 33.33,
75
+ "Pharmacy": 60.0,
76
+ "Physics": 60.0,
77
+ "Psychology": 66.67,
78
+ "Public": 66.67,
79
+ "Sociology": 76.67
80
+ },
81
+ "difficulty_score": {
82
+ "Medium": 53.3,
83
+ "Easy": 65.42,
84
+ "Hard": 37.02
85
+ },
86
+ "acc_stderr": 0,
87
+ "acc": 54.0
88
+ },
89
+ "MMMU_Pro_standard": {
90
+ "accuracy": 39.36,
91
+ "subject_score": {
92
+ "History": 62.5,
93
+ "Art": 52.83,
94
+ "Design": 65.0,
95
+ "Literature": 67.31,
96
+ "Agriculture": 31.67,
97
+ "Finance": 21.67,
98
+ "Sociology": 53.7,
99
+ "Accounting": 27.59,
100
+ "Energy_and_Power": 27.59,
101
+ "Pharmacy": 43.86,
102
+ "Architecture_and_Engineering": 38.33,
103
+ "Clinical_Medicine": 42.37,
104
+ "Public_Health": 39.66,
105
+ "Physics": 38.33,
106
+ "Art_Theory": 70.91,
107
+ "Electronics": 50.0,
108
+ "Psychology": 35.0,
109
+ "Biology": 37.29,
110
+ "Manage": 32.0,
111
+ "Economics": 42.37,
112
+ "Mechanical_Engineering": 22.03,
113
+ "Diagnostics_and_Laboratory_Medicine": 30.0,
114
+ "Basic_Medical_Science": 40.38,
115
+ "Computer_Science": 35.0,
116
+ "Math": 23.33,
117
+ "Music": 30.0,
118
+ "Materials": 21.67,
119
+ "Marketing": 40.68,
120
+ "Chemistry": 33.33,
121
+ "Geography": 32.69
122
+ },
123
+ "difficulty_score": {
124
+ "Medium": 36.58,
125
+ "Easy": 53.03,
126
+ "Hard": 26.93
127
+ },
128
+ "acc_stderr": 0,
129
+ "acc": 39.36
130
+ },
131
+ "MMMU_Pro_vision": {
132
+ "accuracy": 26.59,
133
+ "subject_score": {
134
+ "History": 37.5,
135
+ "Art": 39.62,
136
+ "Design": 56.67,
137
+ "Literature": 65.38,
138
+ "Agriculture": 20.0,
139
+ "Finance": 18.33,
140
+ "Sociology": 29.63,
141
+ "Accounting": 25.86,
142
+ "Energy_and_Power": 20.69,
143
+ "Pharmacy": 31.58,
144
+ "Architecture_and_Engineering": 21.67,
145
+ "Clinical_Medicine": 22.03,
146
+ "Public_Health": 10.34,
147
+ "Physics": 21.67,
148
+ "Art_Theory": 32.73,
149
+ "Electronics": 28.33,
150
+ "Psychology": 20.0,
151
+ "Biology": 23.73,
152
+ "Manage": 26.0,
153
+ "Economics": 16.95,
154
+ "Mechanical_Engineering": 18.64,
155
+ "Diagnostics_and_Laboratory_Medicine": 18.33,
156
+ "Basic_Medical_Science": 32.69,
157
+ "Computer_Science": 21.67,
158
+ "Math": 18.33,
159
+ "Music": 30.0,
160
+ "Materials": 16.67,
161
+ "Marketing": 28.81,
162
+ "Chemistry": 26.67,
163
+ "Geography": 25.0
164
+ },
165
+ "acc_stderr": 0,
166
+ "acc": 26.59
167
+ },
168
+ "MmvetV2": {
169
+ "accuracy": 36.25,
170
+ "capability_scores": {
171
+ "math": 28.235294117647058,
172
+ "ocr": 32.592592592592595,
173
+ "spat": 40.95238095238095,
174
+ "rec": 25.454545454545453,
175
+ "know": 66.66666666666666
176
+ },
177
+ "capability_detail_scores": {
178
+ "math_ocr": 30.0,
179
+ "spat_ocr_math": 30.0,
180
+ "rec_spat_ocr_math": 0.0,
181
+ "rec_spat": 60.0,
182
+ "spat_ocr": 75.0,
183
+ "rec_spat_ocr": 0.0,
184
+ "spat_ocr_know": 100.0,
185
+ "rec_ocr": 0.0,
186
+ "rec_spat_know": 50.0
187
+ },
188
+ "acc_stderr": 0,
189
+ "acc": 36.25
190
+ },
191
+ "MathVerse": {
192
+ "Text Dominant": {
193
+ "accuracy": 37.56,
194
+ "correct": 296,
195
+ "total": 788
196
+ },
197
+ "Total": {
198
+ "accuracy": 30.63,
199
+ "correct": 1207,
200
+ "total": 3940
201
+ },
202
+ "Text Lite": {
203
+ "accuracy": 33.5,
204
+ "correct": 264,
205
+ "total": 788
206
+ },
207
+ "Vision Intensive": {
208
+ "accuracy": 31.35,
209
+ "correct": 247,
210
+ "total": 788
211
+ },
212
+ "Vision Dominant": {
213
+ "accuracy": 30.84,
214
+ "correct": 243,
215
+ "total": 788
216
+ },
217
+ "Vision Only": {
218
+ "accuracy": 19.92,
219
+ "correct": 157,
220
+ "total": 788
221
+ },
222
+ "accuracy": 30.63,
223
+ "acc_stderr": 0,
224
+ "acc": 30.63
225
+ },
226
+ "Ocrlite": {
227
+ "final_score": [
228
+ 680,
229
+ 1644
230
+ ],
231
+ "accuracy": 41.363,
232
+ "Key Information Extraction-Bookshelf": [
233
+ 6,
234
+ 51,
235
+ 0.118,
236
+ {
237
+ "Default": [
238
+ 6,
239
+ 51,
240
+ 0.118
241
+ ]
242
+ }
243
+ ],
244
+ "Scene Text-centric VQA-diet_constraints": [
245
+ 49,
246
+ 90,
247
+ 0.544,
248
+ {
249
+ "Default": [
250
+ 49,
251
+ 90,
252
+ 0.544
253
+ ]
254
+ }
255
+ ],
256
+ "Doc-oriented VQA-Control": [
257
+ 35,
258
+ 189,
259
+ 0.185,
260
+ {
261
+ "Default": [
262
+ 35,
263
+ 189,
264
+ 0.185
265
+ ]
266
+ }
267
+ ],
268
+ "Doc-oriented VQA": [
269
+ 78,
270
+ 204,
271
+ 0.382,
272
+ {
273
+ "Default": [
274
+ 78,
275
+ 204,
276
+ 0.382
277
+ ]
278
+ }
279
+ ],
280
+ "Scene Text-centric VQA-Fake_logo": [
281
+ 35,
282
+ 119,
283
+ 0.294,
284
+ {
285
+ "Default": [
286
+ 35,
287
+ 119,
288
+ 0.294
289
+ ]
290
+ }
291
+ ],
292
+ "Handwritten Mathematical Expression Recognition": [
293
+ 1,
294
+ 100,
295
+ 0.01,
296
+ {
297
+ "Default": [
298
+ 1,
299
+ 100,
300
+ 0.01
301
+ ]
302
+ }
303
+ ],
304
+ "Key Information Extraction": [
305
+ 131,
306
+ 209,
307
+ 0.627,
308
+ {
309
+ "Default": [
310
+ 131,
311
+ 209,
312
+ 0.627
313
+ ]
314
+ }
315
+ ],
316
+ "Scene Text-centric VQA-Control": [
317
+ 112,
318
+ 200,
319
+ 0.56,
320
+ {
321
+ "Default": [
322
+ 112,
323
+ 200,
324
+ 0.56
325
+ ]
326
+ }
327
+ ],
328
+ "Scene Text-centric VQA": [
329
+ 122,
330
+ 282,
331
+ 0.433,
332
+ {
333
+ "Default": [
334
+ 122,
335
+ 282,
336
+ 0.433
337
+ ]
338
+ }
339
+ ],
340
+ "Artistic Text Recognition": [
341
+ 31,
342
+ 50,
343
+ 0.62,
344
+ {
345
+ "Default": [
346
+ 31,
347
+ 50,
348
+ 0.62
349
+ ]
350
+ }
351
+ ],
352
+ "Irregular Text Recognition": [
353
+ 31,
354
+ 50,
355
+ 0.62,
356
+ {
357
+ "Default": [
358
+ 31,
359
+ 50,
360
+ 0.62
361
+ ]
362
+ }
363
+ ],
364
+ "Non-Semantic Text Recognition": [
365
+ 14,
366
+ 50,
367
+ 0.28,
368
+ {
369
+ "Default": [
370
+ 14,
371
+ 50,
372
+ 0.28
373
+ ]
374
+ }
375
+ ],
376
+ "Regular Text Recognition": [
377
+ 35,
378
+ 50,
379
+ 0.7,
380
+ {
381
+ "Default": [
382
+ 35,
383
+ 50,
384
+ 0.7
385
+ ]
386
+ }
387
+ ],
388
+ "acc_stderr": 0,
389
+ "acc": 41.363
390
+ },
391
+ "OcrliteZh": {
392
+ "final_score": [
393
+ 101,
394
+ 234
395
+ ],
396
+ "accuracy": 43.162,
397
+ "Docvqa": [
398
+ 3,
399
+ 10,
400
+ 0.3,
401
+ {
402
+ "Default": [
403
+ 3,
404
+ 10,
405
+ 0.3
406
+ ]
407
+ }
408
+ ],
409
+ "Chartqa-human": [
410
+ 4,
411
+ 10,
412
+ 0.4,
413
+ {
414
+ "Default": [
415
+ 4,
416
+ 10,
417
+ 0.4
418
+ ]
419
+ }
420
+ ],
421
+ "Chartqa-au": [
422
+ 5,
423
+ 10,
424
+ 0.5,
425
+ {
426
+ "Default": [
427
+ 5,
428
+ 10,
429
+ 0.5
430
+ ]
431
+ }
432
+ ],
433
+ "infographic": [
434
+ 5,
435
+ 10,
436
+ 0.5,
437
+ {
438
+ "Default": [
439
+ 5,
440
+ 10,
441
+ 0.5
442
+ ]
443
+ }
444
+ ],
445
+ "Key Information Extraction": [
446
+ 33,
447
+ 45,
448
+ 0.733,
449
+ {
450
+ "Default": [
451
+ 33,
452
+ 45,
453
+ 0.733
454
+ ]
455
+ }
456
+ ],
457
+ "Scene Text-centric VQA": [
458
+ 14,
459
+ 40,
460
+ 0.35,
461
+ {
462
+ "Default": [
463
+ 14,
464
+ 40,
465
+ 0.35
466
+ ]
467
+ }
468
+ ],
469
+ "Artistic Text Recognition": [
470
+ 1,
471
+ 11,
472
+ 0.091,
473
+ {
474
+ "Default": [
475
+ 1,
476
+ 11,
477
+ 0.091
478
+ ]
479
+ }
480
+ ],
481
+ "IrRegular Text Recognition": [
482
+ 2,
483
+ 11,
484
+ 0.182,
485
+ {
486
+ "Default": [
487
+ 2,
488
+ 11,
489
+ 0.182
490
+ ]
491
+ }
492
+ ],
493
+ "Non-semantic Text Recognition": [
494
+ 6,
495
+ 12,
496
+ 0.5,
497
+ {
498
+ "Default": [
499
+ 6,
500
+ 12,
501
+ 0.5
502
+ ]
503
+ }
504
+ ],
505
+ "Regular Text Recognition": [
506
+ 6,
507
+ 11,
508
+ 0.545,
509
+ {
510
+ "Default": [
511
+ 6,
512
+ 11,
513
+ 0.545
514
+ ]
515
+ }
516
+ ],
517
+ "Handwriting_CN": [
518
+ 7,
519
+ 20,
520
+ 0.35,
521
+ {
522
+ "Default": [
523
+ 7,
524
+ 20,
525
+ 0.35
526
+ ]
527
+ }
528
+ ],
529
+ "Chinese Unlimited": [
530
+ 15,
531
+ 44,
532
+ 0.341,
533
+ {
534
+ "Default": [
535
+ 15,
536
+ 44,
537
+ 0.341
538
+ ]
539
+ }
540
+ ],
541
+ "acc_stderr": 0,
542
+ "acc": 43.162
543
+ },
544
+ "CharXiv": {
545
+ "descriptive": {
546
+ "Overall Score": 56.25,
547
+ "By Question": {
548
+ "Q1": 51.64,
549
+ "Q2": 73.48,
550
+ "Q3": 62.23,
551
+ "Q4": 77.82,
552
+ "Q5": 79.08,
553
+ "Q6": 64.26,
554
+ "Q7": 61.54,
555
+ "Q8": 58.04,
556
+ "Q9": 53.73,
557
+ "Q10": 56.85,
558
+ "Q11": 38.86,
559
+ "Q12": 55.49,
560
+ "Q13": 51.6,
561
+ "Q14": 48.23,
562
+ "Q15": 36.74,
563
+ "Q16": 66.67,
564
+ "Q17": 3.12,
565
+ "Q18": 70.45,
566
+ "Q19": 89.23
567
+ },
568
+ "By Category": {
569
+ "Information Extraction": 67.2,
570
+ "Enumeration": 48.59,
571
+ "Pattern Recognition": 58.08,
572
+ "Counting": 61.58,
573
+ "Compositionality": 3.12
574
+ },
575
+ "By Subplot": {
576
+ "1 Subplot": 64.18,
577
+ "2-4 Subplots": 54.03,
578
+ "5+ Subplots": 46.82
579
+ },
580
+ "By Subject": {
581
+ "Computer Science": 51.79,
582
+ "Economics": 57.07,
583
+ "Electrical Engineering and Systems Science": 65.13,
584
+ "Mathematics": 58.33,
585
+ "Physics": 49.41,
586
+ "Quantitative Biology": 51.19,
587
+ "Quantitative Finance": 59.91,
588
+ "Statistics": 57.96
589
+ },
590
+ "By Year": {
591
+ "2020": 55.97,
592
+ "2021": 54.89,
593
+ "2022": 55.02,
594
+ "2023": 59.17
595
+ },
596
+ "N_valid": 4000,
597
+ "N_invalid": 0,
598
+ "Question Type": "Descriptive"
599
+ },
600
+ "reasoning": {
601
+ "Overall Score": 34.3,
602
+ "By Answer Type": {
603
+ "Text-in-Chart": 32.5,
604
+ "Text-in-General": 47.47,
605
+ "Number-in-Chart": 39.66,
606
+ "Number-in-General": 26.64
607
+ },
608
+ "By Source": {
609
+ "GPT-Sourced": 38.04,
610
+ "GPT-Inspired": 28.7,
611
+ "Completely Human": 35.17
612
+ },
613
+ "By Subject": {
614
+ "Computer Science": 32.54,
615
+ "Economics": 34.06,
616
+ "Electrical Engineering and Systems Science": 31.93,
617
+ "Mathematics": 39.26,
618
+ "Physics": 39.37,
619
+ "Quantitative Biology": 30.95,
620
+ "Quantitative Finance": 28.45,
621
+ "Statistics": 37.17
622
+ },
623
+ "By Year": {
624
+ "2020": 30.36,
625
+ "2021": 34.48,
626
+ "2022": 38.11,
627
+ "2023": 34.27
628
+ },
629
+ "By Subplot": {
630
+ "1 Subplot": 38.34,
631
+ "2-4 Subplots": 32.8,
632
+ "5+ Subplots": 30.08
633
+ },
634
+ "N_valid": 1000,
635
+ "N_invalid": 0,
636
+ "Question Type": "Reasoning"
637
+ },
638
+ "accuracy": 45.27,
639
+ "acc_stderr": 0,
640
+ "acc": 45.27
641
+ },
642
+ "MathVision": {
643
+ "accuracy": 15.95,
644
+ "acc_stderr": 0,
645
+ "acc": 15.95
646
+ },
647
+ "CII-Bench": {
648
+ "accuracy": 37.65,
649
+ "domain_score": {
650
+ "Life": 33.77,
651
+ "Art": 41.91,
652
+ "CTC": 31.85,
653
+ "Society": 37.84,
654
+ "Env.": 53.7,
655
+ "Politics": 45.83
656
+ },
657
+ "emotion_score": {
658
+ "Neutral": 38.35,
659
+ "Negative": 40.0,
660
+ "Positive": 34.19
661
+ },
662
+ "acc_stderr": 0,
663
+ "acc": 37.65
664
+ },
665
+ "Blink": {
666
+ "accuracy": 61.39,
667
+ "Art Style": 87.18,
668
+ "Counting": 59.17,
669
+ "Forensic Detection": 90.15,
670
+ "Functional Correspondence": 27.69,
671
+ "IQ Test": 25.33,
672
+ "Jigsaw": 67.33,
673
+ "Multi-view Reasoning": 77.44,
674
+ "Object Localization": 52.46,
675
+ "Relative Depth": 70.97,
676
+ "Relative Reflectance": 31.34,
677
+ "Semantic Correspondence": 51.8,
678
+ "Spatial Relation": 72.73,
679
+ "Visual Correspondence": 67.44,
680
+ "Visual Similarity": 82.22,
681
+ "acc_stderr": 0,
682
+ "acc": 61.39
683
+ }
684
+ }
685
+ }
Pixtral-12B-2409/results_2025-01-25T10-42-53.190540.json CHANGED
@@ -21,9 +21,41 @@
21
  "acc": 31.5
22
  },
23
  "MMMU_Pro_vision": {
24
- "accuracy": 57.51,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
25
  "acc_stderr": 0,
26
- "acc": 57.51
27
  },
28
  "MmvetV2": {
29
  "accuracy": 55.087,
@@ -333,9 +365,9 @@
333
  "acc": 51.1
334
  },
335
  "MathVision": {
336
- "accuracy": 21.55,
337
  "acc_stderr": 0,
338
- "acc": 21.55
339
  },
340
  "CII-Bench": {
341
  "accuracy": 31.63,
 
21
  "acc": 31.5
22
  },
23
  "MMMU_Pro_vision": {
24
+ "accuracy": 20.92,
25
+ "subject_score": {
26
+ "History": 23.21,
27
+ "Art": 32.08,
28
+ "Design": 25.0,
29
+ "Literature": 57.69,
30
+ "Agriculture": 13.33,
31
+ "Finance": 21.67,
32
+ "Sociology": 27.78,
33
+ "Accounting": 15.52,
34
+ "Energy_and_Power": 15.52,
35
+ "Pharmacy": 28.07,
36
+ "Architecture_and_Engineering": 13.33,
37
+ "Clinical_Medicine": 10.17,
38
+ "Public_Health": 18.97,
39
+ "Physics": 20.0,
40
+ "Art_Theory": 29.09,
41
+ "Electronics": 18.33,
42
+ "Psychology": 13.33,
43
+ "Biology": 18.64,
44
+ "Manage": 14.0,
45
+ "Economics": 22.03,
46
+ "Mechanical_Engineering": 13.56,
47
+ "Diagnostics_and_Laboratory_Medicine": 21.67,
48
+ "Basic_Medical_Science": 21.15,
49
+ "Computer_Science": 21.67,
50
+ "Math": 21.67,
51
+ "Music": 15.0,
52
+ "Materials": 13.33,
53
+ "Marketing": 27.12,
54
+ "Chemistry": 23.33,
55
+ "Geography": 17.31
56
+ },
57
  "acc_stderr": 0,
58
+ "acc": 20.92
59
  },
60
  "MmvetV2": {
61
  "accuracy": 55.087,
 
365
  "acc": 51.1
366
  },
367
  "MathVision": {
368
+ "accuracy": 19.18,
369
  "acc_stderr": 0,
370
+ "acc": 19.18
371
  },
372
  "CII-Bench": {
373
  "accuracy": 31.63,
Pixtral-Large-Instruct-2411/results_2025-01-25T10-42-53.190540.json ADDED
@@ -0,0 +1,517 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "model_name": "Pixtral-Large-Instruct-2411",
4
+ "model_dtype": "float16",
5
+ "model_size": 0
6
+ },
7
+ "results": {
8
+ "CMMMU": {
9
+ "艺术与设计": {
10
+ "num": 88,
11
+ "correct": 57,
12
+ "accuracy": 64.77
13
+ },
14
+ "overall": {
15
+ "num": 900,
16
+ "correct": 386,
17
+ "accuracy": 42.89
18
+ },
19
+ "商业": {
20
+ "num": 126,
21
+ "correct": 34,
22
+ "accuracy": 26.98
23
+ },
24
+ "科学": {
25
+ "num": 204,
26
+ "correct": 75,
27
+ "accuracy": 36.76
28
+ },
29
+ "健康与医学": {
30
+ "num": 153,
31
+ "correct": 73,
32
+ "accuracy": 47.71
33
+ },
34
+ "人文社会科学": {
35
+ "num": 85,
36
+ "correct": 40,
37
+ "accuracy": 47.06
38
+ },
39
+ "技术与工程": {
40
+ "num": 244,
41
+ "correct": 107,
42
+ "accuracy": 43.85
43
+ },
44
+ "accuracy": 42.89,
45
+ "acc_stderr": 0,
46
+ "acc": 42.89
47
+ },
48
+ "MMMU": {
49
+ "accuracy": 54.22,
50
+ "subject_score": {
51
+ "Accounting": 30.0,
52
+ "Agriculture": 60.0,
53
+ "Architecture": 40.0,
54
+ "Art": 81.67,
55
+ "Basic": 70.0,
56
+ "Biology": 43.33,
57
+ "Chemistry": 30.0,
58
+ "Clinical": 66.67,
59
+ "Computer": 50.0,
60
+ "Design": 76.67,
61
+ "Diagnostics": 46.67,
62
+ "Economics": 56.67,
63
+ "Electronics": 40.0,
64
+ "Energy": 43.33,
65
+ "Finance": 36.67,
66
+ "Geography": 60.0,
67
+ "History": 66.67,
68
+ "Literature": 83.33,
69
+ "Manage": 63.33,
70
+ "Marketing": 50.0,
71
+ "Materials": 43.33,
72
+ "Math": 36.67,
73
+ "Mechanical": 30.0,
74
+ "Music": 36.67,
75
+ "Pharmacy": 50.0,
76
+ "Physics": 50.0,
77
+ "Psychology": 66.67,
78
+ "Public": 70.0,
79
+ "Sociology": 66.67
80
+ },
81
+ "difficulty_score": {
82
+ "Medium": 53.77,
83
+ "Easy": 64.41,
84
+ "Hard": 38.67
85
+ },
86
+ "acc_stderr": 0,
87
+ "acc": 54.22
88
+ },
89
+ "MMMU_Pro_standard": {
90
+ "accuracy": 37.28,
91
+ "subject_score": {
92
+ "Literature": 57.69,
93
+ "Design": 61.67,
94
+ "History": 50.0,
95
+ "Sociology": 50.0,
96
+ "Agriculture": 35.0,
97
+ "Art": 58.49,
98
+ "Energy_and_Power": 22.41,
99
+ "Pharmacy": 38.6,
100
+ "Architecture_and_Engineering": 28.33,
101
+ "Clinical_Medicine": 44.07,
102
+ "Accounting": 31.03,
103
+ "Physics": 30.0,
104
+ "Electronics": 35.0,
105
+ "Public_Health": 43.1,
106
+ "Art_Theory": 65.45,
107
+ "Manage": 38.0,
108
+ "Economics": 37.29,
109
+ "Diagnostics_and_Laboratory_Medicine": 33.33,
110
+ "Mechanical_Engineering": 25.42,
111
+ "Basic_Medical_Science": 40.38,
112
+ "Finance": 25.0,
113
+ "Computer_Science": 38.33,
114
+ "Math": 28.33,
115
+ "Psychology": 38.33,
116
+ "Biology": 32.2,
117
+ "Materials": 20.0,
118
+ "Music": 25.0,
119
+ "Marketing": 32.2,
120
+ "Geography": 38.46,
121
+ "Chemistry": 25.0
122
+ },
123
+ "difficulty_score": {
124
+ "Easy": 47.73,
125
+ "Hard": 26.93,
126
+ "Medium": 35.58
127
+ },
128
+ "acc_stderr": 0,
129
+ "acc": 37.28
130
+ },
131
+ "MMMU_Pro_vision": {
132
+ "accuracy": 34.28,
133
+ "subject_score": {
134
+ "Art": 52.83,
135
+ "Agriculture": 26.67,
136
+ "Literature": 69.23,
137
+ "Sociology": 38.89,
138
+ "History": 39.29,
139
+ "Clinical_Medicine": 28.81,
140
+ "Pharmacy": 36.84,
141
+ "Design": 33.33,
142
+ "Public_Health": 41.38,
143
+ "Art_Theory": 58.18,
144
+ "Accounting": 43.1,
145
+ "Energy_and_Power": 25.86,
146
+ "Architecture_and_Engineering": 21.67,
147
+ "Physics": 25.0,
148
+ "Psychology": 33.33,
149
+ "Manage": 34.0,
150
+ "Biology": 37.29,
151
+ "Diagnostics_and_Laboratory_Medicine": 20.0,
152
+ "Mechanical_Engineering": 23.73,
153
+ "Finance": 43.33,
154
+ "Economics": 42.37,
155
+ "Basic_Medical_Science": 32.69,
156
+ "Electronics": 28.33,
157
+ "Computer_Science": 25.0,
158
+ "Math": 23.33,
159
+ "Music": 23.33,
160
+ "Marketing": 50.85,
161
+ "Materials": 26.67,
162
+ "Chemistry": 30.0,
163
+ "Geography": 21.15
164
+ },
165
+ "acc_stderr": 0,
166
+ "acc": 34.28
167
+ },
168
+ "MmvetV2": {
169
+ "accuracy": 66.1315,
170
+ "capability_scores": {
171
+ "math": 72.64705882352939,
172
+ "ocr": 72.74038461538464,
173
+ "spat": 65.38071065989845,
174
+ "rec": 62.45145631067969,
175
+ "know": 62.564102564102555,
176
+ "gen": 64.10909090909097,
177
+ "seq": 60.17857142857144
178
+ },
179
+ "capability_detail_scores": {
180
+ "math_ocr": 71.81818181818183,
181
+ "math_spat_ocr": 86.0,
182
+ "math_spat_ocr_rec": 45.0,
183
+ "spat_rec": 64.64285714285715,
184
+ "spat_ocr": 86.15384615384616,
185
+ "spat_ocr_rec": 33.33333333333333,
186
+ "spat_know_ocr": 100.0,
187
+ "ocr_rec": 87.5,
188
+ "spat_know_rec": 45.0,
189
+ "ocr": 80.3125,
190
+ "rec": 64.23728813559322,
191
+ "know_rec": 66.15384615384615,
192
+ "know_gen_rec": 59.99999999999998,
193
+ "know_gen_ocr_rec": 62.30769230769231,
194
+ "spat_gen_ocr_rec": 71.6279069767442,
195
+ "spat_gen_ocr": 85.00000000000001,
196
+ "math_gen_ocr_seq_spat": 60.0,
197
+ "math_ocr_rec_seq_spat": 0.0,
198
+ "spat_gen_rec": 51.36363636363637,
199
+ "math_spat_gen_ocr": 40.0,
200
+ "seq_spat_rec": 50.0,
201
+ "seq_spat_ocr_rec": 10.0,
202
+ "spat_know_gen_rec": 63.33333333333333,
203
+ "gen_rec": 67.94117647058823,
204
+ "spat_know_ocr_rec": 85.0,
205
+ "know_gen_ocr_rec_spat": 90.0,
206
+ "math_ocr_rec": 100.0,
207
+ "gen_ocr_rec": 86.00000000000001,
208
+ "seq_gen_ocr_rec": 71.42857142857143,
209
+ "gen_ocr": 69.23076923076923,
210
+ "seq_gen_rec": 62.14285714285713,
211
+ "seq_rec": 83.33333333333334,
212
+ "seq_spat_gen_rec": 58.75,
213
+ "seq_know_rec": 100.0,
214
+ "seq_know_gen_rec": 65.0,
215
+ "gen_ocr_rec_seq_spat": 53.333333333333336,
216
+ "know_gen_ocr_rec_seq": 100.0,
217
+ "math_know_rec": 50.0,
218
+ "seq_ocr_rec": 0.0
219
+ },
220
+ "acc_stderr": 0,
221
+ "acc": 66.1315
222
+ },
223
+ "MathVerse": {
224
+ "Vision Intensive": {
225
+ "accuracy": 27.41,
226
+ "correct": 216,
227
+ "total": 788
228
+ },
229
+ "Total": {
230
+ "accuracy": 29.06,
231
+ "correct": 1145,
232
+ "total": 3940
233
+ },
234
+ "Vision Only": {
235
+ "accuracy": 26.9,
236
+ "correct": 212,
237
+ "total": 788
238
+ },
239
+ "Vision Dominant": {
240
+ "accuracy": 26.02,
241
+ "correct": 205,
242
+ "total": 788
243
+ },
244
+ "Text Lite": {
245
+ "accuracy": 28.55,
246
+ "correct": 225,
247
+ "total": 788
248
+ },
249
+ "Text Dominant": {
250
+ "accuracy": 36.42,
251
+ "correct": 287,
252
+ "total": 788
253
+ },
254
+ "accuracy": 29.06,
255
+ "acc_stderr": 0,
256
+ "acc": 29.06
257
+ },
258
+ "Ocrlite": {
259
+ "final_score": [
260
+ 1212,
261
+ 1645
262
+ ],
263
+ "accuracy": 73.678,
264
+ "Key Information Extraction-Bookshelf": [
265
+ 30,
266
+ 52
267
+ ],
268
+ "Scene Text-centric VQA-diet_constraints": [
269
+ 69,
270
+ 90
271
+ ],
272
+ "Doc-oriented VQA-Control": [
273
+ 137,
274
+ 189
275
+ ],
276
+ "Doc-oriented VQA": [
277
+ 170,
278
+ 204
279
+ ],
280
+ "Scene Text-centric VQA-Fake_logo": [
281
+ 55,
282
+ 119
283
+ ],
284
+ "Handwritten Mathematical Expression Recognition": [
285
+ 26,
286
+ 100
287
+ ],
288
+ "Key Information Extraction": [
289
+ 179,
290
+ 209
291
+ ],
292
+ "Scene Text-centric VQA-Control": [
293
+ 160,
294
+ 200
295
+ ],
296
+ "Scene Text-centric VQA": [
297
+ 224,
298
+ 282
299
+ ],
300
+ "Artistic Text Recognition": [
301
+ 42,
302
+ 50
303
+ ],
304
+ "Irregular Text Recognition": [
305
+ 47,
306
+ 50
307
+ ],
308
+ "Non-Semantic Text Recognition": [
309
+ 24,
310
+ 50
311
+ ],
312
+ "Regular Text Recognition": [
313
+ 49,
314
+ 50
315
+ ],
316
+ "acc_stderr": 0,
317
+ "acc": 73.678
318
+ },
319
+ "OcrliteZh": {
320
+ "final_score": [
321
+ 71,
322
+ 234
323
+ ],
324
+ "accuracy": 30.342,
325
+ "Docvqa": [
326
+ 4,
327
+ 10
328
+ ],
329
+ "Chartqa-human": [
330
+ 3,
331
+ 10
332
+ ],
333
+ "Chartqa-au": [
334
+ 2,
335
+ 10
336
+ ],
337
+ "infographic": [
338
+ 2,
339
+ 10
340
+ ],
341
+ "Key Information Extraction": [
342
+ 24,
343
+ 45
344
+ ],
345
+ "Scene Text-centric VQA": [
346
+ 20,
347
+ 40
348
+ ],
349
+ "Artistic Text Recognition": [
350
+ 1,
351
+ 11
352
+ ],
353
+ "IrRegular Text Recognition": [
354
+ 0,
355
+ 11
356
+ ],
357
+ "Non-semantic Text Recognition": [
358
+ 0,
359
+ 12
360
+ ],
361
+ "Regular Text Recognition": [
362
+ 0,
363
+ 11
364
+ ],
365
+ "Handwriting_CN": [
366
+ 0,
367
+ 20
368
+ ],
369
+ "Chinese Unlimited": [
370
+ 15,
371
+ 44
372
+ ],
373
+ "acc_stderr": 0,
374
+ "acc": 30.342
375
+ },
376
+ "CharXiv": {
377
+ "descriptive": {
378
+ "Overall Score": 86.28,
379
+ "By Question": {
380
+ "Q1": 84.84,
381
+ "Q2": 85.22,
382
+ "Q3": 75.97,
383
+ "Q4": 87.55,
384
+ "Q5": 89.12,
385
+ "Q6": 83.13,
386
+ "Q7": 86.32,
387
+ "Q8": 90.62,
388
+ "Q9": 87.56,
389
+ "Q10": 89.73,
390
+ "Q11": 80.57,
391
+ "Q12": 84.07,
392
+ "Q13": 82.19,
393
+ "Q14": 92.2,
394
+ "Q15": 96.81,
395
+ "Q16": 80.56,
396
+ "Q17": 75.0,
397
+ "Q18": 90.28,
398
+ "Q19": 87.69
399
+ },
400
+ "By Category": {
401
+ "Information Extraction": 84.64,
402
+ "Enumeration": 90.56,
403
+ "Pattern Recognition": 85.81,
404
+ "Counting": 86.77,
405
+ "Compositionality": 75.0
406
+ },
407
+ "By Subplot": {
408
+ "1 Subplot": 90.09,
409
+ "2-4 Subplots": 85.85,
410
+ "5+ Subplots": 80.72
411
+ },
412
+ "By Subject": {
413
+ "Computer Science": 86.11,
414
+ "Economics": 89.86,
415
+ "Electrical Engineering and Systems Science": 89.29,
416
+ "Mathematics": 84.63,
417
+ "Physics": 83.07,
418
+ "Quantitative Biology": 82.54,
419
+ "Quantitative Finance": 85.78,
420
+ "Statistics": 89.16
421
+ },
422
+ "By Year": {
423
+ "2020": 85.63,
424
+ "2021": 86.21,
425
+ "2022": 87.6,
426
+ "2023": 85.69
427
+ },
428
+ "N_valid": 4000,
429
+ "N_invalid": 0,
430
+ "Question Type": "Descriptive"
431
+ },
432
+ "reasoning": {
433
+ "Overall Score": 49.2,
434
+ "By Answer Type": {
435
+ "Text-in-Chart": 55.23,
436
+ "Text-in-General": 51.52,
437
+ "Number-in-Chart": 47.84,
438
+ "Number-in-General": 37.99
439
+ },
440
+ "By Source": {
441
+ "GPT-Sourced": 53.26,
442
+ "GPT-Inspired": 48.15,
443
+ "Completely Human": 48.33
444
+ },
445
+ "By Subject": {
446
+ "Computer Science": 47.62,
447
+ "Economics": 53.62,
448
+ "Electrical Engineering and Systems Science": 44.54,
449
+ "Mathematics": 52.59,
450
+ "Physics": 53.54,
451
+ "Quantitative Biology": 46.83,
452
+ "Quantitative Finance": 44.83,
453
+ "Statistics": 48.67
454
+ },
455
+ "By Year": {
456
+ "2020": 44.53,
457
+ "2021": 53.26,
458
+ "2022": 45.9,
459
+ "2023": 52.82
460
+ },
461
+ "By Subplot": {
462
+ "1 Subplot": 47.93,
463
+ "2-4 Subplots": 50.53,
464
+ "5+ Subplots": 49.15
465
+ },
466
+ "N_valid": 1000,
467
+ "N_invalid": 0,
468
+ "Question Type": "Reasoning"
469
+ },
470
+ "accuracy": 67.74,
471
+ "acc_stderr": 0,
472
+ "acc": 67.74
473
+ },
474
+ "MathVision": {
475
+ "accuracy": 29.74,
476
+ "acc_stderr": 0,
477
+ "acc": 29.74
478
+ },
479
+ "CII-Bench": {
480
+ "accuracy": 60.65,
481
+ "domain_score": {
482
+ "Art": 63.97,
483
+ "CTC": 56.3,
484
+ "Society": 60.54,
485
+ "Env.": 74.07,
486
+ "Life": 57.14,
487
+ "Politics": 70.83
488
+ },
489
+ "emotion_score": {
490
+ "Negative": 64.53,
491
+ "Positive": 56.41,
492
+ "Neutral": 60.53
493
+ },
494
+ "acc_stderr": 0,
495
+ "acc": 60.65
496
+ },
497
+ "Blink": {
498
+ "accuracy": 59.23,
499
+ "Art Style": 78.63,
500
+ "Counting": 63.33,
501
+ "Forensic Detection": 47.73,
502
+ "Functional Correspondence": 40.0,
503
+ "IQ Test": 30.67,
504
+ "Jigsaw": 65.33,
505
+ "Multi-view Reasoning": 53.38,
506
+ "Object Localization": 53.28,
507
+ "Relative Depth": 75.0,
508
+ "Relative Reflectance": 34.33,
509
+ "Semantic Correspondence": 51.08,
510
+ "Spatial Relation": 83.22,
511
+ "Visual Correspondence": 74.42,
512
+ "Visual Similarity": 78.52,
513
+ "acc_stderr": 0,
514
+ "acc": 59.23
515
+ }
516
+ }
517
+ }
Qwen-VL-Max-20250402/results_2025-01-25T10-42-53.190540.json ADDED
@@ -0,0 +1,762 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "model_name": "Qwen-VL-Max-20250402",
4
+ "model_dtype": "float16",
5
+ "model_size": 0
6
+ },
7
+ "results": {
8
+ "CMMMU": {
9
+ "reject_info": {
10
+ "reject_rate": 0.22,
11
+ "reject_number": 2,
12
+ "total_question": 900
13
+ },
14
+ "艺术与设计": {
15
+ "num": 88,
16
+ "correct": 62,
17
+ "accuracy": 70.45
18
+ },
19
+ "overall": {
20
+ "num": 898,
21
+ "correct": 442,
22
+ "accuracy": 49.22
23
+ },
24
+ "商业": {
25
+ "num": 126,
26
+ "correct": 37,
27
+ "accuracy": 29.37
28
+ },
29
+ "科学": {
30
+ "num": 204,
31
+ "correct": 95,
32
+ "accuracy": 46.57
33
+ },
34
+ "健康与医学": {
35
+ "num": 152,
36
+ "correct": 85,
37
+ "accuracy": 55.92
38
+ },
39
+ "人文社会科学": {
40
+ "num": 85,
41
+ "correct": 55,
42
+ "accuracy": 64.71
43
+ },
44
+ "技术与工程": {
45
+ "num": 243,
46
+ "correct": 108,
47
+ "accuracy": 44.44
48
+ },
49
+ "accuracy": 49.22,
50
+ "acc_stderr": 0,
51
+ "acc": 49.22
52
+ },
53
+ "MMMU": {
54
+ "reject_info": {
55
+ "reject_rate": 0.11,
56
+ "reject_number": 1,
57
+ "total_question": 900
58
+ },
59
+ "accuracy": 55.62,
60
+ "subject_score": {
61
+ "Accounting": 40.0,
62
+ "Agriculture": 60.0,
63
+ "Architecture": 43.33,
64
+ "Art": 73.33,
65
+ "Basic": 62.07,
66
+ "Biology": 50.0,
67
+ "Chemistry": 33.33,
68
+ "Clinical": 63.33,
69
+ "Computer": 50.0,
70
+ "Design": 80.0,
71
+ "Diagnostics": 50.0,
72
+ "Economics": 46.67,
73
+ "Electronics": 23.33,
74
+ "Energy": 53.33,
75
+ "Finance": 30.0,
76
+ "Geography": 73.33,
77
+ "History": 76.67,
78
+ "Literature": 83.33,
79
+ "Manage": 50.0,
80
+ "Marketing": 66.67,
81
+ "Materials": 46.67,
82
+ "Math": 46.67,
83
+ "Mechanical": 33.33,
84
+ "Music": 23.33,
85
+ "Pharmacy": 66.67,
86
+ "Physics": 60.0,
87
+ "Psychology": 73.33,
88
+ "Public": 63.33,
89
+ "Sociology": 73.33
90
+ },
91
+ "difficulty_score": {
92
+ "Medium": 56.26,
93
+ "Easy": 66.1,
94
+ "Hard": 37.02
95
+ },
96
+ "acc_stderr": 0,
97
+ "acc": 55.62
98
+ },
99
+ "MMMU_Pro_standard": {
100
+ "reject_info": {
101
+ "reject_rate": 0.12,
102
+ "reject_number": 2,
103
+ "total_question": 1730
104
+ },
105
+ "accuracy": 38.19,
106
+ "subject_score": {
107
+ "History": 48.21,
108
+ "Art": 62.26,
109
+ "Design": 48.33,
110
+ "Literature": 69.23,
111
+ "Agriculture": 26.67,
112
+ "Finance": 23.33,
113
+ "Sociology": 49.06,
114
+ "Accounting": 27.59,
115
+ "Energy_and_Power": 25.86,
116
+ "Pharmacy": 43.86,
117
+ "Architecture_and_Engineering": 33.33,
118
+ "Clinical_Medicine": 37.29,
119
+ "Public_Health": 27.59,
120
+ "Physics": 33.33,
121
+ "Art_Theory": 60.0,
122
+ "Electronics": 60.0,
123
+ "Psychology": 35.0,
124
+ "Biology": 32.2,
125
+ "Manage": 32.0,
126
+ "Economics": 38.98,
127
+ "Mechanical_Engineering": 38.98,
128
+ "Diagnostics_and_Laboratory_Medicine": 30.0,
129
+ "Computer_Science": 48.33,
130
+ "Math": 25.0,
131
+ "Basic_Medical_Science": 31.37,
132
+ "Music": 25.0,
133
+ "Materials": 31.67,
134
+ "Marketing": 35.59,
135
+ "Chemistry": 30.0,
136
+ "Geography": 44.23
137
+ },
138
+ "difficulty_score": {
139
+ "Medium": 34.88,
140
+ "Easy": 47.63,
141
+ "Hard": 32.42
142
+ },
143
+ "acc_stderr": 0,
144
+ "acc": 38.19
145
+ },
146
+ "MMMU_Pro_vision": {
147
+ "accuracy": 35.9,
148
+ "subject_score": {
149
+ "History": 46.43,
150
+ "Art": 43.4,
151
+ "Design": 56.67,
152
+ "Literature": 65.38,
153
+ "Agriculture": 25.0,
154
+ "Finance": 23.33,
155
+ "Sociology": 42.59,
156
+ "Accounting": 20.69,
157
+ "Energy_and_Power": 12.07,
158
+ "Pharmacy": 47.37,
159
+ "Architecture_and_Engineering": 30.0,
160
+ "Clinical_Medicine": 38.98,
161
+ "Public_Health": 25.86,
162
+ "Physics": 28.33,
163
+ "Art_Theory": 47.27,
164
+ "Electronics": 51.67,
165
+ "Psychology": 41.67,
166
+ "Biology": 33.9,
167
+ "Manage": 36.0,
168
+ "Economics": 30.51,
169
+ "Mechanical_Engineering": 30.51,
170
+ "Diagnostics_and_Laboratory_Medicine": 26.67,
171
+ "Basic_Medical_Science": 32.69,
172
+ "Computer_Science": 50.0,
173
+ "Math": 28.33,
174
+ "Music": 25.0,
175
+ "Materials": 25.0,
176
+ "Marketing": 35.59,
177
+ "Chemistry": 41.67,
178
+ "Geography": 40.38
179
+ },
180
+ "acc_stderr": 0,
181
+ "acc": 35.9
182
+ },
183
+ "MmvetV2": {
184
+ "reject_info": {
185
+ "reject_rate": 1.16,
186
+ "reject_number": 6,
187
+ "total_question": 517
188
+ },
189
+ "accuracy": 70.9785,
190
+ "capability_scores": {
191
+ "ocr": 80.96153846153851,
192
+ "math": 86.76470588235294,
193
+ "spat": 69.74358974358972,
194
+ "rec": 67.16748768472915,
195
+ "know": 64.63576158940397,
196
+ "gen": 70.00000000000003,
197
+ "seq": 69.46428571428571
198
+ },
199
+ "capability_detail_scores": {
200
+ "ocr_math": 90.9090909090909,
201
+ "spat_ocr_math": 92.66666666666666,
202
+ "rec_spat_ocr_math": 75.0,
203
+ "rec_spat": 60.0,
204
+ "spat_ocr": 75.0,
205
+ "rec_spat_ocr": 75.0,
206
+ "spat_ocr_know": 100.0,
207
+ "rec_ocr": 75.0,
208
+ "rec_spat_know": 53.75,
209
+ "ocr": 88.4375,
210
+ "rec": 68.44827586206897,
211
+ "rec_know": 61.53846153846154,
212
+ "rec_know_gen": 64.02061855670101,
213
+ "rec_ocr_know_gen": 80.76923076923077,
214
+ "rec_spat_ocr_gen": 79.53488372093027,
215
+ "spat_ocr_gen": 95.0,
216
+ "gen_spat_seq_math_ocr": 100.0,
217
+ "spat_seq_rec_math_ocr": 50.0,
218
+ "rec_spat_gen": 53.63636363636364,
219
+ "spat_ocr_math_gen": 60.0,
220
+ "rec_spat_seq": 47.142857142857146,
221
+ "rec_spat_seq_ocr": 66.66666666666666,
222
+ "rec_spat_know_gen": 30.0,
223
+ "rec_gen": 71.47058823529413,
224
+ "rec_spat_ocr_know": 25.0,
225
+ "gen_spat_rec_ocr_know": 75.0,
226
+ "rec_ocr_math": 100.0,
227
+ "rec_ocr_gen": 84.00000000000001,
228
+ "rec_seq_ocr_gen": 75.71428571428571,
229
+ "ocr_gen": 83.07692307692308,
230
+ "rec_seq_gen": 69.28571428571429,
231
+ "rec_seq": 58.333333333333336,
232
+ "rec_spat_seq_gen": 81.25,
233
+ "rec_seq_know": 100.0,
234
+ "rec_seq_know_gen": 70.0,
235
+ "gen_spat_seq_rec_ocr": 60.0,
236
+ "gen_seq_rec_ocr_know": 95.0,
237
+ "rec_know_math": 50.0,
238
+ "rec_seq_ocr": 100.0
239
+ },
240
+ "acc_stderr": 0,
241
+ "acc": 70.9785
242
+ },
243
+ "MathVerse": {
244
+ "Text Dominant": {
245
+ "accuracy": 62.44,
246
+ "correct": 492,
247
+ "total": 788
248
+ },
249
+ "Total": {
250
+ "accuracy": 52.03,
251
+ "correct": 2050,
252
+ "total": 3940
253
+ },
254
+ "Text Lite": {
255
+ "accuracy": 53.05,
256
+ "correct": 418,
257
+ "total": 788
258
+ },
259
+ "Vision Intensive": {
260
+ "accuracy": 49.75,
261
+ "correct": 392,
262
+ "total": 788
263
+ },
264
+ "Vision Dominant": {
265
+ "accuracy": 48.48,
266
+ "correct": 382,
267
+ "total": 788
268
+ },
269
+ "Vision Only": {
270
+ "accuracy": 46.45,
271
+ "correct": 366,
272
+ "total": 788
273
+ },
274
+ "accuracy": 52.03,
275
+ "acc_stderr": 0,
276
+ "acc": 52.03
277
+ },
278
+ "Ocrlite": {
279
+ "reject_info": {
280
+ "reject_rate": 0.12,
281
+ "reject_number": 2,
282
+ "total_question": 1644
283
+ },
284
+ "final_score": [
285
+ 1261,
286
+ 1642
287
+ ],
288
+ "accuracy": 76.797,
289
+ "Key Information Extraction-Bookshelf": [
290
+ 28,
291
+ 51,
292
+ 0.549,
293
+ {
294
+ "Default": [
295
+ 28,
296
+ 51,
297
+ 0.549
298
+ ]
299
+ }
300
+ ],
301
+ "Scene Text-centric VQA-diet_constraints": [
302
+ 68,
303
+ 90,
304
+ 0.756,
305
+ {
306
+ "Default": [
307
+ 68,
308
+ 90,
309
+ 0.756
310
+ ]
311
+ }
312
+ ],
313
+ "Doc-oriented VQA-Control": [
314
+ 157,
315
+ 188,
316
+ 0.835,
317
+ {
318
+ "Default": [
319
+ 157,
320
+ 188,
321
+ 0.835
322
+ ]
323
+ }
324
+ ],
325
+ "Doc-oriented VQA": [
326
+ 175,
327
+ 204,
328
+ 0.858,
329
+ {
330
+ "Default": [
331
+ 175,
332
+ 204,
333
+ 0.858
334
+ ]
335
+ }
336
+ ],
337
+ "Scene Text-centric VQA-Fake_logo": [
338
+ 50,
339
+ 119,
340
+ 0.42,
341
+ {
342
+ "Default": [
343
+ 50,
344
+ 119,
345
+ 0.42
346
+ ]
347
+ }
348
+ ],
349
+ "Handwritten Mathematical Expression Recognition": [
350
+ 1,
351
+ 100,
352
+ 0.01,
353
+ {
354
+ "Default": [
355
+ 1,
356
+ 100,
357
+ 0.01
358
+ ]
359
+ }
360
+ ],
361
+ "Key Information Extraction": [
362
+ 195,
363
+ 209,
364
+ 0.933,
365
+ {
366
+ "Default": [
367
+ 195,
368
+ 209,
369
+ 0.933
370
+ ]
371
+ }
372
+ ],
373
+ "Scene Text-centric VQA-Control": [
374
+ 171,
375
+ 199,
376
+ 0.859,
377
+ {
378
+ "Default": [
379
+ 171,
380
+ 199,
381
+ 0.859
382
+ ]
383
+ }
384
+ ],
385
+ "Scene Text-centric VQA": [
386
+ 246,
387
+ 282,
388
+ 0.872,
389
+ {
390
+ "Default": [
391
+ 246,
392
+ 282,
393
+ 0.872
394
+ ]
395
+ }
396
+ ],
397
+ "Artistic Text Recognition": [
398
+ 42,
399
+ 50,
400
+ 0.84,
401
+ {
402
+ "Default": [
403
+ 42,
404
+ 50,
405
+ 0.84
406
+ ]
407
+ }
408
+ ],
409
+ "Irregular Text Recognition": [
410
+ 43,
411
+ 50,
412
+ 0.86,
413
+ {
414
+ "Default": [
415
+ 43,
416
+ 50,
417
+ 0.86
418
+ ]
419
+ }
420
+ ],
421
+ "Non-Semantic Text Recognition": [
422
+ 36,
423
+ 50,
424
+ 0.72,
425
+ {
426
+ "Default": [
427
+ 36,
428
+ 50,
429
+ 0.72
430
+ ]
431
+ }
432
+ ],
433
+ "Regular Text Recognition": [
434
+ 49,
435
+ 50,
436
+ 0.98,
437
+ {
438
+ "Default": [
439
+ 49,
440
+ 50,
441
+ 0.98
442
+ ]
443
+ }
444
+ ],
445
+ "acc_stderr": 0,
446
+ "acc": 76.797
447
+ },
448
+ "OcrliteZh": {
449
+ "reject_info": {
450
+ "reject_rate": 0.43,
451
+ "reject_number": 1,
452
+ "total_question": 234
453
+ },
454
+ "final_score": [
455
+ 172,
456
+ 233
457
+ ],
458
+ "accuracy": 73.82,
459
+ "Docvqa": [
460
+ 9,
461
+ 10,
462
+ 0.9,
463
+ {
464
+ "Default": [
465
+ 9,
466
+ 10,
467
+ 0.9
468
+ ]
469
+ }
470
+ ],
471
+ "Chartqa-human": [
472
+ 7,
473
+ 10,
474
+ 0.7,
475
+ {
476
+ "Default": [
477
+ 7,
478
+ 10,
479
+ 0.7
480
+ ]
481
+ }
482
+ ],
483
+ "Chartqa-au": [
484
+ 8,
485
+ 10,
486
+ 0.8,
487
+ {
488
+ "Default": [
489
+ 8,
490
+ 10,
491
+ 0.8
492
+ ]
493
+ }
494
+ ],
495
+ "infographic": [
496
+ 8,
497
+ 9,
498
+ 0.889,
499
+ {
500
+ "Default": [
501
+ 8,
502
+ 9,
503
+ 0.889
504
+ ]
505
+ }
506
+ ],
507
+ "Key Information Extraction": [
508
+ 42,
509
+ 45,
510
+ 0.933,
511
+ {
512
+ "Default": [
513
+ 42,
514
+ 45,
515
+ 0.933
516
+ ]
517
+ }
518
+ ],
519
+ "Scene Text-centric VQA": [
520
+ 29,
521
+ 40,
522
+ 0.725,
523
+ {
524
+ "Default": [
525
+ 29,
526
+ 40,
527
+ 0.725
528
+ ]
529
+ }
530
+ ],
531
+ "Artistic Text Recognition": [
532
+ 5,
533
+ 11,
534
+ 0.455,
535
+ {
536
+ "Default": [
537
+ 5,
538
+ 11,
539
+ 0.455
540
+ ]
541
+ }
542
+ ],
543
+ "IrRegular Text Recognition": [
544
+ 8,
545
+ 11,
546
+ 0.727,
547
+ {
548
+ "Default": [
549
+ 8,
550
+ 11,
551
+ 0.727
552
+ ]
553
+ }
554
+ ],
555
+ "Non-semantic Text Recognition": [
556
+ 10,
557
+ 12,
558
+ 0.833,
559
+ {
560
+ "Default": [
561
+ 10,
562
+ 12,
563
+ 0.833
564
+ ]
565
+ }
566
+ ],
567
+ "Regular Text Recognition": [
568
+ 10,
569
+ 11,
570
+ 0.909,
571
+ {
572
+ "Default": [
573
+ 10,
574
+ 11,
575
+ 0.909
576
+ ]
577
+ }
578
+ ],
579
+ "Handwriting_CN": [
580
+ 13,
581
+ 20,
582
+ 0.65,
583
+ {
584
+ "Default": [
585
+ 13,
586
+ 20,
587
+ 0.65
588
+ ]
589
+ }
590
+ ],
591
+ "Chinese Unlimited": [
592
+ 23,
593
+ 44,
594
+ 0.523,
595
+ {
596
+ "Default": [
597
+ 23,
598
+ 44,
599
+ 0.523
600
+ ]
601
+ }
602
+ ],
603
+ "acc_stderr": 0,
604
+ "acc": 73.82
605
+ },
606
+ "CharXiv": {
607
+ "descriptive": {
608
+ "Overall Score": 83.53,
609
+ "By Question": {
610
+ "Q1": 89.75,
611
+ "Q2": 86.52,
612
+ "Q3": 73.39,
613
+ "Q4": 87.55,
614
+ "Q5": 84.94,
615
+ "Q6": 85.54,
616
+ "Q7": 84.62,
617
+ "Q8": 89.29,
618
+ "Q9": 75.62,
619
+ "Q10": 81.51,
620
+ "Q11": 65.14,
621
+ "Q12": 82.42,
622
+ "Q13": 73.52,
623
+ "Q14": 95.04,
624
+ "Q15": 97.44,
625
+ "Q16": 72.22,
626
+ "Q17": 59.38,
627
+ "Q18": 91.09,
628
+ "Q19": 92.31
629
+ },
630
+ "By Category": {
631
+ "Information Extraction": 84.7,
632
+ "Enumeration": 87.65,
633
+ "Pattern Recognition": 79.69,
634
+ "Counting": 83.72,
635
+ "Compositionality": 59.38
636
+ },
637
+ "By Subplot": {
638
+ "1 Subplot": 87.89,
639
+ "2-4 Subplots": 83.73,
640
+ "5+ Subplots": 76.06
641
+ },
642
+ "By Subject": {
643
+ "Computer Science": 84.52,
644
+ "Economics": 85.33,
645
+ "Electrical Engineering and Systems Science": 88.87,
646
+ "Mathematics": 83.52,
647
+ "Physics": 77.17,
648
+ "Quantitative Biology": 80.16,
649
+ "Quantitative Finance": 84.48,
650
+ "Statistics": 84.51
651
+ },
652
+ "By Year": {
653
+ "2020": 82.69,
654
+ "2021": 82.85,
655
+ "2022": 85.04,
656
+ "2023": 83.57
657
+ },
658
+ "N_valid": 4000,
659
+ "N_invalid": 0,
660
+ "Question Type": "Descriptive"
661
+ },
662
+ "reasoning": {
663
+ "Overall Score": 45.0,
664
+ "By Answer Type": {
665
+ "Text-in-Chart": 49.09,
666
+ "Text-in-General": 42.42,
667
+ "Number-in-Chart": 44.83,
668
+ "Number-in-General": 38.43
669
+ },
670
+ "By Source": {
671
+ "GPT-Sourced": 50.0,
672
+ "GPT-Inspired": 45.83,
673
+ "Completely Human": 43.17
674
+ },
675
+ "By Subject": {
676
+ "Computer Science": 49.21,
677
+ "Economics": 43.48,
678
+ "Electrical Engineering and Systems Science": 47.9,
679
+ "Mathematics": 45.93,
680
+ "Physics": 50.39,
681
+ "Quantitative Biology": 41.27,
682
+ "Quantitative Finance": 38.79,
683
+ "Statistics": 42.48
684
+ },
685
+ "By Year": {
686
+ "2020": 44.13,
687
+ "2021": 44.06,
688
+ "2022": 43.85,
689
+ "2023": 47.98
690
+ },
691
+ "By Subplot": {
692
+ "1 Subplot": 49.48,
693
+ "2-4 Subplots": 42.06,
694
+ "5+ Subplots": 42.37
695
+ },
696
+ "N_valid": 1000,
697
+ "N_invalid": 1,
698
+ "Question Type": "Reasoning"
699
+ },
700
+ "accuracy": 64.27,
701
+ "acc_stderr": 0,
702
+ "acc": 64.27
703
+ },
704
+ "MathVision": {
705
+ "reject_info": {
706
+ "reject_rate": 0.07,
707
+ "reject_number": 2,
708
+ "total_question": 3040
709
+ },
710
+ "accuracy": 33.15,
711
+ "acc_stderr": 0,
712
+ "acc": 33.15
713
+ },
714
+ "CII-Bench": {
715
+ "reject_info": {
716
+ "reject_rate": 0.13,
717
+ "reject_number": 1,
718
+ "total_question": 765
719
+ },
720
+ "accuracy": 55.1,
721
+ "domain_score": {
722
+ "Life": 50.43,
723
+ "Art": 56.62,
724
+ "CTC": 50.37,
725
+ "Society": 59.46,
726
+ "Env.": 66.67,
727
+ "Politics": 58.33
728
+ },
729
+ "emotion_score": {
730
+ "Neutral": 55.09,
731
+ "Negative": 56.98,
732
+ "Positive": 52.99
733
+ },
734
+ "acc_stderr": 0,
735
+ "acc": 55.1
736
+ },
737
+ "Blink": {
738
+ "reject_info": {
739
+ "reject_rate": 0.05,
740
+ "reject_number": 1,
741
+ "total_question": 1901
742
+ },
743
+ "accuracy": 57.42,
744
+ "Art Style": 76.92,
745
+ "Counting": 61.67,
746
+ "Forensic Detection": 46.21,
747
+ "Functional Correspondence": 39.23,
748
+ "IQ Test": 28.67,
749
+ "Jigsaw": 65.77,
750
+ "Multi-view Reasoning": 43.61,
751
+ "Object Localization": 53.28,
752
+ "Relative Depth": 73.39,
753
+ "Relative Reflectance": 31.34,
754
+ "Semantic Correspondence": 46.76,
755
+ "Spatial Relation": 74.13,
756
+ "Visual Correspondence": 77.33,
757
+ "Visual Similarity": 84.44,
758
+ "acc_stderr": 0,
759
+ "acc": 57.42
760
+ }
761
+ }
762
+ }
Qwen-VL-Max/results_2025-01-25T10-42-53.190540.json CHANGED
@@ -36,9 +36,41 @@
36
  "acc": 39.25
37
  },
38
  "MMMU_Pro_vision": {
39
- "accuracy": 31.79,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
40
  "acc_stderr": 0,
41
- "acc": 31.79
42
  },
43
  "MmvetV2": {
44
  "reject_info": {
@@ -379,9 +411,9 @@
379
  "reject_number": 1,
380
  "total_question": 3040
381
  },
382
- "accuracy": 26.88,
383
  "acc_stderr": 0,
384
- "acc": 26.88
385
  },
386
  "CII-Bench": {
387
  "accuracy": 58.76,
 
36
  "acc": 39.25
37
  },
38
  "MMMU_Pro_vision": {
39
+ "accuracy": 41.16,
40
+ "subject_score": {
41
+ "History": 58.93,
42
+ "Art": 50.94,
43
+ "Design": 61.67,
44
+ "Literature": 61.54,
45
+ "Agriculture": 20.0,
46
+ "Finance": 50.0,
47
+ "Sociology": 46.3,
48
+ "Accounting": 44.83,
49
+ "Energy_and_Power": 18.97,
50
+ "Pharmacy": 47.37,
51
+ "Architecture_and_Engineering": 21.67,
52
+ "Clinical_Medicine": 33.9,
53
+ "Public_Health": 53.45,
54
+ "Physics": 31.67,
55
+ "Art_Theory": 63.64,
56
+ "Electronics": 43.33,
57
+ "Psychology": 46.67,
58
+ "Biology": 44.07,
59
+ "Manage": 26.0,
60
+ "Economics": 52.54,
61
+ "Mechanical_Engineering": 42.37,
62
+ "Diagnostics_and_Laboratory_Medicine": 30.0,
63
+ "Basic_Medical_Science": 48.08,
64
+ "Computer_Science": 35.0,
65
+ "Math": 36.67,
66
+ "Music": 38.33,
67
+ "Materials": 26.67,
68
+ "Marketing": 42.37,
69
+ "Chemistry": 33.33,
70
+ "Geography": 28.85
71
+ },
72
  "acc_stderr": 0,
73
+ "acc": 41.16
74
  },
75
  "MmvetV2": {
76
  "reject_info": {
 
411
  "reject_number": 1,
412
  "total_question": 3040
413
  },
414
+ "accuracy": 26.39,
415
  "acc_stderr": 0,
416
+ "acc": 26.39
417
  },
418
  "CII-Bench": {
419
  "accuracy": 58.76,
Qwen2-VL-2B-Instruct/results_2025-01-25T10-42-53.190540.json ADDED
@@ -0,0 +1,410 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "model_name": "Qwen2-VL-2B-Instruct",
4
+ "model_dtype": "float16",
5
+ "model_size": 0
6
+ },
7
+ "results": {
8
+ "CMMMU": {
9
+ "accuracy": 33.89,
10
+ "acc_stderr": 0,
11
+ "acc": 33.89
12
+ },
13
+ "MMMU": {
14
+ "accuracy": 41.44,
15
+ "acc_stderr": 0,
16
+ "acc": 41.44
17
+ },
18
+ "MMMU_Pro_standard": {
19
+ "accuracy": 26.82,
20
+ "acc_stderr": 0,
21
+ "acc": 26.82
22
+ },
23
+ "MMMU_Pro_vision": {
24
+ "accuracy": 16.47,
25
+ "subject_score": {
26
+ "Sociology": 22.22,
27
+ "History": 16.07,
28
+ "Agriculture": 15.0,
29
+ "Design": 26.67,
30
+ "Energy_and_Power": 3.45,
31
+ "Finance": 8.33,
32
+ "Literature": 51.92,
33
+ "Pharmacy": 24.56,
34
+ "Accounting": 10.34,
35
+ "Architecture_and_Engineering": 8.33,
36
+ "Clinical_Medicine": 11.86,
37
+ "Art": 24.53,
38
+ "Art_Theory": 21.82,
39
+ "Public_Health": 6.9,
40
+ "Electronics": 13.33,
41
+ "Physics": 13.33,
42
+ "Psychology": 13.33,
43
+ "Manage": 26.0,
44
+ "Biology": 22.03,
45
+ "Mechanical_Engineering": 16.95,
46
+ "Economics": 11.86,
47
+ "Basic_Medical_Science": 15.38,
48
+ "Computer_Science": 25.0,
49
+ "Diagnostics_and_Laboratory_Medicine": 15.0,
50
+ "Math": 10.0,
51
+ "Materials": 16.67,
52
+ "Music": 11.67,
53
+ "Marketing": 10.17,
54
+ "Chemistry": 16.67,
55
+ "Geography": 11.54
56
+ },
57
+ "acc_stderr": 0,
58
+ "acc": 16.47
59
+ },
60
+ "MmvetV2": {
61
+ "accuracy": 45.2998,
62
+ "capability_scores": {
63
+ "ocr": 47.5,
64
+ "math": 30.58823529411765,
65
+ "spat": 41.065989847715734,
66
+ "rec": 43.05825242718446,
67
+ "know": 44.29487179487178,
68
+ "gen": 41.34545454545452,
69
+ "seq": 34.82142857142858
70
+ },
71
+ "capability_detail_scores": {
72
+ "ocr_math": 31.818181818181817,
73
+ "ocr_math_spat": 33.33333333333333,
74
+ "ocr_math_spat_rec": 25.0,
75
+ "spat_rec": 56.07142857142857,
76
+ "ocr_spat": 55.769230769230774,
77
+ "ocr_spat_rec": 21.666666666666668,
78
+ "ocr_know_spat": 75.0,
79
+ "ocr_rec": 65.0,
80
+ "know_spat_rec": 18.0,
81
+ "ocr": 70.93750000000001,
82
+ "rec": 55.08474576271186,
83
+ "know_rec": 41.53846153846154,
84
+ "gen_know_rec": 45.099999999999994,
85
+ "ocr_gen_know_rec": 43.84615384615385,
86
+ "ocr_gen_spat_rec": 38.372093023255815,
87
+ "ocr_spat_gen": 60.0,
88
+ "math_gen_seq_ocr_spat": 0.0,
89
+ "math_rec_seq_ocr_spat": 0.0,
90
+ "gen_spat_rec": 30.909090909090903,
91
+ "ocr_math_spat_gen": 40.0,
92
+ "seq_spat_rec": 40.0,
93
+ "seq_ocr_spat_rec": 33.33333333333333,
94
+ "gen_know_spat_rec": 20.0,
95
+ "gen_rec": 42.94117647058823,
96
+ "ocr_know_spat_rec": 87.5,
97
+ "gen_know_ocr_rec": 43.84615384615385,
98
+ "gen_rec_spat_ocr_know": 65.0,
99
+ "ocr_math_rec": 100.0,
100
+ "ocr_gen_rec": 34.0,
101
+ "seq_ocr_gen_rec": 52.85714285714285,
102
+ "ocr_gen": 50.0,
103
+ "seq_gen_rec": 22.857142857142858,
104
+ "seq_rec": 40.0,
105
+ "seq_gen_spat_rec": 42.5,
106
+ "seq_know_rec": 0.0,
107
+ "seq_gen_know_rec": 55.00000000000001,
108
+ "gen_rec_seq_ocr_spat": 10.0,
109
+ "gen_rec_seq_ocr_know": 80.0,
110
+ "math_know_rec": 0.0,
111
+ "seq_ocr_rec": 0.0
112
+ },
113
+ "acc_stderr": 0,
114
+ "acc": 45.2998
115
+ },
116
+ "MathVerse": {
117
+ "Text Lite": {
118
+ "accuracy": 21.32,
119
+ "correct": 168,
120
+ "total": 788
121
+ },
122
+ "Total": {
123
+ "accuracy": 20.94,
124
+ "correct": 825,
125
+ "total": 3940
126
+ },
127
+ "Text Dominant": {
128
+ "accuracy": 23.73,
129
+ "correct": 187,
130
+ "total": 788
131
+ },
132
+ "Vision Intensive": {
133
+ "accuracy": 20.56,
134
+ "correct": 162,
135
+ "total": 788
136
+ },
137
+ "Vision Only": {
138
+ "accuracy": 18.78,
139
+ "correct": 148,
140
+ "total": 788
141
+ },
142
+ "Vision Dominant": {
143
+ "accuracy": 20.3,
144
+ "correct": 160,
145
+ "total": 788
146
+ },
147
+ "accuracy": 20.94,
148
+ "acc_stderr": 0,
149
+ "acc": 20.94
150
+ },
151
+ "Ocrlite": {
152
+ "final_score": [
153
+ 1123,
154
+ 1645
155
+ ],
156
+ "accuracy": 68.267,
157
+ "Key Information Extraction-Bookshelf": [
158
+ 23,
159
+ 52
160
+ ],
161
+ "Scene Text-centric VQA-diet_constraints": [
162
+ 27,
163
+ 90
164
+ ],
165
+ "Doc-oriented VQA-Control": [
166
+ 111,
167
+ 189
168
+ ],
169
+ "Doc-oriented VQA": [
170
+ 128,
171
+ 204
172
+ ],
173
+ "Scene Text-centric VQA-Fake_logo": [
174
+ 47,
175
+ 119
176
+ ],
177
+ "Handwritten Mathematical Expression Recognition": [
178
+ 28,
179
+ 100
180
+ ],
181
+ "Key Information Extraction": [
182
+ 190,
183
+ 209
184
+ ],
185
+ "Scene Text-centric VQA-Control": [
186
+ 166,
187
+ 200
188
+ ],
189
+ "Scene Text-centric VQA": [
190
+ 229,
191
+ 282
192
+ ],
193
+ "Artistic Text Recognition": [
194
+ 43,
195
+ 50
196
+ ],
197
+ "Irregular Text Recognition": [
198
+ 47,
199
+ 50
200
+ ],
201
+ "Non-Semantic Text Recognition": [
202
+ 38,
203
+ 50
204
+ ],
205
+ "Regular Text Recognition": [
206
+ 46,
207
+ 50
208
+ ],
209
+ "acc_stderr": 0,
210
+ "acc": 68.267
211
+ },
212
+ "OcrliteZh": {
213
+ "final_score": [
214
+ 143,
215
+ 234
216
+ ],
217
+ "accuracy": 61.111,
218
+ "Docvqa": [
219
+ 8,
220
+ 10
221
+ ],
222
+ "Chartqa-human": [
223
+ 5,
224
+ 10
225
+ ],
226
+ "Chartqa-au": [
227
+ 6,
228
+ 10
229
+ ],
230
+ "infographic": [
231
+ 4,
232
+ 10
233
+ ],
234
+ "Key Information Extraction": [
235
+ 33,
236
+ 45
237
+ ],
238
+ "Scene Text-centric VQA": [
239
+ 27,
240
+ 40
241
+ ],
242
+ "Artistic Text Recognition": [
243
+ 6,
244
+ 11
245
+ ],
246
+ "IrRegular Text Recognition": [
247
+ 8,
248
+ 11
249
+ ],
250
+ "Non-semantic Text Recognition": [
251
+ 10,
252
+ 12
253
+ ],
254
+ "Regular Text Recognition": [
255
+ 11,
256
+ 11
257
+ ],
258
+ "Handwriting_CN": [
259
+ 12,
260
+ 20
261
+ ],
262
+ "Chinese Unlimited": [
263
+ 13,
264
+ 44
265
+ ],
266
+ "acc_stderr": 0,
267
+ "acc": 61.111
268
+ },
269
+ "CharXiv": {
270
+ "descriptive": {
271
+ "Overall Score": 45.25,
272
+ "By Question": {
273
+ "Q1": 66.8,
274
+ "Q2": 54.78,
275
+ "Q3": 49.79,
276
+ "Q4": 65.37,
277
+ "Q5": 55.23,
278
+ "Q6": 38.55,
279
+ "Q7": 22.22,
280
+ "Q8": 43.3,
281
+ "Q9": 29.35,
282
+ "Q10": 32.88,
283
+ "Q11": 24.57,
284
+ "Q12": 31.87,
285
+ "Q13": 45.66,
286
+ "Q14": 73.76,
287
+ "Q15": 50.8,
288
+ "Q16": 38.89,
289
+ "Q17": 6.25,
290
+ "Q18": 44.13,
291
+ "Q19": 73.85
292
+ },
293
+ "By Category": {
294
+ "Information Extraction": 50.59,
295
+ "Enumeration": 50.28,
296
+ "Pattern Recognition": 36.24,
297
+ "Counting": 39.19,
298
+ "Compositionality": 6.25
299
+ },
300
+ "By Subplot": {
301
+ "1 Subplot": 52.91,
302
+ "2-4 Subplots": 42.66,
303
+ "5+ Subplots": 36.86
304
+ },
305
+ "By Subject": {
306
+ "Computer Science": 41.67,
307
+ "Economics": 44.2,
308
+ "Electrical Engineering and Systems Science": 50.84,
309
+ "Mathematics": 48.52,
310
+ "Physics": 45.47,
311
+ "Quantitative Biology": 38.69,
312
+ "Quantitative Finance": 47.63,
313
+ "Statistics": 45.35
314
+ },
315
+ "By Year": {
316
+ "2020": 46.15,
317
+ "2021": 43.58,
318
+ "2022": 46.93,
319
+ "2023": 44.46
320
+ },
321
+ "N_valid": 4000,
322
+ "N_invalid": 0,
323
+ "Question Type": "Descriptive"
324
+ },
325
+ "reasoning": {
326
+ "Overall Score": 23.9,
327
+ "By Answer Type": {
328
+ "Text-in-Chart": 24.77,
329
+ "Text-in-General": 28.28,
330
+ "Number-in-Chart": 25.0,
331
+ "Number-in-General": 19.21
332
+ },
333
+ "By Source": {
334
+ "GPT-Sourced": 27.17,
335
+ "GPT-Inspired": 19.44,
336
+ "Completely Human": 24.5
337
+ },
338
+ "By Subject": {
339
+ "Computer Science": 28.57,
340
+ "Economics": 17.39,
341
+ "Electrical Engineering and Systems Science": 27.73,
342
+ "Mathematics": 25.19,
343
+ "Physics": 24.41,
344
+ "Quantitative Biology": 20.63,
345
+ "Quantitative Finance": 22.41,
346
+ "Statistics": 25.66
347
+ },
348
+ "By Year": {
349
+ "2020": 24.7,
350
+ "2021": 24.14,
351
+ "2022": 26.23,
352
+ "2023": 20.56
353
+ },
354
+ "By Subplot": {
355
+ "1 Subplot": 25.65,
356
+ "2-4 Subplots": 25.93,
357
+ "5+ Subplots": 17.8
358
+ },
359
+ "N_valid": 1000,
360
+ "N_invalid": 1,
361
+ "Question Type": "Reasoning"
362
+ },
363
+ "accuracy": 34.58,
364
+ "acc_stderr": 0,
365
+ "acc": 34.58
366
+ },
367
+ "MathVision": {
368
+ "accuracy": 14.11,
369
+ "acc_stderr": 0,
370
+ "acc": 14.11
371
+ },
372
+ "CII-Bench": {
373
+ "accuracy": 39.48,
374
+ "domain_score": {
375
+ "Art": 42.65,
376
+ "Env.": 50.0,
377
+ "CTC": 46.67,
378
+ "Society": 40.0,
379
+ "Life": 31.17,
380
+ "Politics": 33.33
381
+ },
382
+ "emotion_score": {
383
+ "Negative": 38.49,
384
+ "Positive": 39.74,
385
+ "Neutral": 40.23
386
+ },
387
+ "acc_stderr": 0,
388
+ "acc": 39.48
389
+ },
390
+ "Blink": {
391
+ "accuracy": 40.45,
392
+ "Art Style": 47.01,
393
+ "Counting": 55.83,
394
+ "Forensic Detection": 21.21,
395
+ "Functional Correspondence": 32.31,
396
+ "IQ Test": 16.0,
397
+ "Jigsaw": 52.67,
398
+ "Multi-view Reasoning": 33.08,
399
+ "Object Localization": 50.0,
400
+ "Relative Depth": 55.65,
401
+ "Relative Reflectance": 31.34,
402
+ "Semantic Correspondence": 24.46,
403
+ "Spatial Relation": 69.93,
404
+ "Visual Correspondence": 28.49,
405
+ "Visual Similarity": 55.56,
406
+ "acc_stderr": 0,
407
+ "acc": 40.45
408
+ }
409
+ }
410
+ }
Qwen2-VL-72B-Instruct/results_2025-01-25T10-42-53.190540.json CHANGED
@@ -26,9 +26,41 @@
26
  "reject_number": 4,
27
  "total_question": 1730
28
  },
29
- "accuracy": 34.41,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
30
  "acc_stderr": 0,
31
- "acc": 34.41
32
  },
33
  "MmvetV2": {
34
  "accuracy": 65.4739,
@@ -337,9 +369,9 @@
337
  "acc": 60.06
338
  },
339
  "MathVision": {
340
- "accuracy": 26.35,
341
  "acc_stderr": 0,
342
- "acc": 26.35
343
  },
344
  "CII-Bench": {
345
  "accuracy": 67.84,
 
26
  "reject_number": 4,
27
  "total_question": 1730
28
  },
29
+ "accuracy": 34.47,
30
+ "subject_score": {
31
+ "Design": 56.67,
32
+ "Finance": 22.03,
33
+ "Sociology": 38.89,
34
+ "History": 44.64,
35
+ "Literature": 63.46,
36
+ "Art": 54.72,
37
+ "Agriculture": 16.67,
38
+ "Pharmacy": 43.86,
39
+ "Architecture_and_Engineering": 23.33,
40
+ "Public_Health": 25.86,
41
+ "Clinical_Medicine": 30.51,
42
+ "Accounting": 19.3,
43
+ "Energy_and_Power": 19.3,
44
+ "Art_Theory": 57.41,
45
+ "Psychology": 33.33,
46
+ "Electronics": 45.0,
47
+ "Physics": 26.67,
48
+ "Manage": 28.0,
49
+ "Biology": 25.42,
50
+ "Mechanical_Engineering": 33.9,
51
+ "Economics": 32.2,
52
+ "Basic_Medical_Science": 36.54,
53
+ "Diagnostics_and_Laboratory_Medicine": 31.67,
54
+ "Computer_Science": 38.33,
55
+ "Math": 26.67,
56
+ "Music": 30.0,
57
+ "Materials": 23.33,
58
+ "Marketing": 37.29,
59
+ "Chemistry": 38.33,
60
+ "Geography": 38.46
61
+ },
62
  "acc_stderr": 0,
63
+ "acc": 34.47
64
  },
65
  "MmvetV2": {
66
  "accuracy": 65.4739,
 
369
  "acc": 60.06
370
  },
371
  "MathVision": {
372
+ "accuracy": 26.15,
373
  "acc_stderr": 0,
374
+ "acc": 26.15
375
  },
376
  "CII-Bench": {
377
  "accuracy": 67.84,
Qwen2-VL-7B-Instruct/results_2025-01-25T10-42-53.190540.json CHANGED
@@ -21,9 +21,41 @@
21
  "acc": 33.93
22
  },
23
  "MMMU_Pro_vision": {
24
- "accuracy": 17.69,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
25
  "acc_stderr": 0,
26
- "acc": 17.69
27
  },
28
  "MmvetV2": {
29
  "accuracy": 56.8665,
@@ -334,9 +366,9 @@
334
  "acc": 46.02
335
  },
336
  "MathVision": {
337
- "accuracy": 17.34,
338
  "acc_stderr": 0,
339
- "acc": 17.34
340
  },
341
  "CII-Bench": {
342
  "accuracy": 51.24,
 
21
  "acc": 33.93
22
  },
23
  "MMMU_Pro_vision": {
24
+ "accuracy": 25.38,
25
+ "subject_score": {
26
+ "Design": 50.0,
27
+ "History": 33.93,
28
+ "Agriculture": 11.67,
29
+ "Sociology": 27.78,
30
+ "Finance": 15.0,
31
+ "Art": 39.62,
32
+ "Pharmacy": 24.56,
33
+ "Accounting": 8.62,
34
+ "Energy_and_Power": 22.41,
35
+ "Literature": 57.69,
36
+ "Clinical_Medicine": 13.56,
37
+ "Architecture_and_Engineering": 23.33,
38
+ "Art_Theory": 41.82,
39
+ "Public_Health": 12.07,
40
+ "Electronics": 28.33,
41
+ "Physics": 20.0,
42
+ "Psychology": 21.67,
43
+ "Biology": 28.81,
44
+ "Manage": 26.0,
45
+ "Economics": 22.03,
46
+ "Mechanical_Engineering": 32.2,
47
+ "Diagnostics_and_Laboratory_Medicine": 28.33,
48
+ "Basic_Medical_Science": 28.85,
49
+ "Computer_Science": 31.67,
50
+ "Math": 20.0,
51
+ "Music": 25.0,
52
+ "Materials": 15.0,
53
+ "Chemistry": 18.33,
54
+ "Marketing": 15.25,
55
+ "Geography": 25.0
56
+ },
57
  "acc_stderr": 0,
58
+ "acc": 25.38
59
  },
60
  "MmvetV2": {
61
  "accuracy": 56.8665,
 
366
  "acc": 46.02
367
  },
368
  "MathVision": {
369
+ "accuracy": 17.04,
370
  "acc_stderr": 0,
371
+ "acc": 17.04
372
  },
373
  "CII-Bench": {
374
  "accuracy": 51.24,
Qwen2.5-VL-32B-Instruct/results_2025-01-25T10-42-53.190540.json ADDED
@@ -0,0 +1,717 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "model_name": "Qwen2.5-VL-32B-Instruct",
4
+ "model_dtype": "float16",
5
+ "model_size": 0
6
+ },
7
+ "results": {
8
+ "CMMMU": {
9
+ "艺术与设计": {
10
+ "num": 88,
11
+ "correct": 65,
12
+ "accuracy": 73.86
13
+ },
14
+ "overall": {
15
+ "num": 900,
16
+ "correct": 443,
17
+ "accuracy": 49.22
18
+ },
19
+ "商业": {
20
+ "num": 126,
21
+ "correct": 39,
22
+ "accuracy": 30.95
23
+ },
24
+ "科学": {
25
+ "num": 204,
26
+ "correct": 85,
27
+ "accuracy": 41.67
28
+ },
29
+ "健康与医学": {
30
+ "num": 153,
31
+ "correct": 82,
32
+ "accuracy": 53.59
33
+ },
34
+ "人文社会科学": {
35
+ "num": 85,
36
+ "correct": 55,
37
+ "accuracy": 64.71
38
+ },
39
+ "技术与工程": {
40
+ "num": 244,
41
+ "correct": 117,
42
+ "accuracy": 47.95
43
+ },
44
+ "accuracy": 49.22,
45
+ "acc_stderr": 0,
46
+ "acc": 49.22
47
+ },
48
+ "MMMU": {
49
+ "accuracy": 61.0,
50
+ "subject_score": {
51
+ "Accounting": 53.33,
52
+ "Agriculture": 56.67,
53
+ "Architecture": 33.33,
54
+ "Art": 75.0,
55
+ "Basic": 66.67,
56
+ "Biology": 63.33,
57
+ "Chemistry": 33.33,
58
+ "Clinical": 70.0,
59
+ "Computer": 63.33,
60
+ "Design": 83.33,
61
+ "Diagnostics": 46.67,
62
+ "Economics": 63.33,
63
+ "Electronics": 40.0,
64
+ "Energy": 50.0,
65
+ "Finance": 36.67,
66
+ "Geography": 66.67,
67
+ "History": 80.0,
68
+ "Literature": 83.33,
69
+ "Manage": 53.33,
70
+ "Marketing": 76.67,
71
+ "Materials": 50.0,
72
+ "Math": 50.0,
73
+ "Mechanical": 43.33,
74
+ "Music": 53.33,
75
+ "Pharmacy": 73.33,
76
+ "Physics": 73.33,
77
+ "Psychology": 73.33,
78
+ "Public": 70.0,
79
+ "Sociology": 73.33
80
+ },
81
+ "difficulty_score": {
82
+ "Hard": 44.2,
83
+ "Medium": 60.14,
84
+ "Easy": 72.54
85
+ },
86
+ "acc_stderr": 0,
87
+ "acc": 61.0
88
+ },
89
+ "MMMU_Pro_standard": {
90
+ "accuracy": 44.34,
91
+ "subject_score": {
92
+ "Literature": 69.23,
93
+ "Finance": 36.67,
94
+ "Design": 68.33,
95
+ "History": 62.5,
96
+ "Sociology": 50.0,
97
+ "Art": 60.38,
98
+ "Energy_and_Power": 20.69,
99
+ "Agriculture": 23.33,
100
+ "Accounting": 31.03,
101
+ "Clinical_Medicine": 49.15,
102
+ "Pharmacy": 45.61,
103
+ "Architecture_and_Engineering": 40.0,
104
+ "Public_Health": 43.1,
105
+ "Physics": 35.0,
106
+ "Electronics": 60.0,
107
+ "Art_Theory": 67.27,
108
+ "Psychology": 51.67,
109
+ "Manage": 42.0,
110
+ "Biology": 42.37,
111
+ "Economics": 47.46,
112
+ "Diagnostics_and_Laboratory_Medicine": 36.67,
113
+ "Mechanical_Engineering": 44.07,
114
+ "Basic_Medical_Science": 44.23,
115
+ "Computer_Science": 48.33,
116
+ "Math": 36.67,
117
+ "Music": 33.33,
118
+ "Materials": 26.67,
119
+ "Marketing": 37.29,
120
+ "Chemistry": 36.67,
121
+ "Geography": 48.08
122
+ },
123
+ "difficulty_score": {
124
+ "Easy": 54.92,
125
+ "Hard": 35.66,
126
+ "Medium": 41.7
127
+ },
128
+ "acc_stderr": 0,
129
+ "acc": 44.34
130
+ },
131
+ "MMMU_Pro_vision": {
132
+ "accuracy": 35.55,
133
+ "subject_score": {
134
+ "History": 48.21,
135
+ "Sociology": 44.44,
136
+ "Design": 48.33,
137
+ "Accounting": 24.14,
138
+ "Energy_and_Power": 13.79,
139
+ "Literature": 67.31,
140
+ "Agriculture": 20.0,
141
+ "Art": 45.28,
142
+ "Pharmacy": 50.88,
143
+ "Finance": 33.33,
144
+ "Clinical_Medicine": 40.68,
145
+ "Architecture_and_Engineering": 23.33,
146
+ "Physics": 38.33,
147
+ "Art_Theory": 49.09,
148
+ "Electronics": 40.0,
149
+ "Psychology": 31.67,
150
+ "Manage": 28.0,
151
+ "Biology": 28.81,
152
+ "Economics": 37.29,
153
+ "Diagnostics_and_Laboratory_Medicine": 28.33,
154
+ "Mechanical_Engineering": 22.03,
155
+ "Basic_Medical_Science": 34.62,
156
+ "Computer_Science": 46.67,
157
+ "Math": 21.67,
158
+ "Public_Health": 43.1,
159
+ "Materials": 30.0,
160
+ "Music": 20.0,
161
+ "Geography": 36.54,
162
+ "Marketing": 35.59,
163
+ "Chemistry": 41.67
164
+ },
165
+ "acc_stderr": 0,
166
+ "acc": 35.55
167
+ },
168
+ "MmvetV2": {
169
+ "accuracy": 70.7157,
170
+ "capability_scores": {
171
+ "ocr": 81.00961538461544,
172
+ "math": 84.11764705882352,
173
+ "spat": 69.59390862944163,
174
+ "rec": 66.48058252427191,
175
+ "know": 64.35897435897436,
176
+ "gen": 68.87272727272735,
177
+ "seq": 65.35714285714286
178
+ },
179
+ "capability_detail_scores": {
180
+ "ocr_math": 88.18181818181817,
181
+ "ocr_math_spat": 89.33333333333333,
182
+ "rec_ocr_math_spat": 50.0,
183
+ "rec_spat": 63.21428571428571,
184
+ "ocr_spat": 84.61538461538461,
185
+ "rec_ocr_spat": 50.0,
186
+ "know_ocr_spat": 100.0,
187
+ "rec_ocr": 100.0,
188
+ "know_rec_spat": 50.0,
189
+ "ocr": 89.68749999999999,
190
+ "rec": 69.15254237288137,
191
+ "know_rec": 64.61538461538461,
192
+ "know_rec_gen": 61.699999999999974,
193
+ "know_rec_ocr_gen": 84.6153846153846,
194
+ "rec_ocr_gen_spat": 81.62790697674417,
195
+ "ocr_gen_spat": 95.0,
196
+ "gen_spat_math_ocr_seq": 100.0,
197
+ "rec_spat_math_ocr_seq": 100.0,
198
+ "rec_gen_spat": 54.54545454545454,
199
+ "ocr_math_gen_spat": 50.0,
200
+ "rec_seq_spat": 65.71428571428571,
201
+ "rec_seq_ocr_spat": 16.666666666666664,
202
+ "know_rec_gen_spat": 40.0,
203
+ "rec_gen": 70.29411764705883,
204
+ "know_rec_ocr_spat": 47.5,
205
+ "rec_gen_spat_know_ocr": 90.0,
206
+ "rec_ocr_math": 100.0,
207
+ "rec_ocr_gen": 96.0,
208
+ "rec_seq_ocr_gen": 72.85714285714286,
209
+ "ocr_gen": 80.76923076923077,
210
+ "rec_seq_gen": 64.28571428571429,
211
+ "rec_seq": 58.333333333333336,
212
+ "rec_seq_gen_spat": 73.75,
213
+ "know_rec_seq": 100.0,
214
+ "know_rec_seq_gen": 70.0,
215
+ "rec_gen_spat_ocr_seq": 20.0,
216
+ "rec_gen_know_ocr_seq": 100.0,
217
+ "know_rec_math": 50.0,
218
+ "rec_seq_ocr": 100.0
219
+ },
220
+ "acc_stderr": 0,
221
+ "acc": 70.7157
222
+ },
223
+ "MathVerse": {
224
+ "Text Lite": {
225
+ "accuracy": 54.95,
226
+ "correct": 433,
227
+ "total": 788
228
+ },
229
+ "Total": {
230
+ "accuracy": 53.27,
231
+ "correct": 2099,
232
+ "total": 3940
233
+ },
234
+ "Text Dominant": {
235
+ "accuracy": 62.44,
236
+ "correct": 492,
237
+ "total": 788
238
+ },
239
+ "Vision Intensive": {
240
+ "accuracy": 49.87,
241
+ "correct": 393,
242
+ "total": 788
243
+ },
244
+ "Vision Dominant": {
245
+ "accuracy": 50.13,
246
+ "correct": 395,
247
+ "total": 788
248
+ },
249
+ "Vision Only": {
250
+ "accuracy": 48.98,
251
+ "correct": 386,
252
+ "total": 788
253
+ },
254
+ "accuracy": 53.27,
255
+ "acc_stderr": 0,
256
+ "acc": 53.27
257
+ },
258
+ "Ocrlite": {
259
+ "final_score": [
260
+ 1275,
261
+ 1644
262
+ ],
263
+ "accuracy": 77.555,
264
+ "Key Information Extraction-Bookshelf": [
265
+ 32,
266
+ 51,
267
+ 0.627,
268
+ {
269
+ "Default": [
270
+ 32,
271
+ 51,
272
+ 0.627
273
+ ]
274
+ }
275
+ ],
276
+ "Scene Text-centric VQA-diet_constraints": [
277
+ 75,
278
+ 90,
279
+ 0.833,
280
+ {
281
+ "Default": [
282
+ 75,
283
+ 90,
284
+ 0.833
285
+ ]
286
+ }
287
+ ],
288
+ "Doc-oriented VQA-Control": [
289
+ 152,
290
+ 189,
291
+ 0.804,
292
+ {
293
+ "Default": [
294
+ 152,
295
+ 189,
296
+ 0.804
297
+ ]
298
+ }
299
+ ],
300
+ "Doc-oriented VQA": [
301
+ 176,
302
+ 204,
303
+ 0.863,
304
+ {
305
+ "Default": [
306
+ 176,
307
+ 204,
308
+ 0.863
309
+ ]
310
+ }
311
+ ],
312
+ "Scene Text-centric VQA-Fake_logo": [
313
+ 61,
314
+ 119,
315
+ 0.513,
316
+ {
317
+ "Default": [
318
+ 61,
319
+ 119,
320
+ 0.513
321
+ ]
322
+ }
323
+ ],
324
+ "Handwritten Mathematical Expression Recognition": [
325
+ 1,
326
+ 100,
327
+ 0.01,
328
+ {
329
+ "Default": [
330
+ 1,
331
+ 100,
332
+ 0.01
333
+ ]
334
+ }
335
+ ],
336
+ "Key Information Extraction": [
337
+ 194,
338
+ 209,
339
+ 0.928,
340
+ {
341
+ "Default": [
342
+ 194,
343
+ 209,
344
+ 0.928
345
+ ]
346
+ }
347
+ ],
348
+ "Scene Text-centric VQA-Control": [
349
+ 170,
350
+ 200,
351
+ 0.85,
352
+ {
353
+ "Default": [
354
+ 170,
355
+ 200,
356
+ 0.85
357
+ ]
358
+ }
359
+ ],
360
+ "Scene Text-centric VQA": [
361
+ 249,
362
+ 282,
363
+ 0.883,
364
+ {
365
+ "Default": [
366
+ 249,
367
+ 282,
368
+ 0.883
369
+ ]
370
+ }
371
+ ],
372
+ "Artistic Text Recognition": [
373
+ 41,
374
+ 50,
375
+ 0.82,
376
+ {
377
+ "Default": [
378
+ 41,
379
+ 50,
380
+ 0.82
381
+ ]
382
+ }
383
+ ],
384
+ "Irregular Text Recognition": [
385
+ 42,
386
+ 50,
387
+ 0.84,
388
+ {
389
+ "Default": [
390
+ 42,
391
+ 50,
392
+ 0.84
393
+ ]
394
+ }
395
+ ],
396
+ "Non-Semantic Text Recognition": [
397
+ 34,
398
+ 50,
399
+ 0.68,
400
+ {
401
+ "Default": [
402
+ 34,
403
+ 50,
404
+ 0.68
405
+ ]
406
+ }
407
+ ],
408
+ "Regular Text Recognition": [
409
+ 48,
410
+ 50,
411
+ 0.96,
412
+ {
413
+ "Default": [
414
+ 48,
415
+ 50,
416
+ 0.96
417
+ ]
418
+ }
419
+ ],
420
+ "acc_stderr": 0,
421
+ "acc": 77.555
422
+ },
423
+ "OcrliteZh": {
424
+ "final_score": [
425
+ 168,
426
+ 234
427
+ ],
428
+ "accuracy": 71.795,
429
+ "Docvqa": [
430
+ 8,
431
+ 10,
432
+ 0.8,
433
+ {
434
+ "Default": [
435
+ 8,
436
+ 10,
437
+ 0.8
438
+ ]
439
+ }
440
+ ],
441
+ "Chartqa-human": [
442
+ 8,
443
+ 10,
444
+ 0.8,
445
+ {
446
+ "Default": [
447
+ 8,
448
+ 10,
449
+ 0.8
450
+ ]
451
+ }
452
+ ],
453
+ "Chartqa-au": [
454
+ 8,
455
+ 10,
456
+ 0.8,
457
+ {
458
+ "Default": [
459
+ 8,
460
+ 10,
461
+ 0.8
462
+ ]
463
+ }
464
+ ],
465
+ "infographic": [
466
+ 8,
467
+ 10,
468
+ 0.8,
469
+ {
470
+ "Default": [
471
+ 8,
472
+ 10,
473
+ 0.8
474
+ ]
475
+ }
476
+ ],
477
+ "Key Information Extraction": [
478
+ 40,
479
+ 45,
480
+ 0.889,
481
+ {
482
+ "Default": [
483
+ 40,
484
+ 45,
485
+ 0.889
486
+ ]
487
+ }
488
+ ],
489
+ "Scene Text-centric VQA": [
490
+ 28,
491
+ 40,
492
+ 0.7,
493
+ {
494
+ "Default": [
495
+ 28,
496
+ 40,
497
+ 0.7
498
+ ]
499
+ }
500
+ ],
501
+ "Artistic Text Recognition": [
502
+ 4,
503
+ 11,
504
+ 0.364,
505
+ {
506
+ "Default": [
507
+ 4,
508
+ 11,
509
+ 0.364
510
+ ]
511
+ }
512
+ ],
513
+ "IrRegular Text Recognition": [
514
+ 7,
515
+ 11,
516
+ 0.636,
517
+ {
518
+ "Default": [
519
+ 7,
520
+ 11,
521
+ 0.636
522
+ ]
523
+ }
524
+ ],
525
+ "Non-semantic Text Recognition": [
526
+ 11,
527
+ 12,
528
+ 0.917,
529
+ {
530
+ "Default": [
531
+ 11,
532
+ 12,
533
+ 0.917
534
+ ]
535
+ }
536
+ ],
537
+ "Regular Text Recognition": [
538
+ 10,
539
+ 11,
540
+ 0.909,
541
+ {
542
+ "Default": [
543
+ 10,
544
+ 11,
545
+ 0.909
546
+ ]
547
+ }
548
+ ],
549
+ "Handwriting_CN": [
550
+ 13,
551
+ 20,
552
+ 0.65,
553
+ {
554
+ "Default": [
555
+ 13,
556
+ 20,
557
+ 0.65
558
+ ]
559
+ }
560
+ ],
561
+ "Chinese Unlimited": [
562
+ 23,
563
+ 44,
564
+ 0.523,
565
+ {
566
+ "Default": [
567
+ 23,
568
+ 44,
569
+ 0.523
570
+ ]
571
+ }
572
+ ],
573
+ "acc_stderr": 0,
574
+ "acc": 71.795
575
+ },
576
+ "CharXiv": {
577
+ "descriptive": {
578
+ "Overall Score": 81.6,
579
+ "By Question": {
580
+ "Q1": 87.3,
581
+ "Q2": 85.22,
582
+ "Q3": 71.67,
583
+ "Q4": 89.11,
584
+ "Q5": 89.12,
585
+ "Q6": 83.13,
586
+ "Q7": 80.34,
587
+ "Q8": 91.96,
588
+ "Q9": 76.62,
589
+ "Q10": 78.08,
590
+ "Q11": 50.29,
591
+ "Q12": 75.27,
592
+ "Q13": 70.78,
593
+ "Q14": 87.94,
594
+ "Q15": 94.25,
595
+ "Q16": 75.0,
596
+ "Q17": 65.18,
597
+ "Q18": 89.88,
598
+ "Q19": 90.77
599
+ },
600
+ "By Category": {
601
+ "Information Extraction": 83.81,
602
+ "Enumeration": 85.39,
603
+ "Pattern Recognition": 73.58,
604
+ "Counting": 78.88,
605
+ "Compositionality": 65.18
606
+ },
607
+ "By Subplot": {
608
+ "1 Subplot": 86.01,
609
+ "2-4 Subplots": 81.94,
610
+ "5+ Subplots": 73.83
611
+ },
612
+ "By Subject": {
613
+ "Computer Science": 82.14,
614
+ "Economics": 83.7,
615
+ "Electrical Engineering and Systems Science": 85.92,
616
+ "Mathematics": 83.7,
617
+ "Physics": 76.77,
618
+ "Quantitative Biology": 76.19,
619
+ "Quantitative Finance": 80.39,
620
+ "Statistics": 84.07
621
+ },
622
+ "By Year": {
623
+ "2020": 81.98,
624
+ "2021": 79.6,
625
+ "2022": 83.61,
626
+ "2023": 81.35
627
+ },
628
+ "N_valid": 4000,
629
+ "N_invalid": 5,
630
+ "Question Type": "Descriptive"
631
+ },
632
+ "reasoning": {
633
+ "Overall Score": 42.7,
634
+ "By Answer Type": {
635
+ "Text-in-Chart": 45.45,
636
+ "Text-in-General": 43.43,
637
+ "Number-in-Chart": 44.4,
638
+ "Number-in-General": 35.37
639
+ },
640
+ "By Source": {
641
+ "GPT-Sourced": 50.0,
642
+ "GPT-Inspired": 41.67,
643
+ "Completely Human": 40.83
644
+ },
645
+ "By Subject": {
646
+ "Computer Science": 42.06,
647
+ "Economics": 43.48,
648
+ "Electrical Engineering and Systems Science": 42.86,
649
+ "Mathematics": 40.0,
650
+ "Physics": 51.18,
651
+ "Quantitative Biology": 42.06,
652
+ "Quantitative Finance": 37.93,
653
+ "Statistics": 41.59
654
+ },
655
+ "By Year": {
656
+ "2020": 40.08,
657
+ "2021": 41.38,
658
+ "2022": 36.48,
659
+ "2023": 52.82
660
+ },
661
+ "By Subplot": {
662
+ "1 Subplot": 45.6,
663
+ "2-4 Subplots": 42.33,
664
+ "5+ Subplots": 38.56
665
+ },
666
+ "N_valid": 1000,
667
+ "N_invalid": 1,
668
+ "Question Type": "Reasoning"
669
+ },
670
+ "accuracy": 62.15,
671
+ "acc_stderr": 0,
672
+ "acc": 62.15
673
+ },
674
+ "MathVision": {
675
+ "accuracy": 31.18,
676
+ "acc_stderr": 0,
677
+ "acc": 31.18
678
+ },
679
+ "CII-Bench": {
680
+ "accuracy": 63.01,
681
+ "domain_score": {
682
+ "Art": 62.5,
683
+ "Life": 60.17,
684
+ "Env.": 79.63,
685
+ "Society": 65.95,
686
+ "CTC": 58.52,
687
+ "Politics": 58.33
688
+ },
689
+ "emotion_score": {
690
+ "Negative": 64.91,
691
+ "Neutral": 60.15,
692
+ "Positive": 64.1
693
+ },
694
+ "acc_stderr": 0,
695
+ "acc": 63.01
696
+ },
697
+ "Blink": {
698
+ "accuracy": 58.97,
699
+ "Art Style": 76.07,
700
+ "Counting": 65.0,
701
+ "Forensic Detection": 58.33,
702
+ "Functional Correspondence": 29.23,
703
+ "IQ Test": 30.67,
704
+ "Jigsaw": 69.33,
705
+ "Multi-view Reasoning": 48.87,
706
+ "Object Localization": 56.56,
707
+ "Relative Depth": 74.19,
708
+ "Relative Reflectance": 28.36,
709
+ "Semantic Correspondence": 51.8,
710
+ "Spatial Relation": 86.01,
711
+ "Visual Correspondence": 72.09,
712
+ "Visual Similarity": 78.52,
713
+ "acc_stderr": 0,
714
+ "acc": 58.97
715
+ }
716
+ }
717
+ }
Qwen2.5-VL-72B-Instruct/results_2025-01-25T10-42-53.190540.json ADDED
@@ -0,0 +1,723 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "model_name": "Qwen2.5-VL-72B-Instruct",
4
+ "model_dtype": "float16",
5
+ "model_size": 0
6
+ },
7
+ "results": {
8
+ "CMMMU": {
9
+ "艺术与设计": {
10
+ "num": 88,
11
+ "correct": 66,
12
+ "accuracy": 75.0
13
+ },
14
+ "overall": {
15
+ "num": 900,
16
+ "correct": 509,
17
+ "accuracy": 56.56
18
+ },
19
+ "商业": {
20
+ "num": 126,
21
+ "correct": 46,
22
+ "accuracy": 36.51
23
+ },
24
+ "科学": {
25
+ "num": 204,
26
+ "correct": 112,
27
+ "accuracy": 54.9
28
+ },
29
+ "健康与医学": {
30
+ "num": 153,
31
+ "correct": 95,
32
+ "accuracy": 62.09
33
+ },
34
+ "人文社会科学": {
35
+ "num": 85,
36
+ "correct": 60,
37
+ "accuracy": 70.59
38
+ },
39
+ "技术与工程": {
40
+ "num": 244,
41
+ "correct": 130,
42
+ "accuracy": 53.28
43
+ },
44
+ "accuracy": 56.56,
45
+ "acc_stderr": 0,
46
+ "acc": 56.56
47
+ },
48
+ "MMMU": {
49
+ "accuracy": 62.56,
50
+ "subject_score": {
51
+ "Accounting": 66.67,
52
+ "Agriculture": 53.33,
53
+ "Architecture": 33.33,
54
+ "Art": 81.67,
55
+ "Basic": 66.67,
56
+ "Biology": 60.0,
57
+ "Chemistry": 46.67,
58
+ "Clinical": 70.0,
59
+ "Computer": 63.33,
60
+ "Design": 83.33,
61
+ "Diagnostics": 43.33,
62
+ "Economics": 76.67,
63
+ "Electronics": 40.0,
64
+ "Energy": 50.0,
65
+ "Finance": 46.67,
66
+ "Geography": 73.33,
67
+ "History": 76.67,
68
+ "Literature": 90.0,
69
+ "Manage": 60.0,
70
+ "Marketing": 66.67,
71
+ "Materials": 40.0,
72
+ "Math": 56.67,
73
+ "Mechanical": 46.67,
74
+ "Music": 33.33,
75
+ "Pharmacy": 70.0,
76
+ "Physics": 66.67,
77
+ "Psychology": 80.0,
78
+ "Public": 83.33,
79
+ "Sociology": 70.0
80
+ },
81
+ "difficulty_score": {
82
+ "Medium": 61.79,
83
+ "Hard": 44.75,
84
+ "Easy": 74.58
85
+ },
86
+ "acc_stderr": 0,
87
+ "acc": 62.56
88
+ },
89
+ "MMMU_Pro_standard": {
90
+ "accuracy": 46.94,
91
+ "subject_score": {
92
+ "History": 60.71,
93
+ "Design": 66.67,
94
+ "Finance": 43.33,
95
+ "Literature": 73.08,
96
+ "Sociology": 51.85,
97
+ "Art": 73.58,
98
+ "Agriculture": 33.33,
99
+ "Energy_and_Power": 25.86,
100
+ "Accounting": 31.03,
101
+ "Architecture_and_Engineering": 33.33,
102
+ "Pharmacy": 57.89,
103
+ "Public_Health": 43.1,
104
+ "Clinical_Medicine": 42.37,
105
+ "Art_Theory": 74.55,
106
+ "Physics": 40.0,
107
+ "Electronics": 65.0,
108
+ "Psychology": 45.0,
109
+ "Economics": 54.24,
110
+ "Biology": 45.76,
111
+ "Mechanical_Engineering": 44.07,
112
+ "Manage": 40.0,
113
+ "Diagnostics_and_Laboratory_Medicine": 33.33,
114
+ "Basic_Medical_Science": 50.0,
115
+ "Computer_Science": 50.0,
116
+ "Math": 41.67,
117
+ "Materials": 28.33,
118
+ "Music": 30.0,
119
+ "Marketing": 45.76,
120
+ "Chemistry": 45.0,
121
+ "Geography": 48.08
122
+ },
123
+ "difficulty_score": {
124
+ "Medium": 42.2,
125
+ "Hard": 37.66,
126
+ "Easy": 61.17
127
+ },
128
+ "acc_stderr": 0,
129
+ "acc": 46.94
130
+ },
131
+ "MMMU_Pro_vision": {
132
+ "accuracy": 46.3,
133
+ "subject_score": {
134
+ "Design": 61.67,
135
+ "History": 58.93,
136
+ "Art": 52.83,
137
+ "Literature": 69.23,
138
+ "Sociology": 55.56,
139
+ "Pharmacy": 50.88,
140
+ "Agriculture": 21.67,
141
+ "Clinical_Medicine": 40.68,
142
+ "Accounting": 60.34,
143
+ "Energy_and_Power": 39.66,
144
+ "Architecture_and_Engineering": 35.0,
145
+ "Public_Health": 62.07,
146
+ "Physics": 45.0,
147
+ "Art_Theory": 58.18,
148
+ "Finance": 61.67,
149
+ "Manage": 40.0,
150
+ "Psychology": 43.33,
151
+ "Biology": 32.2,
152
+ "Diagnostics_and_Laboratory_Medicine": 25.0,
153
+ "Economics": 61.02,
154
+ "Electronics": 56.67,
155
+ "Basic_Medical_Science": 42.31,
156
+ "Mechanical_Engineering": 38.98,
157
+ "Computer_Science": 45.0,
158
+ "Math": 43.33,
159
+ "Music": 30.0,
160
+ "Materials": 28.33,
161
+ "Marketing": 50.85,
162
+ "Chemistry": 46.67,
163
+ "Geography": 36.54
164
+ },
165
+ "acc_stderr": 0,
166
+ "acc": 46.3
167
+ },
168
+ "MmvetV2": {
169
+ "reject_info": {
170
+ "reject_rate": 1.35,
171
+ "reject_number": 7,
172
+ "total_question": 517
173
+ },
174
+ "accuracy": 69.1765,
175
+ "capability_scores": {
176
+ "math": 76.47058823529412,
177
+ "ocr": 72.58536585365856,
178
+ "spat": 65.20408163265303,
179
+ "rec": 67.0617283950618,
180
+ "know": 65.32051282051277,
181
+ "gen": 69.1143911439115,
182
+ "seq": 72.24489795918367
183
+ },
184
+ "capability_detail_scores": {
185
+ "math_ocr": 72.72727272727273,
186
+ "spat_ocr_math": 93.33333333333333,
187
+ "rec_spat_ocr_math": 50.0,
188
+ "rec_spat": 61.78571428571429,
189
+ "spat_ocr": 64.61538461538461,
190
+ "rec_spat_ocr": 50.0,
191
+ "know_spat_ocr": 100.0,
192
+ "rec_ocr": 95.0,
193
+ "rec_know_spat": 48.0,
194
+ "ocr": 84.37500000000001,
195
+ "rec": 67.28813559322035,
196
+ "rec_know": 78.46153846153847,
197
+ "rec_know_gen": 68.19999999999997,
198
+ "rec_know_ocr_gen": 69.23076923076923,
199
+ "rec_spat_ocr_gen": 74.41860465116281,
200
+ "spat_ocr_gen": 80.0,
201
+ "spat_math_ocr_seq_gen": 100.0,
202
+ "spat_math_ocr_seq_rec": 100.0,
203
+ "rec_spat_gen": 62.27272727272729,
204
+ "spat_ocr_gen_math": 0.0,
205
+ "rec_spat_seq": 65.0,
206
+ "rec_spat_ocr_seq": 33.33333333333333,
207
+ "rec_know_spat_gen": 36.66666666666667,
208
+ "rec_gen": 70.58823529411767,
209
+ "rec_know_spat_ocr": 0.0,
210
+ "know_spat_ocr_gen_rec": 50.0,
211
+ "rec_math_ocr": 100.0,
212
+ "rec_ocr_gen": 88.00000000000001,
213
+ "rec_seq_ocr_gen": 75.0,
214
+ "ocr_gen": 67.6923076923077,
215
+ "rec_seq_gen": 71.53846153846153,
216
+ "rec_seq": 100.0,
217
+ "rec_spat_seq_gen": 75.0,
218
+ "rec_know_seq": 100.0,
219
+ "rec_gen_seq": 71.53846153846153,
220
+ "rec_know_seq_gen": 40.0,
221
+ "spat_ocr_seq_rec_gen": 53.333333333333336,
222
+ "know_ocr_seq_rec_gen": 90.0,
223
+ "rec_know_math": 0.0,
224
+ "rec_ocr_seq": 100.0
225
+ },
226
+ "acc_stderr": 0,
227
+ "acc": 69.1765
228
+ },
229
+ "MathVerse": {
230
+ "Vision Intensive": {
231
+ "accuracy": 35.28,
232
+ "correct": 278,
233
+ "total": 788
234
+ },
235
+ "Total": {
236
+ "accuracy": 37.31,
237
+ "correct": 1470,
238
+ "total": 3940
239
+ },
240
+ "Text Lite": {
241
+ "accuracy": 37.94,
242
+ "correct": 299,
243
+ "total": 788
244
+ },
245
+ "Text Dominant": {
246
+ "accuracy": 43.78,
247
+ "correct": 345,
248
+ "total": 788
249
+ },
250
+ "Vision Dominant": {
251
+ "accuracy": 36.68,
252
+ "correct": 289,
253
+ "total": 788
254
+ },
255
+ "Vision Only": {
256
+ "accuracy": 32.87,
257
+ "correct": 259,
258
+ "total": 788
259
+ },
260
+ "accuracy": 37.31,
261
+ "acc_stderr": 0,
262
+ "acc": 37.31
263
+ },
264
+ "Ocrlite": {
265
+ "final_score": [
266
+ 1306,
267
+ 1644
268
+ ],
269
+ "accuracy": 79.44,
270
+ "Key Information Extraction-Bookshelf": [
271
+ 40,
272
+ 51,
273
+ 0.784,
274
+ {
275
+ "Default": [
276
+ 40,
277
+ 51,
278
+ 0.784
279
+ ]
280
+ }
281
+ ],
282
+ "Scene Text-centric VQA-diet_constraints": [
283
+ 71,
284
+ 90,
285
+ 0.789,
286
+ {
287
+ "Default": [
288
+ 71,
289
+ 90,
290
+ 0.789
291
+ ]
292
+ }
293
+ ],
294
+ "Doc-oriented VQA-Control": [
295
+ 147,
296
+ 189,
297
+ 0.778,
298
+ {
299
+ "Default": [
300
+ 147,
301
+ 189,
302
+ 0.778
303
+ ]
304
+ }
305
+ ],
306
+ "Doc-oriented VQA": [
307
+ 170,
308
+ 204,
309
+ 0.833,
310
+ {
311
+ "Default": [
312
+ 170,
313
+ 204,
314
+ 0.833
315
+ ]
316
+ }
317
+ ],
318
+ "Scene Text-centric VQA-Fake_logo": [
319
+ 74,
320
+ 119,
321
+ 0.622,
322
+ {
323
+ "Default": [
324
+ 74,
325
+ 119,
326
+ 0.622
327
+ ]
328
+ }
329
+ ],
330
+ "Handwritten Mathematical Expression Recognition": [
331
+ 1,
332
+ 100,
333
+ 0.01,
334
+ {
335
+ "Default": [
336
+ 1,
337
+ 100,
338
+ 0.01
339
+ ]
340
+ }
341
+ ],
342
+ "Key Information Extraction": [
343
+ 200,
344
+ 209,
345
+ 0.957,
346
+ {
347
+ "Default": [
348
+ 200,
349
+ 209,
350
+ 0.957
351
+ ]
352
+ }
353
+ ],
354
+ "Scene Text-centric VQA-Control": [
355
+ 172,
356
+ 200,
357
+ 0.86,
358
+ {
359
+ "Default": [
360
+ 172,
361
+ 200,
362
+ 0.86
363
+ ]
364
+ }
365
+ ],
366
+ "Scene Text-centric VQA": [
367
+ 248,
368
+ 282,
369
+ 0.879,
370
+ {
371
+ "Default": [
372
+ 248,
373
+ 282,
374
+ 0.879
375
+ ]
376
+ }
377
+ ],
378
+ "Artistic Text Recognition": [
379
+ 44,
380
+ 50,
381
+ 0.88,
382
+ {
383
+ "Default": [
384
+ 44,
385
+ 50,
386
+ 0.88
387
+ ]
388
+ }
389
+ ],
390
+ "Irregular Text Recognition": [
391
+ 47,
392
+ 50,
393
+ 0.94,
394
+ {
395
+ "Default": [
396
+ 47,
397
+ 50,
398
+ 0.94
399
+ ]
400
+ }
401
+ ],
402
+ "Non-Semantic Text Recognition": [
403
+ 43,
404
+ 50,
405
+ 0.86,
406
+ {
407
+ "Default": [
408
+ 43,
409
+ 50,
410
+ 0.86
411
+ ]
412
+ }
413
+ ],
414
+ "Regular Text Recognition": [
415
+ 49,
416
+ 50,
417
+ 0.98,
418
+ {
419
+ "Default": [
420
+ 49,
421
+ 50,
422
+ 0.98
423
+ ]
424
+ }
425
+ ],
426
+ "acc_stderr": 0,
427
+ "acc": 79.44
428
+ },
429
+ "OcrliteZh": {
430
+ "final_score": [
431
+ 175,
432
+ 234
433
+ ],
434
+ "accuracy": 74.786,
435
+ "Docvqa": [
436
+ 7,
437
+ 10,
438
+ 0.7,
439
+ {
440
+ "Default": [
441
+ 7,
442
+ 10,
443
+ 0.7
444
+ ]
445
+ }
446
+ ],
447
+ "Chartqa-human": [
448
+ 5,
449
+ 10,
450
+ 0.5,
451
+ {
452
+ "Default": [
453
+ 5,
454
+ 10,
455
+ 0.5
456
+ ]
457
+ }
458
+ ],
459
+ "Chartqa-au": [
460
+ 8,
461
+ 10,
462
+ 0.8,
463
+ {
464
+ "Default": [
465
+ 8,
466
+ 10,
467
+ 0.8
468
+ ]
469
+ }
470
+ ],
471
+ "infographic": [
472
+ 8,
473
+ 10,
474
+ 0.8,
475
+ {
476
+ "Default": [
477
+ 8,
478
+ 10,
479
+ 0.8
480
+ ]
481
+ }
482
+ ],
483
+ "Key Information Extraction": [
484
+ 40,
485
+ 45,
486
+ 0.889,
487
+ {
488
+ "Default": [
489
+ 40,
490
+ 45,
491
+ 0.889
492
+ ]
493
+ }
494
+ ],
495
+ "Scene Text-centric VQA": [
496
+ 30,
497
+ 40,
498
+ 0.75,
499
+ {
500
+ "Default": [
501
+ 30,
502
+ 40,
503
+ 0.75
504
+ ]
505
+ }
506
+ ],
507
+ "Artistic Text Recognition": [
508
+ 7,
509
+ 11,
510
+ 0.636,
511
+ {
512
+ "Default": [
513
+ 7,
514
+ 11,
515
+ 0.636
516
+ ]
517
+ }
518
+ ],
519
+ "IrRegular Text Recognition": [
520
+ 10,
521
+ 11,
522
+ 0.909,
523
+ {
524
+ "Default": [
525
+ 10,
526
+ 11,
527
+ 0.909
528
+ ]
529
+ }
530
+ ],
531
+ "Non-semantic Text Recognition": [
532
+ 11,
533
+ 12,
534
+ 0.917,
535
+ {
536
+ "Default": [
537
+ 11,
538
+ 12,
539
+ 0.917
540
+ ]
541
+ }
542
+ ],
543
+ "Regular Text Recognition": [
544
+ 11,
545
+ 11,
546
+ 1.0,
547
+ {
548
+ "Default": [
549
+ 11,
550
+ 11,
551
+ 1.0
552
+ ]
553
+ }
554
+ ],
555
+ "Handwriting_CN": [
556
+ 15,
557
+ 20,
558
+ 0.75,
559
+ {
560
+ "Default": [
561
+ 15,
562
+ 20,
563
+ 0.75
564
+ ]
565
+ }
566
+ ],
567
+ "Chinese Unlimited": [
568
+ 23,
569
+ 44,
570
+ 0.523,
571
+ {
572
+ "Default": [
573
+ 23,
574
+ 44,
575
+ 0.523
576
+ ]
577
+ }
578
+ ],
579
+ "acc_stderr": 0,
580
+ "acc": 74.786
581
+ },
582
+ "CharXiv": {
583
+ "descriptive": {
584
+ "Overall Score": 85.28,
585
+ "By Question": {
586
+ "Q1": 84.43,
587
+ "Q2": 80.87,
588
+ "Q3": 72.96,
589
+ "Q4": 87.94,
590
+ "Q5": 89.12,
591
+ "Q6": 84.34,
592
+ "Q7": 88.46,
593
+ "Q8": 90.18,
594
+ "Q9": 85.07,
595
+ "Q10": 82.88,
596
+ "Q11": 77.14,
597
+ "Q12": 89.01,
598
+ "Q13": 71.23,
599
+ "Q14": 95.74,
600
+ "Q15": 95.85,
601
+ "Q16": 77.78,
602
+ "Q17": 70.54,
603
+ "Q18": 93.12,
604
+ "Q19": 92.31
605
+ },
606
+ "By Category": {
607
+ "Information Extraction": 84.1,
608
+ "Enumeration": 88.7,
609
+ "Pattern Recognition": 85.81,
610
+ "Counting": 87.28,
611
+ "Compositionality": 70.54
612
+ },
613
+ "By Subplot": {
614
+ "1 Subplot": 89.18,
615
+ "2-4 Subplots": 86.44,
616
+ "5+ Subplots": 77.01
617
+ },
618
+ "By Subject": {
619
+ "Computer Science": 84.13,
620
+ "Economics": 86.41,
621
+ "Electrical Engineering and Systems Science": 89.08,
622
+ "Mathematics": 86.48,
623
+ "Physics": 82.48,
624
+ "Quantitative Biology": 80.56,
625
+ "Quantitative Finance": 85.56,
626
+ "Statistics": 87.83
627
+ },
628
+ "By Year": {
629
+ "2020": 85.12,
630
+ "2021": 83.72,
631
+ "2022": 86.07,
632
+ "2023": 86.29
633
+ },
634
+ "N_valid": 4000,
635
+ "N_invalid": 0,
636
+ "Question Type": "Descriptive"
637
+ },
638
+ "reasoning": {
639
+ "Overall Score": 45.6,
640
+ "By Answer Type": {
641
+ "Text-in-Chart": 48.41,
642
+ "Text-in-General": 53.54,
643
+ "Number-in-Chart": 45.69,
644
+ "Number-in-General": 36.68
645
+ },
646
+ "By Source": {
647
+ "GPT-Sourced": 50.54,
648
+ "GPT-Inspired": 43.98,
649
+ "Completely Human": 44.67
650
+ },
651
+ "By Subject": {
652
+ "Computer Science": 44.44,
653
+ "Economics": 50.0,
654
+ "Electrical Engineering and Systems Science": 40.34,
655
+ "Mathematics": 51.85,
656
+ "Physics": 54.33,
657
+ "Quantitative Biology": 39.68,
658
+ "Quantitative Finance": 37.93,
659
+ "Statistics": 44.25
660
+ },
661
+ "By Year": {
662
+ "2020": 44.94,
663
+ "2021": 44.83,
664
+ "2022": 39.75,
665
+ "2023": 52.82
666
+ },
667
+ "By Subplot": {
668
+ "1 Subplot": 47.93,
669
+ "2-4 Subplots": 44.18,
670
+ "5+ Subplots": 44.07
671
+ },
672
+ "N_valid": 1000,
673
+ "N_invalid": 0,
674
+ "Question Type": "Reasoning"
675
+ },
676
+ "accuracy": 65.44,
677
+ "acc_stderr": 0,
678
+ "acc": 65.44
679
+ },
680
+ "MathVision": {
681
+ "accuracy": 30.26,
682
+ "acc_stderr": 0,
683
+ "acc": 30.26
684
+ },
685
+ "CII-Bench": {
686
+ "accuracy": 63.27,
687
+ "domain_score": {
688
+ "CTC": 62.22,
689
+ "Society": 69.19,
690
+ "Env.": 66.67,
691
+ "Art": 63.24,
692
+ "Life": 58.87,
693
+ "Politics": 58.33
694
+ },
695
+ "emotion_score": {
696
+ "Positive": 61.54,
697
+ "Negative": 66.04,
698
+ "Neutral": 62.03
699
+ },
700
+ "acc_stderr": 0,
701
+ "acc": 63.27
702
+ },
703
+ "Blink": {
704
+ "accuracy": 59.18,
705
+ "Art Style": 71.79,
706
+ "Counting": 70.83,
707
+ "Forensic Detection": 54.55,
708
+ "Functional Correspondence": 42.31,
709
+ "IQ Test": 29.33,
710
+ "Jigsaw": 72.0,
711
+ "Multi-view Reasoning": 43.61,
712
+ "Object Localization": 42.62,
713
+ "Relative Depth": 75.81,
714
+ "Relative Reflectance": 33.58,
715
+ "Semantic Correspondence": 51.8,
716
+ "Spatial Relation": 82.52,
717
+ "Visual Correspondence": 75.58,
718
+ "Visual Similarity": 80.0,
719
+ "acc_stderr": 0,
720
+ "acc": 59.18
721
+ }
722
+ }
723
+ }
Qwen2.5-VL-7B-Instruct/results_2025-01-25T10-42-53.190540.json ADDED
@@ -0,0 +1,722 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "model_name": "Qwen2.5-VL-7B-Instruct",
4
+ "model_dtype": "float16",
5
+ "model_size": 0
6
+ },
7
+ "results": {
8
+ "CMMMU": {
9
+ "艺术与设计": {
10
+ "num": 88,
11
+ "correct": 56,
12
+ "accuracy": 63.64
13
+ },
14
+ "overall": {
15
+ "num": 900,
16
+ "correct": 406,
17
+ "accuracy": 45.11
18
+ },
19
+ "商业": {
20
+ "num": 126,
21
+ "correct": 35,
22
+ "accuracy": 27.78
23
+ },
24
+ "科学": {
25
+ "num": 204,
26
+ "correct": 83,
27
+ "accuracy": 40.69
28
+ },
29
+ "健康与医学": {
30
+ "num": 153,
31
+ "correct": 72,
32
+ "accuracy": 47.06
33
+ },
34
+ "人文社会科学": {
35
+ "num": 85,
36
+ "correct": 49,
37
+ "accuracy": 57.65
38
+ },
39
+ "技术与工程": {
40
+ "num": 244,
41
+ "correct": 111,
42
+ "accuracy": 45.49
43
+ },
44
+ "accuracy": 45.11,
45
+ "acc_stderr": 0,
46
+ "acc": 45.11
47
+ },
48
+ "MMMU": {
49
+ "accuracy": 50.67,
50
+ "subject_score": {
51
+ "Accounting": 33.33,
52
+ "Agriculture": 40.0,
53
+ "Architecture": 50.0,
54
+ "Art": 78.33,
55
+ "Basic": 53.33,
56
+ "Biology": 50.0,
57
+ "Chemistry": 36.67,
58
+ "Clinical": 56.67,
59
+ "Computer": 50.0,
60
+ "Design": 73.33,
61
+ "Diagnostics": 40.0,
62
+ "Economics": 43.33,
63
+ "Electronics": 33.33,
64
+ "Energy": 30.0,
65
+ "Finance": 33.33,
66
+ "Geography": 46.67,
67
+ "History": 66.67,
68
+ "Literature": 80.0,
69
+ "Manage": 33.33,
70
+ "Marketing": 56.67,
71
+ "Materials": 40.0,
72
+ "Math": 43.33,
73
+ "Mechanical": 46.67,
74
+ "Music": 30.0,
75
+ "Pharmacy": 70.0,
76
+ "Physics": 33.33,
77
+ "Psychology": 73.33,
78
+ "Public": 60.0,
79
+ "Sociology": 60.0
80
+ },
81
+ "difficulty_score": {
82
+ "Medium": 46.46,
83
+ "Easy": 64.07,
84
+ "Hard": 38.67
85
+ },
86
+ "acc_stderr": 0,
87
+ "acc": 50.67
88
+ },
89
+ "MMMU_Pro_standard": {
90
+ "accuracy": 36.76,
91
+ "subject_score": {
92
+ "History": 48.21,
93
+ "Finance": 21.67,
94
+ "Design": 63.33,
95
+ "Literature": 65.38,
96
+ "Agriculture": 25.0,
97
+ "Clinical_Medicine": 28.81,
98
+ "Accounting": 25.86,
99
+ "Sociology": 38.89,
100
+ "Art": 54.72,
101
+ "Physics": 25.0,
102
+ "Public_Health": 20.69,
103
+ "Energy_and_Power": 20.69,
104
+ "Pharmacy": 49.12,
105
+ "Electronics": 53.33,
106
+ "Architecture_and_Engineering": 31.67,
107
+ "Art_Theory": 67.27,
108
+ "Psychology": 38.33,
109
+ "Economics": 33.9,
110
+ "Biology": 33.9,
111
+ "Diagnostics_and_Laboratory_Medicine": 28.33,
112
+ "Manage": 26.0,
113
+ "Mechanical_Engineering": 35.59,
114
+ "Basic_Medical_Science": 36.54,
115
+ "Computer_Science": 45.0,
116
+ "Math": 25.0,
117
+ "Music": 26.67,
118
+ "Materials": 23.33,
119
+ "Marketing": 30.51,
120
+ "Chemistry": 36.67,
121
+ "Geography": 51.92
122
+ },
123
+ "difficulty_score": {
124
+ "Medium": 34.46,
125
+ "Easy": 46.59,
126
+ "Hard": 28.43
127
+ },
128
+ "acc_stderr": 0,
129
+ "acc": 36.76
130
+ },
131
+ "MMMU_Pro_vision": {
132
+ "accuracy": 34.91,
133
+ "subject_score": {
134
+ "Design": 46.67,
135
+ "History": 53.57,
136
+ "Sociology": 33.33,
137
+ "Art": 50.94,
138
+ "Literature": 69.23,
139
+ "Agriculture": 23.33,
140
+ "Pharmacy": 40.35,
141
+ "Clinical_Medicine": 30.51,
142
+ "Architecture_and_Engineering": 20.0,
143
+ "Accounting": 43.1,
144
+ "Physics": 33.33,
145
+ "Art_Theory": 50.91,
146
+ "Energy_and_Power": 18.97,
147
+ "Psychology": 26.67,
148
+ "Biology": 30.51,
149
+ "Manage": 22.0,
150
+ "Economics": 37.29,
151
+ "Public_Health": 46.55,
152
+ "Mechanical_Engineering": 23.73,
153
+ "Diagnostics_and_Laboratory_Medicine": 26.67,
154
+ "Electronics": 41.67,
155
+ "Basic_Medical_Science": 36.54,
156
+ "Finance": 45.0,
157
+ "Computer_Science": 38.33,
158
+ "Math": 23.33,
159
+ "Music": 28.33,
160
+ "Marketing": 32.2,
161
+ "Materials": 15.0,
162
+ "Chemistry": 36.67,
163
+ "Geography": 28.85
164
+ },
165
+ "acc_stderr": 0,
166
+ "acc": 34.91
167
+ },
168
+ "MmvetV2": {
169
+ "reject_info": {
170
+ "reject_rate": 1.35,
171
+ "reject_number": 7,
172
+ "total_question": 517
173
+ },
174
+ "accuracy": 61.7843,
175
+ "capability_scores": {
176
+ "ocr": 68.5365853658537,
177
+ "math": 69.11764705882352,
178
+ "spat": 57.70408163265303,
179
+ "rec": 57.30864197530875,
180
+ "know": 57.56410256410256,
181
+ "gen": 59.22509225092255,
182
+ "seq": 44.08163265306123
183
+ },
184
+ "capability_detail_scores": {
185
+ "ocr_math": 86.36363636363636,
186
+ "spat_ocr_math": 70.0,
187
+ "spat_rec_ocr_math": 45.0,
188
+ "spat_rec": 56.07142857142857,
189
+ "spat_ocr": 77.3076923076923,
190
+ "spat_rec_ocr": 44.166666666666664,
191
+ "spat_know_ocr": 100.0,
192
+ "rec_ocr": 75.0,
193
+ "spat_know_rec": 40.0,
194
+ "ocr": 83.125,
195
+ "rec": 67.28813559322033,
196
+ "know_rec": 57.692307692307686,
197
+ "know_rec_gen": 57.999999999999964,
198
+ "know_rec_ocr_gen": 67.6923076923077,
199
+ "spat_rec_ocr_gen": 61.86046511627909,
200
+ "spat_ocr_gen": 80.0,
201
+ "seq_gen_math_spat_ocr": 20.0,
202
+ "seq_math_spat_ocr_rec": 0.0,
203
+ "spat_rec_gen": 51.81818181818182,
204
+ "spat_gen_ocr_math": 40.0,
205
+ "spat_rec_seq": 38.33333333333333,
206
+ "spat_rec_ocr_seq": 0.0,
207
+ "spat_know_rec_gen": 40.00000000000001,
208
+ "rec_gen": 60.882352941176464,
209
+ "spat_know_rec_ocr": 12.5,
210
+ "know_gen_spat_ocr_rec": 90.0,
211
+ "rec_ocr_math": 100.0,
212
+ "rec_ocr_gen": 72.0,
213
+ "rec_ocr_seq_gen": 45.0,
214
+ "ocr_gen": 77.6923076923077,
215
+ "rec_seq_gen": 36.15384615384615,
216
+ "rec_seq": 50.0,
217
+ "spat_rec_seq_gen": 63.74999999999999,
218
+ "know_rec_seq": 0.0,
219
+ "know_rec_seq_gen": 65.0,
220
+ "seq_gen_spat_ocr_rec": 50.0,
221
+ "know_seq_gen_ocr_rec": 85.00000000000001,
222
+ "know_rec_math": 50.0,
223
+ "rec_ocr_seq": 100.0
224
+ },
225
+ "acc_stderr": 0,
226
+ "acc": 61.7843
227
+ },
228
+ "MathVerse": {
229
+ "Vision Intensive": {
230
+ "accuracy": 42.64,
231
+ "correct": 336,
232
+ "total": 788
233
+ },
234
+ "Total": {
235
+ "accuracy": 45.38,
236
+ "correct": 1788,
237
+ "total": 3940
238
+ },
239
+ "Text Dominant": {
240
+ "accuracy": 53.81,
241
+ "correct": 424,
242
+ "total": 788
243
+ },
244
+ "Text Lite": {
245
+ "accuracy": 47.34,
246
+ "correct": 373,
247
+ "total": 788
248
+ },
249
+ "Vision Dominant": {
250
+ "accuracy": 45.43,
251
+ "correct": 358,
252
+ "total": 788
253
+ },
254
+ "Vision Only": {
255
+ "accuracy": 37.69,
256
+ "correct": 297,
257
+ "total": 788
258
+ },
259
+ "accuracy": 45.38,
260
+ "acc_stderr": 0,
261
+ "acc": 45.38
262
+ },
263
+ "Ocrlite": {
264
+ "final_score": [
265
+ 1247,
266
+ 1644
267
+ ],
268
+ "accuracy": 75.852,
269
+ "Key Information Extraction-Bookshelf": [
270
+ 26,
271
+ 51,
272
+ 0.51,
273
+ {
274
+ "Default": [
275
+ 26,
276
+ 51,
277
+ 0.51
278
+ ]
279
+ }
280
+ ],
281
+ "Scene Text-centric VQA-diet_constraints": [
282
+ 52,
283
+ 90,
284
+ 0.578,
285
+ {
286
+ "Default": [
287
+ 52,
288
+ 90,
289
+ 0.578
290
+ ]
291
+ }
292
+ ],
293
+ "Doc-oriented VQA-Control": [
294
+ 142,
295
+ 189,
296
+ 0.751,
297
+ {
298
+ "Default": [
299
+ 142,
300
+ 189,
301
+ 0.751
302
+ ]
303
+ }
304
+ ],
305
+ "Doc-oriented VQA": [
306
+ 171,
307
+ 204,
308
+ 0.838,
309
+ {
310
+ "Default": [
311
+ 171,
312
+ 204,
313
+ 0.838
314
+ ]
315
+ }
316
+ ],
317
+ "Scene Text-centric VQA-Fake_logo": [
318
+ 54,
319
+ 119,
320
+ 0.454,
321
+ {
322
+ "Default": [
323
+ 54,
324
+ 119,
325
+ 0.454
326
+ ]
327
+ }
328
+ ],
329
+ "Handwritten Mathematical Expression Recognition": [
330
+ 1,
331
+ 100,
332
+ 0.01,
333
+ {
334
+ "Default": [
335
+ 1,
336
+ 100,
337
+ 0.01
338
+ ]
339
+ }
340
+ ],
341
+ "Key Information Extraction": [
342
+ 198,
343
+ 209,
344
+ 0.947,
345
+ {
346
+ "Default": [
347
+ 198,
348
+ 209,
349
+ 0.947
350
+ ]
351
+ }
352
+ ],
353
+ "Scene Text-centric VQA-Control": [
354
+ 173,
355
+ 200,
356
+ 0.865,
357
+ {
358
+ "Default": [
359
+ 173,
360
+ 200,
361
+ 0.865
362
+ ]
363
+ }
364
+ ],
365
+ "Scene Text-centric VQA": [
366
+ 247,
367
+ 282,
368
+ 0.876,
369
+ {
370
+ "Default": [
371
+ 247,
372
+ 282,
373
+ 0.876
374
+ ]
375
+ }
376
+ ],
377
+ "Artistic Text Recognition": [
378
+ 42,
379
+ 50,
380
+ 0.84,
381
+ {
382
+ "Default": [
383
+ 42,
384
+ 50,
385
+ 0.84
386
+ ]
387
+ }
388
+ ],
389
+ "Irregular Text Recognition": [
390
+ 47,
391
+ 50,
392
+ 0.94,
393
+ {
394
+ "Default": [
395
+ 47,
396
+ 50,
397
+ 0.94
398
+ ]
399
+ }
400
+ ],
401
+ "Non-Semantic Text Recognition": [
402
+ 45,
403
+ 50,
404
+ 0.9,
405
+ {
406
+ "Default": [
407
+ 45,
408
+ 50,
409
+ 0.9
410
+ ]
411
+ }
412
+ ],
413
+ "Regular Text Recognition": [
414
+ 49,
415
+ 50,
416
+ 0.98,
417
+ {
418
+ "Default": [
419
+ 49,
420
+ 50,
421
+ 0.98
422
+ ]
423
+ }
424
+ ],
425
+ "acc_stderr": 0,
426
+ "acc": 75.852
427
+ },
428
+ "OcrliteZh": {
429
+ "final_score": [
430
+ 161,
431
+ 234
432
+ ],
433
+ "accuracy": 68.803,
434
+ "Docvqa": [
435
+ 6,
436
+ 10,
437
+ 0.6,
438
+ {
439
+ "Default": [
440
+ 6,
441
+ 10,
442
+ 0.6
443
+ ]
444
+ }
445
+ ],
446
+ "Chartqa-human": [
447
+ 4,
448
+ 10,
449
+ 0.4,
450
+ {
451
+ "Default": [
452
+ 4,
453
+ 10,
454
+ 0.4
455
+ ]
456
+ }
457
+ ],
458
+ "Chartqa-au": [
459
+ 7,
460
+ 10,
461
+ 0.7,
462
+ {
463
+ "Default": [
464
+ 7,
465
+ 10,
466
+ 0.7
467
+ ]
468
+ }
469
+ ],
470
+ "infographic": [
471
+ 7,
472
+ 10,
473
+ 0.7,
474
+ {
475
+ "Default": [
476
+ 7,
477
+ 10,
478
+ 0.7
479
+ ]
480
+ }
481
+ ],
482
+ "Key Information Extraction": [
483
+ 39,
484
+ 45,
485
+ 0.867,
486
+ {
487
+ "Default": [
488
+ 39,
489
+ 45,
490
+ 0.867
491
+ ]
492
+ }
493
+ ],
494
+ "Scene Text-centric VQA": [
495
+ 28,
496
+ 40,
497
+ 0.7,
498
+ {
499
+ "Default": [
500
+ 28,
501
+ 40,
502
+ 0.7
503
+ ]
504
+ }
505
+ ],
506
+ "Artistic Text Recognition": [
507
+ 7,
508
+ 11,
509
+ 0.636,
510
+ {
511
+ "Default": [
512
+ 7,
513
+ 11,
514
+ 0.636
515
+ ]
516
+ }
517
+ ],
518
+ "IrRegular Text Recognition": [
519
+ 8,
520
+ 11,
521
+ 0.727,
522
+ {
523
+ "Default": [
524
+ 8,
525
+ 11,
526
+ 0.727
527
+ ]
528
+ }
529
+ ],
530
+ "Non-semantic Text Recognition": [
531
+ 10,
532
+ 12,
533
+ 0.833,
534
+ {
535
+ "Default": [
536
+ 10,
537
+ 12,
538
+ 0.833
539
+ ]
540
+ }
541
+ ],
542
+ "Regular Text Recognition": [
543
+ 10,
544
+ 11,
545
+ 0.909,
546
+ {
547
+ "Default": [
548
+ 10,
549
+ 11,
550
+ 0.909
551
+ ]
552
+ }
553
+ ],
554
+ "Handwriting_CN": [
555
+ 16,
556
+ 20,
557
+ 0.8,
558
+ {
559
+ "Default": [
560
+ 16,
561
+ 20,
562
+ 0.8
563
+ ]
564
+ }
565
+ ],
566
+ "Chinese Unlimited": [
567
+ 19,
568
+ 44,
569
+ 0.432,
570
+ {
571
+ "Default": [
572
+ 19,
573
+ 44,
574
+ 0.432
575
+ ]
576
+ }
577
+ ],
578
+ "acc_stderr": 0,
579
+ "acc": 68.803
580
+ },
581
+ "CharXiv": {
582
+ "descriptive": {
583
+ "Overall Score": 38.55,
584
+ "By Question": {
585
+ "Q1": 83.2,
586
+ "Q2": 77.83,
587
+ "Q3": 69.1,
588
+ "Q4": 85.99,
589
+ "Q5": 81.17,
590
+ "Q6": 75.1,
591
+ "Q7": 76.5,
592
+ "Q8": 62.95,
593
+ "Q9": 38.31,
594
+ "Q10": 0.0,
595
+ "Q11": 0.0,
596
+ "Q12": 0.0,
597
+ "Q13": 0.0,
598
+ "Q14": 0.0,
599
+ "Q15": 0.0,
600
+ "Q16": 0.0,
601
+ "Q17": 0.0,
602
+ "Q18": 0.0,
603
+ "Q19": 0.0
604
+ },
605
+ "By Category": {
606
+ "Information Extraction": 78.53,
607
+ "Enumeration": 17.59,
608
+ "Pattern Recognition": 0.0,
609
+ "Counting": 0.0,
610
+ "Compositionality": 0.0
611
+ },
612
+ "By Subplot": {
613
+ "1 Subplot": 41.77,
614
+ "2-4 Subplots": 37.1,
615
+ "5+ Subplots": 35.59
616
+ },
617
+ "By Subject": {
618
+ "Computer Science": 38.49,
619
+ "Economics": 38.59,
620
+ "Electrical Engineering and Systems Science": 44.12,
621
+ "Mathematics": 37.04,
622
+ "Physics": 38.39,
623
+ "Quantitative Biology": 31.94,
624
+ "Quantitative Finance": 38.36,
625
+ "Statistics": 42.26
626
+ },
627
+ "By Year": {
628
+ "2020": 38.46,
629
+ "2021": 39.08,
630
+ "2022": 39.34,
631
+ "2023": 37.3
632
+ },
633
+ "N_valid": 4000,
634
+ "N_invalid": 1975,
635
+ "Question Type": "Descriptive"
636
+ },
637
+ "reasoning": {
638
+ "Overall Score": 37.7,
639
+ "By Answer Type": {
640
+ "Text-in-Chart": 38.18,
641
+ "Text-in-General": 41.41,
642
+ "Number-in-Chart": 40.52,
643
+ "Number-in-General": 32.31
644
+ },
645
+ "By Source": {
646
+ "GPT-Sourced": 40.22,
647
+ "GPT-Inspired": 37.96,
648
+ "Completely Human": 36.83
649
+ },
650
+ "By Subject": {
651
+ "Computer Science": 26.19,
652
+ "Economics": 39.13,
653
+ "Electrical Engineering and Systems Science": 39.5,
654
+ "Mathematics": 41.48,
655
+ "Physics": 42.52,
656
+ "Quantitative Biology": 39.68,
657
+ "Quantitative Finance": 36.21,
658
+ "Statistics": 36.28
659
+ },
660
+ "By Year": {
661
+ "2020": 30.77,
662
+ "2021": 41.76,
663
+ "2022": 36.89,
664
+ "2023": 41.13
665
+ },
666
+ "By Subplot": {
667
+ "1 Subplot": 39.9,
668
+ "2-4 Subplots": 37.04,
669
+ "5+ Subplots": 35.17
670
+ },
671
+ "N_valid": 1000,
672
+ "N_invalid": 34,
673
+ "Question Type": "Reasoning"
674
+ },
675
+ "accuracy": 38.12,
676
+ "acc_stderr": 0,
677
+ "acc": 38.12
678
+ },
679
+ "MathVision": {
680
+ "accuracy": 18.65,
681
+ "acc_stderr": 0,
682
+ "acc": 18.65
683
+ },
684
+ "CII-Bench": {
685
+ "accuracy": 48.89,
686
+ "domain_score": {
687
+ "CTC": 52.59,
688
+ "Society": 50.81,
689
+ "Life": 41.56,
690
+ "Art": 47.79,
691
+ "Env.": 61.11,
692
+ "Politics": 62.5
693
+ },
694
+ "emotion_score": {
695
+ "Positive": 49.57,
696
+ "Negative": 48.3,
697
+ "Neutral": 48.87
698
+ },
699
+ "acc_stderr": 0,
700
+ "acc": 48.89
701
+ },
702
+ "Blink": {
703
+ "accuracy": 56.08,
704
+ "Art Style": 59.83,
705
+ "Counting": 63.33,
706
+ "Forensic Detection": 58.33,
707
+ "Functional Correspondence": 31.54,
708
+ "IQ Test": 24.0,
709
+ "Jigsaw": 59.33,
710
+ "Multi-view Reasoning": 54.89,
711
+ "Object Localization": 55.74,
712
+ "Relative Depth": 76.61,
713
+ "Relative Reflectance": 27.61,
714
+ "Semantic Correspondence": 39.57,
715
+ "Spatial Relation": 80.42,
716
+ "Visual Correspondence": 68.02,
717
+ "Visual Similarity": 86.67,
718
+ "acc_stderr": 0,
719
+ "acc": 56.08
720
+ }
721
+ }
722
+ }
Step-1V-32k/results_2025-01-25T10-42-53.190540.json CHANGED
@@ -58,9 +58,41 @@
58
  "acc": 35.66
59
  },
60
  "MMMU_Pro_vision": {
61
- "accuracy": 59.08,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
62
  "acc_stderr": 0,
63
- "acc": 59.08
64
  },
65
  "MmvetV2": {
66
  "reject_info": {
@@ -386,9 +418,9 @@
386
  "acc": 51.35
387
  },
388
  "MathVision": {
389
- "accuracy": 25.86,
390
  "acc_stderr": 0,
391
- "acc": 25.86
392
  },
393
  "CII-Bench": {
394
  "accuracy": 58.82,
 
58
  "acc": 35.66
59
  },
60
  "MMMU_Pro_vision": {
61
+ "accuracy": 28.96,
62
+ "subject_score": {
63
+ "History": 33.93,
64
+ "Art": 50.94,
65
+ "Design": 48.33,
66
+ "Literature": 57.69,
67
+ "Agriculture": 23.33,
68
+ "Finance": 26.67,
69
+ "Sociology": 42.59,
70
+ "Accounting": 25.86,
71
+ "Energy_and_Power": 15.52,
72
+ "Pharmacy": 28.07,
73
+ "Architecture_and_Engineering": 16.67,
74
+ "Clinical_Medicine": 11.86,
75
+ "Public_Health": 34.48,
76
+ "Physics": 30.0,
77
+ "Art_Theory": 45.45,
78
+ "Electronics": 20.0,
79
+ "Psychology": 31.67,
80
+ "Biology": 20.34,
81
+ "Manage": 32.0,
82
+ "Economics": 35.59,
83
+ "Mechanical_Engineering": 13.56,
84
+ "Diagnostics_and_Laboratory_Medicine": 20.0,
85
+ "Basic_Medical_Science": 36.54,
86
+ "Computer_Science": 30.0,
87
+ "Math": 25.0,
88
+ "Music": 20.0,
89
+ "Materials": 13.33,
90
+ "Marketing": 25.42,
91
+ "Chemistry": 36.67,
92
+ "Geography": 26.92
93
+ },
94
  "acc_stderr": 0,
95
+ "acc": 28.96
96
  },
97
  "MmvetV2": {
98
  "reject_info": {
 
418
  "acc": 51.35
419
  },
420
  "MathVision": {
421
+ "accuracy": 25.03,
422
  "acc_stderr": 0,
423
+ "acc": 25.03
424
  },
425
  "CII-Bench": {
426
  "accuracy": 58.82,