xuanricheng commited on
Commit
df5a402
·
verified ·
1 Parent(s): e663878

Add results for glm-4v

Browse files
glm-4v/results_2025-01-17T15-45-24.560866.json ADDED
@@ -0,0 +1,371 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "model_name": "glm-4v",
4
+ "model_dtype": "float16",
5
+ "model_size": 0
6
+ },
7
+ "results": {
8
+ "ChartQA": {
9
+ "acc": 35.08,
10
+ "acc_stderr": 0,
11
+ "accuracy": 35.08,
12
+ "human_test": {
13
+ "total": 1250,
14
+ "correct": 469,
15
+ "accuracy": 37.52
16
+ },
17
+ "augmented_test": {
18
+ "total": 1250,
19
+ "correct": 408,
20
+ "accuracy": 32.64
21
+ }
22
+ },
23
+ "CMMMU": {
24
+ "acc": 34.78,
25
+ "acc_stderr": 0,
26
+ "\u5546\u4e1a": {
27
+ "num": 126,
28
+ "correct": 27,
29
+ "accuracy": 21.43
30
+ },
31
+ "\u79d1\u5b66": {
32
+ "num": 204,
33
+ "correct": 54,
34
+ "accuracy": 26.47
35
+ },
36
+ "overall": {
37
+ "num": 900,
38
+ "correct": 313,
39
+ "accuracy": 34.78
40
+ },
41
+ "accuracy": 34.78,
42
+ "\u5065\u5eb7\u4e0e\u533b\u5b66": {
43
+ "num": 153,
44
+ "correct": 56,
45
+ "accuracy": 36.6
46
+ },
47
+ "\u6280\u672f\u4e0e\u5de5\u7a0b": {
48
+ "num": 244,
49
+ "correct": 87,
50
+ "accuracy": 35.66
51
+ },
52
+ "\u827a\u672f\u4e0e\u8bbe\u8ba1": {
53
+ "num": 88,
54
+ "correct": 51,
55
+ "accuracy": 57.95
56
+ },
57
+ "\u4eba\u6587\u793e\u4f1a\u79d1\u5b66": {
58
+ "num": 85,
59
+ "correct": 38,
60
+ "accuracy": 44.71
61
+ }
62
+ },
63
+ "CMMU": {
64
+ "acc": 10.58,
65
+ "acc_stderr": 0,
66
+ "val": {
67
+ "multiple-choice": {
68
+ "hard": {
69
+ "total": 150,
70
+ "correct": 6,
71
+ "accuracy": 4.0
72
+ },
73
+ "normal": {
74
+ "total": 1205,
75
+ "correct": 160,
76
+ "accuracy": 13.28
77
+ }
78
+ },
79
+ "fill-in-the-blank": {
80
+ "hard": {
81
+ "total": 300,
82
+ "correct": 20,
83
+ "accuracy": 6.67
84
+ },
85
+ "normal": {
86
+ "total": 507,
87
+ "correct": 48,
88
+ "accuracy": 9.47
89
+ }
90
+ },
91
+ "multiple-response": {
92
+ "hard": {
93
+ "total": 94,
94
+ "correct": 0,
95
+ "accuracy": 0.0
96
+ },
97
+ "normal": {
98
+ "total": 33,
99
+ "correct": 0,
100
+ "accuracy": 0.0
101
+ }
102
+ }
103
+ },
104
+ "test": {
105
+ "multiple-choice": {
106
+ "hard": {
107
+ "total": 150,
108
+ "correct": 4,
109
+ "accuracy": 2.67
110
+ },
111
+ "normal": {
112
+ "total": 1205,
113
+ "correct": 157,
114
+ "accuracy": 13.03
115
+ }
116
+ },
117
+ "fill-in-the-blank": {
118
+ "hard": {
119
+ "total": 296,
120
+ "correct": 25,
121
+ "accuracy": 8.45
122
+ },
123
+ "normal": {
124
+ "total": 529,
125
+ "correct": 57,
126
+ "accuracy": 10.78
127
+ }
128
+ },
129
+ "multiple-response": {
130
+ "hard": {
131
+ "total": 95,
132
+ "correct": 1,
133
+ "accuracy": 1.05
134
+ },
135
+ "normal": {
136
+ "total": 32,
137
+ "correct": 0,
138
+ "accuracy": 0.0
139
+ }
140
+ }
141
+ },
142
+ "val-overall": {
143
+ "total": 2289,
144
+ "correct": 234,
145
+ "accuracy": 10.22,
146
+ "bias_rate": 128.08
147
+ },
148
+ "test-overall": {
149
+ "total": 2307,
150
+ "correct": 244,
151
+ "accuracy": 10.58,
152
+ "bias_rate": 130.25
153
+ }
154
+ },
155
+ "MMMU": {
156
+ "acc": 40.56,
157
+ "acc_stderr": 0,
158
+ "accuracy": 40.56,
159
+ "subject_score": {
160
+ "Art": 61.67,
161
+ "Math": 36.67,
162
+ "Basic": 50.0,
163
+ "Music": 40.0,
164
+ "Design": 66.67,
165
+ "Energy": 46.67,
166
+ "Manage": 30.0,
167
+ "Public": 43.33,
168
+ "Biology": 40.0,
169
+ "Finance": 20.0,
170
+ "History": 63.33,
171
+ "Physics": 6.67,
172
+ "Clinical": 46.67,
173
+ "Computer": 36.67,
174
+ "Pharmacy": 36.67,
175
+ "Chemistry": 23.33,
176
+ "Economics": 30.0,
177
+ "Geography": 30.0,
178
+ "Marketing": 40.0,
179
+ "Materials": 26.67,
180
+ "Sociology": 50.0,
181
+ "Accounting": 43.33,
182
+ "Literature": 83.33,
183
+ "Mechanical": 30.0,
184
+ "Psychology": 46.67,
185
+ "Agriculture": 33.33,
186
+ "Diagnostics": 40.0,
187
+ "Electronics": 20.0,
188
+ "Architecture": 33.33
189
+ },
190
+ "difficulty_score": {
191
+ "Easy": 49.15,
192
+ "Hard": 26.52,
193
+ "Medium": 40.57
194
+ }
195
+ },
196
+ "MMMU_Pro_standard": {
197
+ "acc": 20.58,
198
+ "acc_stderr": 0,
199
+ "accuracy": 20.58,
200
+ "subject_score": {
201
+ "Art": 24.53,
202
+ "Math": 21.67,
203
+ "Music": 23.33,
204
+ "Design": 31.67,
205
+ "Manage": 24.0,
206
+ "Biology": 27.12,
207
+ "Finance": 16.67,
208
+ "History": 21.43,
209
+ "Physics": 3.33,
210
+ "Pharmacy": 26.32,
211
+ "Chemistry": 13.33,
212
+ "Economics": 20.34,
213
+ "Geography": 25.0,
214
+ "Marketing": 16.95,
215
+ "Materials": 15.0,
216
+ "Sociology": 25.93,
217
+ "Accounting": 13.79,
218
+ "Art_Theory": 38.18,
219
+ "Literature": 48.08,
220
+ "Psychology": 18.33,
221
+ "Agriculture": 13.33,
222
+ "Electronics": 15.0,
223
+ "Public_Health": 12.07,
224
+ "Computer_Science": 23.33,
225
+ "Energy_and_Power": 15.52,
226
+ "Clinical_Medicine": 28.81,
227
+ "Basic_Medical_Science": 21.15,
228
+ "Mechanical_Engineering": 22.03,
229
+ "Architecture_and_Engineering": 6.67,
230
+ "Diagnostics_and_Laboratory_Medicine": 11.67
231
+ },
232
+ "difficulty_score": {
233
+ "Easy": 27.84,
234
+ "Hard": 12.47,
235
+ "Medium": 19.85
236
+ }
237
+ },
238
+ "MMMU_Pro_vision": {
239
+ "acc": 16.71,
240
+ "acc_stderr": 0,
241
+ "accuracy": 16.71,
242
+ "subject_score": {
243
+ "Art": 28.3,
244
+ "Math": 20.0,
245
+ "Music": 25.0,
246
+ "Design": 20.0,
247
+ "Manage": 22.0,
248
+ "Biology": 15.25,
249
+ "Finance": 8.33,
250
+ "History": 14.29,
251
+ "Physics": 15.0,
252
+ "Pharmacy": 17.54,
253
+ "Chemistry": 11.67,
254
+ "Economics": 18.64,
255
+ "Geography": 23.08,
256
+ "Marketing": 10.17,
257
+ "Materials": 11.67,
258
+ "Sociology": 25.93,
259
+ "Accounting": 13.79,
260
+ "Art_Theory": 29.09,
261
+ "Literature": 44.23,
262
+ "Psychology": 11.67,
263
+ "Agriculture": 8.33,
264
+ "Electronics": 8.33,
265
+ "Public_Health": 13.79,
266
+ "Computer_Science": 18.33,
267
+ "Energy_and_Power": 15.52,
268
+ "Clinical_Medicine": 8.47,
269
+ "Basic_Medical_Science": 15.38,
270
+ "Mechanical_Engineering": 15.25,
271
+ "Architecture_and_Engineering": 11.67,
272
+ "Diagnostics_and_Laboratory_Medicine": 8.33
273
+ }
274
+ },
275
+ "OCRBench": {
276
+ "acc": 79.1,
277
+ "acc_stderr": 0,
278
+ "accuracy": 79.1,
279
+ "final_score": [
280
+ 791,
281
+ 1000
282
+ ],
283
+ "Doc-oriented VQA": [
284
+ 150,
285
+ 200
286
+ ],
287
+ "Scene Text-centric VQA": [
288
+ 178,
289
+ 200
290
+ ],
291
+ "Handwriting Recognition": [
292
+ 34,
293
+ 50
294
+ ],
295
+ "Digit String Recognition": [
296
+ 32,
297
+ 50
298
+ ],
299
+ "Regular Text Recognition": [
300
+ 49,
301
+ 50
302
+ ],
303
+ "Artistic Text Recognition": [
304
+ 47,
305
+ 50
306
+ ],
307
+ "Irregular Text Recognition": [
308
+ 47,
309
+ 50
310
+ ],
311
+ "Key Information Extraction": [
312
+ 166,
313
+ 200
314
+ ],
315
+ "Non-Semantic Text Recognition": [
316
+ 43,
317
+ 50
318
+ ],
319
+ "Handwritten Mathematical Expression Recognition": [
320
+ 45,
321
+ 100
322
+ ]
323
+ },
324
+ "MathVision": {
325
+ "acc": 16.02,
326
+ "acc_stderr": 0,
327
+ "accuracy": 16.02
328
+ },
329
+ "CII-Bench": {
330
+ "acc": 53.99,
331
+ "acc_stderr": 0,
332
+ "accuracy": 53.99,
333
+ "domain_score": {
334
+ "Art": 49.26,
335
+ "CTC": 49.63,
336
+ "Env.": 68.52,
337
+ "Life": 50.65,
338
+ "Society": 61.08,
339
+ "Politics": 50.0
340
+ },
341
+ "emotion_score": {
342
+ "Neutral": 54.14,
343
+ "Negative": 55.47,
344
+ "Positive": 52.14
345
+ }
346
+ },
347
+ "Blink": {
348
+ "acc": 44.66,
349
+ "acc_stderr": 0,
350
+ "Jigsaw": 50.0,
351
+ "IQ Test": 32.0,
352
+ "Counting": 60.83,
353
+ "accuracy": 44.66,
354
+ "Art Style": 46.15,
355
+ "Relative Depth": 62.9,
356
+ "Spatial Relation": 74.13,
357
+ "Visual Similarity": 54.81,
358
+ "Forensic Detection": 21.21,
359
+ "Object Localization": 63.11,
360
+ "Multi-view Reasoning": 60.9,
361
+ "Relative Reflectance": 41.79,
362
+ "Visual Correspondence": 25.58,
363
+ "Semantic Correspondence": 23.02,
364
+ "Functional Correspondence": 17.69
365
+ }
366
+ },
367
+ "versions": {},
368
+ "config_tasks": {},
369
+ "summary_tasks": {},
370
+ "summary_general": {}
371
+ }