Datasets:

License:
daiteng01 commited on
Commit
4c464f8
·
verified ·
1 Parent(s): b6663e0

Delete gemini-2.0-pro-exp-02-05

Browse files
gemini-2.0-pro-exp-02-05/results_2025-01-25T10-42-53.190540.json DELETED
@@ -1,758 +0,0 @@
1
- {
2
- "config_general": {
3
- "model_name": "gemini-2.0-pro-exp-02-05",
4
- "model_dtype": "float16",
5
- "model_size": 0
6
- },
7
- "results": {
8
- "CMMMU": {
9
- "艺术与设计": {
10
- "num": 88,
11
- "correct": 69,
12
- "accuracy": 78.41
13
- },
14
- "overall": {
15
- "num": 900,
16
- "correct": 550,
17
- "accuracy": 61.11
18
- },
19
- "商业": {
20
- "num": 126,
21
- "correct": 53,
22
- "accuracy": 42.06
23
- },
24
- "科学": {
25
- "num": 204,
26
- "correct": 114,
27
- "accuracy": 55.88
28
- },
29
- "健康与医学": {
30
- "num": 153,
31
- "correct": 103,
32
- "accuracy": 67.32
33
- },
34
- "人文社会科学": {
35
- "num": 85,
36
- "correct": 63,
37
- "accuracy": 74.12
38
- },
39
- "技术与工程": {
40
- "num": 244,
41
- "correct": 148,
42
- "accuracy": 60.66
43
- },
44
- "accuracy": 61.11,
45
- "acc_stderr": 0,
46
- "acc": 61.11
47
- },
48
- "MMMU": {
49
- "accuracy": 62.89,
50
- "subject_score": {
51
- "Accounting": 50.0,
52
- "Agriculture": 66.67,
53
- "Architecture": 36.67,
54
- "Art": 88.33,
55
- "Basic": 73.33,
56
- "Biology": 56.67,
57
- "Chemistry": 56.67,
58
- "Clinical": 66.67,
59
- "Computer": 56.67,
60
- "Design": 86.67,
61
- "Diagnostics": 46.67,
62
- "Economics": 66.67,
63
- "Electronics": 36.67,
64
- "Energy": 50.0,
65
- "Finance": 30.0,
66
- "Geography": 63.33,
67
- "History": 80.0,
68
- "Literature": 90.0,
69
- "Manage": 63.33,
70
- "Marketing": 60.0,
71
- "Materials": 53.33,
72
- "Math": 53.33,
73
- "Mechanical": 46.67,
74
- "Music": 36.67,
75
- "Pharmacy": 86.67,
76
- "Physics": 76.67,
77
- "Psychology": 73.33,
78
- "Public": 73.33,
79
- "Sociology": 73.33
80
- },
81
- "difficulty_score": {
82
- "Medium": 62.26,
83
- "Easy": 72.2,
84
- "Hard": 49.17
85
- },
86
- "acc_stderr": 0,
87
- "acc": 62.89
88
- },
89
- "MMMU_Pro_standard": {
90
- "accuracy": 45.9,
91
- "subject_score": {
92
- "History": 55.36,
93
- "Art": 66.04,
94
- "Design": 66.67,
95
- "Literature": 59.62,
96
- "Agriculture": 35.0,
97
- "Finance": 28.33,
98
- "Sociology": 59.26,
99
- "Accounting": 36.21,
100
- "Energy_and_Power": 34.48,
101
- "Pharmacy": 49.12,
102
- "Architecture_and_Engineering": 36.67,
103
- "Clinical_Medicine": 52.54,
104
- "Public_Health": 46.55,
105
- "Physics": 40.0,
106
- "Art_Theory": 69.09,
107
- "Electronics": 51.67,
108
- "Psychology": 50.0,
109
- "Biology": 44.07,
110
- "Manage": 40.0,
111
- "Economics": 44.07,
112
- "Mechanical_Engineering": 33.9,
113
- "Diagnostics_and_Laboratory_Medicine": 43.33,
114
- "Basic_Medical_Science": 57.69,
115
- "Computer_Science": 46.67,
116
- "Math": 46.67,
117
- "Music": 31.67,
118
- "Materials": 21.67,
119
- "Marketing": 45.76,
120
- "Chemistry": 51.67,
121
- "Geography": 40.38
122
- },
123
- "difficulty_score": {
124
- "Medium": 45.44,
125
- "Easy": 56.82,
126
- "Hard": 32.42
127
- },
128
- "acc_stderr": 0,
129
- "acc": 45.9
130
- },
131
- "MMMU_Pro_vision": {
132
- "reject_info": {
133
- "reject_rate": 0.06,
134
- "reject_number": 1,
135
- "total_question": 1730
136
- },
137
- "accuracy": 40.54,
138
- "subject_score": {
139
- "History": 50.0,
140
- "Art": 57.69,
141
- "Design": 70.0,
142
- "Literature": 67.31,
143
- "Agriculture": 31.67,
144
- "Finance": 20.0,
145
- "Sociology": 55.56,
146
- "Accounting": 32.76,
147
- "Energy_and_Power": 18.97,
148
- "Pharmacy": 49.12,
149
- "Architecture_and_Engineering": 31.67,
150
- "Clinical_Medicine": 44.07,
151
- "Public_Health": 25.86,
152
- "Physics": 36.67,
153
- "Art_Theory": 69.09,
154
- "Electronics": 36.67,
155
- "Psychology": 38.33,
156
- "Biology": 40.68,
157
- "Manage": 50.0,
158
- "Economics": 40.68,
159
- "Mechanical_Engineering": 28.81,
160
- "Diagnostics_and_Laboratory_Medicine": 35.0,
161
- "Basic_Medical_Science": 57.69,
162
- "Computer_Science": 38.33,
163
- "Math": 33.33,
164
- "Music": 30.0,
165
- "Materials": 25.0,
166
- "Marketing": 30.51,
167
- "Chemistry": 41.67,
168
- "Geography": 42.31
169
- },
170
- "acc_stderr": 0,
171
- "acc": 40.54
172
- },
173
- "MmvetV2": {
174
- "reject_info": {
175
- "reject_rate": 1.93,
176
- "reject_number": 10,
177
- "total_question": 517
178
- },
179
- "accuracy": 72.1893,
180
- "capability_scores": {
181
- "math": 87.87878787878788,
182
- "ocr": 78.57843137254909,
183
- "spat": 67.62886597938139,
184
- "rec": 68.85856079404473,
185
- "know": 70.26490066225163,
186
- "gen": 72.23048327137552,
187
- "seq": 70.55555555555556
188
- },
189
- "capability_detail_scores": {
190
- "math_ocr": 89.0909090909091,
191
- "math_spat_ocr": 86.66666666666667,
192
- "spat_math_rec_ocr": 100.0,
193
- "spat_rec": 57.407407407407405,
194
- "spat_ocr": 82.6923076923077,
195
- "spat_rec_ocr": 56.666666666666664,
196
- "know_spat_ocr": 100.0,
197
- "rec_ocr": 87.5,
198
- "spat_know_rec": 45.0,
199
- "ocr": 86.45161290322581,
200
- "rec": 70.34482758620689,
201
- "know_rec": 73.07692307692307,
202
- "know_rec_gen": 71.7525773195876,
203
- "know_gen_rec_ocr": 77.6923076923077,
204
- "spat_gen_rec_ocr": 75.71428571428574,
205
- "gen_spat_ocr": 90.0,
206
- "spat_ocr_gen_seq_math": 80.0,
207
- "rec_spat_ocr_seq_math": 100.0,
208
- "spat_rec_gen": 55.90909090909091,
209
- "gen_math_spat_ocr": 40.0,
210
- "spat_seq_rec": 51.42857142857144,
211
- "spat_seq_rec_ocr": 43.333333333333336,
212
- "spat_know_rec_gen": 73.33333333333333,
213
- "rec_gen": 70.29411764705883,
214
- "spat_know_rec_ocr": 0.0,
215
- "know_ocr_rec_gen": 77.6923076923077,
216
- "rec_spat_ocr_gen_know": 75.0,
217
- "math_rec_ocr": 100.0,
218
- "gen_rec_ocr": 83.99999999999999,
219
- "seq_gen_rec_ocr": 82.85714285714285,
220
- "gen_ocr": 80.0,
221
- "seq_rec_gen": 68.57142857142858,
222
- "seq_rec": 68.33333333333333,
223
- "spat_seq_rec_gen": 80.0,
224
- "seq_know_rec": 100.0,
225
- "seq_know_rec_gen": 85.00000000000001,
226
- "rec_spat_ocr_gen_seq": 40.0,
227
- "rec_know_ocr_gen_seq": 100.0,
228
- "know_math_rec": 100.0,
229
- "seq_rec_ocr": 100.0
230
- },
231
- "acc_stderr": 0,
232
- "acc": 72.1893
233
- },
234
- "MathVerse": {
235
- "reject_info": {
236
- "reject_rate": 28.32,
237
- "reject_number": 1116,
238
- "total_question": 3940
239
- },
240
- "Text Dominant": {
241
- "accuracy": 58.4,
242
- "correct": 351,
243
- "total": 601
244
- },
245
- "Total": {
246
- "accuracy": 50.57,
247
- "correct": 1428,
248
- "total": 2824
249
- },
250
- "Vision Intensive": {
251
- "accuracy": 49.06,
252
- "correct": 286,
253
- "total": 583
254
- },
255
- "Text Lite": {
256
- "accuracy": 54.16,
257
- "correct": 319,
258
- "total": 589
259
- },
260
- "Vision Dominant": {
261
- "accuracy": 48.35,
262
- "correct": 278,
263
- "total": 575
264
- },
265
- "Vision Only": {
266
- "accuracy": 40.76,
267
- "correct": 194,
268
- "total": 476
269
- },
270
- "accuracy": 50.57,
271
- "acc_stderr": 0,
272
- "acc": 50.57
273
- },
274
- "Ocrlite": {
275
- "reject_info": {
276
- "reject_rate": 2.55,
277
- "reject_number": 42,
278
- "total_question": 1644
279
- },
280
- "final_score": [
281
- 1326,
282
- 1602
283
- ],
284
- "accuracy": 82.772,
285
- "Key Information Extraction-Bookshelf": [
286
- 42,
287
- 49,
288
- 0.857,
289
- {
290
- "Default": [
291
- 42,
292
- 49,
293
- 0.857
294
- ]
295
- }
296
- ],
297
- "Scene Text-centric VQA-diet_constraints": [
298
- 77,
299
- 89,
300
- 0.865,
301
- {
302
- "Default": [
303
- 77,
304
- 89,
305
- 0.865
306
- ]
307
- }
308
- ],
309
- "Doc-oriented VQA-Control": [
310
- 146,
311
- 186,
312
- 0.785,
313
- {
314
- "Default": [
315
- 146,
316
- 186,
317
- 0.785
318
- ]
319
- }
320
- ],
321
- "Doc-oriented VQA": [
322
- 169,
323
- 199,
324
- 0.849,
325
- {
326
- "Default": [
327
- 169,
328
- 199,
329
- 0.849
330
- ]
331
- }
332
- ],
333
- "Scene Text-centric VQA-Fake_logo": [
334
- 82,
335
- 119,
336
- 0.689,
337
- {
338
- "Default": [
339
- 82,
340
- 119,
341
- 0.689
342
- ]
343
- }
344
- ],
345
- "Handwritten Mathematical Expression Recognition": [
346
- 1,
347
- 73,
348
- 0.014,
349
- {
350
- "Default": [
351
- 1,
352
- 73,
353
- 0.014
354
- ]
355
- }
356
- ],
357
- "Key Information Extraction": [
358
- 186,
359
- 206,
360
- 0.903,
361
- {
362
- "Default": [
363
- 186,
364
- 206,
365
- 0.903
366
- ]
367
- }
368
- ],
369
- "Scene Text-centric VQA-Control": [
370
- 181,
371
- 200,
372
- 0.905,
373
- {
374
- "Default": [
375
- 181,
376
- 200,
377
- 0.905
378
- ]
379
- }
380
- ],
381
- "Scene Text-centric VQA": [
382
- 258,
383
- 281,
384
- 0.918,
385
- {
386
- "Default": [
387
- 258,
388
- 281,
389
- 0.918
390
- ]
391
- }
392
- ],
393
- "Artistic Text Recognition": [
394
- 43,
395
- 50,
396
- 0.86,
397
- {
398
- "Default": [
399
- 43,
400
- 50,
401
- 0.86
402
- ]
403
- }
404
- ],
405
- "Irregular Text Recognition": [
406
- 45,
407
- 50,
408
- 0.9,
409
- {
410
- "Default": [
411
- 45,
412
- 50,
413
- 0.9
414
- ]
415
- }
416
- ],
417
- "Non-Semantic Text Recognition": [
418
- 46,
419
- 50,
420
- 0.92,
421
- {
422
- "Default": [
423
- 46,
424
- 50,
425
- 0.92
426
- ]
427
- }
428
- ],
429
- "Regular Text Recognition": [
430
- 50,
431
- 50,
432
- 1.0,
433
- {
434
- "Default": [
435
- 50,
436
- 50,
437
- 1.0
438
- ]
439
- }
440
- ],
441
- "acc_stderr": 0,
442
- "acc": 82.772
443
- },
444
- "OcrliteZh": {
445
- "reject_info": {
446
- "reject_rate": 2.56,
447
- "reject_number": 6,
448
- "total_question": 234
449
- },
450
- "final_score": [
451
- 168,
452
- 228
453
- ],
454
- "accuracy": 73.684,
455
- "Docvqa": [
456
- 8,
457
- 9,
458
- 0.889,
459
- {
460
- "Default": [
461
- 8,
462
- 9,
463
- 0.889
464
- ]
465
- }
466
- ],
467
- "Chartqa-human": [
468
- 6,
469
- 9,
470
- 0.667,
471
- {
472
- "Default": [
473
- 6,
474
- 9,
475
- 0.667
476
- ]
477
- }
478
- ],
479
- "Chartqa-au": [
480
- 8,
481
- 9,
482
- 0.889,
483
- {
484
- "Default": [
485
- 8,
486
- 9,
487
- 0.889
488
- ]
489
- }
490
- ],
491
- "infographic": [
492
- 6,
493
- 10,
494
- 0.6,
495
- {
496
- "Default": [
497
- 6,
498
- 10,
499
- 0.6
500
- ]
501
- }
502
- ],
503
- "Key Information Extraction": [
504
- 43,
505
- 44,
506
- 0.977,
507
- {
508
- "Default": [
509
- 43,
510
- 44,
511
- 0.977
512
- ]
513
- }
514
- ],
515
- "Scene Text-centric VQA": [
516
- 32,
517
- 39,
518
- 0.821,
519
- {
520
- "Default": [
521
- 32,
522
- 39,
523
- 0.821
524
- ]
525
- }
526
- ],
527
- "Artistic Text Recognition": [
528
- 5,
529
- 11,
530
- 0.455,
531
- {
532
- "Default": [
533
- 5,
534
- 11,
535
- 0.455
536
- ]
537
- }
538
- ],
539
- "IrRegular Text Recognition": [
540
- 6,
541
- 11,
542
- 0.545,
543
- {
544
- "Default": [
545
- 6,
546
- 11,
547
- 0.545
548
- ]
549
- }
550
- ],
551
- "Non-semantic Text Recognition": [
552
- 8,
553
- 12,
554
- 0.667,
555
- {
556
- "Default": [
557
- 8,
558
- 12,
559
- 0.667
560
- ]
561
- }
562
- ],
563
- "Regular Text Recognition": [
564
- 9,
565
- 11,
566
- 0.818,
567
- {
568
- "Default": [
569
- 9,
570
- 11,
571
- 0.818
572
- ]
573
- }
574
- ],
575
- "Handwriting_CN": [
576
- 12,
577
- 20,
578
- 0.6,
579
- {
580
- "Default": [
581
- 12,
582
- 20,
583
- 0.6
584
- ]
585
- }
586
- ],
587
- "Chinese Unlimited": [
588
- 25,
589
- 43,
590
- 0.581,
591
- {
592
- "Default": [
593
- 25,
594
- 43,
595
- 0.581
596
- ]
597
- }
598
- ],
599
- "acc_stderr": 0,
600
- "acc": 73.684
601
- },
602
- "CharXiv": {
603
- "descriptive": {
604
- "Overall Score": 83.12,
605
- "By Question": {
606
- "Q1": 81.97,
607
- "Q2": 82.17,
608
- "Q3": 75.54,
609
- "Q4": 88.72,
610
- "Q5": 85.36,
611
- "Q6": 80.32,
612
- "Q7": 77.35,
613
- "Q8": 87.95,
614
- "Q9": 82.09,
615
- "Q10": 86.99,
616
- "Q11": 69.71,
617
- "Q12": 84.62,
618
- "Q13": 85.39,
619
- "Q14": 88.3,
620
- "Q15": 92.01,
621
- "Q16": 86.11,
622
- "Q17": 58.93,
623
- "Q18": 94.74,
624
- "Q19": 93.85
625
- },
626
- "By Category": {
627
- "Information Extraction": 81.73,
628
- "Enumeration": 87.65,
629
- "Pattern Recognition": 84.5,
630
- "Counting": 87.02,
631
- "Compositionality": 58.93
632
- },
633
- "By Subplot": {
634
- "1 Subplot": 86.46,
635
- "2-4 Subplots": 83.8,
636
- "5+ Subplots": 76.59
637
- },
638
- "By Subject": {
639
- "Computer Science": 84.13,
640
- "Economics": 85.87,
641
- "Electrical Engineering and Systems Science": 88.03,
642
- "Mathematics": 85.74,
643
- "Physics": 79.53,
644
- "Quantitative Biology": 76.79,
645
- "Quantitative Finance": 80.82,
646
- "Statistics": 83.85
647
- },
648
- "By Year": {
649
- "2020": 82.09,
650
- "2021": 81.51,
651
- "2022": 84.94,
652
- "2023": 84.07
653
- },
654
- "N_valid": 4000,
655
- "N_invalid": 0,
656
- "Question Type": "Descriptive"
657
- },
658
- "reasoning": {
659
- "Overall Score": 51.0,
660
- "By Answer Type": {
661
- "Text-in-Chart": 54.55,
662
- "Text-in-General": 43.43,
663
- "Number-in-Chart": 56.9,
664
- "Number-in-General": 41.48
665
- },
666
- "By Source": {
667
- "GPT-Sourced": 59.24,
668
- "GPT-Inspired": 48.61,
669
- "Completely Human": 49.33
670
- },
671
- "By Subject": {
672
- "Computer Science": 51.59,
673
- "Economics": 44.2,
674
- "Electrical Engineering and Systems Science": 52.94,
675
- "Mathematics": 58.52,
676
- "Physics": 54.33,
677
- "Quantitative Biology": 51.59,
678
- "Quantitative Finance": 47.41,
679
- "Statistics": 46.9
680
- },
681
- "By Year": {
682
- "2020": 48.58,
683
- "2021": 50.57,
684
- "2022": 52.46,
685
- "2023": 52.42
686
- },
687
- "By Subplot": {
688
- "1 Subplot": 55.7,
689
- "2-4 Subplots": 45.24,
690
- "5+ Subplots": 52.54
691
- },
692
- "N_valid": 1000,
693
- "N_invalid": 0,
694
- "Question Type": "Reasoning"
695
- },
696
- "accuracy": 67.06,
697
- "acc_stderr": 0,
698
- "acc": 67.06
699
- },
700
- "MathVision": {
701
- "reject_info": {
702
- "reject_rate": 0.07,
703
- "reject_number": 2,
704
- "total_question": 3040
705
- },
706
- "accuracy": 53.79,
707
- "acc_stderr": 0,
708
- "acc": 53.79
709
- },
710
- "CII-Bench": {
711
- "reject_info": {
712
- "reject_rate": 0.39,
713
- "reject_number": 3,
714
- "total_question": 765
715
- },
716
- "accuracy": 67.72,
717
- "domain_score": {
718
- "Life": 70.56,
719
- "Art": 70.37,
720
- "CTC": 57.46,
721
- "Society": 69.02,
722
- "Env.": 66.67,
723
- "Politics": 75.0
724
- },
725
- "emotion_score": {
726
- "Neutral": 71.05,
727
- "Negative": 65.78,
728
- "Positive": 66.09
729
- },
730
- "acc_stderr": 0,
731
- "acc": 67.72
732
- },
733
- "Blink": {
734
- "reject_info": {
735
- "reject_rate": 0.16,
736
- "reject_number": 3,
737
- "total_question": 1901
738
- },
739
- "accuracy": 64.7,
740
- "Art Style": 87.18,
741
- "Counting": 70.0,
742
- "Forensic Detection": 78.79,
743
- "Functional Correspondence": 53.08,
744
- "IQ Test": 21.33,
745
- "Jigsaw": 57.72,
746
- "Multi-view Reasoning": 59.4,
747
- "Object Localization": 60.66,
748
- "Relative Depth": 84.68,
749
- "Relative Reflectance": 33.83,
750
- "Semantic Correspondence": 63.31,
751
- "Spatial Relation": 74.65,
752
- "Visual Correspondence": 83.72,
753
- "Visual Similarity": 81.48,
754
- "acc_stderr": 0,
755
- "acc": 64.7
756
- }
757
- }
758
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
gemini-2.0-pro-exp-02-05/results_2025-03-10T11-01-14.184463.json DELETED
@@ -1,406 +0,0 @@
1
- {
2
- "config_general": {
3
- "model_name": "gemini-2.0-pro-exp-02-05",
4
- "model_dtype": "float16",
5
- "model_size": 0
6
- },
7
- "results": {
8
- "ChartQA": {
9
- "acc": 21.82,
10
- "acc_stderr": 0,
11
- "accuracy": 21.82,
12
- "human_test": {
13
- "total": 1250,
14
- "correct": 234,
15
- "accuracy": 18.72
16
- },
17
- "reject_info": {
18
- "reject_rate": 0.08,
19
- "reject_number": 2,
20
- "total_question": 2500
21
- },
22
- "augmented_test": {
23
- "total": 1248,
24
- "correct": 311,
25
- "accuracy": 24.92
26
- }
27
- },
28
- "CMMMU": {
29
- "acc": 61.11,
30
- "acc_stderr": 0,
31
- "\u5546\u4e1a": {
32
- "num": 126,
33
- "correct": 53,
34
- "accuracy": 42.06
35
- },
36
- "\u79d1\u5b66": {
37
- "num": 204,
38
- "correct": 114,
39
- "accuracy": 55.88
40
- },
41
- "overall": {
42
- "num": 900,
43
- "correct": 550,
44
- "accuracy": 61.11
45
- },
46
- "accuracy": 61.11,
47
- "\u5065\u5eb7\u4e0e\u533b\u5b66": {
48
- "num": 153,
49
- "correct": 103,
50
- "accuracy": 67.32
51
- },
52
- "\u6280\u672f\u4e0e\u5de5\u7a0b": {
53
- "num": 244,
54
- "correct": 148,
55
- "accuracy": 60.66
56
- },
57
- "\u827a\u672f\u4e0e\u8bbe\u8ba1": {
58
- "num": 88,
59
- "correct": 69,
60
- "accuracy": 78.41
61
- },
62
- "\u4eba\u6587\u793e\u4f1a\u79d1\u5b66": {
63
- "num": 85,
64
- "correct": 63,
65
- "accuracy": 74.12
66
- }
67
- },
68
- "CMMU": {
69
- "acc": 53.71,
70
- "acc_stderr": 0,
71
- "val": {
72
- "multiple-choice": {
73
- "hard": {
74
- "total": 150,
75
- "correct": 68,
76
- "accuracy": 45.33
77
- },
78
- "normal": {
79
- "total": 1205,
80
- "correct": 668,
81
- "accuracy": 55.44
82
- }
83
- },
84
- "fill-in-the-blank": {
85
- "hard": {
86
- "total": 300,
87
- "correct": 156,
88
- "accuracy": 52.0
89
- },
90
- "normal": {
91
- "total": 506,
92
- "correct": 267,
93
- "accuracy": 52.77
94
- }
95
- },
96
- "multiple-response": {
97
- "hard": {
98
- "total": 94,
99
- "correct": 48,
100
- "accuracy": 51.06
101
- },
102
- "normal": {
103
- "total": 33,
104
- "correct": 16,
105
- "accuracy": 48.48
106
- }
107
- }
108
- },
109
- "test": {
110
- "multiple-choice": {
111
- "hard": {
112
- "total": 150,
113
- "correct": 71,
114
- "accuracy": 47.33
115
- },
116
- "normal": {
117
- "total": 1205,
118
- "correct": 696,
119
- "accuracy": 57.76
120
- }
121
- },
122
- "fill-in-the-blank": {
123
- "hard": {
124
- "total": 296,
125
- "correct": 142,
126
- "accuracy": 47.97
127
- },
128
- "normal": {
129
- "total": 529,
130
- "correct": 277,
131
- "accuracy": 52.36
132
- }
133
- },
134
- "multiple-response": {
135
- "hard": {
136
- "total": 95,
137
- "correct": 40,
138
- "accuracy": 42.11
139
- },
140
- "normal": {
141
- "total": 32,
142
- "correct": 13,
143
- "accuracy": 40.62
144
- }
145
- }
146
- },
147
- "reject_info": {
148
- "reject_rate": 0.01,
149
- "reject_number": 1,
150
- "total_question": 12705
151
- },
152
- "val-overall": {
153
- "total": 2288,
154
- "correct": 1223,
155
- "accuracy": 53.45,
156
- "bias_rate": 9.6
157
- },
158
- "test-overall": {
159
- "total": 2307,
160
- "correct": 1239,
161
- "accuracy": 53.71,
162
- "bias_rate": 3.96
163
- }
164
- },
165
- "MMMU": {
166
- "acc": 62.89,
167
- "acc_stderr": 0,
168
- "accuracy": 62.89,
169
- "subject_score": {
170
- "Art": 88.33,
171
- "Math": 53.33,
172
- "Basic": 73.33,
173
- "Music": 36.67,
174
- "Design": 86.67,
175
- "Energy": 50.0,
176
- "Manage": 63.33,
177
- "Public": 73.33,
178
- "Biology": 56.67,
179
- "Finance": 30.0,
180
- "History": 80.0,
181
- "Physics": 76.67,
182
- "Clinical": 66.67,
183
- "Computer": 56.67,
184
- "Pharmacy": 86.67,
185
- "Chemistry": 56.67,
186
- "Economics": 66.67,
187
- "Geography": 63.33,
188
- "Marketing": 60.0,
189
- "Materials": 53.33,
190
- "Sociology": 73.33,
191
- "Accounting": 50.0,
192
- "Literature": 90.0,
193
- "Mechanical": 46.67,
194
- "Psychology": 73.33,
195
- "Agriculture": 66.67,
196
- "Diagnostics": 46.67,
197
- "Electronics": 36.67,
198
- "Architecture": 36.67
199
- },
200
- "difficulty_score": {
201
- "Easy": 72.2,
202
- "Hard": 49.17,
203
- "Medium": 62.26
204
- }
205
- },
206
- "MMMU_Pro_standard": {
207
- "acc": 45.9,
208
- "acc_stderr": 0,
209
- "accuracy": 45.9,
210
- "subject_score": {
211
- "Art": 66.04,
212
- "Math": 46.67,
213
- "Music": 31.67,
214
- "Design": 66.67,
215
- "Manage": 40.0,
216
- "Biology": 44.07,
217
- "Finance": 28.33,
218
- "History": 55.36,
219
- "Physics": 40.0,
220
- "Pharmacy": 49.12,
221
- "Chemistry": 51.67,
222
- "Economics": 44.07,
223
- "Geography": 40.38,
224
- "Marketing": 45.76,
225
- "Materials": 21.67,
226
- "Sociology": 59.26,
227
- "Accounting": 36.21,
228
- "Art_Theory": 69.09,
229
- "Literature": 59.62,
230
- "Psychology": 50.0,
231
- "Agriculture": 35.0,
232
- "Electronics": 51.67,
233
- "Public_Health": 46.55,
234
- "Computer_Science": 46.67,
235
- "Energy_and_Power": 34.48,
236
- "Clinical_Medicine": 52.54,
237
- "Basic_Medical_Science": 57.69,
238
- "Mechanical_Engineering": 33.9,
239
- "Architecture_and_Engineering": 36.67,
240
- "Diagnostics_and_Laboratory_Medicine": 43.33
241
- },
242
- "difficulty_score": {
243
- "Easy": 56.82,
244
- "Hard": 32.42,
245
- "Medium": 45.44
246
- }
247
- },
248
- "MMMU_Pro_vision": {
249
- "acc": 40.54,
250
- "acc_stderr": 0,
251
- "accuracy": 40.54,
252
- "reject_info": {
253
- "reject_rate": 0.06,
254
- "reject_number": 1,
255
- "total_question": 1730
256
- },
257
- "subject_score": {
258
- "Art": 57.69,
259
- "Math": 33.33,
260
- "Music": 30.0,
261
- "Design": 70.0,
262
- "Manage": 50.0,
263
- "Biology": 40.68,
264
- "Finance": 20.0,
265
- "History": 50.0,
266
- "Physics": 36.67,
267
- "Pharmacy": 49.12,
268
- "Chemistry": 41.67,
269
- "Economics": 40.68,
270
- "Geography": 42.31,
271
- "Marketing": 30.51,
272
- "Materials": 25.0,
273
- "Sociology": 55.56,
274
- "Accounting": 32.76,
275
- "Art_Theory": 69.09,
276
- "Literature": 67.31,
277
- "Psychology": 38.33,
278
- "Agriculture": 31.67,
279
- "Electronics": 36.67,
280
- "Public_Health": 25.86,
281
- "Computer_Science": 38.33,
282
- "Energy_and_Power": 18.97,
283
- "Clinical_Medicine": 44.07,
284
- "Basic_Medical_Science": 57.69,
285
- "Mechanical_Engineering": 28.81,
286
- "Architecture_and_Engineering": 31.67,
287
- "Diagnostics_and_Laboratory_Medicine": 35.0
288
- }
289
- },
290
- "OCRBench": {
291
- "acc": 86.186,
292
- "acc_stderr": 0,
293
- "accuracy": 86.186,
294
- "final_score": [
295
- 861,
296
- 999
297
- ],
298
- "reject_info": {
299
- "reject_rate": 0.1,
300
- "reject_number": 1,
301
- "total_question": 1000
302
- },
303
- "Doc-oriented VQA": [
304
- 175,
305
- 200
306
- ],
307
- "Scene Text-centric VQA": [
308
- 187,
309
- 200
310
- ],
311
- "Handwriting Recognition": [
312
- 38,
313
- 50
314
- ],
315
- "Digit String Recognition": [
316
- 39,
317
- 50
318
- ],
319
- "Regular Text Recognition": [
320
- 49,
321
- 50
322
- ],
323
- "Artistic Text Recognition": [
324
- 47,
325
- 50
326
- ],
327
- "Irregular Text Recognition": [
328
- 47,
329
- 50
330
- ],
331
- "Key Information Extraction": [
332
- 186,
333
- 200
334
- ],
335
- "Non-Semantic Text Recognition": [
336
- 50,
337
- 50
338
- ],
339
- "Handwritten Mathematical Expression Recognition": [
340
- 43,
341
- 99
342
- ]
343
- },
344
- "MathVision": {
345
- "acc": 53.79,
346
- "acc_stderr": 0,
347
- "accuracy": 53.79,
348
- "reject_info": {
349
- "reject_rate": 0.07,
350
- "reject_number": 2,
351
- "total_question": 3040
352
- }
353
- },
354
- "CII-Bench": {
355
- "acc": 67.72,
356
- "acc_stderr": 0,
357
- "accuracy": 67.72,
358
- "reject_info": {
359
- "reject_rate": 0.39,
360
- "reject_number": 3,
361
- "total_question": 765
362
- },
363
- "domain_score": {
364
- "Art": 70.37,
365
- "CTC": 57.46,
366
- "Env.": 66.67,
367
- "Life": 70.56,
368
- "Society": 69.02,
369
- "Politics": 75.0
370
- },
371
- "emotion_score": {
372
- "Neutral": 71.05,
373
- "Negative": 65.78,
374
- "Positive": 66.09
375
- }
376
- },
377
- "Blink": {
378
- "acc": 64.7,
379
- "acc_stderr": 0,
380
- "Jigsaw": 57.72,
381
- "IQ Test": 21.33,
382
- "Counting": 70.0,
383
- "accuracy": 64.7,
384
- "Art Style": 87.18,
385
- "reject_info": {
386
- "reject_rate": 0.16,
387
- "reject_number": 3,
388
- "total_question": 1901
389
- },
390
- "Relative Depth": 84.68,
391
- "Spatial Relation": 74.65,
392
- "Visual Similarity": 81.48,
393
- "Forensic Detection": 78.79,
394
- "Object Localization": 60.66,
395
- "Multi-view Reasoning": 59.4,
396
- "Relative Reflectance": 33.83,
397
- "Visual Correspondence": 83.72,
398
- "Semantic Correspondence": 63.31,
399
- "Functional Correspondence": 53.08
400
- }
401
- },
402
- "versions": {},
403
- "config_tasks": {},
404
- "summary_tasks": {},
405
- "summary_general": {}
406
- }