Datasets:

License:
vlm_results / Aria /results_2025-01-25T10-42-53.190540.json
daiteng01's picture
Upload 52 files
7498b1c verified
{
"config_general": {
"model_name": "Aria",
"model_dtype": "float16",
"model_size": 0
},
"results": {
"CMMMU": {
"accuracy": 43.56,
"acc_stderr": 0,
"acc": 43.56
},
"MMMU": {
"accuracy": 48.67,
"acc_stderr": 0,
"acc": 48.67
},
"MMMU_Pro_standard": {
"accuracy": 32.08,
"acc_stderr": 0,
"acc": 32.08
},
"MMMU_Pro_vision": {
"accuracy": 21.21,
"subject_score": {
"History": 25.0,
"Art": 26.42,
"Design": 41.67,
"Literature": 55.77,
"Agriculture": 25.0,
"Finance": 16.67,
"Sociology": 22.22,
"Accounting": 25.86,
"Energy_and_Power": 13.79,
"Pharmacy": 31.58,
"Architecture_and_Engineering": 10.0,
"Clinical_Medicine": 16.95,
"Public_Health": 25.86,
"Physics": 16.67,
"Art_Theory": 21.82,
"Electronics": 13.33,
"Psychology": 25.0,
"Biology": 20.34,
"Manage": 12.0,
"Economics": 22.03,
"Mechanical_Engineering": 20.34,
"Diagnostics_and_Laboratory_Medicine": 10.0,
"Basic_Medical_Science": 23.08,
"Computer_Science": 23.33,
"Math": 23.33,
"Music": 20.0,
"Materials": 8.33,
"Marketing": 15.25,
"Chemistry": 15.0,
"Geography": 13.46
},
"acc_stderr": 0,
"acc": 21.21
},
"MmvetV2": {
"accuracy": 55.8414,
"capability_scores": {
"math": 45.0,
"ocr": 60.673076923076906,
"spat": 50.65989847715736,
"rec": 54.63592233009711,
"know": 51.602564102564095,
"gen": 56.363636363636346,
"seq": 59.821428571428584
},
"capability_detail_scores": {
"math_ocr": 59.09090909090909,
"math_ocr_spat": 39.33333333333333,
"math_rec_ocr_spat": 50.0,
"rec_spat": 46.78571428571429,
"ocr_spat": 52.69230769230769,
"rec_ocr_spat": 31.666666666666664,
"know_ocr_spat": 100.0,
"rec_ocr": 57.49999999999999,
"rec_know_spat": 35.0,
"ocr": 75.3125,
"rec": 63.55932203389829,
"rec_know": 43.84615384615384,
"rec_know_gen": 50.499999999999986,
"rec_know_ocr_gen": 69.23076923076921,
"rec_spat_ocr_gen": 64.41860465116278,
"spat_ocr_gen": 70.0,
"math_gen_seq_ocr_spat": 50.0,
"math_rec_seq_ocr_spat": 0.0,
"rec_spat_gen": 36.36363636363637,
"math_gen_ocr_spat": 0.0,
"seq_rec_spat": 45.714285714285715,
"seq_rec_ocr_spat": 63.33333333333333,
"rec_know_spat_gen": 40.0,
"rec_gen": 61.764705882352956,
"ocr_rec_know_spat": 50.0,
"gen_rec_know_ocr_spat": 55.00000000000001,
"math_rec_ocr": 100.0,
"rec_ocr_gen": 65.99999999999999,
"seq_rec_ocr_gen": 72.85714285714285,
"ocr_gen": 57.692307692307686,
"seq_rec_gen": 59.28571428571428,
"seq_rec": 43.333333333333336,
"seq_rec_spat_gen": 77.5,
"seq_rec_know": 50.0,
"seq_rec_know_gen": 40.0,
"gen_rec_seq_ocr_spat": 53.333333333333336,
"gen_rec_seq_know_ocr": 90.0,
"math_rec_know": 20.0,
"seq_rec_ocr": 100.0,
"rec_know_ocr_spat": 50.0
},
"acc_stderr": 0,
"acc": 55.8414
},
"MathVerse": {
"Text Dominant": {
"accuracy": 41.37,
"correct": 326,
"total": 788
},
"Total": {
"accuracy": 33.25,
"correct": 1310,
"total": 3940
},
"Text Lite": {
"accuracy": 34.64,
"correct": 273,
"total": 788
},
"Vision Intensive": {
"accuracy": 33.63,
"correct": 265,
"total": 788
},
"Vision Dominant": {
"accuracy": 32.49,
"correct": 256,
"total": 788
},
"Vision Only": {
"accuracy": 24.11,
"correct": 190,
"total": 788
},
"accuracy": 33.25,
"acc_stderr": 0,
"acc": 33.25
},
"Ocrlite": {
"final_score": [
1089,
1645
],
"accuracy": 66.201,
"Key Information Extraction-Bookshelf": [
19,
52
],
"Scene Text-centric VQA-diet_constraints": [
49,
90
],
"Doc-oriented VQA-Control": [
123,
189
],
"Doc-oriented VQA": [
128,
204
],
"Scene Text-centric VQA-Fake_logo": [
49,
119
],
"Handwritten Mathematical Expression Recognition": [
24,
100
],
"Key Information Extraction": [
166,
209
],
"Scene Text-centric VQA-Control": [
164,
200
],
"Scene Text-centric VQA": [
211,
282
],
"Artistic Text Recognition": [
39,
50
],
"Irregular Text Recognition": [
37,
50
],
"Non-Semantic Text Recognition": [
32,
50
],
"Regular Text Recognition": [
48,
50
],
"acc_stderr": 0,
"acc": 66.201
},
"OcrliteZh": {
"final_score": [
80,
234
],
"accuracy": 34.188,
"Docvqa": [
3,
10
],
"Chartqa-human": [
2,
10
],
"Chartqa-au": [
3,
10
],
"infographic": [
3,
10
],
"Key Information Extraction": [
29,
45
],
"Scene Text-centric VQA": [
17,
40
],
"Artistic Text Recognition": [
0,
11
],
"IrRegular Text Recognition": [
0,
11
],
"Non-semantic Text Recognition": [
5,
12
],
"Regular Text Recognition": [
1,
11
],
"Handwriting_CN": [
4,
20
],
"Chinese Unlimited": [
13,
44
],
"acc_stderr": 0,
"acc": 34.188
},
"CharXiv": {
"descriptive": {
"Overall Score": 57.65,
"By Question": {
"Q1": 43.44,
"Q2": 75.65,
"Q3": 65.24,
"Q4": 73.54,
"Q5": 77.41,
"Q6": 63.45,
"Q7": 59.4,
"Q8": 62.95,
"Q9": 53.73,
"Q10": 68.49,
"Q11": 40.57,
"Q12": 58.79,
"Q13": 51.6,
"Q14": 57.45,
"Q15": 41.53,
"Q16": 58.33,
"Q17": 12.5,
"Q18": 70.04,
"Q19": 75.38
},
"By Category": {
"Information Extraction": 65.42,
"Enumeration": 52.78,
"Pattern Recognition": 57.86,
"Counting": 65.14,
"Compositionality": 12.5
},
"By Subplot": {
"1 Subplot": 61.59,
"2-4 Subplots": 57.28,
"5+ Subplots": 51.8
},
"By Subject": {
"Computer Science": 55.56,
"Economics": 60.33,
"Electrical Engineering and Systems Science": 59.87,
"Mathematics": 58.89,
"Physics": 51.77,
"Quantitative Biology": 53.77,
"Quantitative Finance": 57.97,
"Statistics": 63.5
},
"By Year": {
"2020": 57.59,
"2021": 56.61,
"2022": 57.38,
"2023": 59.07
},
"N_valid": 4000,
"N_invalid": 0,
"Question Type": "Descriptive"
},
"reasoning": {
"Overall Score": 28.9,
"By Answer Type": {
"Text-in-Chart": 33.18,
"Text-in-General": 42.42,
"Number-in-Chart": 28.88,
"Number-in-General": 14.85
},
"By Source": {
"GPT-Sourced": 29.89,
"GPT-Inspired": 23.61,
"Completely Human": 30.5
},
"By Subject": {
"Computer Science": 25.4,
"Economics": 26.09,
"Electrical Engineering and Systems Science": 31.09,
"Mathematics": 29.63,
"Physics": 32.28,
"Quantitative Biology": 28.57,
"Quantitative Finance": 22.41,
"Statistics": 36.28
},
"By Year": {
"2020": 27.94,
"2021": 35.63,
"2022": 24.18,
"2023": 27.42
},
"By Subplot": {
"1 Subplot": 28.24,
"2-4 Subplots": 30.16,
"5+ Subplots": 27.97
},
"N_valid": 1000,
"N_invalid": 3,
"Question Type": "Reasoning"
},
"accuracy": 43.27,
"acc_stderr": 0,
"acc": 43.27
},
"MathVision": {
"accuracy": 13.59,
"acc_stderr": 0,
"acc": 13.59
},
"CII-Bench": {
"accuracy": 46.27,
"domain_score": {
"Life": 40.26,
"Art": 45.59,
"CTC": 48.89,
"Society": 46.49,
"Env.": 62.96,
"Politics": 54.17
},
"emotion_score": {
"Neutral": 46.62,
"Negative": 46.04,
"Positive": 46.15
},
"acc_stderr": 0,
"acc": 46.27
},
"Blink": {
"accuracy": 52.18,
"Art Style": 70.09,
"Counting": 58.33,
"Forensic Detection": 58.33,
"Functional Correspondence": 23.08,
"IQ Test": 29.33,
"Jigsaw": 74.0,
"Multi-view Reasoning": 53.38,
"Object Localization": 54.1,
"Relative Depth": 41.94,
"Relative Reflectance": 34.33,
"Semantic Correspondence": 30.22,
"Spatial Relation": 79.02,
"Visual Correspondence": 42.44,
"Visual Similarity": 85.19,
"acc_stderr": 0,
"acc": 52.18
}
}
}