Datasets:

License:
vlm_results / LLaVA-Onevision-72B /results_2025-01-25T10-42-53.190540.json
daiteng01's picture
Upload 52 files
7498b1c verified
{
"config_general": {
"model_name": "LLaVA-Onevision-72B",
"model_dtype": "float16",
"model_size": 0
},
"results": {
"CMMMU": {
"艺术与设计": {
"num": 88,
"correct": 66,
"accuracy": 75.0
},
"overall": {
"num": 900,
"correct": 430,
"accuracy": 47.78
},
"商业": {
"num": 126,
"correct": 35,
"accuracy": 27.78
},
"科学": {
"num": 204,
"correct": 80,
"accuracy": 39.22
},
"健康与医学": {
"num": 153,
"correct": 86,
"accuracy": 56.21
},
"人文社会科学": {
"num": 85,
"correct": 50,
"accuracy": 58.82
},
"技术与工程": {
"num": 244,
"correct": 113,
"accuracy": 46.31
},
"accuracy": 47.78,
"acc_stderr": 0,
"acc": 47.78
},
"MMMU": {
"accuracy": 56.0,
"acc_stderr": 0,
"acc": 56.0
},
"MMMU_Pro_standard": {
"reject_info": {
"reject_rate": 0.12,
"reject_number": 2,
"total_question": 1730
},
"accuracy": 37.21,
"acc_stderr": 0,
"acc": 37.21
},
"MMMU_Pro_vision": {
"reject_info": {
"reject_rate": 0.12,
"reject_number": 2,
"total_question": 1730
},
"accuracy": 23.9,
"subject_score": {
"History": 26.79,
"Literature": 55.77,
"Sociology": 35.19,
"Art": 28.3,
"Agriculture": 16.67,
"Design": 35.0,
"Pharmacy": 38.6,
"Energy_and_Power": 15.52,
"Architecture_and_Engineering": 11.67,
"Art_Theory": 38.18,
"Electronics": 23.33,
"Accounting": 21.43,
"Psychology": 16.67,
"Biology": 16.95,
"Manage": 20.0,
"Public_Health": 17.24,
"Economics": 28.81,
"Diagnostics_and_Laboratory_Medicine": 11.67,
"Clinical_Medicine": 18.64,
"Physics": 18.33,
"Mechanical_Engineering": 25.42,
"Finance": 26.67,
"Computer_Science": 30.0,
"Math": 20.0,
"Basic_Medical_Science": 17.31,
"Marketing": 22.03,
"Music": 25.0,
"Materials": 21.67,
"Chemistry": 16.67,
"Geography": 23.08
},
"acc_stderr": 0,
"acc": 23.9
},
"MmvetV2": {
"reject_info": {
"reject_rate": 0.19,
"reject_number": 1,
"total_question": 517
},
"accuracy": 58.9147,
"capability_scores": {
"math": 61.1764705882353,
"ocr": 62.06730769230771,
"spat": 55.025380710659874,
"rec": 56.64233576642339,
"know": 57.179487179487154,
"gen": 59.34306569343065,
"seq": 48.545454545454554
},
"capability_detail_scores": {
"math_ocr": 78.18181818181817,
"spat_math_ocr": 56.666666666666664,
"spat_math_ocr_rec": 40.0,
"spat_rec": 63.57142857142858,
"spat_ocr": 61.92307692307693,
"spat_ocr_rec": 23.333333333333332,
"spat_know_ocr": 75.0,
"ocr_rec": 82.5,
"spat_know_rec": 50.0,
"ocr": 75.62500000000001,
"rec": 57.28813559322033,
"know_rec": 70.76923076923076,
"know_gen_rec": 58.19999999999995,
"know_ocr_gen_rec": 67.6923076923077,
"spat_ocr_gen_rec": 62.7906976744186,
"spat_ocr_gen": 80.0,
"spat_math_ocr_gen_seq": 100.0,
"spat_math_ocr_rec_seq": 0.0,
"spat_gen_rec": 50.90909090909093,
"spat_math_ocr_gen": 40.0,
"spat_seq_rec": 28.57142857142857,
"spat_ocr_rec_seq": 40.0,
"spat_know_gen_rec": 36.66666666666667,
"gen_rec": 64.70588235294117,
"spat_know_ocr_rec": 0.0,
"know_spat_ocr_gen_rec": 75.0,
"math_ocr_rec": 50.0,
"ocr_gen_rec": 61.999999999999986,
"ocr_gen_rec_seq": 72.85714285714285,
"ocr_gen": 60.0,
"seq_gen_rec": 38.46153846153847,
"seq_rec": 60.0,
"spat_seq_gen_rec": 62.499999999999986,
"know_seq_rec": 0.0,
"know_seq_gen_rec": 0.0,
"spat_ocr_gen_rec_seq": 80.0,
"know_ocr_gen_rec_seq": 70.0,
"know_math_rec": 50.0,
"ocr_rec_seq": 0.0
},
"acc_stderr": 0,
"acc": 58.9147
},
"MathVerse": {
"reject_info": {
"reject_rate": 0.03,
"reject_number": 1,
"total_question": 3940
},
"Vision Intensive": {
"accuracy": 42.13,
"correct": 332,
"total": 788
},
"Total": {
"accuracy": 40.7,
"correct": 1603,
"total": 3939
},
"Text Lite": {
"accuracy": 43.15,
"correct": 340,
"total": 788
},
"Vision Dominant": {
"accuracy": 42.39,
"correct": 334,
"total": 788
},
"Text Dominant": {
"accuracy": 47.59,
"correct": 375,
"total": 788
},
"Vision Only": {
"accuracy": 28.21,
"correct": 222,
"total": 787
},
"accuracy": 40.7,
"acc_stderr": 0,
"acc": 40.7
},
"Ocrlite": {
"reject_info": {
"reject_rate": 0.06,
"reject_number": 1,
"total_question": 1645
},
"final_score": [
1128,
1644
],
"accuracy": 68.613,
"Key Information Extraction-Bookshelf": [
20,
52
],
"Scene Text-centric VQA-diet_constraints": [
62,
89
],
"Doc-oriented VQA-Control": [
114,
189
],
"Doc-oriented VQA": [
142,
204
],
"Scene Text-centric VQA-Fake_logo": [
39,
119
],
"Handwritten Mathematical Expression Recognition": [
47,
100
],
"Key Information Extraction": [
162,
209
],
"Scene Text-centric VQA-Control": [
159,
200
],
"Scene Text-centric VQA": [
231,
282
],
"Artistic Text Recognition": [
41,
50
],
"Irregular Text Recognition": [
37,
50
],
"Non-Semantic Text Recognition": [
27,
50
],
"Regular Text Recognition": [
47,
50
],
"acc_stderr": 0,
"acc": 68.613
},
"OcrliteZh": {
"final_score": [
71,
234
],
"accuracy": 30.342,
"Docvqa": [
2,
10
],
"Chartqa-human": [
2,
10
],
"Chartqa-au": [
2,
10
],
"infographic": [
1,
10
],
"Key Information Extraction": [
25,
45
],
"Scene Text-centric VQA": [
17,
40
],
"Artistic Text Recognition": [
0,
11
],
"IrRegular Text Recognition": [
0,
11
],
"Non-semantic Text Recognition": [
2,
12
],
"Regular Text Recognition": [
2,
11
],
"Handwriting_CN": [
5,
20
],
"Chinese Unlimited": [
13,
44
],
"acc_stderr": 0,
"acc": 30.342
},
"CharXiv": {
"reject_info": {
"reject_rate": 0.02,
"reject_number": 1,
"total_question": 5000
},
"descriptive": {
"Overall Score": 70.97,
"By Question": {
"Q1": 79.51,
"Q2": 73.48,
"Q3": 56.65,
"Q4": 82.49,
"Q5": 79.08,
"Q6": 65.06,
"Q7": 68.38,
"Q8": 70.98,
"Q9": 67.66,
"Q10": 76.03,
"Q11": 64.0,
"Q12": 70.33,
"Q13": 63.01,
"Q14": 86.17,
"Q15": 89.42,
"Q16": 80.56,
"Q17": 10.71,
"Q18": 82.59,
"Q19": 87.69
},
"By Category": {
"Information Extraction": 72.24,
"Enumeration": 77.14,
"Pattern Recognition": 75.33,
"Counting": 75.32,
"Compositionality": 10.71
},
"By Subplot": {
"1 Subplot": 76.42,
"2-4 Subplots": 71.54,
"5+ Subplots": 61.12
},
"By Subject": {
"Computer Science": 70.44,
"Economics": 72.1,
"Electrical Engineering and Systems Science": 76.05,
"Mathematics": 73.52,
"Physics": 66.47,
"Quantitative Biology": 65.67,
"Quantitative Finance": 71.98,
"Statistics": 71.68
},
"By Year": {
"2020": 72.47,
"2021": 68.3,
"2022": 71.9,
"2023": 71.37
},
"N_valid": 3999,
"N_invalid": 0,
"Question Type": "Descriptive"
},
"reasoning": {
"Overall Score": 33.2,
"By Answer Type": {
"Text-in-Chart": 37.73,
"Text-in-General": 41.41,
"Number-in-Chart": 30.6,
"Number-in-General": 23.58
},
"By Source": {
"GPT-Sourced": 39.67,
"GPT-Inspired": 30.09,
"Completely Human": 32.33
},
"By Subject": {
"Computer Science": 29.37,
"Economics": 34.06,
"Electrical Engineering and Systems Science": 36.97,
"Mathematics": 36.3,
"Physics": 33.86,
"Quantitative Biology": 31.75,
"Quantitative Finance": 31.03,
"Statistics": 31.86
},
"By Year": {
"2020": 35.22,
"2021": 35.63,
"2022": 30.33,
"2023": 31.45
},
"By Subplot": {
"1 Subplot": 33.94,
"2-4 Subplots": 34.39,
"5+ Subplots": 30.08
},
"N_valid": 1000,
"N_invalid": 2,
"Question Type": "Reasoning"
},
"accuracy": 52.09,
"acc_stderr": 0,
"acc": 52.09
},
"MathVision": {
"accuracy": 25.03,
"acc_stderr": 0,
"acc": 25.03
},
"CII-Bench": {
"accuracy": 57.78,
"domain_score": {
"CTC": 53.33,
"Society": 59.46,
"Art": 60.29,
"Env.": 68.52,
"Life": 54.11,
"Politics": 66.67
},
"emotion_score": {
"Positive": 55.13,
"Negative": 61.13,
"Neutral": 56.77
},
"acc_stderr": 0,
"acc": 57.78
},
"Blink": {
"reject_info": {
"reject_rate": 0.05,
"reject_number": 1,
"total_question": 1901
},
"accuracy": 56.95,
"Art Style": 75.0,
"Counting": 73.33,
"Forensic Detection": 43.94,
"Functional Correspondence": 31.54,
"IQ Test": 20.67,
"Jigsaw": 70.0,
"Multi-view Reasoning": 38.35,
"Object Localization": 63.11,
"Relative Depth": 76.61,
"Relative Reflectance": 38.81,
"Semantic Correspondence": 43.17,
"Spatial Relation": 84.62,
"Visual Correspondence": 59.3,
"Visual Similarity": 84.44,
"acc_stderr": 0,
"acc": 56.95
}
}
}