Datasets:

License:
vlm_results / LLaVA-OneVision-7B /results_2025-01-25T10-42-53.190540.json
daiteng01's picture
Upload 52 files
7498b1c verified
{
"config_general": {
"model_name": "LLaVA-OneVision-7B",
"model_dtype": "float16",
"model_size": 0
},
"results": {
"CMMMU": {
"accuracy": 37.11,
"acc_stderr": 0,
"acc": 37.11
},
"MMMU": {
"accuracy": 45.33,
"acc_stderr": 0,
"acc": 45.33
},
"MMMU_Pro_standard": {
"accuracy": 28.67,
"acc_stderr": 0,
"acc": 28.67
},
"MMMU_Pro_vision": {
"accuracy": 11.39,
"subject_score": {
"Literature": 17.31,
"Sociology": 9.26,
"History": 10.71,
"Finance": 15.0,
"Art": 11.32,
"Design": 8.33,
"Agriculture": 8.33,
"Clinical_Medicine": 13.56,
"Accounting": 8.62,
"Energy_and_Power": 13.79,
"Architecture_and_Engineering": 5.0,
"Electronics": 6.67,
"Art_Theory": 9.09,
"Physics": 13.33,
"Public_Health": 5.17,
"Manage": 22.0,
"Biology": 13.56,
"Psychology": 16.67,
"Economics": 16.95,
"Diagnostics_and_Laboratory_Medicine": 8.33,
"Mechanical_Engineering": 5.08,
"Basic_Medical_Science": 7.69,
"Computer_Science": 11.67,
"Math": 8.33,
"Pharmacy": 15.79,
"Music": 16.67,
"Materials": 5.0,
"Marketing": 15.25,
"Chemistry": 8.33,
"Geography": 17.31
},
"acc_stderr": 0,
"acc": 11.39
},
"MmvetV2": {
"reject_info": {
"reject_rate": 0.19,
"reject_number": 1,
"total_question": 517
},
"accuracy": 44.9806,
"capability_scores": {
"math": 42.94117647058823,
"ocr": 43.07692307692308,
"spat": 39.441624365482234,
"rec": 44.014598540146,
"know": 42.43589743589742,
"gen": 42.51824817518247,
"seq": 30.545454545454554
},
"capability_detail_scores": {
"math_ocr": 66.36363636363636,
"spat_math_ocr": 36.0,
"rec_spat_math_ocr": 0.0,
"rec_spat": 51.42857142857142,
"spat_ocr": 43.84615384615384,
"rec_spat_ocr": 12.5,
"know_spat_ocr": 87.5,
"rec_ocr": 67.5,
"rec_know_spat": 30.0,
"ocr": 56.56250000000001,
"rec": 60.84745762711864,
"rec_know": 34.61538461538461,
"rec_know_gen": 43.299999999999976,
"rec_know_gen_ocr": 54.61538461538461,
"rec_gen_spat_ocr": 45.813953488372086,
"gen_spat_ocr": 55.00000000000001,
"gen_spat_seq_math_ocr": 0.0,
"spat_rec_seq_math_ocr": 50.0,
"rec_gen_spat": 33.18181818181819,
"gen_spat_math_ocr": 40.0,
"rec_seq_spat": 40.0,
"rec_seq_spat_ocr": 13.333333333333334,
"rec_know_gen_spat": 23.333333333333332,
"rec_gen": 54.70588235294118,
"rec_know_spat_ocr": 17.5,
"gen_know_spat_rec_ocr": 50.0,
"rec_math_ocr": 0.0,
"rec_gen_ocr": 54.0,
"rec_seq_gen_ocr": 18.571428571428573,
"gen_ocr": 30.76923076923077,
"rec_seq_gen": 30.76923076923077,
"rec_seq": 41.66666666666667,
"rec_seq_gen_spat": 43.75,
"rec_seq_know": 0.0,
"rec_seq_know_gen": 50.0,
"gen_spat_rec_seq_ocr": 13.333333333333334,
"gen_know_rec_seq_ocr": 20.0,
"rec_know_math": 50.0,
"rec_seq_ocr": 0.0
},
"acc_stderr": 0,
"acc": 44.9806
},
"MathVerse": {
"Vision Intensive": {
"accuracy": 30.46,
"correct": 240,
"total": 788
},
"Total": {
"accuracy": 27.84,
"correct": 1097,
"total": 3940
},
"Text Lite": {
"accuracy": 31.98,
"correct": 252,
"total": 788
},
"Vision Dominant": {
"accuracy": 25.51,
"correct": 201,
"total": 788
},
"Vision Only": {
"accuracy": 13.96,
"correct": 110,
"total": 788
},
"Text Dominant": {
"accuracy": 37.31,
"correct": 294,
"total": 788
},
"accuracy": 27.84,
"acc_stderr": 0,
"acc": 27.84
},
"Ocrlite": {
"reject_info": {
"reject_rate": 0.06,
"reject_number": 1,
"total_question": 1645
},
"final_score": [
1042,
1644
],
"accuracy": 63.382,
"Key Information Extraction-Bookshelf": [
13,
52
],
"Scene Text-centric VQA-diet_constraints": [
39,
89
],
"Doc-oriented VQA-Control": [
124,
189
],
"Doc-oriented VQA": [
118,
204
],
"Scene Text-centric VQA-Fake_logo": [
43,
119
],
"Handwritten Mathematical Expression Recognition": [
19,
100
],
"Key Information Extraction": [
166,
209
],
"Scene Text-centric VQA-Control": [
162,
200
],
"Scene Text-centric VQA": [
218,
282
],
"Artistic Text Recognition": [
36,
50
],
"Irregular Text Recognition": [
38,
50
],
"Non-Semantic Text Recognition": [
20,
50
],
"Regular Text Recognition": [
46,
50
],
"acc_stderr": 0,
"acc": 63.382
},
"OcrliteZh": {
"final_score": [
42,
234
],
"accuracy": 17.949,
"Docvqa": [
1,
10
],
"Chartqa-human": [
2,
10
],
"Chartqa-au": [
1,
10
],
"infographic": [
3,
10
],
"Key Information Extraction": [
11,
45
],
"Scene Text-centric VQA": [
10,
40
],
"Artistic Text Recognition": [
0,
11
],
"IrRegular Text Recognition": [
1,
11
],
"Non-semantic Text Recognition": [
0,
12
],
"Regular Text Recognition": [
0,
11
],
"Handwriting_CN": [
2,
20
],
"Chinese Unlimited": [
11,
44
],
"acc_stderr": 0,
"acc": 17.949
},
"CharXiv": {
"reject_info": {
"reject_rate": 0.02,
"reject_number": 1,
"total_question": 5000
},
"descriptive": {
"Overall Score": 39.08,
"By Question": {
"Q1": 34.43,
"Q2": 35.65,
"Q3": 24.03,
"Q4": 57.2,
"Q5": 63.6,
"Q6": 41.37,
"Q7": 50.85,
"Q8": 54.46,
"Q9": 50.25,
"Q10": 30.14,
"Q11": 42.29,
"Q12": 29.67,
"Q13": 27.06,
"Q14": 27.3,
"Q15": 21.41,
"Q16": 63.89,
"Q17": 7.59,
"Q18": 53.85,
"Q19": 75.38
},
"By Category": {
"Information Extraction": 44.07,
"Enumeration": 34.41,
"Pattern Recognition": 50.22,
"Counting": 37.4,
"Compositionality": 7.59
},
"By Subplot": {
"1 Subplot": 46.05,
"2-4 Subplots": 37.37,
"5+ Subplots": 30.43
},
"By Subject": {
"Computer Science": 38.69,
"Economics": 39.86,
"Electrical Engineering and Systems Science": 43.07,
"Mathematics": 44.44,
"Physics": 36.61,
"Quantitative Biology": 34.19,
"Quantitative Finance": 37.93,
"Statistics": 37.39
},
"By Year": {
"2020": 41.34,
"2021": 38.31,
"2022": 38.42,
"2023": 38.31
},
"N_valid": 3999,
"N_invalid": 50,
"Question Type": "Descriptive"
},
"reasoning": {
"Overall Score": 20.0,
"By Answer Type": {
"Text-in-Chart": 18.18,
"Text-in-General": 34.34,
"Number-in-Chart": 24.57,
"Number-in-General": 12.66
},
"By Source": {
"GPT-Sourced": 17.39,
"GPT-Inspired": 19.44,
"Completely Human": 21.0
},
"By Subject": {
"Computer Science": 16.67,
"Economics": 19.57,
"Electrical Engineering and Systems Science": 21.85,
"Mathematics": 22.22,
"Physics": 23.62,
"Quantitative Biology": 18.25,
"Quantitative Finance": 14.66,
"Statistics": 23.01
},
"By Year": {
"2020": 20.65,
"2021": 25.67,
"2022": 12.3,
"2023": 20.97
},
"By Subplot": {
"1 Subplot": 20.47,
"2-4 Subplots": 19.84,
"5+ Subplots": 19.49
},
"N_valid": 1000,
"N_invalid": 1,
"Question Type": "Reasoning"
},
"accuracy": 29.54,
"acc_stderr": 0,
"acc": 29.54
},
"MathVision": {
"accuracy": 16.68,
"acc_stderr": 0,
"acc": 16.68
},
"CII-Bench": {
"accuracy": 42.88,
"domain_score": {
"CTC": 42.22,
"Society": 49.19,
"Art": 43.38,
"Life": 34.2,
"Env.": 53.7,
"Politics": 54.17
},
"emotion_score": {
"Positive": 37.18,
"Negative": 46.04,
"Neutral": 44.74
},
"acc_stderr": 0,
"acc": 42.88
},
"Blink": {
"accuracy": 49.03,
"Art Style": 58.12,
"Counting": 65.83,
"Forensic Detection": 25.0,
"Functional Correspondence": 33.08,
"IQ Test": 26.67,
"Jigsaw": 51.33,
"Multi-view Reasoning": 50.38,
"Object Localization": 56.56,
"Relative Depth": 75.81,
"Relative Reflectance": 23.88,
"Semantic Correspondence": 32.37,
"Spatial Relation": 79.72,
"Visual Correspondence": 38.37,
"Visual Similarity": 77.78,
"acc_stderr": 0,
"acc": 49.03
}
}
}