Datasets:

License:
vlm_results / Phi-3.5-Vision-Instruct /results_2025-01-25T10-42-53.190540.json
daiteng01's picture
Upload 52 files
7498b1c verified
{
"config_general": {
"model_name": "Phi-3.5-Vision-Instruct",
"model_dtype": "float16",
"model_size": 0
},
"results": {
"CMMMU": {
"accuracy": 28.44,
"acc_stderr": 0,
"acc": 28.44
},
"MMMU": {
"accuracy": 44.0,
"acc_stderr": 0,
"acc": 44.0
},
"MMMU_Pro_standard": {
"accuracy": 24.28,
"acc_stderr": 0,
"acc": 24.28
},
"MMMU_Pro_vision": {
"accuracy": 12.43,
"subject_score": {
"History": 7.14,
"Art": 16.98,
"Design": 13.33,
"Literature": 26.92,
"Agriculture": 5.0,
"Finance": 8.33,
"Sociology": 12.96,
"Accounting": 10.34,
"Energy_and_Power": 6.9,
"Pharmacy": 19.3,
"Architecture_and_Engineering": 8.33,
"Clinical_Medicine": 3.39,
"Public_Health": 10.34,
"Physics": 11.67,
"Art_Theory": 1.82,
"Electronics": 11.67,
"Psychology": 13.33,
"Biology": 15.25,
"Manage": 22.0,
"Economics": 10.17,
"Mechanical_Engineering": 15.25,
"Diagnostics_and_Laboratory_Medicine": 10.0,
"Basic_Medical_Science": 26.92,
"Computer_Science": 13.33,
"Math": 10.0,
"Music": 20.0,
"Materials": 8.33,
"Marketing": 10.17,
"Chemistry": 18.33,
"Geography": 9.62
},
"acc_stderr": 0,
"acc": 12.43
},
"MmvetV2": {
"accuracy": 42.5725,
"capability_scores": {
"math": 25.0,
"ocr": 46.58653846153845,
"spat": 41.87817258883246,
"rec": 41.09223300970876,
"know": 35.06410256410257,
"gen": 40.36363636363635,
"seq": 44.28571428571427
},
"capability_detail_scores": {
"math_ocr": 18.181818181818183,
"math_spat_ocr": 16.666666666666664,
"rec_math_spat_ocr": 50.0,
"rec_spat": 46.785714285714285,
"spat_ocr": 63.07692307692307,
"rec_spat_ocr": 16.666666666666664,
"know_spat_ocr": 62.5,
"rec_ocr": 60.0,
"rec_know_spat": 40.0,
"ocr": 59.06250000000001,
"rec": 53.38983050847458,
"rec_know": 24.615384615384613,
"rec_gen_know": 32.30000000000001,
"rec_gen_know_ocr": 55.38461538461538,
"rec_gen_spat_ocr": 49.534883720930246,
"gen_spat_ocr": 85.00000000000001,
"seq_spat_math_gen_ocr": 20.0,
"rec_seq_spat_math_ocr": 0.0,
"rec_gen_spat": 26.818181818181813,
"math_gen_spat_ocr": 80.0,
"rec_seq_spat": 42.857142857142854,
"rec_seq_spat_ocr": 36.66666666666667,
"rec_gen_know_spat": 10.0,
"rec_gen": 44.99999999999999,
"rec_know_spat_ocr": 20.0,
"rec_know_spat_gen_ocr": 50.0,
"rec_know_gen": 32.30000000000001,
"rec_math_ocr": 100.0,
"rec_gen_ocr": 27.999999999999996,
"rec_seq_gen_ocr": 65.71428571428572,
"gen_ocr": 44.61538461538462,
"rec_gen_seq": 42.14285714285714,
"rec_seq": 45.0,
"rec_gen_spat_seq": 50.0,
"rec_know_seq": 0.0,
"rec_gen_know_seq": 50.0,
"rec_seq_gen_spat": 50.0,
"rec_seq_spat_gen_ocr": 30.0,
"rec_seq_know_gen_ocr": 70.0,
"rec_math_know": 50.0,
"rec_seq_ocr": 0.0
},
"acc_stderr": 0,
"acc": 42.5725
},
"MathVerse": {
"Text Dominant": {
"accuracy": 25.76,
"correct": 203,
"total": 788
},
"Total": {
"accuracy": 22.99,
"correct": 906,
"total": 3940
},
"Text Lite": {
"accuracy": 23.6,
"correct": 186,
"total": 788
},
"Vision Intensive": {
"accuracy": 23.48,
"correct": 185,
"total": 788
},
"Vision Dominant": {
"accuracy": 23.98,
"correct": 189,
"total": 788
},
"Vision Only": {
"accuracy": 18.15,
"correct": 143,
"total": 788
},
"accuracy": 22.99,
"acc_stderr": 0,
"acc": 22.99
},
"Ocrlite": {
"final_score": [
901,
1645
],
"accuracy": 54.772,
"Key Information Extraction-Bookshelf": [
8,
52
],
"Scene Text-centric VQA-diet_constraints": [
37,
90
],
"Doc-oriented VQA-Control": [
99,
189
],
"Doc-oriented VQA": [
103,
204
],
"Scene Text-centric VQA-Fake_logo": [
55,
119
],
"Handwritten Mathematical Expression Recognition": [
8,
100
],
"Key Information Extraction": [
135,
209
],
"Scene Text-centric VQA-Control": [
155,
200
],
"Scene Text-centric VQA": [
145,
282
],
"Artistic Text Recognition": [
37,
50
],
"Irregular Text Recognition": [
37,
50
],
"Non-Semantic Text Recognition": [
35,
50
],
"Regular Text Recognition": [
47,
50
],
"acc_stderr": 0,
"acc": 54.772
},
"OcrliteZh": {
"final_score": [
30,
234
],
"accuracy": 12.821,
"Docvqa": [
1,
10
],
"Chartqa-human": [
2,
10
],
"Chartqa-au": [
0,
10
],
"infographic": [
0,
10
],
"Key Information Extraction": [
15,
45
],
"Scene Text-centric VQA": [
5,
40
],
"Artistic Text Recognition": [
0,
11
],
"IrRegular Text Recognition": [
0,
11
],
"Non-semantic Text Recognition": [
0,
12
],
"Regular Text Recognition": [
0,
11
],
"Handwriting_CN": [
3,
20
],
"Chinese Unlimited": [
4,
44
],
"acc_stderr": 0,
"acc": 12.821
},
"CharXiv": {
"descriptive": {
"Overall Score": 53.1,
"By Question": {
"Q1": 60.25,
"Q2": 63.48,
"Q3": 56.65,
"Q4": 70.04,
"Q5": 70.29,
"Q6": 58.63,
"Q7": 59.83,
"Q8": 66.96,
"Q9": 47.76,
"Q10": 54.11,
"Q11": 51.43,
"Q12": 45.05,
"Q13": 54.34,
"Q14": 29.43,
"Q15": 38.66,
"Q16": 61.11,
"Q17": 8.04,
"Q18": 61.54,
"Q19": 81.54
},
"By Category": {
"Information Extraction": 62.81,
"Enumeration": 45.92,
"Pattern Recognition": 57.64,
"Counting": 54.45,
"Compositionality": 8.04
},
"By Subplot": {
"1 Subplot": 63.54,
"2-4 Subplots": 50.33,
"5+ Subplots": 40.47
},
"By Subject": {
"Computer Science": 52.38,
"Economics": 54.53,
"Electrical Engineering and Systems Science": 57.77,
"Mathematics": 55.56,
"Physics": 50.98,
"Quantitative Biology": 47.42,
"Quantitative Finance": 53.88,
"Statistics": 52.21
},
"By Year": {
"2020": 55.97,
"2021": 50.67,
"2022": 53.18,
"2023": 52.72
},
"N_valid": 4000,
"N_invalid": 0,
"Question Type": "Descriptive"
},
"reasoning": {
"Overall Score": 23.2,
"By Answer Type": {
"Text-in-Chart": 23.64,
"Text-in-General": 30.3,
"Number-in-Chart": 26.72,
"Number-in-General": 15.72
},
"By Source": {
"GPT-Sourced": 27.72,
"GPT-Inspired": 19.44,
"Completely Human": 23.17
},
"By Subject": {
"Computer Science": 19.05,
"Economics": 21.74,
"Electrical Engineering and Systems Science": 20.17,
"Mathematics": 23.7,
"Physics": 31.5,
"Quantitative Biology": 21.43,
"Quantitative Finance": 18.97,
"Statistics": 29.2
},
"By Year": {
"2020": 22.27,
"2021": 24.9,
"2022": 21.72,
"2023": 23.79
},
"By Subplot": {
"1 Subplot": 26.17,
"2-4 Subplots": 23.81,
"5+ Subplots": 17.37
},
"N_valid": 1000,
"N_invalid": 3,
"Question Type": "Reasoning"
},
"accuracy": 38.15,
"acc_stderr": 0,
"acc": 38.15
},
"MathVision": {
"accuracy": 14.64,
"acc_stderr": 0,
"acc": 14.64
},
"CII-Bench": {
"accuracy": 36.08,
"domain_score": {
"Life": 32.03,
"Art": 39.71,
"CTC": 36.3,
"Society": 33.51,
"Env.": 46.3,
"Politics": 50.0
},
"emotion_score": {
"Neutral": 39.47,
"Negative": 36.23,
"Positive": 32.05
},
"acc_stderr": 0,
"acc": 36.08
},
"Blink": {
"accuracy": 57.39,
"Art Style": 88.89,
"Counting": 53.33,
"Forensic Detection": 92.42,
"Functional Correspondence": 33.08,
"IQ Test": 26.0,
"Jigsaw": 71.33,
"Multi-view Reasoning": 45.86,
"Object Localization": 45.9,
"Relative Depth": 70.16,
"Relative Reflectance": 38.06,
"Semantic Correspondence": 39.57,
"Spatial Relation": 65.03,
"Visual Correspondence": 55.23,
"Visual Similarity": 84.44,
"acc_stderr": 0,
"acc": 57.39
}
}
}