Datasets:

License:
vlm_results / Llama-3.2-11B-Vision-Instruct /results_2025-01-25T10-42-53.190540.json
daiteng01's picture
Upload 52 files
7498b1c verified
{
"config_general": {
"model_name": "Llama-3.2-11B-Vision-Instruct",
"model_dtype": "float16",
"model_size": 0
},
"results": {
"CMMMU": {
"accuracy": 28.89,
"acc_stderr": 0,
"acc": 28.89
},
"MMMU": {
"accuracy": 38.33,
"acc_stderr": 0,
"acc": 38.33
},
"MMMU_Pro_standard": {
"accuracy": 26.53,
"acc_stderr": 0,
"acc": 26.53
},
"MMMU_Pro_vision": {
"accuracy": 12.6,
"subject_score": {
"History": 8.93,
"Art": 1.89,
"Design": 3.33,
"Literature": 1.92,
"Agriculture": 18.33,
"Finance": 10.0,
"Sociology": 9.26,
"Accounting": 17.24,
"Energy_and_Power": 12.07,
"Pharmacy": 33.33,
"Architecture_and_Engineering": 11.67,
"Clinical_Medicine": 3.39,
"Public_Health": 12.07,
"Physics": 16.67,
"Art_Theory": 7.27,
"Electronics": 11.67,
"Psychology": 13.33,
"Biology": 11.86,
"Manage": 22.0,
"Economics": 10.17,
"Mechanical_Engineering": 16.95,
"Diagnostics_and_Laboratory_Medicine": 13.33,
"Basic_Medical_Science": 9.62,
"Computer_Science": 16.67,
"Math": 6.67,
"Music": 13.33,
"Materials": 11.67,
"Marketing": 10.17,
"Chemistry": 25.0,
"Geography": 17.31
},
"acc_stderr": 0,
"acc": 12.6
},
"MmvetV2": {
"reject_info": {
"reject_rate": 54.35,
"reject_number": 281,
"total_question": 517
},
"accuracy": 41.9068,
"capability_scores": {
"ocr": 54.27184466019417,
"math": 44.00000000000001,
"spat": 56.62921348314607,
"rec": 34.47852760736196,
"know": 29.047619047619044,
"gen": 31.79775280898876,
"seq": 25.0
},
"capability_detail_scores": {
"ocr_math": 36.36363636363637,
"ocr_math_spat": 56.42857142857143,
"ocr_math_rec_spat": 25.0,
"rec_spat": 54.666666666666664,
"ocr_spat": 77.6923076923077,
"ocr_rec_spat": 28.57142857142857,
"ocr_spat_know": 100.0,
"ocr_rec": 45.0,
"rec_spat_know": 0.0,
"ocr": 36.0,
"rec": 41.842105263157904,
"rec_know": 18.88888888888889,
"gen_rec_know": 25.967741935483872,
"ocr_gen_rec_know": 45.0,
"ocr_gen_rec_spat": 66.25,
"ocr_gen_spat": 70.0,
"math_gen_spat_ocr_seq": 50.0,
"math_spat_rec_ocr_seq": 0.0,
"gen_rec_spat": 15.714285714285717,
"ocr_math_gen_spat": 30.0,
"seq_rec_spat": 0,
"ocr_seq_rec_spat": 0,
"gen_rec_spat_know": 0,
"gen_rec": 0,
"ocr_rec_spat_know": 0,
"gen_spat_rec_ocr_know": 0,
"ocr_math_rec": 0,
"ocr_gen_rec": 0,
"ocr_seq_gen_rec": 0,
"ocr_gen": 0,
"seq_gen_rec": 0,
"seq_rec": 0,
"seq_gen_rec_spat": 0,
"seq_rec_know": 0,
"seq_gen_rec_know": 0,
"gen_spat_rec_ocr_seq": 0,
"gen_rec_ocr_seq_know": 0,
"math_rec_know": 0,
"ocr_seq_rec": 0
},
"acc_stderr": 0,
"acc": 41.9068
},
"MathVerse": {
"Text Dominant": {
"accuracy": 38.58,
"correct": 304,
"total": 788
},
"Total": {
"accuracy": 27.08,
"correct": 1067,
"total": 3940
},
"Text Lite": {
"accuracy": 26.78,
"correct": 211,
"total": 788
},
"Vision Intensive": {
"accuracy": 26.9,
"correct": 212,
"total": 788
},
"Vision Dominant": {
"accuracy": 22.97,
"correct": 181,
"total": 788
},
"Vision Only": {
"accuracy": 20.18,
"correct": 159,
"total": 788
},
"accuracy": 27.08,
"acc_stderr": 0,
"acc": 27.08
},
"Ocrlite": {
"final_score": [
433,
1645
],
"accuracy": 26.322,
"Key Information Extraction-Bookshelf": [
15,
52
],
"Scene Text-centric VQA-diet_constraints": [
3,
90
],
"Doc-oriented VQA-Control": [
95,
189
],
"Doc-oriented VQA": [
71,
204
],
"Scene Text-centric VQA-Fake_logo": [
14,
119
],
"Handwritten Mathematical Expression Recognition": [
1,
100
],
"Key Information Extraction": [
57,
209
],
"Scene Text-centric VQA-Control": [
61,
200
],
"Scene Text-centric VQA": [
24,
282
],
"Artistic Text Recognition": [
29,
50
],
"Irregular Text Recognition": [
33,
50
],
"Non-Semantic Text Recognition": [
4,
50
],
"Regular Text Recognition": [
26,
50
],
"acc_stderr": 0,
"acc": 26.322
},
"OcrliteZh": {
"final_score": [
3,
234
],
"accuracy": 1.282,
"Docvqa": [
0,
10
],
"Chartqa-human": [
0,
10
],
"Chartqa-au": [
0,
10
],
"infographic": [
0,
10
],
"Key Information Extraction": [
0,
45
],
"Scene Text-centric VQA": [
0,
40
],
"Artistic Text Recognition": [
0,
11
],
"IrRegular Text Recognition": [
0,
11
],
"Non-semantic Text Recognition": [
0,
12
],
"Regular Text Recognition": [
0,
11
],
"Handwriting_CN": [
1,
20
],
"Chinese Unlimited": [
2,
44
],
"acc_stderr": 0,
"acc": 1.282
},
"CharXiv": {
"descriptive": {
"Overall Score": 52.58,
"By Question": {
"Q1": 38.93,
"Q2": 75.22,
"Q3": 64.81,
"Q4": 78.99,
"Q5": 75.73,
"Q6": 67.07,
"Q7": 65.81,
"Q8": 53.57,
"Q9": 59.7,
"Q10": 65.07,
"Q11": 31.43,
"Q12": 65.38,
"Q13": 51.6,
"Q14": 22.34,
"Q15": 16.29,
"Q16": 55.56,
"Q17": 11.16,
"Q18": 60.32,
"Q19": 75.38
},
"By Category": {
"Information Extraction": 66.67,
"Enumeration": 37.69,
"Pattern Recognition": 48.91,
"Counting": 66.92,
"Compositionality": 11.16
},
"By Subplot": {
"1 Subplot": 59.65,
"2-4 Subplots": 50.99,
"5+ Subplots": 43.54
},
"By Subject": {
"Computer Science": 50.0,
"Economics": 53.8,
"Electrical Engineering and Systems Science": 56.09,
"Mathematics": 53.7,
"Physics": 47.83,
"Quantitative Biology": 48.81,
"Quantitative Finance": 55.39,
"Statistics": 55.53
},
"By Year": {
"2020": 53.04,
"2021": 51.82,
"2022": 53.59,
"2023": 51.92
},
"N_valid": 4000,
"N_invalid": 0,
"Question Type": "Descriptive"
},
"reasoning": {
"Overall Score": 27.7,
"By Answer Type": {
"Text-in-Chart": 30.68,
"Text-in-General": 33.33,
"Number-in-Chart": 26.72,
"Number-in-General": 20.52
},
"By Source": {
"GPT-Sourced": 33.7,
"GPT-Inspired": 26.39,
"Completely Human": 26.33
},
"By Subject": {
"Computer Science": 25.4,
"Economics": 26.81,
"Electrical Engineering and Systems Science": 27.73,
"Mathematics": 35.56,
"Physics": 32.28,
"Quantitative Biology": 23.02,
"Quantitative Finance": 24.14,
"Statistics": 25.66
},
"By Year": {
"2020": 25.1,
"2021": 30.27,
"2022": 29.1,
"2023": 26.21
},
"By Subplot": {
"1 Subplot": 25.39,
"2-4 Subplots": 31.22,
"5+ Subplots": 25.85
},
"N_valid": 1000,
"N_invalid": 18,
"Question Type": "Reasoning"
},
"accuracy": 40.14,
"acc_stderr": 0,
"acc": 40.14
},
"MathVision": {
"accuracy": 15.72,
"acc_stderr": 0,
"acc": 15.72
},
"CII-Bench": {
"accuracy": 1.44,
"domain_score": {
"Life": 1.3,
"Art": 0.74,
"CTC": 1.48,
"Society": 0.54,
"Env.": 7.41,
"Politics": 0.0
},
"emotion_score": {
"Neutral": 1.88,
"Negative": 1.13,
"Positive": 1.28
},
"acc_stderr": 0,
"acc": 1.44
},
"Blink": {
"accuracy": 28.2,
"Art Style": 46.15,
"Counting": 44.17,
"Forensic Detection": 15.91,
"Functional Correspondence": 17.69,
"IQ Test": 4.0,
"Jigsaw": 24.0,
"Multi-view Reasoning": 44.36,
"Object Localization": 59.84,
"Relative Depth": 37.9,
"Relative Reflectance": 17.16,
"Semantic Correspondence": 8.63,
"Spatial Relation": 37.06,
"Visual Correspondence": 13.95,
"Visual Similarity": 38.52,
"acc_stderr": 0,
"acc": 28.2
}
}
}