Datasets:

License:
vlm_results / Molmo-7B-D /results_2025-01-25T10-42-53.190540.json
daiteng01's picture
Upload 52 files
7498b1c verified
{
"config_general": {
"model_name": "Molmo-7B-D",
"model_dtype": "float16",
"model_size": 0
},
"results": {
"CMMMU": {
"accuracy": 39.78,
"acc_stderr": 0,
"acc": 39.78
},
"MMMU": {
"accuracy": 43.0,
"acc_stderr": 0,
"acc": 43.0
},
"MMMU_Pro_standard": {
"accuracy": 26.07,
"acc_stderr": 0,
"acc": 26.07
},
"MMMU_Pro_vision": {
"accuracy": 16.07,
"subject_score": {
"History": 26.79,
"Art": 20.75,
"Design": 23.33,
"Literature": 48.08,
"Agriculture": 10.0,
"Finance": 5.0,
"Sociology": 20.37,
"Accounting": 12.07,
"Energy_and_Power": 10.34,
"Pharmacy": 26.32,
"Architecture_and_Engineering": 15.0,
"Clinical_Medicine": 11.86,
"Public_Health": 6.9,
"Physics": 11.67,
"Art_Theory": 16.36,
"Electronics": 15.0,
"Psychology": 10.0,
"Biology": 15.25,
"Manage": 20.0,
"Economics": 8.47,
"Mechanical_Engineering": 16.95,
"Diagnostics_and_Laboratory_Medicine": 18.33,
"Basic_Medical_Science": 17.31,
"Computer_Science": 15.0,
"Math": 18.33,
"Music": 13.33,
"Materials": 11.67,
"Marketing": 5.08,
"Chemistry": 18.33,
"Geography": 21.15
},
"acc_stderr": 0,
"acc": 16.07
},
"MmvetV2": {
"accuracy": 52.6886,
"capability_scores": {
"math": 49.117647058823536,
"ocr": 57.59615384615382,
"spat": 50.10152284263958,
"rec": 50.77669902912625,
"know": 46.15384615384613,
"gen": 48.65454545454543,
"seq": 53.75000000000001
},
"capability_detail_scores": {
"math_ocr": 66.36363636363636,
"spat_math_ocr": 43.333333333333336,
"rec_spat_math_ocr": 75.0,
"rec_spat": 47.50000000000001,
"spat_ocr": 64.99999999999999,
"rec_spat_ocr": 51.66666666666667,
"know_spat_ocr": 87.5,
"rec_ocr": 52.5,
"rec_know_spat": 30.0,
"ocr": 62.18749999999999,
"rec": 70.0,
"rec_know": 58.46153846153847,
"rec_know_gen": 43.500000000000014,
"rec_know_gen_ocr": 68.46153846153847,
"rec_gen_spat_ocr": 57.90697674418607,
"gen_spat_ocr": 80.0,
"gen_spat_seq_math_ocr": 50.0,
"spat_rec_seq_math_ocr": 40.0,
"rec_gen_spat": 29.545454545454547,
"gen_spat_math_ocr": 50.0,
"rec_seq_spat": 38.57142857142858,
"rec_seq_spat_ocr": 63.33333333333333,
"rec_know_gen_spat": 43.333333333333336,
"rec_gen": 46.1764705882353,
"rec_know_spat_ocr": 5.0,
"gen_know_spat_rec_ocr": 65.0,
"rec_math_ocr": 0.0,
"rec_gen_ocr": 46.0,
"rec_seq_gen_ocr": 55.71428571428571,
"gen_ocr": 50.0,
"rec_seq_gen": 54.99999999999999,
"rec_seq": 71.66666666666667,
"rec_seq_gen_spat": 58.75,
"rec_seq_know": 0.0,
"rec_seq_know_gen": 50.0,
"gen_spat_rec_seq_ocr": 43.333333333333336,
"gen_know_rec_seq_ocr": 85.00000000000001,
"rec_know_math": 0.0,
"rec_seq_ocr": 0.0
},
"acc_stderr": 0,
"acc": 52.6886
},
"MathVerse": {
"Text Dominant": {
"accuracy": 31.98,
"correct": 252,
"total": 788
},
"Total": {
"accuracy": 24.47,
"correct": 964,
"total": 3940
},
"Text Lite": {
"accuracy": 25.63,
"correct": 202,
"total": 788
},
"Vision Intensive": {
"accuracy": 26.14,
"correct": 206,
"total": 788
},
"Vision Dominant": {
"accuracy": 20.56,
"correct": 162,
"total": 788
},
"Vision Only": {
"accuracy": 18.02,
"correct": 142,
"total": 788
},
"accuracy": 24.47,
"acc_stderr": 0,
"acc": 24.47
},
"Ocrlite": {
"final_score": [
1102,
1645
],
"accuracy": 66.991,
"Key Information Extraction-Bookshelf": [
23,
52
],
"Scene Text-centric VQA-diet_constraints": [
42,
90
],
"Doc-oriented VQA-Control": [
129,
189
],
"Doc-oriented VQA": [
138,
204
],
"Scene Text-centric VQA-Fake_logo": [
46,
119
],
"Handwritten Mathematical Expression Recognition": [
22,
100
],
"Key Information Extraction": [
173,
209
],
"Scene Text-centric VQA-Control": [
161,
200
],
"Scene Text-centric VQA": [
213,
282
],
"Artistic Text Recognition": [
40,
50
],
"Irregular Text Recognition": [
40,
50
],
"Non-Semantic Text Recognition": [
28,
50
],
"Regular Text Recognition": [
47,
50
],
"acc_stderr": 0,
"acc": 66.991
},
"OcrliteZh": {
"final_score": [
40,
234
],
"accuracy": 17.094,
"Docvqa": [
1,
10
],
"Chartqa-human": [
1,
10
],
"Chartqa-au": [
0,
10
],
"infographic": [
3,
10
],
"Key Information Extraction": [
14,
45
],
"Scene Text-centric VQA": [
9,
40
],
"Artistic Text Recognition": [
0,
11
],
"IrRegular Text Recognition": [
0,
11
],
"Non-semantic Text Recognition": [
0,
12
],
"Regular Text Recognition": [
0,
11
],
"Handwriting_CN": [
4,
20
],
"Chinese Unlimited": [
8,
44
],
"acc_stderr": 0,
"acc": 17.094
},
"CharXiv": {
"descriptive": {
"Overall Score": 48.62,
"By Question": {
"Q1": 52.87,
"Q2": 55.65,
"Q3": 41.63,
"Q4": 73.54,
"Q5": 71.55,
"Q6": 64.26,
"Q7": 58.55,
"Q8": 66.96,
"Q9": 62.69,
"Q10": 52.05,
"Q11": 34.29,
"Q12": 64.29,
"Q13": 30.14,
"Q14": 14.54,
"Q15": 16.61,
"Q16": 47.22,
"Q17": 10.71,
"Q18": 62.75,
"Q19": 76.92
},
"By Category": {
"Information Extraction": 59.96,
"Enumeration": 35.11,
"Pattern Recognition": 50.66,
"Counting": 61.83,
"Compositionality": 10.71
},
"By Subplot": {
"1 Subplot": 55.18,
"2-4 Subplots": 46.76,
"5+ Subplots": 40.89
},
"By Subject": {
"Computer Science": 46.03,
"Economics": 49.82,
"Electrical Engineering and Systems Science": 56.09,
"Mathematics": 50.74,
"Physics": 48.23,
"Quantitative Biology": 44.44,
"Quantitative Finance": 46.34,
"Statistics": 47.12
},
"By Year": {
"2020": 50.2,
"2021": 46.65,
"2022": 48.87,
"2023": 48.89
},
"N_valid": 4000,
"N_invalid": 0,
"Question Type": "Descriptive"
},
"reasoning": {
"Overall Score": 23.5,
"By Answer Type": {
"Text-in-Chart": 26.14,
"Text-in-General": 30.3,
"Number-in-Chart": 18.97,
"Number-in-General": 20.09
},
"By Source": {
"GPT-Sourced": 29.35,
"GPT-Inspired": 25.0,
"Completely Human": 21.17
},
"By Subject": {
"Computer Science": 26.98,
"Economics": 26.81,
"Electrical Engineering and Systems Science": 19.33,
"Mathematics": 24.44,
"Physics": 20.47,
"Quantitative Biology": 27.78,
"Quantitative Finance": 16.38,
"Statistics": 24.78
},
"By Year": {
"2020": 21.05,
"2021": 23.37,
"2022": 25.82,
"2023": 23.79
},
"By Subplot": {
"1 Subplot": 24.87,
"2-4 Subplots": 24.6,
"5+ Subplots": 19.49
},
"N_valid": 1000,
"N_invalid": 7,
"Question Type": "Reasoning"
},
"accuracy": 36.06,
"acc_stderr": 0,
"acc": 36.06
},
"MathVision": {
"accuracy": 17.43,
"acc_stderr": 0,
"acc": 17.43
},
"CII-Bench": {
"accuracy": 40.13,
"domain_score": {
"Life": 33.77,
"Art": 43.38,
"CTC": 35.56,
"Society": 42.7,
"Env.": 57.41,
"Politics": 50.0
},
"emotion_score": {
"Neutral": 40.23,
"Negative": 42.64,
"Positive": 37.18
},
"acc_stderr": 0,
"acc": 40.13
},
"Blink": {
"accuracy": 43.56,
"Art Style": 58.12,
"Counting": 57.5,
"Forensic Detection": 29.55,
"Functional Correspondence": 30.0,
"IQ Test": 22.67,
"Jigsaw": 52.67,
"Multi-view Reasoning": 45.86,
"Object Localization": 56.56,
"Relative Depth": 70.16,
"Relative Reflectance": 26.87,
"Semantic Correspondence": 23.02,
"Spatial Relation": 65.03,
"Visual Correspondence": 28.49,
"Visual Similarity": 54.07,
"acc_stderr": 0,
"acc": 43.56
}
}
}