Datasets:

License:
vlm_results / Molmo-72B-0924 /results_2025-01-25T10-42-53.190540.json
daiteng01's picture
Upload 52 files
7498b1c verified
{
"config_general": {
"model_name": "Molmo-72B-0924",
"model_dtype": "float16",
"model_size": 0
},
"results": {
"CMMMU": {
"accuracy": 48.33,
"acc_stderr": 0,
"acc": 48.33
},
"MMMU": {
"accuracy": 51.89,
"acc_stderr": 0,
"acc": 51.89
},
"MMMU_Pro_standard": {
"accuracy": 36.65,
"acc_stderr": 0,
"acc": 36.65
},
"MMMU_Pro_vision": {
"accuracy": 24.28,
"subject_score": {
"History": 30.36,
"Art": 35.85,
"Design": 36.67,
"Literature": 59.62,
"Agriculture": 6.67,
"Finance": 28.33,
"Sociology": 27.78,
"Accounting": 25.86,
"Energy_and_Power": 13.79,
"Pharmacy": 33.33,
"Architecture_and_Engineering": 13.33,
"Clinical_Medicine": 10.17,
"Public_Health": 18.97,
"Physics": 21.67,
"Art_Theory": 32.73,
"Electronics": 16.67,
"Psychology": 28.33,
"Biology": 23.73,
"Manage": 26.0,
"Economics": 40.68,
"Mechanical_Engineering": 28.81,
"Diagnostics_and_Laboratory_Medicine": 16.67,
"Basic_Medical_Science": 30.77,
"Computer_Science": 25.0,
"Math": 16.67,
"Music": 11.67,
"Materials": 11.67,
"Marketing": 20.34,
"Chemistry": 15.0,
"Geography": 30.77
},
"acc_stderr": 0,
"acc": 24.28
},
"MmvetV2": {
"accuracy": 59.2456,
"capability_scores": {
"math": 60.88235294117648,
"ocr": 64.375,
"spat": 59.34010152284262,
"rec": 57.08737864077674,
"know": 50.57692307692307,
"gen": 57.818181818181856,
"seq": 56.42857142857144
},
"capability_detail_scores": {
"math_ocr": 69.0909090909091,
"spat_math_ocr": 66.66666666666666,
"rec_spat_math_ocr": 50.0,
"rec_spat": 56.78571428571429,
"spat_ocr": 74.23076923076923,
"rec_spat_ocr": 50.0,
"know_spat_ocr": 100.0,
"rec_ocr": 72.5,
"rec_know_spat": 23.000000000000004,
"ocr": 64.375,
"rec": 68.13559322033899,
"rec_know": 50.0,
"rec_know_gen": 49.800000000000004,
"rec_know_gen_ocr": 71.53846153846153,
"rec_gen_spat_ocr": 69.76744186046511,
"gen_spat_ocr": 85.00000000000001,
"gen_spat_seq_math_ocr": 0.0,
"spat_rec_seq_math_ocr": 50.0,
"rec_gen_spat": 53.18181818181817,
"gen_spat_math_ocr": 60.0,
"rec_seq_spat": 42.857142857142854,
"rec_seq_spat_ocr": 53.333333333333336,
"rec_know_gen_spat": 56.666666666666664,
"rec_gen": 60.88235294117648,
"rec_know_spat_ocr": 12.5,
"gen_know_spat_rec_ocr": 60.0,
"rec_math_ocr": 0.0,
"rec_gen_ocr": 57.99999999999999,
"rec_seq_gen_ocr": 64.28571428571429,
"gen_ocr": 56.15384615384615,
"rec_seq_gen": 65.00000000000001,
"rec_seq": 73.33333333333334,
"rec_seq_gen_spat": 66.25,
"rec_seq_know": 0.0,
"rec_seq_know_gen": 40.0,
"gen_spat_rec_seq_ocr": 20.0,
"gen_know_rec_seq_ocr": 90.0,
"rec_know_math": 50.0,
"rec_seq_ocr": 0.0
},
"acc_stderr": 0,
"acc": 59.2456
},
"MathVerse": {
"Text Dominant": {
"accuracy": 37.94,
"correct": 299,
"total": 788
},
"Total": {
"accuracy": 28.25,
"correct": 1113,
"total": 3940
},
"Text Lite": {
"accuracy": 29.82,
"correct": 235,
"total": 788
},
"Vision Intensive": {
"accuracy": 27.03,
"correct": 213,
"total": 788
},
"Vision Dominant": {
"accuracy": 26.65,
"correct": 210,
"total": 788
},
"Vision Only": {
"accuracy": 19.8,
"correct": 156,
"total": 788
},
"accuracy": 28.25,
"acc_stderr": 0,
"acc": 28.25
},
"Ocrlite": {
"final_score": [
1121,
1645
],
"accuracy": 68.146,
"Key Information Extraction-Bookshelf": [
32,
52
],
"Scene Text-centric VQA-diet_constraints": [
48,
90
],
"Doc-oriented VQA-Control": [
134,
189
],
"Doc-oriented VQA": [
142,
204
],
"Scene Text-centric VQA-Fake_logo": [
47,
119
],
"Handwritten Mathematical Expression Recognition": [
19,
100
],
"Key Information Extraction": [
170,
209
],
"Scene Text-centric VQA-Control": [
156,
200
],
"Scene Text-centric VQA": [
218,
282
],
"Artistic Text Recognition": [
38,
50
],
"Irregular Text Recognition": [
39,
50
],
"Non-Semantic Text Recognition": [
30,
50
],
"Regular Text Recognition": [
48,
50
],
"acc_stderr": 0,
"acc": 68.146
},
"OcrliteZh": {
"final_score": [
51,
234
],
"accuracy": 21.795,
"Docvqa": [
1,
10
],
"Chartqa-human": [
2,
10
],
"Chartqa-au": [
1,
10
],
"infographic": [
1,
10
],
"Key Information Extraction": [
19,
45
],
"Scene Text-centric VQA": [
13,
40
],
"Artistic Text Recognition": [
0,
11
],
"IrRegular Text Recognition": [
0,
11
],
"Non-semantic Text Recognition": [
0,
12
],
"Regular Text Recognition": [
0,
11
],
"Handwriting_CN": [
1,
20
],
"Chinese Unlimited": [
13,
44
],
"acc_stderr": 0,
"acc": 21.795
},
"CharXiv": {
"descriptive": {
"Overall Score": 73.85,
"By Question": {
"Q1": 78.69,
"Q2": 67.83,
"Q3": 54.08,
"Q4": 81.32,
"Q5": 79.08,
"Q6": 77.11,
"Q7": 64.96,
"Q8": 84.38,
"Q9": 77.11,
"Q10": 79.45,
"Q11": 56.57,
"Q12": 79.12,
"Q13": 59.36,
"Q14": 87.94,
"Q15": 88.18,
"Q16": 41.67,
"Q17": 40.62,
"Q18": 89.88,
"Q19": 81.54
},
"By Category": {
"Information Extraction": 72.12,
"Enumeration": 80.55,
"Pattern Recognition": 73.36,
"Counting": 79.64,
"Compositionality": 40.62
},
"By Subplot": {
"1 Subplot": 78.17,
"2-4 Subplots": 72.88,
"5+ Subplots": 68.33
},
"By Subject": {
"Computer Science": 72.42,
"Economics": 75.72,
"Electrical Engineering and Systems Science": 80.46,
"Mathematics": 75.56,
"Physics": 68.11,
"Quantitative Biology": 71.03,
"Quantitative Finance": 71.12,
"Statistics": 76.55
},
"By Year": {
"2020": 74.09,
"2021": 72.03,
"2022": 73.46,
"2023": 75.91
},
"N_valid": 4000,
"N_invalid": 0,
"Question Type": "Descriptive"
},
"reasoning": {
"Overall Score": 34.7,
"By Answer Type": {
"Text-in-Chart": 35.0,
"Text-in-General": 42.42,
"Number-in-Chart": 37.07,
"Number-in-General": 28.38
},
"By Source": {
"GPT-Sourced": 46.74,
"GPT-Inspired": 31.48,
"Completely Human": 32.17
},
"By Subject": {
"Computer Science": 32.54,
"Economics": 36.96,
"Electrical Engineering and Systems Science": 34.45,
"Mathematics": 37.78,
"Physics": 35.43,
"Quantitative Biology": 37.3,
"Quantitative Finance": 25.86,
"Statistics": 36.28
},
"By Year": {
"2020": 38.87,
"2021": 36.4,
"2022": 28.69,
"2023": 34.68
},
"By Subplot": {
"1 Subplot": 37.56,
"2-4 Subplots": 34.13,
"5+ Subplots": 30.93
},
"N_valid": 1000,
"N_invalid": 10,
"Question Type": "Reasoning"
},
"accuracy": 54.27,
"acc_stderr": 0,
"acc": 54.27
},
"MathVision": {
"accuracy": 24.24,
"acc_stderr": 0,
"acc": 24.24
},
"CII-Bench": {
"accuracy": 52.55,
"domain_score": {
"Life": 53.25,
"Art": 57.35,
"CTC": 46.67,
"Society": 48.11,
"Env.": 62.96,
"Politics": 62.5
},
"emotion_score": {
"Neutral": 53.01,
"Negative": 52.45,
"Positive": 52.14
},
"acc_stderr": 0,
"acc": 52.55
},
"Blink": {
"accuracy": 49.03,
"Art Style": 57.26,
"Counting": 68.33,
"Forensic Detection": 40.15,
"Functional Correspondence": 23.85,
"IQ Test": 27.33,
"Jigsaw": 44.67,
"Multi-view Reasoning": 50.38,
"Object Localization": 54.1,
"Relative Depth": 72.58,
"Relative Reflectance": 34.33,
"Semantic Correspondence": 36.69,
"Spatial Relation": 75.52,
"Visual Correspondence": 36.05,
"Visual Similarity": 74.81,
"acc_stderr": 0,
"acc": 49.03
}
}
}