Datasets:

License:
vlm_results / Step-1V-32k /results_2025-01-25T10-42-53.190540.json
daiteng01's picture
Upload 52 files
7498b1c verified
{
"config_general": {
"model_name": "Step-1V-32k",
"model_dtype": "float16",
"model_size": 0
},
"results": {
"CMMMU": {
"accuracy": 47.33,
"acc_stderr": 0,
"acc": 47.33
},
"MMMU": {
"accuracy": 52.44,
"acc_stderr": 0,
"acc": 52.44
},
"MMMU_Pro_standard": {
"accuracy": 35.66,
"subject_score": {
"History": 42.86,
"Art": 64.15,
"Design": 61.67,
"Literature": 71.15,
"Agriculture": 35.0,
"Finance": 31.67,
"Sociology": 42.59,
"Accounting": 44.83,
"Energy_and_Power": 15.52,
"Pharmacy": 43.86,
"Architecture_and_Engineering": 11.67,
"Clinical_Medicine": 20.34,
"Public_Health": 37.93,
"Physics": 23.33,
"Art_Theory": 67.27,
"Electronics": 20.0,
"Psychology": 33.33,
"Biology": 35.59,
"Manage": 28.0,
"Economics": 57.63,
"Mechanical_Engineering": 18.64,
"Diagnostics_and_Laboratory_Medicine": 26.67,
"Basic_Medical_Science": 40.38,
"Computer_Science": 33.33,
"Math": 26.67,
"Music": 18.33,
"Materials": 18.33,
"Marketing": 45.76,
"Chemistry": 28.33,
"Geography": 36.54
},
"difficulty_score": {
"Medium": 33.96,
"Easy": 51.7,
"Hard": 17.96
},
"acc_stderr": 0,
"acc": 35.66
},
"MMMU_Pro_vision": {
"accuracy": 28.96,
"subject_score": {
"History": 33.93,
"Art": 50.94,
"Design": 48.33,
"Literature": 57.69,
"Agriculture": 23.33,
"Finance": 26.67,
"Sociology": 42.59,
"Accounting": 25.86,
"Energy_and_Power": 15.52,
"Pharmacy": 28.07,
"Architecture_and_Engineering": 16.67,
"Clinical_Medicine": 11.86,
"Public_Health": 34.48,
"Physics": 30.0,
"Art_Theory": 45.45,
"Electronics": 20.0,
"Psychology": 31.67,
"Biology": 20.34,
"Manage": 32.0,
"Economics": 35.59,
"Mechanical_Engineering": 13.56,
"Diagnostics_and_Laboratory_Medicine": 20.0,
"Basic_Medical_Science": 36.54,
"Computer_Science": 30.0,
"Math": 25.0,
"Music": 20.0,
"Materials": 13.33,
"Marketing": 25.42,
"Chemistry": 36.67,
"Geography": 26.92
},
"acc_stderr": 0,
"acc": 28.96
},
"MmvetV2": {
"reject_info": {
"reject_rate": 0.39,
"reject_number": 2,
"total_question": 517
},
"accuracy": 64.5825,
"capability_scores": {
"ocr": 69.47115384615388,
"math": 70.58823529411765,
"spat": 61.071428571428555,
"rec": 62.414634146341555,
"know": 62.05128205128205,
"gen": 65.52727272727279,
"seq": 66.85185185185185
},
"capability_detail_scores": {
"ocr_math": 90.9090909090909,
"ocr_spat_math": 65.33333333333334,
"ocr_rec_spat_math": 25.0,
"rec_spat": 61.07142857142858,
"ocr_spat": 67.3076923076923,
"ocr_rec_spat": 40.0,
"ocr_know_spat": 75.0,
"ocr_rec": 50.0,
"rec_know_spat": 41.0,
"ocr": 82.18749999999999,
"rec": 60.84745762711864,
"rec_know": 62.30769230769232,
"rec_know_gen": 63.19999999999997,
"ocr_rec_know_gen": 70.0,
"ocr_rec_spat_gen": 74.88372093023257,
"ocr_spat_gen": 85.00000000000001,
"ocr_seq_spat_math_gen": 100.0,
"ocr_seq_spat_rec_math": 0.0,
"rec_spat_gen": 47.727272727272734,
"ocr_spat_math_gen": 70.0,
"rec_spat_seq": 60.0,
"ocr_rec_spat_seq": 50.0,
"rec_know_spat_gen": 56.666666666666664,
"rec_gen": 66.47058823529413,
"ocr_rec_spat_know": 25.0,
"ocr_spat_know": 75.0,
"ocr_spat_rec_know_gen": 80.0,
"ocr_rec_math": 100.0,
"ocr_rec_gen": 88.00000000000001,
"ocr_rec_gen_seq": 80.0,
"ocr_gen": 51.53846153846155,
"rec_gen_seq": 62.14285714285713,
"rec_seq": 68.0,
"rec_spat_gen_seq": 74.99999999999999,
"rec_know_seq": 90.0,
"rec_know_gen_seq": 65.0,
"ocr_seq_spat_rec_gen": 46.666666666666664,
"ocr_seq_rec_know_gen": 90.0,
"rec_know_math": 50.0,
"ocr_rec_seq": 90.0,
"rec_spat_know": 41.0
},
"acc_stderr": 0,
"acc": 64.5825
},
"MathVerse": {
"Text Dominant": {
"accuracy": 29.95,
"correct": 236,
"total": 788
},
"Total": {
"accuracy": 26.45,
"correct": 1042,
"total": 3940
},
"Text Lite": {
"accuracy": 25.89,
"correct": 204,
"total": 788
},
"Vision Intensive": {
"accuracy": 26.9,
"correct": 212,
"total": 788
},
"Vision Dominant": {
"accuracy": 25.13,
"correct": 198,
"total": 788
},
"Vision Only": {
"accuracy": 24.37,
"correct": 192,
"total": 788
},
"accuracy": 26.45,
"acc_stderr": 0,
"acc": 26.45
},
"Ocrlite": {
"reject_info": {
"reject_rate": 0.24,
"reject_number": 4,
"total_question": 1645
},
"final_score": [
1193,
1641
],
"accuracy": 72.7,
"Key Information Extraction-Bookshelf": [
18,
52
],
"Scene Text-centric VQA-diet_constraints": [
53,
90
],
"Doc-oriented VQA-Control": [
127,
185
],
"Doc-oriented VQA": [
147,
204
],
"Scene Text-centric VQA-Fake_logo": [
73,
119
],
"Handwritten Mathematical Expression Recognition": [
45,
100
],
"Key Information Extraction": [
174,
209
],
"Scene Text-centric VQA-Control": [
168,
200
],
"Scene Text-centric VQA": [
220,
282
],
"Artistic Text Recognition": [
40,
50
],
"Irregular Text Recognition": [
45,
50
],
"Non-Semantic Text Recognition": [
35,
50
],
"Regular Text Recognition": [
48,
50
],
"acc_stderr": 0,
"acc": 72.7
},
"OcrliteZh": {
"reject_info": {
"reject_rate": 0.43,
"reject_number": 1,
"total_question": 234
},
"final_score": [
150,
233
],
"accuracy": 64.378,
"Docvqa": [
8,
10
],
"Chartqa-human": [
3,
10
],
"Chartqa-au": [
5,
10
],
"infographic": [
6,
9
],
"Key Information Extraction": [
38,
45
],
"Scene Text-centric VQA": [
25,
40
],
"Artistic Text Recognition": [
3,
11
],
"IrRegular Text Recognition": [
8,
11
],
"Non-semantic Text Recognition": [
9,
12
],
"Regular Text Recognition": [
11,
11
],
"Handwriting_CN": [
13,
20
],
"Chinese Unlimited": [
21,
44
],
"acc_stderr": 0,
"acc": 64.378
},
"CharXiv": {
"descriptive": {
"Overall Score": 70.7,
"By Question": {
"Q1": 59.84,
"Q2": 76.52,
"Q3": 60.94,
"Q4": 78.6,
"Q5": 78.24,
"Q6": 69.48,
"Q7": 69.66,
"Q8": 87.5,
"Q9": 75.12,
"Q10": 77.4,
"Q11": 44.0,
"Q12": 79.12,
"Q13": 58.45,
"Q14": 81.21,
"Q15": 68.69,
"Q16": 75.0,
"Q17": 44.64,
"Q18": 83.0,
"Q19": 83.08
},
"By Category": {
"Information Extraction": 70.52,
"Enumeration": 74.17,
"Pattern Recognition": 67.47,
"Counting": 79.13,
"Compositionality": 44.64
},
"By Subplot": {
"1 Subplot": 79.53,
"2-4 Subplots": 69.97,
"5+ Subplots": 57.42
},
"By Subject": {
"Computer Science": 70.83,
"Economics": 74.46,
"Electrical Engineering and Systems Science": 76.05,
"Mathematics": 72.59,
"Physics": 65.55,
"Quantitative Biology": 63.69,
"Quantitative Finance": 70.04,
"Statistics": 72.35
},
"By Year": {
"2020": 71.76,
"2021": 67.72,
"2022": 71.72,
"2023": 71.77
},
"N_valid": 4000,
"N_invalid": 5,
"Question Type": "Descriptive"
},
"reasoning": {
"Overall Score": 32.0,
"By Answer Type": {
"Text-in-Chart": 36.59,
"Text-in-General": 34.34,
"Number-in-Chart": 29.74,
"Number-in-General": 24.45
},
"By Source": {
"GPT-Sourced": 41.85,
"GPT-Inspired": 29.63,
"Completely Human": 29.83
},
"By Subject": {
"Computer Science": 28.57,
"Economics": 34.06,
"Electrical Engineering and Systems Science": 26.89,
"Mathematics": 32.59,
"Physics": 37.8,
"Quantitative Biology": 30.95,
"Quantitative Finance": 33.62,
"Statistics": 30.97
},
"By Year": {
"2020": 28.34,
"2021": 33.33,
"2022": 29.1,
"2023": 37.1
},
"By Subplot": {
"1 Subplot": 34.46,
"2-4 Subplots": 31.22,
"5+ Subplots": 29.24
},
"N_valid": 1000,
"N_invalid": 3,
"Question Type": "Reasoning"
},
"accuracy": 51.35,
"acc_stderr": 0,
"acc": 51.35
},
"MathVision": {
"accuracy": 25.03,
"acc_stderr": 0,
"acc": 25.03
},
"CII-Bench": {
"accuracy": 58.82,
"domain_score": {
"Life": 61.04,
"Art": 57.35,
"CTC": 51.85,
"Society": 55.68,
"Env.": 72.22,
"Politics": 79.17
},
"emotion_score": {
"Neutral": 60.15,
"Negative": 58.49,
"Positive": 57.69
},
"acc_stderr": 0,
"acc": 58.82
},
"Blink": {
"accuracy": 58.13,
"Art Style": 76.07,
"Counting": 70.0,
"Forensic Detection": 45.45,
"Functional Correspondence": 33.08,
"IQ Test": 24.67,
"Jigsaw": 65.33,
"Multi-view Reasoning": 51.13,
"Object Localization": 57.38,
"Relative Depth": 75.81,
"Relative Reflectance": 36.57,
"Semantic Correspondence": 46.04,
"Spatial Relation": 74.83,
"Visual Correspondence": 75.0,
"Visual Similarity": 83.7,
"acc_stderr": 0,
"acc": 58.13
}
}
}