Datasets:

License:
vlm_results / Janus-1.3B /results_2025-01-25T10-42-53.190540.json
daiteng01's picture
Upload 52 files
7498b1c verified
{
"config_general": {
"model_name": "Janus-1.3B",
"model_dtype": "float16",
"model_size": 0
},
"results": {
"CMMMU": {
"accuracy": 25.44,
"acc_stderr": 0,
"acc": 25.44
},
"MMMU": {
"accuracy": 30.0,
"acc_stderr": 0,
"acc": 30.0
},
"MMMU_Pro_standard": {
"accuracy": 15.09,
"acc_stderr": 0,
"acc": 15.09
},
"MMMU_Pro_vision": {
"accuracy": 11.97,
"subject_score": {
"History": 0.0,
"Art": 13.21,
"Design": 3.33,
"Literature": 9.62,
"Agriculture": 13.33,
"Finance": 11.67,
"Sociology": 12.96,
"Accounting": 20.69,
"Energy_and_Power": 13.79,
"Pharmacy": 17.54,
"Architecture_and_Engineering": 10.0,
"Clinical_Medicine": 10.17,
"Public_Health": 3.45,
"Physics": 16.67,
"Art_Theory": 5.45,
"Electronics": 5.0,
"Psychology": 10.0,
"Biology": 15.25,
"Manage": 18.0,
"Economics": 15.25,
"Mechanical_Engineering": 18.64,
"Diagnostics_and_Laboratory_Medicine": 6.67,
"Basic_Medical_Science": 9.62,
"Computer_Science": 10.0,
"Math": 15.0,
"Music": 16.67,
"Materials": 10.0,
"Marketing": 13.56,
"Chemistry": 20.0,
"Geography": 13.46
},
"acc_stderr": 0,
"acc": 11.97
},
"MmvetV2": {
"accuracy": 27.911,
"capability_scores": {
"ocr": 25.81730769230769,
"math": 2.941176470588235,
"spat": 26.14213197969544,
"rec": 28.54368932038836,
"know": 27.243589743589748,
"gen": 26.472727272727287,
"seq": 16.071428571428573
},
"capability_detail_scores": {
"ocr_math": 0.0,
"ocr_math_spat": 4.0,
"ocr_math_spat_rec": 0.0,
"spat_rec": 41.07142857142857,
"ocr_spat": 18.84615384615385,
"ocr_spat_rec": 16.666666666666664,
"ocr_know_spat": 75.0,
"ocr_rec": 32.5,
"spat_know_rec": 40.0,
"ocr": 48.4375,
"rec": 36.271186440677965,
"know_rec": 8.461538461538462,
"gen_know_rec": 26.800000000000008,
"gen_ocr_know_rec": 21.53846153846154,
"gen_ocr_spat_rec": 31.16279069767442,
"gen_ocr_spat": 30.0,
"seq_ocr_gen_math_spat": 0.0,
"seq_ocr_math_spat_rec": 0.0,
"gen_spat_rec": 25.90909090909091,
"gen_ocr_math_spat": 20.0,
"seq_spat_rec": 10.0,
"seq_ocr_spat_rec": 33.33333333333333,
"spat_gen_know_rec": 23.333333333333332,
"gen_rec": 40.58823529411764,
"ocr_know_spat_rec": 50.0,
"gen_ocr_spat_know_rec": 15.0,
"ocr_math_rec": 0.0,
"gen_ocr_rec": 32.0,
"gen_ocr_seq_rec": 24.28571428571429,
"gen_ocr": 14.615384615384613,
"seq_gen_rec": 12.857142857142856,
"seq_rec": 21.666666666666668,
"seq_gen_spat_rec": 7.500000000000001,
"seq_know_rec": 100.0,
"seq_gen_know_rec": 0.0,
"seq_gen_ocr_spat_rec": 10.0,
"seq_gen_ocr_know_rec": 30.000000000000004,
"math_know_rec": 10.0,
"seq_ocr_rec": 0.0,
"know_spat_rec": 40.0
},
"acc_stderr": 0,
"acc": 27.911
},
"MathVerse": {
"Text Dominant": {
"accuracy": 16.62,
"correct": 131,
"total": 788
},
"Total": {
"accuracy": 15.63,
"correct": 616,
"total": 3940
},
"Text Lite": {
"accuracy": 17.26,
"correct": 136,
"total": 788
},
"Vision Intensive": {
"accuracy": 17.64,
"correct": 139,
"total": 788
},
"Vision Dominant": {
"accuracy": 13.83,
"correct": 109,
"total": 788
},
"Vision Only": {
"accuracy": 12.82,
"correct": 101,
"total": 788
},
"accuracy": 15.63,
"acc_stderr": 0,
"acc": 15.63
},
"Ocrlite": {
"final_score": [
556,
1645
],
"accuracy": 33.799,
"Key Information Extraction-Bookshelf": [
3,
52
],
"Scene Text-centric VQA-diet_constraints": [
0,
90
],
"Doc-oriented VQA-Control": [
32,
189
],
"Doc-oriented VQA": [
32,
204
],
"Scene Text-centric VQA-Fake_logo": [
23,
119
],
"Handwritten Mathematical Expression Recognition": [
39,
100
],
"Key Information Extraction": [
63,
209
],
"Scene Text-centric VQA-Control": [
123,
200
],
"Scene Text-centric VQA": [
112,
282
],
"Artistic Text Recognition": [
35,
50
],
"Irregular Text Recognition": [
32,
50
],
"Non-Semantic Text Recognition": [
19,
50
],
"Regular Text Recognition": [
43,
50
],
"acc_stderr": 0,
"acc": 33.799
},
"OcrliteZh": {
"final_score": [
19,
234
],
"accuracy": 8.12,
"Docvqa": [
0,
10
],
"Chartqa-human": [
2,
10
],
"Chartqa-au": [
0,
10
],
"infographic": [
0,
10
],
"Key Information Extraction": [
8,
45
],
"Scene Text-centric VQA": [
2,
40
],
"Artistic Text Recognition": [
0,
11
],
"IrRegular Text Recognition": [
0,
11
],
"Non-semantic Text Recognition": [
0,
12
],
"Regular Text Recognition": [
0,
11
],
"Handwriting_CN": [
4,
20
],
"Chinese Unlimited": [
3,
44
],
"acc_stderr": 0,
"acc": 8.12
},
"CharXiv": {
"descriptive": {
"Overall Score": 20.55,
"By Question": {
"Q1": 20.49,
"Q2": 42.17,
"Q3": 20.6,
"Q4": 30.35,
"Q5": 10.46,
"Q6": 23.69,
"Q7": 18.8,
"Q8": 16.52,
"Q9": 18.91,
"Q10": 33.56,
"Q11": 32.0,
"Q12": 22.53,
"Q13": 15.07,
"Q14": 9.57,
"Q15": 7.03,
"Q16": 19.44,
"Q17": 3.12,
"Q18": 24.7,
"Q19": 66.15
},
"By Category": {
"Information Extraction": 23.78,
"Enumeration": 12.67,
"Pattern Recognition": 27.07,
"Counting": 33.84,
"Compositionality": 3.12
},
"By Subplot": {
"1 Subplot": 26.42,
"2-4 Subplots": 17.92,
"5+ Subplots": 15.15
},
"By Subject": {
"Computer Science": 19.25,
"Economics": 19.38,
"Electrical Engineering and Systems Science": 25.0,
"Mathematics": 21.3,
"Physics": 20.87,
"Quantitative Biology": 19.05,
"Quantitative Finance": 19.61,
"Statistics": 20.13
},
"By Year": {
"2020": 21.56,
"2021": 19.16,
"2022": 20.8,
"2023": 20.77
},
"N_valid": 4000,
"N_invalid": 40,
"Question Type": "Descriptive"
},
"reasoning": {
"Overall Score": 14.1,
"By Answer Type": {
"Text-in-Chart": 10.45,
"Text-in-General": 20.2,
"Number-in-Chart": 14.22,
"Number-in-General": 18.34
},
"By Source": {
"GPT-Sourced": 16.3,
"GPT-Inspired": 9.26,
"Completely Human": 15.17
},
"By Subject": {
"Computer Science": 13.49,
"Economics": 15.22,
"Electrical Engineering and Systems Science": 10.08,
"Mathematics": 17.78,
"Physics": 12.6,
"Quantitative Biology": 16.67,
"Quantitative Finance": 8.62,
"Statistics": 17.7
},
"By Year": {
"2020": 13.36,
"2021": 14.94,
"2022": 13.93,
"2023": 14.11
},
"By Subplot": {
"1 Subplot": 16.84,
"2-4 Subplots": 11.9,
"5+ Subplots": 13.14
},
"N_valid": 1000,
"N_invalid": 2,
"Question Type": "Reasoning"
},
"accuracy": 17.32,
"acc_stderr": 0,
"acc": 17.32
},
"MathVision": {
"accuracy": 14.24,
"acc_stderr": 0,
"acc": 14.24
},
"CII-Bench": {
"accuracy": 21.96,
"domain_score": {
"Life": 16.88,
"Art": 27.21,
"CTC": 21.48,
"Society": 23.78,
"Env.": 31.48,
"Politics": 8.33
},
"emotion_score": {
"Neutral": 24.44,
"Negative": 21.51,
"Positive": 19.66
},
"acc_stderr": 0,
"acc": 21.96
},
"Blink": {
"accuracy": 38.56,
"Art Style": 52.99,
"Counting": 42.5,
"Forensic Detection": 22.73,
"Functional Correspondence": 24.62,
"IQ Test": 23.33,
"Jigsaw": 52.67,
"Multi-view Reasoning": 44.36,
"Object Localization": 41.8,
"Relative Depth": 58.87,
"Relative Reflectance": 28.36,
"Semantic Correspondence": 27.34,
"Spatial Relation": 46.15,
"Visual Correspondence": 27.33,
"Visual Similarity": 53.33,
"acc_stderr": 0,
"acc": 38.56
}
}
}