Datasets:

License:
vlm_results / Idefics3-8B-Llama3 /results_2025-01-25T10-42-53.190540.json
daiteng01's picture
Upload 52 files
7498b1c verified
{
"config_general": {
"model_name": "Idefics3-8B-Llama3",
"model_dtype": "float16",
"model_size": 0
},
"results": {
"CMMMU": {
"accuracy": 33.89,
"acc_stderr": 0,
"acc": 33.89
},
"MMMU": {
"accuracy": 42.22,
"acc_stderr": 0,
"acc": 42.22
},
"MMMU_Pro_standard": {
"accuracy": 27.86,
"acc_stderr": 0,
"acc": 27.86
},
"MMMU_Pro_vision": {
"accuracy": 8.55,
"subject_score": {
"History": 12.5,
"Art": 7.55,
"Design": 6.67,
"Literature": 17.31,
"Agriculture": 3.33,
"Finance": 5.0,
"Sociology": 7.41,
"Accounting": 5.17,
"Energy_and_Power": 8.62,
"Pharmacy": 12.28,
"Architecture_and_Engineering": 13.33,
"Clinical_Medicine": 8.47,
"Public_Health": 1.72,
"Physics": 20.0,
"Art_Theory": 1.82,
"Electronics": 8.33,
"Psychology": 5.0,
"Biology": 10.17,
"Manage": 16.0,
"Economics": 5.08,
"Mechanical_Engineering": 16.95,
"Diagnostics_and_Laboratory_Medicine": 5.0,
"Basic_Medical_Science": 9.62,
"Computer_Science": 6.67,
"Math": 10.0,
"Music": 6.67,
"Materials": 10.0,
"Marketing": 6.78,
"Chemistry": 6.67,
"Geography": 3.85
},
"acc_stderr": 0,
"acc": 8.55
},
"MmvetV2": {
"accuracy": 36.6344,
"capability_scores": {
"math": 16.176470588235293,
"ocr": 38.028846153846146,
"spat": 39.54314720812184,
"rec": 36.62621359223302,
"know": 28.52564102564103,
"gen": 35.27272727272727,
"seq": 30.357142857142865
},
"capability_detail_scores": {
"math_ocr": 0.0,
"math_spat_ocr": 26.666666666666668,
"math_spat_rec_ocr": 25.0,
"spat_rec": 52.85714285714286,
"spat_ocr": 55.38461538461539,
"spat_rec_ocr": 30.83333333333333,
"spat_know_ocr": 75.0,
"rec_ocr": 75.0,
"spat_rec_know": 30.0,
"ocr": 37.5,
"rec": 48.30508474576271,
"rec_know": 7.6923076923076925,
"gen_rec_know": 28.700000000000003,
"gen_know_rec_ocr": 40.76923076923077,
"spat_gen_rec_ocr": 45.58139534883721,
"spat_gen_ocr": 64.99999999999999,
"math_spat_gen_seq_ocr": 50.0,
"math_spat_rec_seq_ocr": 0.0,
"spat_gen_rec": 23.636363636363637,
"math_spat_gen_ocr": 50.0,
"spat_seq_rec": 7.142857142857142,
"spat_seq_rec_ocr": 0.0,
"spat_gen_rec_know": 13.333333333333334,
"gen_rec": 50.882352941176464,
"spat_know_rec_ocr": 25.0,
"gen_rec_know_ocr": 40.76923076923077,
"spat_know_gen_rec_ocr": 50.0,
"math_rec_ocr": 0.0,
"gen_rec_ocr": 27.999999999999996,
"seq_gen_rec_ocr": 42.857142857142854,
"gen_ocr": 21.538461538461537,
"seq_gen_rec": 31.428571428571423,
"seq_rec": 33.33333333333333,
"seq_gen_rec_spat": 51.24999999999999,
"seq_rec_know": 0.0,
"gen_seq_rec": 31.428571428571423,
"seq_gen_rec_know": 20.0,
"spat_gen_rec_seq": 51.24999999999999,
"spat_gen_rec_seq_ocr": 13.333333333333334,
"know_gen_rec_seq_ocr": 35.0,
"math_rec_know": 0.0,
"seq_rec_ocr": 100.0
},
"acc_stderr": 0,
"acc": 36.6344
},
"MathVerse": {
"Text Dominant": {
"accuracy": 24.87,
"correct": 196,
"total": 788
},
"Total": {
"accuracy": 21.32,
"correct": 840,
"total": 3940
},
"Text Lite": {
"accuracy": 21.95,
"correct": 173,
"total": 788
},
"Vision Intensive": {
"accuracy": 21.83,
"correct": 172,
"total": 788
},
"Vision Dominant": {
"accuracy": 22.21,
"correct": 175,
"total": 788
},
"Vision Only": {
"accuracy": 15.74,
"correct": 124,
"total": 788
},
"accuracy": 21.32,
"acc_stderr": 0,
"acc": 21.32
},
"Ocrlite": {
"final_score": [
820,
1645
],
"accuracy": 49.848,
"Key Information Extraction-Bookshelf": [
11,
52
],
"Scene Text-centric VQA-diet_constraints": [
30,
90
],
"Doc-oriented VQA-Control": [
110,
189
],
"Doc-oriented VQA": [
125,
204
],
"Scene Text-centric VQA-Fake_logo": [
29,
119
],
"Handwritten Mathematical Expression Recognition": [
4,
100
],
"Key Information Extraction": [
163,
209
],
"Scene Text-centric VQA-Control": [
141,
200
],
"Scene Text-centric VQA": [
170,
282
],
"Artistic Text Recognition": [
10,
50
],
"Irregular Text Recognition": [
5,
50
],
"Non-Semantic Text Recognition": [
3,
50
],
"Regular Text Recognition": [
19,
50
],
"acc_stderr": 0,
"acc": 49.848
},
"OcrliteZh": {
"final_score": [
30,
234
],
"accuracy": 12.821,
"Docvqa": [
2,
10
],
"Chartqa-human": [
2,
10
],
"Chartqa-au": [
0,
10
],
"infographic": [
1,
10
],
"Key Information Extraction": [
12,
45
],
"Scene Text-centric VQA": [
7,
40
],
"Artistic Text Recognition": [
0,
11
],
"IrRegular Text Recognition": [
0,
11
],
"Non-semantic Text Recognition": [
0,
12
],
"Regular Text Recognition": [
0,
11
],
"Handwriting_CN": [
1,
20
],
"Chinese Unlimited": [
5,
44
],
"acc_stderr": 0,
"acc": 12.821
},
"CharXiv": {
"descriptive": {
"Overall Score": 44.55,
"By Question": {
"Q1": 45.9,
"Q2": 54.78,
"Q3": 38.63,
"Q4": 61.87,
"Q5": 64.85,
"Q6": 47.79,
"Q7": 55.56,
"Q8": 60.27,
"Q9": 46.77,
"Q10": 47.95,
"Q11": 38.29,
"Q12": 45.05,
"Q13": 44.29,
"Q14": 19.86,
"Q15": 23.32,
"Q16": 58.33,
"Q17": 9.38,
"Q18": 53.04,
"Q19": 67.69
},
"By Category": {
"Information Extraction": 52.85,
"Enumeration": 36.72,
"Pattern Recognition": 47.82,
"Counting": 49.87,
"Compositionality": 9.38
},
"By Subplot": {
"1 Subplot": 52.85,
"2-4 Subplots": 40.41,
"5+ Subplots": 37.61
},
"By Subject": {
"Computer Science": 42.66,
"Economics": 46.2,
"Electrical Engineering and Systems Science": 50.42,
"Mathematics": 48.33,
"Physics": 36.81,
"Quantitative Biology": 41.07,
"Quantitative Finance": 46.12,
"Statistics": 44.91
},
"By Year": {
"2020": 44.23,
"2021": 43.97,
"2022": 42.83,
"2023": 47.18
},
"N_valid": 4000,
"N_invalid": 0,
"Question Type": "Descriptive"
},
"reasoning": {
"Overall Score": 24.7,
"By Answer Type": {
"Text-in-Chart": 24.32,
"Text-in-General": 31.31,
"Number-in-Chart": 27.16,
"Number-in-General": 20.09
},
"By Source": {
"GPT-Sourced": 25.54,
"GPT-Inspired": 25.0,
"Completely Human": 24.33
},
"By Subject": {
"Computer Science": 23.02,
"Economics": 25.36,
"Electrical Engineering and Systems Science": 21.01,
"Mathematics": 25.19,
"Physics": 28.35,
"Quantitative Biology": 23.81,
"Quantitative Finance": 22.41,
"Statistics": 28.32
},
"By Year": {
"2020": 25.91,
"2021": 25.67,
"2022": 17.62,
"2023": 29.44
},
"By Subplot": {
"1 Subplot": 23.58,
"2-4 Subplots": 29.37,
"5+ Subplots": 19.07
},
"N_valid": 1000,
"N_invalid": 1,
"Question Type": "Reasoning"
},
"accuracy": 34.62,
"acc_stderr": 0,
"acc": 34.62
},
"MathVision": {
"accuracy": 16.22,
"acc_stderr": 0,
"acc": 16.22
},
"CII-Bench": {
"accuracy": 39.22,
"domain_score": {
"Life": 35.93,
"Art": 41.18,
"CTC": 35.56,
"Society": 39.46,
"Env.": 53.7,
"Politics": 45.83
},
"emotion_score": {
"Neutral": 38.72,
"Negative": 41.89,
"Positive": 36.75
},
"acc_stderr": 0,
"acc": 39.22
},
"Blink": {
"accuracy": 48.13,
"Art Style": 64.96,
"Counting": 61.67,
"Forensic Detection": 28.79,
"Functional Correspondence": 25.38,
"IQ Test": 28.0,
"Jigsaw": 47.33,
"Multi-view Reasoning": 43.61,
"Object Localization": 56.56,
"Relative Depth": 57.26,
"Relative Reflectance": 38.06,
"Semantic Correspondence": 33.81,
"Spatial Relation": 79.72,
"Visual Correspondence": 42.44,
"Visual Similarity": 72.59,
"acc_stderr": 0,
"acc": 48.13
}
}
}