Dataset Preview
The table displays a preview with only the first rows.
Full Screen Viewer
Full Screen
The full dataset viewer is not available (click to read why). Only showing a preview of the rows.
The dataset generation failed
Error code: DatasetGenerationError Exception: ArrowNotImplementedError Message: Cannot write struct type 'versions' with no child field to Parquet. Consider adding a dummy child field. Traceback: Traceback (most recent call last): File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/builder.py", line 1870, in _prepare_split_single writer.write_table(table) File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/arrow_writer.py", line 620, in write_table self._build_writer(inferred_schema=pa_table.schema) File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/arrow_writer.py", line 441, in _build_writer self.pa_writer = self._WRITER_CLASS(self.stream, schema) File "/src/services/worker/.venv/lib/python3.9/site-packages/pyarrow/parquet/core.py", line 1010, in __init__ self.writer = _parquet.ParquetWriter( File "pyarrow/_parquet.pyx", line 2157, in pyarrow._parquet.ParquetWriter.__cinit__ File "pyarrow/error.pxi", line 154, in pyarrow.lib.pyarrow_internal_check_status File "pyarrow/error.pxi", line 91, in pyarrow.lib.check_status pyarrow.lib.ArrowNotImplementedError: Cannot write struct type 'versions' with no child field to Parquet. Consider adding a dummy child field. During handling of the above exception, another exception occurred: Traceback (most recent call last): File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/builder.py", line 1886, in _prepare_split_single num_examples, num_bytes = writer.finalize() File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/arrow_writer.py", line 639, in finalize self._build_writer(self.schema) File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/arrow_writer.py", line 441, in _build_writer self.pa_writer = self._WRITER_CLASS(self.stream, schema) File "/src/services/worker/.venv/lib/python3.9/site-packages/pyarrow/parquet/core.py", line 1010, in __init__ self.writer = _parquet.ParquetWriter( File "pyarrow/_parquet.pyx", line 2157, in pyarrow._parquet.ParquetWriter.__cinit__ File "pyarrow/error.pxi", line 154, in pyarrow.lib.pyarrow_internal_check_status File "pyarrow/error.pxi", line 91, in pyarrow.lib.check_status pyarrow.lib.ArrowNotImplementedError: Cannot write struct type 'versions' with no child field to Parquet. Consider adding a dummy child field. The above exception was the direct cause of the following exception: Traceback (most recent call last): File "/src/services/worker/src/worker/job_runners/config/parquet_and_info.py", line 1420, in compute_config_parquet_and_info_response parquet_operations = convert_to_parquet(builder) File "/src/services/worker/src/worker/job_runners/config/parquet_and_info.py", line 1052, in convert_to_parquet builder.download_and_prepare( File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/builder.py", line 924, in download_and_prepare self._download_and_prepare( File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/builder.py", line 1000, in _download_and_prepare self._prepare_split(split_generator, **prepare_split_kwargs) File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/builder.py", line 1741, in _prepare_split for job_id, done, content in self._prepare_split_single( File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/builder.py", line 1897, in _prepare_split_single raise DatasetGenerationError("An error occurred while generating the dataset") from e datasets.exceptions.DatasetGenerationError: An error occurred while generating the dataset
Need help to make the dataset viewer work? Make sure to review how to configure the dataset viewer, and open a discussion for direct support.
config_general
dict | results
dict | versions
dict | config_tasks
dict | summary_tasks
dict | summary_general
dict |
---|---|---|---|---|---|
{
"model_name": "01-ai/Yi-1.5-34B-32K",
"model_dtype": "float16",
"model_size": 0
} | {
"CMMMU": {
"acc": 58.02
},
"CMMU": {
"acc": 60
},
"ChartQA": {
"acc": 67.48
},
"MMMU": {
"acc": 47.66
},
"MMMU_Pro_standard": {
"acc": 68.11
},
"MMMU_Pro_vision": {
"acc": 39.98
},
"OCRBench": {
"acc": 70.96
},
"MathVision": {
"acc": 75.96
},
"CII-Bench": {
"acc": 59.96
},
"Blink": {
"acc": 36.9
}
} | {} | {} | {} | {} |
{
"model_name": "01-ai/Yi-1.5-6B",
"model_dtype": "float16",
"model_size": 0
} | {
"CMMMU": {
"acc": 48.02
},
"CMMU": {
"acc": 66
},
"ChartQA": {
"acc": 63.48
},
"MMMU": {
"acc": 67.66
},
"MMMU_Pro_standard": {
"acc": 48.11
},
"MMMU_Pro_vision": {
"acc": 69.11
},
"OCRBench": {
"acc": 80.96
},
"MathVision": {
"acc": 83.58
},
"CII-Bench": {
"acc": 50.96
},
"Blink": {
"acc": 46.9
}
} | {} | {} | {} | {} |
{
"model_name": "glm-4v",
"model_dtype": "float16",
"model_size": 0
} | {
"ChartQA": {
"acc": 35.08,
"acc_stderr": 0,
"accuracy": 35.08,
"human_test": {
"total": 1250,
"correct": 469,
"accuracy": 37.52
},
"augmented_test": {
"total": 1250,
"correct": 408,
"accuracy": 32.64
}
},
"CMMMU": {
"acc": 34.78,
"acc_stderr": 0,
"商业": {
"num": 126,
"correct": 27,
"accuracy": 21.43
},
"科学": {
"num": 204,
"correct": 54,
"accuracy": 26.47
},
"overall": {
"num": 900,
"correct": 313,
"accuracy": 34.78
},
"accuracy": 34.78,
"健康与医学": {
"num": 153,
"correct": 56,
"accuracy": 36.6
},
"技术与工程": {
"num": 244,
"correct": 87,
"accuracy": 35.66
},
"艺术与设计": {
"num": 88,
"correct": 51,
"accuracy": 57.95
},
"人文社会科学": {
"num": 85,
"correct": 38,
"accuracy": 44.71
}
},
"CMMU": {
"acc": 10.58,
"acc_stderr": 0,
"val": {
"multiple-choice": {
"hard": {
"total": 150,
"correct": 6,
"accuracy": 4
},
"normal": {
"total": 1205,
"correct": 160,
"accuracy": 13.28
}
},
"fill-in-the-blank": {
"hard": {
"total": 300,
"correct": 20,
"accuracy": 6.67
},
"normal": {
"total": 507,
"correct": 48,
"accuracy": 9.47
}
},
"multiple-response": {
"hard": {
"total": 94,
"correct": 0,
"accuracy": 0
},
"normal": {
"total": 33,
"correct": 0,
"accuracy": 0
}
}
},
"test": {
"multiple-choice": {
"hard": {
"total": 150,
"correct": 4,
"accuracy": 2.67
},
"normal": {
"total": 1205,
"correct": 157,
"accuracy": 13.03
}
},
"fill-in-the-blank": {
"hard": {
"total": 296,
"correct": 25,
"accuracy": 8.45
},
"normal": {
"total": 529,
"correct": 57,
"accuracy": 10.78
}
},
"multiple-response": {
"hard": {
"total": 95,
"correct": 1,
"accuracy": 1.05
},
"normal": {
"total": 32,
"correct": 0,
"accuracy": 0
}
}
},
"val-overall": {
"total": 2289,
"correct": 234,
"accuracy": 10.22,
"bias_rate": 128.08
},
"test-overall": {
"total": 2307,
"correct": 244,
"accuracy": 10.58,
"bias_rate": 130.25
}
},
"MMMU": {
"acc": 40.56,
"acc_stderr": 0,
"accuracy": 40.56,
"subject_score": {
"Art": 61.67,
"Math": 36.67,
"Basic": 50,
"Music": 40,
"Design": 66.67,
"Energy": 46.67,
"Manage": 30,
"Public": 43.33,
"Biology": 40,
"Finance": 20,
"History": 63.33,
"Physics": 6.67,
"Clinical": 46.67,
"Computer": 36.67,
"Pharmacy": 36.67,
"Chemistry": 23.33,
"Economics": 30,
"Geography": 30,
"Marketing": 40,
"Materials": 26.67,
"Sociology": 50,
"Accounting": 43.33,
"Literature": 83.33,
"Mechanical": 30,
"Psychology": 46.67,
"Agriculture": 33.33,
"Diagnostics": 40,
"Electronics": 20,
"Architecture": 33.33
},
"difficulty_score": {
"Easy": 49.15,
"Hard": 26.52,
"Medium": 40.57
}
},
"MMMU_Pro_standard": {
"acc": 20.58,
"acc_stderr": 0,
"accuracy": 20.58,
"subject_score": {
"Art": 24.53,
"Math": 21.67,
"Music": 23.33,
"Design": 31.67,
"Manage": 24,
"Biology": 27.12,
"Finance": 16.67,
"History": 21.43,
"Physics": 3.33,
"Pharmacy": 26.32,
"Chemistry": 13.33,
"Economics": 20.34,
"Geography": 25,
"Marketing": 16.95,
"Materials": 15,
"Sociology": 25.93,
"Accounting": 13.79,
"Art_Theory": 38.18,
"Literature": 48.08,
"Psychology": 18.33,
"Agriculture": 13.33,
"Electronics": 15,
"Public_Health": 12.07,
"Computer_Science": 23.33,
"Energy_and_Power": 15.52,
"Clinical_Medicine": 28.81,
"Basic_Medical_Science": 21.15,
"Mechanical_Engineering": 22.03,
"Architecture_and_Engineering": 6.67,
"Diagnostics_and_Laboratory_Medicine": 11.67
},
"difficulty_score": {
"Easy": 27.84,
"Hard": 12.47,
"Medium": 19.85
}
},
"MMMU_Pro_vision": {
"acc": 16.71,
"acc_stderr": 0,
"accuracy": 16.71,
"subject_score": {
"Art": 28.3,
"Math": 20,
"Music": 25,
"Design": 20,
"Manage": 22,
"Biology": 15.25,
"Finance": 8.33,
"History": 14.29,
"Physics": 15,
"Pharmacy": 17.54,
"Chemistry": 11.67,
"Economics": 18.64,
"Geography": 23.08,
"Marketing": 10.17,
"Materials": 11.67,
"Sociology": 25.93,
"Accounting": 13.79,
"Art_Theory": 29.09,
"Literature": 44.23,
"Psychology": 11.67,
"Agriculture": 8.33,
"Electronics": 8.33,
"Public_Health": 13.79,
"Computer_Science": 18.33,
"Energy_and_Power": 15.52,
"Clinical_Medicine": 8.47,
"Basic_Medical_Science": 15.38,
"Mechanical_Engineering": 15.25,
"Architecture_and_Engineering": 11.67,
"Diagnostics_and_Laboratory_Medicine": 8.33
}
},
"OCRBench": {
"acc": 79.1,
"acc_stderr": 0,
"accuracy": 79.1,
"final_score": [
791,
1000
],
"Doc-oriented VQA": [
150,
200
],
"Scene Text-centric VQA": [
178,
200
],
"Handwriting Recognition": [
34,
50
],
"Digit String Recognition": [
32,
50
],
"Regular Text Recognition": [
49,
50
],
"Artistic Text Recognition": [
47,
50
],
"Irregular Text Recognition": [
47,
50
],
"Key Information Extraction": [
166,
200
],
"Non-Semantic Text Recognition": [
43,
50
],
"Handwritten Mathematical Expression Recognition": [
45,
100
]
},
"MathVision": {
"acc": 16.02,
"acc_stderr": 0,
"accuracy": 16.02
},
"CII-Bench": {
"acc": 53.99,
"acc_stderr": 0,
"accuracy": 53.99,
"domain_score": {
"Art": 49.26,
"CTC": 49.63,
"Env.": 68.52,
"Life": 50.65,
"Society": 61.08,
"Politics": 50
},
"emotion_score": {
"Neutral": 54.14,
"Negative": 55.47,
"Positive": 52.14
}
},
"Blink": {
"acc": 44.66,
"acc_stderr": 0,
"Jigsaw": 50,
"IQ Test": 32,
"Counting": 60.83,
"accuracy": 44.66,
"Art Style": 46.15,
"Relative Depth": 62.9,
"Spatial Relation": 74.13,
"Visual Similarity": 54.81,
"Forensic Detection": 21.21,
"Object Localization": 63.11,
"Multi-view Reasoning": 60.9,
"Relative Reflectance": 41.79,
"Visual Correspondence": 25.58,
"Semantic Correspondence": 23.02,
"Functional Correspondence": 17.69
}
} | {} | {} | {} | {} |
{
"model_name": "yi-vision",
"model_dtype": "float16",
"model_size": 0
} | {
"ChartQA": {
"acc": 78.88,
"acc_stderr": 0,
"accuracy": 78.88,
"human_test": {
"total": 1250,
"correct": 811,
"accuracy": 64.88
},
"augmented_test": {
"total": 1250,
"correct": 1161,
"accuracy": 92.88
}
},
"CMMMU": {
"acc": 41.22,
"acc_stderr": 0,
"商业": {
"num": 126,
"correct": 30,
"accuracy": 23.81
},
"科学": {
"num": 204,
"correct": 78,
"accuracy": 38.24
},
"overall": {
"num": 900,
"correct": 371,
"accuracy": 41.22
},
"accuracy": 41.22,
"健康与医学": {
"num": 153,
"correct": 77,
"accuracy": 50.33
},
"技术与工程": {
"num": 244,
"correct": 89,
"accuracy": 36.48
},
"艺术与设计": {
"num": 88,
"correct": 56,
"accuracy": 63.64
},
"人文社会科学": {
"num": 85,
"correct": 41,
"accuracy": 48.24
}
},
"CMMU": {
"acc": 22.97,
"acc_stderr": 0,
"val": {
"multiple-choice": {
"hard": {
"total": 150,
"correct": 30,
"accuracy": 20
},
"normal": {
"total": 1205,
"correct": 361,
"accuracy": 29.96
}
},
"fill-in-the-blank": {
"hard": {
"total": 300,
"correct": 44,
"accuracy": 14.67
},
"normal": {
"total": 507,
"correct": 77,
"accuracy": 15.19
}
},
"multiple-response": {
"hard": {
"total": 94,
"correct": 9,
"accuracy": 9.57
},
"normal": {
"total": 33,
"correct": 6,
"accuracy": 18.18
}
}
},
"test": {
"multiple-choice": {
"hard": {
"total": 150,
"correct": 25,
"accuracy": 16.67
},
"normal": {
"total": 1205,
"correct": 380,
"accuracy": 31.54
}
},
"fill-in-the-blank": {
"hard": {
"total": 296,
"correct": 37,
"accuracy": 12.5
},
"normal": {
"total": 529,
"correct": 76,
"accuracy": 14.37
}
},
"multiple-response": {
"hard": {
"total": 95,
"correct": 8,
"accuracy": 8.42
},
"normal": {
"total": 32,
"correct": 4,
"accuracy": 12.5
}
}
},
"val-overall": {
"total": 2289,
"correct": 527,
"accuracy": 23.02,
"bias_rate": 7.13
},
"test-overall": {
"total": 2307,
"correct": 530,
"accuracy": 22.97,
"bias_rate": 5.58
}
},
"MMMU": {
"acc": 46.33,
"acc_stderr": 0,
"accuracy": 46.33,
"subject_score": {
"Art": 71.67,
"Math": 46.67,
"Basic": 53.33,
"Music": 30,
"Design": 80,
"Energy": 30,
"Manage": 40,
"Public": 46.67,
"Biology": 46.67,
"Finance": 36.67,
"History": 63.33,
"Physics": 40,
"Clinical": 53.33,
"Computer": 40,
"Pharmacy": 30,
"Chemistry": 23.33,
"Economics": 60,
"Geography": 60,
"Marketing": 53.33,
"Materials": 20,
"Sociology": 56.67,
"Accounting": 36.67,
"Literature": 83.33,
"Mechanical": 23.33,
"Psychology": 46.67,
"Agriculture": 53.33,
"Diagnostics": 43.33,
"Electronics": 23.33,
"Architecture": 26.67
},
"difficulty_score": {
"Easy": 54.92,
"Hard": 30.39,
"Medium": 47.17
}
},
"MMMU_Pro_standard": {
"acc": 29.84,
"acc_stderr": 0,
"accuracy": 29.84,
"reject_info": {
"reject_rate": 0.06,
"reject_number": 1,
"total_question": 1730
},
"subject_score": {
"Art": 47.17,
"Math": 21.67,
"Music": 20,
"Design": 47.46,
"Manage": 24,
"Biology": 32.2,
"Finance": 25,
"History": 42.86,
"Physics": 23.33,
"Pharmacy": 31.58,
"Chemistry": 21.67,
"Economics": 23.73,
"Geography": 36.54,
"Marketing": 33.9,
"Materials": 13.33,
"Sociology": 48.15,
"Accounting": 24.14,
"Art_Theory": 60,
"Literature": 73.08,
"Psychology": 28.33,
"Agriculture": 25,
"Electronics": 21.67,
"Public_Health": 24.14,
"Computer_Science": 30,
"Energy_and_Power": 12.07,
"Clinical_Medicine": 27.12,
"Basic_Medical_Science": 34.62,
"Mechanical_Engineering": 20.34,
"Architecture_and_Engineering": 10,
"Diagnostics_and_Laboratory_Medicine": 25
},
"difficulty_score": {
"Easy": 43.75,
"Hard": 17.96,
"Medium": 26.62
}
},
"MMMU_Pro_vision": {
"acc": 53.06
},
"OCRBench": {
"acc": 69.369,
"acc_stderr": 0,
"accuracy": 69.369,
"final_score": [
693,
999
],
"reject_info": {
"reject_rate": 0.1,
"reject_number": 1,
"total_question": 1000
},
"Doc-oriented VQA": [
146,
200
],
"Scene Text-centric VQA": [
174,
200
],
"Handwriting Recognition": [
28,
50
],
"Digit String Recognition": [
21,
50
],
"Regular Text Recognition": [
48,
50
],
"Artistic Text Recognition": [
42,
50
],
"Irregular Text Recognition": [
40,
50
],
"Key Information Extraction": [
154,
200
],
"Non-Semantic Text Recognition": [
38,
50
],
"Handwritten Mathematical Expression Recognition": [
2,
99
]
},
"MathVision": {
"acc": 14.44,
"acc_stderr": 0,
"accuracy": 14.44
},
"CII-Bench": {
"acc": 45.23,
"acc_stderr": 0,
"accuracy": 45.23,
"domain_score": {
"Art": 49.26,
"CTC": 48.15,
"Env.": 53.7,
"Life": 37.23,
"Society": 45.95,
"Politics": 58.33
},
"emotion_score": {
"Neutral": 48.5,
"Negative": 42.64,
"Positive": 44.44
}
},
"Blink": {
"acc": 50.2
}
} | {} | {} | {} | {} |
README.md exists but content is empty.
- Downloads last month
- 132