dataset
stringlengths
4
115
config
stringlengths
1
121
split
stringlengths
1
228
num_examples
int64
3
341M
statistics_string_text
listlengths
1
16.4k
partial
bool
2 classes
dair-ai/emotion
split
train
16,000
[ { "name": "text", "statistics": { "histogram": { "bin_edges": [ 7, 37, 67, 97, 127, 157, 187, 217, 247, 277, 300 ], "hist": [ 1833, 3789, 3611, 2623, 1804, 1048, 635, 353, 213, 91 ] }, "max": 300, "mean": 96.84581, "median": 86, "min": 7, "nan_count": 0, "nan_proportion": 0, "std": 55.90495 } } ]
false
dair-ai/emotion
split
validation
2,000
[ { "name": "text", "statistics": { "histogram": { "bin_edges": [ 11, 40, 69, 98, 127, 156, 185, 214, 243, 272, 295 ], "hist": [ 293, 473, 415, 309, 228, 140, 66, 35, 29, 12 ] }, "max": 295, "mean": 95.3475, "median": 85, "min": 11, "nan_count": 0, "nan_proportion": 0, "std": 54.82376 } } ]
false
dair-ai/emotion
split
test
2,000
[ { "name": "text", "statistics": { "histogram": { "bin_edges": [ 14, 43, 72, 101, 130, 159, 188, 217, 246, 275, 296 ], "hist": [ 346, 466, 377, 308, 214, 144, 69, 45, 24, 7 ] }, "max": 296, "mean": 96.5865, "median": 86, "min": 14, "nan_count": 0, "nan_proportion": 0, "std": 55.71599 } } ]
false
dair-ai/emotion
unsplit
train
416,809
[ { "name": "text", "statistics": { "histogram": { "bin_edges": [ 2, 85, 168, 251, 334, 417, 500, 583, 666, 749, 830 ], "hist": [ 204631, 162639, 42259, 7256, 17, 4, 2, 0, 0, 1 ] }, "max": 830, "mean": 97.0284, "median": 86, "min": 2, "nan_count": 0, "nan_proportion": 0, "std": 56.19823 } } ]
false
allenai/qasper
qasper
train
888
[ { "name": "abstract", "statistics": { "histogram": { "bin_edges": [ 279, 454, 629, 804, 979, 1154, 1329, 1504, 1679, 1854, 2022 ], "hist": [ 16, 79, 151, 219, 193, 127, 57, 25, 17, 4 ] }, "max": 2022, "mean": 982.02703, "median": 959.5, "min": 279, "nan_count": 0, "nan_proportion": 0, "std": 294.62389 } }, { "name": "id", "statistics": { "histogram": { "bin_edges": [ 10, 10 ], "hist": [ 888 ] }, "max": 10, "mean": 10, "median": 10, "min": 10, "nan_count": 0, "nan_proportion": 0, "std": 0 } }, { "name": "title", "statistics": { "histogram": { "bin_edges": [ 12, 27, 42, 57, 72, 87, 102, 117, 132, 147, 156 ], "hist": [ 6, 43, 166, 247, 217, 136, 47, 19, 6, 1 ] }, "max": 156, "mean": 71.95495, "median": 70, "min": 12, "nan_count": 0, "nan_proportion": 0, "std": 20.95424 } } ]
false
allenai/qasper
qasper
validation
281
[ { "name": "abstract", "statistics": { "histogram": { "bin_edges": [ 338, 492, 646, 800, 954, 1108, 1262, 1416, 1570, 1724, 1868 ], "hist": [ 9, 31, 47, 76, 40, 30, 28, 9, 6, 5 ] }, "max": 1868, "mean": 953.51957, "median": 909, "min": 338, "nan_count": 0, "nan_proportion": 0, "std": 299.11687 } }, { "name": "id", "statistics": { "histogram": { "bin_edges": [ 10, 10 ], "hist": [ 281 ] }, "max": 10, "mean": 10, "median": 10, "min": 10, "nan_count": 0, "nan_proportion": 0, "std": 0 } }, { "name": "title", "statistics": { "histogram": { "bin_edges": [ 19, 32, 45, 58, 71, 84, 97, 110, 123, 136, 142 ], "hist": [ 7, 18, 47, 78, 63, 31, 16, 16, 4, 1 ] }, "max": 142, "mean": 71.29181, "median": 68, "min": 19, "nan_count": 0, "nan_proportion": 0, "std": 21.77187 } } ]
false
allenai/qasper
qasper
test
416
[ { "name": "abstract", "statistics": { "histogram": { "bin_edges": [ 252, 418, 584, 750, 916, 1082, 1248, 1414, 1580, 1746, 1909 ], "hist": [ 16, 26, 67, 108, 77, 63, 33, 15, 8, 3 ] }, "max": 1909, "mean": 930.93029, "median": 901.5, "min": 252, "nan_count": 0, "nan_proportion": 0, "std": 295.71791 } }, { "name": "id", "statistics": { "histogram": { "bin_edges": [ 10, 10 ], "hist": [ 416 ] }, "max": 10, "mean": 10, "median": 10, "min": 10, "nan_count": 0, "nan_proportion": 0, "std": 0 } }, { "name": "title", "statistics": { "histogram": { "bin_edges": [ 12, 27, 42, 57, 72, 87, 102, 117, 132, 147, 153 ], "hist": [ 8, 28, 76, 111, 88, 65, 28, 9, 2, 1 ] }, "max": 153, "mean": 71.10337, "median": 69, "min": 12, "nan_count": 0, "nan_proportion": 0, "std": 22.801 } } ]
false
llm-book/wrime-sentiment
default
test
1,781
[ { "name": "datetime", "statistics": { "histogram": { "bin_edges": [ 13, 14, 15, 16, 16 ], "hist": [ 82, 546, 856, 297 ] }, "max": 16, "mean": 14.76811, "median": 15, "min": 13, "nan_count": 0, "nan_proportion": 0, "std": 0.77721 } }, { "name": "sentence", "statistics": { "histogram": { "bin_edges": [ 3, 19, 35, 51, 67, 83, 99, 115, 131, 147, 154 ], "hist": [ 354, 435, 341, 215, 160, 104, 73, 45, 52, 2 ] }, "max": 154, "mean": 47.29927, "median": 39, "min": 3, "nan_count": 0, "nan_proportion": 0, "std": 33.27888 } } ]
false
llm-book/wrime-sentiment
default
train
20,149
[ { "name": "datetime", "statistics": { "histogram": { "bin_edges": [ 13, 14, 15, 16, 16 ], "hist": [ 1201, 6486, 9866, 2596 ] }, "max": 16, "mean": 14.68773, "median": 15, "min": 13, "nan_count": 0, "nan_proportion": 0, "std": 0.76921 } }, { "name": "sentence", "statistics": { "histogram": { "bin_edges": [ 1, 19, 37, 55, 73, 91, 109, 127, 145, 163, 173 ], "hist": [ 5468, 6483, 3644, 1882, 1005, 623, 444, 541, 56, 3 ] }, "max": 173, "mean": 39.52747, "median": 30, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 30.78759 } } ]
false
llm-book/wrime-sentiment
default
validation
1,608
[ { "name": "datetime", "statistics": { "histogram": { "bin_edges": [ 13, 14, 15, 16, 16 ], "hist": [ 140, 546, 735, 187 ] }, "max": 16, "mean": 14.60261, "median": 15, "min": 13, "nan_count": 0, "nan_proportion": 0, "std": 0.80411 } }, { "name": "sentence", "statistics": { "histogram": { "bin_edges": [ 3, 20, 37, 54, 71, 88, 105, 122, 139, 156, 163 ], "hist": [ 257, 374, 306, 227, 142, 113, 102, 54, 32, 1 ] }, "max": 163, "mean": 53.86754, "median": 46, "min": 3, "nan_count": 0, "nan_proportion": 0, "std": 34.72008 } } ]
false
ttxy/resume_ner
default
test
477
[ { "name": "label", "statistics": { "histogram": { "bin_edges": [ 9, 112, 215, 318, 421, 524, 627, 730, 833, 936, 1035 ], "hist": [ 222, 156, 44, 23, 13, 6, 8, 1, 2, 2 ] }, "max": 1035, "mean": 160.87631, "median": 115, "min": 9, "nan_count": 0, "nan_proportion": 0, "std": 155.29666 } }, { "name": "text", "statistics": { "histogram": { "bin_edges": [ 7, 40, 73, 106, 139, 172, 205, 238, 271, 304, 333 ], "hist": [ 179, 174, 61, 22, 19, 5, 7, 6, 1, 3 ] }, "max": 333, "mean": 62.31237, "median": 49, "min": 7, "nan_count": 0, "nan_proportion": 0, "std": 52.13344 } } ]
false
ttxy/resume_ner
default
train
3,821
[ { "name": "label", "statistics": { "histogram": { "bin_edges": [ 7, 122, 237, 352, 467, 582, 697, 812, 927, 1042, 1149 ], "hist": [ 1940, 1173, 329, 175, 89, 55, 35, 17, 7, 1 ] }, "max": 1149, "mean": 162.03612, "median": 121, "min": 7, "nan_count": 0, "nan_proportion": 0, "std": 149.21149 } }, { "name": "text", "statistics": { "histogram": { "bin_edges": [ 5, 41, 77, 113, 149, 185, 221, 257, 293, 329, 355 ], "hist": [ 1312, 1526, 480, 244, 109, 71, 37, 23, 15, 4 ] }, "max": 355, "mean": 63.95629, "median": 53, "min": 5, "nan_count": 0, "nan_proportion": 0, "std": 50.26945 } } ]
false
ttxy/resume_ner
default
validation
463
[ { "name": "label", "statistics": { "histogram": { "bin_edges": [ 9, 95, 181, 267, 353, 439, 525, 611, 697, 783, 859 ], "hist": [ 180, 184, 48, 14, 11, 12, 6, 6, 0, 2 ] }, "max": 859, "mean": 145.27214, "median": 109, "min": 9, "nan_count": 0, "nan_proportion": 0, "std": 129.88859 } }, { "name": "text", "statistics": { "histogram": { "bin_edges": [ 7, 42, 77, 112, 147, 182, 217, 252, 287, 322, 355 ], "hist": [ 184, 186, 49, 17, 15, 3, 5, 3, 0, 1 ] }, "max": 355, "mean": 59, "median": 49, "min": 7, "nan_count": 0, "nan_proportion": 0, "std": 45.65881 } } ]
false
fedryanto/UnibQuADV2
plain_text
train
4,010
[ { "name": "context", "statistics": { "histogram": { "bin_edges": [ 65, 444, 823, 1202, 1581, 1960, 2339, 2718, 3097, 3476, 3845 ], "hist": [ 797, 1184, 599, 594, 212, 148, 198, 162, 71, 45 ] }, "max": 3845, "mean": 1107.15387, "median": 827, "min": 65, "nan_count": 0, "nan_proportion": 0, "std": 816.68357 } }, { "name": "id", "statistics": { "histogram": { "bin_edges": [ 7, 7 ], "hist": [ 4010 ] }, "max": 7, "mean": 7, "median": 7, "min": 7, "nan_count": 0, "nan_proportion": 0, "std": 0 } }, { "name": "question", "statistics": { "histogram": { "bin_edges": [ 11, 25, 39, 53, 67, 81, 95, 109, 123, 137, 147 ], "hist": [ 102, 1035, 1360, 906, 332, 174, 82, 16, 2, 1 ] }, "max": 147, "mean": 50.14988, "median": 47, "min": 11, "nan_count": 0, "nan_proportion": 0, "std": 17.43961 } } ]
false
fedryanto/UnibQuADV2
plain_text
validation
1,036
[ { "name": "context", "statistics": { "histogram": { "bin_edges": [ 73, 485, 897, 1309, 1721, 2133, 2545, 2957, 3369, 3781, 4188 ], "hist": [ 382, 322, 79, 63, 2, 68, 0, 1, 0, 119 ] }, "max": 4188, "mean": 1119.86873, "median": 620, "min": 73, "nan_count": 0, "nan_proportion": 0, "std": 1219.10519 } }, { "name": "id", "statistics": { "histogram": { "bin_edges": [ 7, 7 ], "hist": [ 1036 ] }, "max": 7, "mean": 7, "median": 7, "min": 7, "nan_count": 0, "nan_proportion": 0, "std": 0 } }, { "name": "question", "statistics": { "histogram": { "bin_edges": [ 13, 24, 35, 46, 57, 68, 79, 90, 101, 112, 120 ], "hist": [ 12, 67, 397, 256, 168, 64, 41, 18, 10, 3 ] }, "max": 120, "mean": 50.65251, "median": 47, "min": 13, "nan_count": 0, "nan_proportion": 0, "std": 16.24953 } } ]
false
redwoodresearch/generated_stories
default
train
3,825
[ { "name": "text", "statistics": { "histogram": { "bin_edges": [ 3130, 3489, 3848, 4207, 4566, 4925, 5284, 5643, 6002, 6361, 6715 ], "hist": [ 32, 162, 301, 411, 1031, 1228, 508, 123, 25, 4 ] }, "max": 6715, "mean": 4849.41752, "median": 4919, "min": 3130, "nan_count": 0, "nan_proportion": 0, "std": 512.67936 } } ]
false
redwoodresearch/generated_stories
default
validation
675
[ { "name": "text", "statistics": { "histogram": { "bin_edges": [ 2838, 3220, 3602, 3984, 4366, 4748, 5130, 5512, 5894, 6276, 6655 ], "hist": [ 1, 8, 47, 54, 132, 231, 155, 40, 6, 1 ] }, "max": 6655, "mean": 4844.32, "median": 4915, "min": 2838, "nan_count": 0, "nan_proportion": 0, "std": 514.73498 } } ]
false
ceval/ceval-exam
accountant
test
443
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 12, 23, 34, 45, 56, 67, 78, 89, 100, 110 ], "hist": [ 212, 96, 77, 32, 11, 9, 3, 2, 0, 1 ] }, "max": 110, "mean": 17.36795, "median": 13, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 15.60652 } }, { "name": "B", "statistics": { "histogram": { "bin_edges": [ 1, 12, 23, 34, 45, 56, 67, 78, 89, 100, 104 ], "hist": [ 207, 95, 71, 41, 16, 7, 4, 1, 0, 1 ] }, "max": 104, "mean": 17.89391, "median": 13, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 15.71883 } }, { "name": "C", "statistics": { "histogram": { "bin_edges": [ 1, 13, 25, 37, 49, 61, 73, 85, 97, 109, 114 ], "hist": [ 212, 108, 70, 27, 14, 8, 2, 0, 1, 1 ] }, "max": 114, "mean": 18.18284, "median": 14, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 16.41089 } }, { "name": "D", "statistics": { "histogram": { "bin_edges": [ 1, 11, 21, 31, 41, 51, 61, 71, 81, 91, 94 ], "hist": [ 194, 97, 69, 42, 24, 9, 4, 3, 0, 1 ] }, "max": 94, "mean": 17.85553, "median": 13, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 15.44108 } }, { "name": "question", "statistics": { "histogram": { "bin_edges": [ 14, 41, 68, 95, 122, 149, 176, 203, 230, 257, 280 ], "hist": [ 227, 82, 46, 42, 27, 14, 1, 2, 1, 1 ] }, "max": 280, "mean": 58.45372, "median": 39, "min": 14, "nan_count": 0, "nan_proportion": 0, "std": 42.39338 } } ]
false
ceval/ceval-exam
accountant
val
49
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 9, 17, 25, 33, 41, 49, 57, 65, 73, 78 ], "hist": [ 26, 5, 6, 5, 1, 2, 2, 1, 0, 1 ] }, "max": 78, "mean": 16.63265, "median": 8, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 17.51986 } }, { "name": "B", "statistics": { "histogram": { "bin_edges": [ 2, 12, 22, 32, 42, 52, 62, 72, 82, 92, 94 ], "hist": [ 28, 9, 5, 0, 3, 2, 0, 0, 1, 1 ] }, "max": 94, "mean": 17.79592, "median": 9, "min": 2, "nan_count": 0, "nan_proportion": 0, "std": 20.10077 } }, { "name": "C", "statistics": { "histogram": { "bin_edges": [ 1, 8, 15, 22, 29, 36, 43, 50, 57, 64, 64 ], "hist": [ 20, 7, 8, 2, 3, 3, 0, 4, 1, 1 ] }, "max": 64, "mean": 18.53061, "median": 10, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 17.97927 } }, { "name": "D", "statistics": { "histogram": { "bin_edges": [ 1, 9, 17, 25, 33, 41, 49, 57, 65, 73, 75 ], "hist": [ 22, 9, 5, 4, 3, 3, 2, 0, 0, 1 ] }, "max": 75, "mean": 17.46939, "median": 11, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 16.60585 } }, { "name": "question", "statistics": { "histogram": { "bin_edges": [ 17, 36, 55, 74, 93, 112, 131, 150, 169, 188, 206 ], "hist": [ 23, 8, 5, 3, 4, 2, 0, 1, 1, 2 ] }, "max": 206, "mean": 59.4898, "median": 38, "min": 17, "nan_count": 0, "nan_proportion": 0, "std": 48.18589 } } ]
false
ceval/ceval-exam
advanced_mathematics
test
173
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 23, 45, 67, 89, 111, 133, 155, 177, 199, 215 ], "hist": [ 109, 23, 17, 14, 2, 4, 1, 1, 0, 2 ] }, "max": 215, "mean": 31.00578, "median": 18, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 35.88637 } }, { "name": "B", "statistics": { "histogram": { "bin_edges": [ 1, 23, 45, 67, 89, 111, 133, 155, 177, 199, 215 ], "hist": [ 110, 22, 17, 14, 2, 4, 1, 1, 0, 2 ] }, "max": 215, "mean": 30.72254, "median": 18, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 36.1361 } }, { "name": "C", "statistics": { "histogram": { "bin_edges": [ 1, 23, 45, 67, 89, 111, 133, 155, 177, 199, 215 ], "hist": [ 110, 22, 17, 14, 2, 4, 2, 0, 0, 2 ] }, "max": 215, "mean": 30.93642, "median": 18, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 35.80635 } }, { "name": "D", "statistics": { "histogram": { "bin_edges": [ 1, 23, 45, 67, 89, 111, 133, 155, 177, 199, 215 ], "hist": [ 107, 26, 16, 14, 2, 4, 1, 1, 0, 2 ] }, "max": 215, "mean": 31.77457, "median": 18, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 35.47552 } }, { "name": "question", "statistics": { "histogram": { "bin_edges": [ 39, 80, 121, 162, 203, 244, 285, 326, 367, 408, 445 ], "hist": [ 55, 56, 36, 11, 6, 5, 2, 1, 0, 1 ] }, "max": 445, "mean": 113.3237, "median": 96, "min": 39, "nan_count": 0, "nan_proportion": 0, "std": 61.42863 } } ]
false
ceval/ceval-exam
art_studies
test
298
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 33, 35 ], "hist": [ 204, 86, 6, 1, 0, 0, 0, 0, 1 ] }, "max": 35, "mean": 3.9396, "median": 3, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 2.61626 } }, { "name": "B", "statistics": { "histogram": { "bin_edges": [ 1, 3, 5, 7, 9, 11, 13, 15, 15 ], "hist": [ 88, 113, 73, 14, 6, 1, 2, 1 ] }, "max": 15, "mean": 3.82886, "median": 3, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 2.01699 } }, { "name": "C", "statistics": { "histogram": { "bin_edges": [ 1, 4, 7, 10, 13, 16, 19, 22, 22 ], "hist": [ 156, 109, 25, 6, 1, 0, 0, 1 ] }, "max": 22, "mean": 3.97651, "median": 3, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 2.28804 } }, { "name": "D", "statistics": { "histogram": { "bin_edges": [ 1, 3, 5, 7, 9, 11, 13, 15, 17, 18 ], "hist": [ 74, 123, 61, 28, 8, 1, 2, 0, 1 ] }, "max": 18, "mean": 4.04027, "median": 3, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 2.23495 } }, { "name": "question", "statistics": { "histogram": { "bin_edges": [ 10, 17, 24, 31, 38, 45, 52, 59, 66, 73, 79 ], "hist": [ 86, 108, 52, 29, 12, 5, 5, 0, 0, 1 ] }, "max": 79, "mean": 22.69128, "median": 20, "min": 10, "nan_count": 0, "nan_proportion": 0, "std": 9.73687 } } ]
false
ceval/ceval-exam
art_studies
val
33
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 2, 3, 4, 5, 6, 7, 8, 8 ], "hist": [ 1, 9, 7, 5, 4, 4, 2, 1 ] }, "max": 8, "mean": 3.81818, "median": 3, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 1.81064 } }, { "name": "B", "statistics": { "histogram": { "bin_edges": [ 1, 2, 3, 4, 5, 6, 7, 8, 8 ], "hist": [ 1, 9, 8, 2, 6, 4, 2, 1 ] }, "max": 8, "mean": 3.84848, "median": 3, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 1.83918 } }, { "name": "C", "statistics": { "histogram": { "bin_edges": [ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 10 ], "hist": [ 1, 9, 8, 5, 3, 4, 2, 0, 0, 1 ] }, "max": 10, "mean": 3.81818, "median": 3, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 1.9757 } }, { "name": "D", "statistics": { "histogram": { "bin_edges": [ 2, 3, 4, 5, 6, 7, 7 ], "hist": [ 12, 9, 3, 5, 1, 3 ] }, "max": 7, "mean": 3.48485, "median": 3, "min": 2, "nan_count": 0, "nan_proportion": 0, "std": 1.62252 } }, { "name": "question", "statistics": { "histogram": { "bin_edges": [ 12, 18, 24, 30, 36, 42, 48, 54, 60, 65 ], "hist": [ 12, 9, 6, 3, 1, 0, 1, 0, 1 ] }, "max": 65, "mean": 23.33333, "median": 21, "min": 12, "nan_count": 0, "nan_proportion": 0, "std": 11.14862 } } ]
false
ceval/ceval-exam
basic_medicine
test
175
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 4, 7, 10, 13, 16, 19, 22, 25, 28, 29 ], "hist": [ 36, 79, 23, 13, 16, 4, 2, 0, 0, 2 ] }, "max": 29, "mean": 6.75429, "median": 5, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 4.64265 } }, { "name": "B", "statistics": { "histogram": { "bin_edges": [ 1, 4, 7, 10, 13, 16, 19, 22, 25, 28, 28 ], "hist": [ 38, 66, 26, 27, 11, 6, 0, 0, 0, 1 ] }, "max": 28, "mean": 6.80571, "median": 5, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 4.29608 } }, { "name": "C", "statistics": { "histogram": { "bin_edges": [ 1, 4, 7, 10, 13, 16, 19, 22, 25, 28, 28 ], "hist": [ 32, 64, 45, 8, 13, 7, 1, 4, 0, 1 ] }, "max": 28, "mean": 7.28571, "median": 6, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 4.89294 } }, { "name": "D", "statistics": { "histogram": { "bin_edges": [ 2, 6, 10, 14, 18, 22, 26, 30, 34, 37 ], "hist": [ 83, 49, 21, 11, 6, 2, 1, 1, 1 ] }, "max": 37, "mean": 7.68571, "median": 6, "min": 2, "nan_count": 0, "nan_proportion": 0, "std": 5.57174 } }, { "name": "question", "statistics": { "histogram": { "bin_edges": [ 7, 13, 19, 25, 31, 37, 43, 49, 55, 61, 66 ], "hist": [ 10, 60, 66, 24, 10, 2, 0, 1, 1, 1 ] }, "max": 66, "mean": 21.19429, "median": 20, "min": 7, "nan_count": 0, "nan_proportion": 0, "std": 7.77167 } } ]
false
ceval/ceval-exam
business_administration
test
301
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 6, 11, 16, 21, 26, 31, 36, 41, 44 ], "hist": [ 180, 66, 21, 17, 6, 4, 4, 0, 3 ] }, "max": 44, "mean": 7.15282, "median": 5, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 7.09342 } }, { "name": "B", "statistics": { "histogram": { "bin_edges": [ 1, 9, 17, 25, 33, 41, 49, 57, 65, 73, 74 ], "hist": [ 237, 32, 22, 6, 1, 1, 0, 0, 1, 1 ] }, "max": 74, "mean": 7.37209, "median": 5, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 8.2487 } }, { "name": "C", "statistics": { "histogram": { "bin_edges": [ 1, 10, 19, 28, 37, 46, 55, 64, 73, 82, 82 ], "hist": [ 243, 32, 13, 9, 1, 0, 0, 2, 0, 1 ] }, "max": 82, "mean": 7.55482, "median": 4, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 9.21454 } }, { "name": "D", "statistics": { "histogram": { "bin_edges": [ 1, 9, 17, 25, 33, 41, 49, 57, 65, 71 ], "hist": [ 232, 41, 15, 7, 2, 0, 2, 1, 1 ] }, "max": 71, "mean": 7.49169, "median": 4, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 8.67779 } }, { "name": "question", "statistics": { "histogram": { "bin_edges": [ 10, 38, 66, 94, 122, 150, 178, 206, 234, 262, 281 ], "hist": [ 153, 72, 21, 20, 15, 11, 5, 3, 0, 1 ] }, "max": 281, "mean": 55.6711, "median": 37, "min": 10, "nan_count": 0, "nan_proportion": 0, "std": 47.00718 } } ]
false
ceval/ceval-exam
business_administration
val
33
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 6, 11, 16, 21, 26, 31, 36, 41, 46, 46 ], "hist": [ 16, 9, 3, 2, 0, 1, 1, 0, 0, 1 ] }, "max": 46, "mean": 9.33333, "median": 6, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 9.84463 } }, { "name": "B", "statistics": { "histogram": { "bin_edges": [ 1, 6, 11, 16, 21, 26, 31, 36, 41, 45 ], "hist": [ 16, 7, 2, 3, 2, 0, 1, 0, 2 ] }, "max": 45, "mean": 10.60606, "median": 6, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 10.99699 } }, { "name": "C", "statistics": { "histogram": { "bin_edges": [ 2, 9, 16, 23, 30, 37, 44, 51, 58, 62 ], "hist": [ 19, 7, 2, 2, 0, 0, 1, 1, 1 ] }, "max": 62, "mean": 12.54545, "median": 6, "min": 2, "nan_count": 0, "nan_proportion": 0, "std": 15.13706 } }, { "name": "D", "statistics": { "histogram": { "bin_edges": [ 2, 8, 14, 20, 26, 32, 38, 44, 50, 55 ], "hist": [ 22, 2, 3, 2, 1, 1, 1, 0, 1 ] }, "max": 55, "mean": 11.36364, "median": 6, "min": 2, "nan_count": 0, "nan_proportion": 0, "std": 12.36886 } }, { "name": "question", "statistics": { "histogram": { "bin_edges": [ 13, 29, 45, 61, 77, 93, 109, 125, 141, 157, 171 ], "hist": [ 14, 10, 2, 1, 2, 1, 1, 0, 1, 1 ] }, "max": 171, "mean": 45.66667, "median": 32, "min": 13, "nan_count": 0, "nan_proportion": 0, "std": 39.182 } } ]
false
ceval/ceval-exam
chinese_language_and_literature
test
209
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 4, 7, 10, 13, 16, 19, 22, 24 ], "hist": [ 105, 55, 15, 11, 10, 8, 1, 4 ] }, "max": 24, "mean": 5.35885, "median": 3, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 4.94785 } }, { "name": "B", "statistics": { "histogram": { "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 31 ], "hist": [ 133, 35, 20, 10, 6, 2, 1, 2 ] }, "max": 31, "mean": 5.51196, "median": 4, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 5.37812 } }, { "name": "C", "statistics": { "histogram": { "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 33, 37, 37 ], "hist": [ 142, 28, 15, 8, 7, 5, 1, 2, 0, 1 ] }, "max": 37, "mean": 5.65072, "median": 4, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 5.93373 } }, { "name": "D", "statistics": { "histogram": { "bin_edges": [ 1, 6, 11, 16, 21, 26, 31, 36, 41, 41 ], "hist": [ 152, 27, 13, 10, 4, 1, 1, 0, 1 ] }, "max": 41, "mean": 5.69378, "median": 4, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 5.80633 } }, { "name": "question", "statistics": { "histogram": { "bin_edges": [ 8, 16, 24, 32, 40, 48, 56, 64, 72, 80, 86 ], "hist": [ 34, 105, 34, 20, 10, 4, 0, 1, 0, 1 ] }, "max": 86, "mean": 23.27273, "median": 21, "min": 8, "nan_count": 0, "nan_proportion": 0, "std": 10.34767 } } ]
false
ceval/ceval-exam
civil_servant
test
429
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 10, 19, 28, 37, 46, 55, 64, 73, 82, 85 ], "hist": [ 217, 102, 59, 26, 8, 10, 4, 2, 0, 1 ] }, "max": 85, "mean": 13.5711, "median": 9, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 12.72876 } }, { "name": "B", "statistics": { "histogram": { "bin_edges": [ 1, 9, 17, 25, 33, 41, 49, 57, 65, 73, 78 ], "hist": [ 193, 104, 58, 35, 16, 11, 4, 3, 3, 2 ] }, "max": 78, "mean": 14.18182, "median": 10, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 13.41831 } }, { "name": "C", "statistics": { "histogram": { "bin_edges": [ 1, 9, 17, 25, 33, 41, 49, 57, 65, 73, 78 ], "hist": [ 186, 114, 55, 41, 14, 7, 5, 4, 2, 1 ] }, "max": 78, "mean": 13.89977, "median": 10, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 12.57377 } }, { "name": "D", "statistics": { "histogram": { "bin_edges": [ 1, 8, 15, 22, 29, 36, 43, 50, 57, 64, 69 ], "hist": [ 167, 94, 65, 50, 19, 13, 10, 3, 3, 5 ] }, "max": 69, "mean": 14.71329, "median": 10, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 13.27581 } }, { "name": "question", "statistics": { "histogram": { "bin_edges": [ 9, 40, 71, 102, 133, 164, 195, 226, 257, 288, 315 ], "hist": [ 175, 49, 56, 45, 42, 34, 13, 10, 4, 1 ] }, "max": 315, "mean": 81.8951, "median": 64, "min": 9, "nan_count": 0, "nan_proportion": 0, "std": 67.62804 } } ]
false
ceval/ceval-exam
civil_servant
val
47
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 2, 6, 10, 14, 18, 22, 26, 30, 32 ], "hist": [ 11, 10, 10, 6, 5, 2, 2, 1 ] }, "max": 32, "mean": 11.42553, "median": 11, "min": 2, "nan_count": 0, "nan_proportion": 0, "std": 7.52025 } }, { "name": "B", "statistics": { "histogram": { "bin_edges": [ 2, 7, 12, 17, 22, 27, 32, 37, 42, 47, 50 ], "hist": [ 14, 11, 11, 7, 2, 1, 0, 0, 0, 1 ] }, "max": 50, "mean": 11.76596, "median": 11, "min": 2, "nan_count": 0, "nan_proportion": 0, "std": 8.84704 } }, { "name": "C", "statistics": { "histogram": { "bin_edges": [ 2, 5, 8, 11, 14, 17, 20, 23, 26, 29, 30 ], "hist": [ 10, 6, 6, 6, 10, 2, 3, 2, 0, 2 ] }, "max": 30, "mean": 11.59574, "median": 11, "min": 2, "nan_count": 0, "nan_proportion": 0, "std": 7.25816 } }, { "name": "D", "statistics": { "histogram": { "bin_edges": [ 2, 6, 10, 14, 18, 22, 26, 30, 32 ], "hist": [ 12, 11, 6, 6, 6, 2, 2, 2 ] }, "max": 32, "mean": 12.23404, "median": 11, "min": 2, "nan_count": 0, "nan_proportion": 0, "std": 8.15932 } }, { "name": "question", "statistics": { "histogram": { "bin_edges": [ 13, 41, 69, 97, 125, 153, 181, 209, 237, 265, 283 ], "hist": [ 16, 0, 5, 10, 6, 4, 2, 2, 0, 2 ] }, "max": 283, "mean": 100.97872, "median": 110, "min": 13, "nan_count": 0, "nan_proportion": 0, "std": 72.37508 } } ]
false
ceval/ceval-exam
clinical_medicine
test
200
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 32 ], "hist": [ 62, 77, 28, 13, 10, 3, 5, 2 ] }, "max": 32, "mean": 7.91, "median": 6, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 5.97498 } }, { "name": "B", "statistics": { "histogram": { "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 32 ], "hist": [ 58, 71, 31, 22, 8, 8, 0, 2 ] }, "max": 32, "mean": 7.97, "median": 6, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 5.65589 } }, { "name": "C", "statistics": { "histogram": { "bin_edges": [ 1, 4, 7, 10, 13, 16, 19, 22, 25, 28, 30 ], "hist": [ 28, 59, 42, 30, 17, 14, 6, 2, 1, 1 ] }, "max": 30, "mean": 8.505, "median": 7, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 5.32369 } }, { "name": "D", "statistics": { "histogram": { "bin_edges": [ 1, 4, 7, 10, 13, 16, 19, 22, 25, 26 ], "hist": [ 31, 57, 41, 28, 14, 19, 4, 5, 1 ] }, "max": 26, "mean": 8.63, "median": 7, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 5.40641 } }, { "name": "question", "statistics": { "histogram": { "bin_edges": [ 8, 26, 44, 62, 80, 98, 116, 134, 152, 170, 180 ], "hist": [ 132, 31, 9, 9, 3, 5, 6, 3, 1, 1 ] }, "max": 180, "mean": 34.11, "median": 21, "min": 8, "nan_count": 0, "nan_proportion": 0, "std": 32.06664 } } ]
false
ceval/ceval-exam
college_economics
test
497
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 6, 11, 16, 21, 26, 31, 36, 41, 46, 49 ], "hist": [ 156, 149, 99, 47, 29, 10, 2, 3, 1, 1 ] }, "max": 49, "mean": 10.09256, "median": 8, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 7.23368 } }, { "name": "B", "statistics": { "histogram": { "bin_edges": [ 1, 6, 11, 16, 21, 26, 31, 36, 41, 42 ], "hist": [ 147, 154, 103, 45, 26, 10, 8, 3, 1 ] }, "max": 42, "mean": 10.38028, "median": 8, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 7.30487 } }, { "name": "C", "statistics": { "histogram": { "bin_edges": [ 1, 6, 11, 16, 21, 26, 31, 36, 41, 41 ], "hist": [ 147, 140, 100, 52, 30, 14, 8, 5, 1 ] }, "max": 41, "mean": 10.68813, "median": 9, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 7.68596 } }, { "name": "D", "statistics": { "histogram": { "bin_edges": [ 1, 7, 13, 19, 25, 31, 37, 43, 49, 51 ], "hist": [ 212, 141, 74, 40, 19, 7, 3, 0, 1 ] }, "max": 51, "mean": 10.23742, "median": 8, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 7.48385 } }, { "name": "question", "statistics": { "histogram": { "bin_edges": [ 8, 31, 54, 77, 100, 123, 146, 169, 192, 215, 235 ], "hist": [ 293, 122, 39, 19, 17, 4, 1, 0, 0, 2 ] }, "max": 235, "mean": 36.10664, "median": 27, "min": 8, "nan_count": 0, "nan_proportion": 0, "std": 27.96453 } } ]
false
ceval/ceval-exam
college_economics
val
55
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 6, 11, 16, 21, 26, 31, 36, 41, 44 ], "hist": [ 15, 20, 8, 4, 2, 4, 1, 0, 1 ] }, "max": 44, "mean": 11.03636, "median": 8, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 8.91724 } }, { "name": "B", "statistics": { "histogram": { "bin_edges": [ 1, 6, 11, 16, 21, 26, 31, 36, 41, 46, 47 ], "hist": [ 16, 15, 11, 7, 5, 0, 0, 0, 0, 1 ] }, "max": 47, "mean": 11.12727, "median": 9, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 8.15374 } }, { "name": "C", "statistics": { "histogram": { "bin_edges": [ 1, 6, 11, 16, 21, 26, 31, 36, 41, 44 ], "hist": [ 18, 13, 12, 4, 5, 2, 0, 0, 1 ] }, "max": 44, "mean": 10.89091, "median": 9, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 8.17556 } }, { "name": "D", "statistics": { "histogram": { "bin_edges": [ 1, 4, 7, 10, 13, 16, 19, 22, 25, 28, 30 ], "hist": [ 5, 16, 9, 7, 6, 5, 3, 0, 2, 2 ] }, "max": 30, "mean": 10.50909, "median": 8, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 7.20821 } }, { "name": "question", "statistics": { "histogram": { "bin_edges": [ 10, 26, 42, 58, 74, 90, 106, 122, 138, 154, 168 ], "hist": [ 25, 17, 3, 4, 1, 1, 1, 1, 0, 2 ] }, "max": 168, "mean": 39.29091, "median": 27, "min": 10, "nan_count": 0, "nan_proportion": 0, "std": 34.53108 } } ]
false
ceval/ceval-exam
college_chemistry
test
224
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 12, 23, 34, 45, 56, 67, 78, 89, 100, 107 ], "hist": [ 139, 53, 21, 5, 3, 0, 0, 0, 1, 2 ] }, "max": 107, "mean": 12.625, "median": 8, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 14.43782 } }, { "name": "B", "statistics": { "histogram": { "bin_edges": [ 1, 15, 29, 43, 57, 71, 85, 99, 113, 127, 136 ], "hist": [ 162, 50, 6, 3, 1, 0, 1, 0, 0, 1 ] }, "max": 136, "mean": 12.34375, "median": 8, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 13.86943 } }, { "name": "C", "statistics": { "histogram": { "bin_edges": [ 1, 11, 21, 31, 41, 51, 61, 71, 81, 91, 96 ], "hist": [ 128, 64, 17, 8, 2, 2, 1, 1, 0, 1 ] }, "max": 96, "mean": 12.29464, "median": 9, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 12.32056 } }, { "name": "D", "statistics": { "histogram": { "bin_edges": [ 1, 13, 25, 37, 49, 61, 73, 85, 97, 109, 113 ], "hist": [ 144, 46, 21, 8, 2, 0, 0, 2, 0, 1 ] }, "max": 113, "mean": 13.39286, "median": 8, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 14.52529 } }, { "name": "question", "statistics": { "histogram": { "bin_edges": [ 7, 69, 131, 193, 255, 317, 379, 441, 503, 565, 618 ], "hist": [ 183, 22, 11, 6, 1, 0, 0, 0, 0, 1 ] }, "max": 618, "mean": 51.97768, "median": 29, "min": 7, "nan_count": 0, "nan_proportion": 0, "std": 63.57965 } } ]
false
ceval/ceval-exam
college_programming
test
342
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 8, 15, 22, 29, 36, 43, 50, 57, 64, 64 ], "hist": [ 161, 77, 45, 20, 19, 7, 5, 5, 2, 1 ] }, "max": 64, "mean": 12.52632, "median": 8, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 12.25382 } }, { "name": "B", "statistics": { "histogram": { "bin_edges": [ 1, 9, 17, 25, 33, 41, 49, 57, 65, 73, 80 ], "hist": [ 172, 69, 47, 25, 18, 2, 7, 1, 0, 1 ] }, "max": 80, "mean": 12.84795, "median": 8, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 12.63613 } }, { "name": "C", "statistics": { "histogram": { "bin_edges": [ 1, 8, 15, 22, 29, 36, 43, 50, 57, 64, 67 ], "hist": [ 156, 63, 56, 32, 15, 12, 2, 4, 0, 2 ] }, "max": 67, "mean": 12.85673, "median": 9, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 11.85271 } }, { "name": "D", "statistics": { "histogram": { "bin_edges": [ 1, 8, 15, 22, 29, 36, 43, 50, 57, 64, 70 ], "hist": [ 150, 83, 42, 28, 23, 9, 1, 4, 1, 1 ] }, "max": 70, "mean": 12.83041, "median": 9, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 11.76731 } }, { "name": "question", "statistics": { "histogram": { "bin_edges": [ 11, 67, 123, 179, 235, 291, 347, 403, 459, 515, 570 ], "hist": [ 260, 41, 19, 10, 2, 5, 2, 1, 1, 1 ] }, "max": 570, "mean": 60.11111, "median": 35, "min": 11, "nan_count": 0, "nan_proportion": 0, "std": 73.69593 } } ]
false
ceval/ceval-exam
college_programming
val
37
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 7, 13, 19, 25, 31, 37, 43, 49, 55, 57 ], "hist": [ 16, 10, 3, 4, 1, 0, 0, 0, 1, 2 ] }, "max": 57, "mean": 12.67568, "median": 7, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 14.44925 } }, { "name": "B", "statistics": { "histogram": { "bin_edges": [ 1, 7, 13, 19, 25, 31, 37, 43, 49, 55, 55 ], "hist": [ 17, 8, 5, 3, 1, 0, 0, 0, 2, 1 ] }, "max": 55, "mean": 12.54054, "median": 9, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 13.56178 } }, { "name": "C", "statistics": { "histogram": { "bin_edges": [ 1, 7, 13, 19, 25, 31, 37, 43, 49, 55, 55 ], "hist": [ 16, 9, 5, 4, 0, 0, 0, 1, 1, 1 ] }, "max": 55, "mean": 12.02703, "median": 8, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 13.32393 } }, { "name": "D", "statistics": { "histogram": { "bin_edges": [ 1, 7, 13, 19, 25, 31, 37, 43, 49, 55, 55 ], "hist": [ 16, 8, 5, 4, 2, 0, 0, 0, 1, 1 ] }, "max": 55, "mean": 11.89189, "median": 7, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 12.75579 } }, { "name": "question", "statistics": { "histogram": { "bin_edges": [ 12, 49, 86, 123, 160, 197, 234, 271, 308, 345, 376 ], "hist": [ 24, 8, 1, 0, 0, 1, 0, 0, 1, 2 ] }, "max": 376, "mean": 67.89189, "median": 34, "min": 12, "nan_count": 0, "nan_proportion": 0, "std": 93.3416 } } ]
false
ceval/ceval-exam
college_physics
test
176
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 9, 17, 25, 33, 41, 49, 57, 65, 73, 76 ], "hist": [ 77, 56, 16, 12, 5, 2, 1, 4, 2, 1 ] }, "max": 76, "mean": 13.80682, "median": 10, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 13.73201 } }, { "name": "B", "statistics": { "histogram": { "bin_edges": [ 1, 13, 25, 37, 49, 61, 73, 85, 97, 109, 116 ], "hist": [ 106, 43, 14, 3, 5, 3, 1, 0, 0, 1 ] }, "max": 116, "mean": 15.14205, "median": 11, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 15.8279 } }, { "name": "C", "statistics": { "histogram": { "bin_edges": [ 1, 10, 19, 28, 37, 46, 55, 64, 73, 82, 83 ], "hist": [ 81, 48, 18, 15, 3, 4, 0, 4, 2, 1 ] }, "max": 83, "mean": 15.96023, "median": 11, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 15.7289 } }, { "name": "D", "statistics": { "histogram": { "bin_edges": [ 1, 11, 21, 31, 41, 51, 61, 71, 81, 91, 93 ], "hist": [ 93, 47, 12, 13, 4, 3, 2, 1, 0, 1 ] }, "max": 93, "mean": 15.01705, "median": 10, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 14.36602 } }, { "name": "question", "statistics": { "histogram": { "bin_edges": [ 13, 34, 55, 76, 97, 118, 139, 160, 181, 202, 219 ], "hist": [ 24, 44, 48, 27, 22, 6, 3, 0, 1, 1 ] }, "max": 219, "mean": 68.11932, "median": 65, "min": 13, "nan_count": 0, "nan_proportion": 0, "std": 32.88582 } } ]
false
ceval/ceval-exam
computer_network
test
171
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 33, 36 ], "hist": [ 81, 49, 20, 9, 4, 3, 1, 3, 1 ] }, "max": 36, "mean": 6.74854, "median": 5, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 6.31254 } }, { "name": "B", "statistics": { "histogram": { "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 33, 37, 37 ], "hist": [ 87, 38, 19, 10, 8, 3, 1, 2, 2, 1 ] }, "max": 37, "mean": 7.17544, "median": 4, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 7.0747 } }, { "name": "C", "statistics": { "histogram": { "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 33, 36 ], "hist": [ 83, 37, 25, 14, 6, 3, 0, 0, 3 ] }, "max": 36, "mean": 6.9883, "median": 5, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 6.16726 } }, { "name": "D", "statistics": { "histogram": { "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 33, 36 ], "hist": [ 78, 42, 20, 17, 7, 4, 0, 0, 3 ] }, "max": 36, "mean": 7.36257, "median": 5, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 6.28421 } }, { "name": "question", "statistics": { "histogram": { "bin_edges": [ 9, 25, 41, 57, 73, 89, 105, 121, 137, 153, 163 ], "hist": [ 48, 63, 23, 10, 11, 5, 4, 4, 2, 1 ] }, "max": 163, "mean": 42.72515, "median": 31, "min": 9, "nan_count": 0, "nan_proportion": 0, "std": 30.39738 } } ]
false
ceval/ceval-exam
computer_architecture
test
193
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 33, 35 ], "hist": [ 67, 51, 28, 19, 10, 10, 2, 3, 3 ] }, "max": 35, "mean": 8.93264, "median": 6, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 7.06301 } }, { "name": "B", "statistics": { "histogram": { "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 33, 34 ], "hist": [ 71, 48, 24, 21, 12, 10, 1, 4, 2 ] }, "max": 34, "mean": 8.86528, "median": 6, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 6.94941 } }, { "name": "C", "statistics": { "histogram": { "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 33, 35 ], "hist": [ 64, 54, 29, 11, 14, 12, 3, 4, 2 ] }, "max": 35, "mean": 9.3057, "median": 6, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 7.41724 } }, { "name": "D", "statistics": { "histogram": { "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 32 ], "hist": [ 65, 57, 22, 15, 17, 11, 4, 2 ] }, "max": 32, "mean": 8.94301, "median": 6, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 6.82 } }, { "name": "question", "statistics": { "histogram": { "bin_edges": [ 9, 21, 33, 45, 57, 69, 81, 93, 105, 117, 127 ], "hist": [ 49, 84, 29, 9, 10, 4, 3, 1, 2, 2 ] }, "max": 127, "mean": 32.72021, "median": 26, "min": 9, "nan_count": 0, "nan_proportion": 0, "std": 20.25191 } } ]
false
ceval/ceval-exam
education_science
test
270
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 2, 8, 14, 20, 26, 32, 38, 44, 50, 52 ], "hist": [ 207, 36, 18, 7, 0, 0, 1, 0, 1 ] }, "max": 52, "mean": 6.54815, "median": 4.5, "min": 2, "nan_count": 0, "nan_proportion": 0, "std": 5.63607 } }, { "name": "B", "statistics": { "histogram": { "bin_edges": [ 2, 6, 10, 14, 18, 22, 26, 30, 34, 38, 38 ], "hist": [ 160, 60, 25, 8, 10, 2, 3, 1, 0, 1 ] }, "max": 38, "mean": 6.68519, "median": 5, "min": 2, "nan_count": 0, "nan_proportion": 0, "std": 5.37039 } }, { "name": "C", "statistics": { "histogram": { "bin_edges": [ 2, 6, 10, 14, 18, 22, 26, 30, 34, 38, 38 ], "hist": [ 163, 50, 29, 9, 11, 4, 0, 0, 3, 1 ] }, "max": 38, "mean": 6.93333, "median": 5, "min": 2, "nan_count": 0, "nan_proportion": 0, "std": 5.86743 } }, { "name": "D", "statistics": { "histogram": { "bin_edges": [ 2, 6, 10, 14, 18, 22, 26, 30, 34, 38, 38 ], "hist": [ 162, 57, 22, 12, 5, 5, 3, 0, 3, 1 ] }, "max": 38, "mean": 6.95185, "median": 4.5, "min": 2, "nan_count": 0, "nan_proportion": 0, "std": 6.16814 } }, { "name": "question", "statistics": { "histogram": { "bin_edges": [ 8, 34, 60, 86, 112, 138, 164, 190, 216, 242, 267 ], "hist": [ 168, 80, 16, 2, 2, 0, 1, 0, 0, 1 ] }, "max": 267, "mean": 34.2037, "median": 28, "min": 8, "nan_count": 0, "nan_proportion": 0, "std": 23.82946 } } ]
false
ceval/ceval-exam
discrete_mathematics
test
153
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 13, 25, 37, 49, 61, 73, 85, 97, 109, 112 ], "hist": [ 88, 19, 21, 8, 4, 6, 2, 1, 3, 1 ] }, "max": 112, "mean": 19.80392, "median": 9, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 23.661 } }, { "name": "B", "statistics": { "histogram": { "bin_edges": [ 1, 14, 27, 40, 53, 66, 79, 92, 105, 118, 126 ], "hist": [ 83, 29, 16, 10, 3, 5, 2, 0, 2, 3 ] }, "max": 126, "mean": 21.30065, "median": 12, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 26.00306 } }, { "name": "C", "statistics": { "histogram": { "bin_edges": [ 1, 16, 31, 46, 61, 76, 91, 106, 121, 136, 147 ], "hist": [ 90, 26, 14, 11, 3, 3, 2, 2, 1, 1 ] }, "max": 147, "mean": 22.04575, "median": 12, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 26.95399 } }, { "name": "D", "statistics": { "histogram": { "bin_edges": [ 1, 21, 41, 61, 81, 101, 121, 141, 161, 181, 197 ], "hist": [ 100, 28, 13, 3, 5, 2, 1, 0, 0, 1 ] }, "max": 197, "mean": 22.15686, "median": 12, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 28.69603 } }, { "name": "question", "statistics": { "histogram": { "bin_edges": [ 10, 39, 68, 97, 126, 155, 184, 213, 242, 271, 291 ], "hist": [ 85, 37, 17, 6, 3, 2, 0, 2, 0, 1 ] }, "max": 291, "mean": 47.29412, "median": 36, "min": 10, "nan_count": 0, "nan_proportion": 0, "std": 43.5281 } } ]
false
ceval/ceval-exam
electrical_engineer
test
339
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 8, 15, 22, 29, 36, 43, 50, 57, 61 ], "hist": [ 236, 68, 12, 9, 6, 4, 2, 1, 1 ] }, "max": 61, "mean": 7.64012, "median": 5, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 8.53536 } }, { "name": "B", "statistics": { "histogram": { "bin_edges": [ 1, 7, 13, 19, 25, 31, 37, 43, 49, 55, 55 ], "hist": [ 212, 82, 19, 7, 7, 6, 4, 1, 0, 1 ] }, "max": 55, "mean": 7.71386, "median": 5, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 7.9932 } }, { "name": "C", "statistics": { "histogram": { "bin_edges": [ 1, 8, 15, 22, 29, 36, 43, 50, 57, 61 ], "hist": [ 235, 63, 18, 11, 7, 3, 1, 0, 1 ] }, "max": 61, "mean": 7.75516, "median": 5, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 7.85382 } }, { "name": "D", "statistics": { "histogram": { "bin_edges": [ 1, 7, 13, 19, 25, 31, 37, 43, 49, 55, 59 ], "hist": [ 199, 88, 26, 11, 5, 4, 2, 3, 0, 1 ] }, "max": 59, "mean": 7.9233, "median": 5, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 7.89989 } }, { "name": "question", "statistics": { "histogram": { "bin_edges": [ 13, 26, 39, 52, 65, 78, 91, 104, 117, 130, 137 ], "hist": [ 54, 97, 89, 47, 23, 14, 9, 2, 3, 1 ] }, "max": 137, "mean": 44.87021, "median": 41, "min": 13, "nan_count": 0, "nan_proportion": 0, "std": 21.49008 } } ]
false
ceval/ceval-exam
electrical_engineer
val
37
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 4, 7, 10, 13, 16, 19, 22, 25, 25 ], "hist": [ 10, 16, 5, 2, 2, 0, 0, 1, 1 ] }, "max": 25, "mean": 6.2973, "median": 5, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 5.17937 } }, { "name": "B", "statistics": { "histogram": { "bin_edges": [ 1, 6, 11, 16, 21, 26, 31, 36, 41, 46, 48 ], "hist": [ 22, 10, 2, 1, 1, 0, 0, 0, 0, 1 ] }, "max": 48, "mean": 7.27027, "median": 5, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 8.2045 } }, { "name": "C", "statistics": { "histogram": { "bin_edges": [ 1, 7, 13, 19, 25, 31, 37, 43, 49, 52 ], "hist": [ 22, 11, 1, 0, 1, 0, 1, 0, 1 ] }, "max": 52, "mean": 8.18919, "median": 5, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 10.06876 } }, { "name": "D", "statistics": { "histogram": { "bin_edges": [ 1, 7, 13, 19, 25, 31, 37, 43, 49, 53 ], "hist": [ 23, 10, 1, 0, 1, 0, 1, 0, 1 ] }, "max": 53, "mean": 8.32432, "median": 5, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 10.1326 } }, { "name": "question", "statistics": { "histogram": { "bin_edges": [ 14, 22, 30, 38, 46, 54, 62, 70, 78, 86, 92 ], "hist": [ 3, 10, 6, 3, 2, 3, 4, 2, 2, 2 ] }, "max": 92, "mean": 44.72973, "median": 36, "min": 14, "nan_count": 0, "nan_proportion": 0, "std": 22.21692 } } ]
false
ceval/ceval-exam
fire_engineer
test
282
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 8, 15, 22, 29, 36, 43, 50, 57, 64, 66 ], "hist": [ 170, 28, 30, 22, 8, 10, 5, 6, 2, 1 ] }, "max": 66, "mean": 11.60638, "median": 5, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 13.62948 } }, { "name": "B", "statistics": { "histogram": { "bin_edges": [ 1, 8, 15, 22, 29, 36, 43, 50, 57, 64, 65 ], "hist": [ 170, 32, 23, 18, 13, 16, 6, 3, 0, 1 ] }, "max": 65, "mean": 11.53546, "median": 5, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 13.11408 } }, { "name": "C", "statistics": { "histogram": { "bin_edges": [ 1, 9, 17, 25, 33, 41, 49, 57, 65, 73, 77 ], "hist": [ 177, 36, 16, 22, 15, 5, 7, 1, 2, 1 ] }, "max": 77, "mean": 12.10284, "median": 5, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 14.68095 } }, { "name": "D", "statistics": { "histogram": { "bin_edges": [ 1, 8, 15, 22, 29, 36, 43, 50, 57, 64, 66 ], "hist": [ 169, 31, 28, 17, 15, 13, 4, 1, 3, 1 ] }, "max": 66, "mean": 11.6383, "median": 5, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 13.17783 } }, { "name": "question", "statistics": { "histogram": { "bin_edges": [ 16, 28, 40, 52, 64, 76, 88, 100, 112, 124, 127 ], "hist": [ 56, 52, 48, 42, 40, 22, 11, 7, 3, 1 ] }, "max": 127, "mean": 50.85816, "median": 46.5, "min": 16, "nan_count": 0, "nan_proportion": 0, "std": 23.89213 } } ]
false
ceval/ceval-exam
fire_engineer
val
31
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 9, 17, 25, 33, 41, 49, 57, 65, 71 ], "hist": [ 17, 5, 7, 0, 1, 0, 0, 0, 1 ] }, "max": 71, "mean": 12, "median": 6, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 14.30618 } }, { "name": "B", "statistics": { "histogram": { "bin_edges": [ 1, 8, 15, 22, 29, 36, 43, 50, 57, 64, 66 ], "hist": [ 15, 6, 2, 2, 0, 2, 2, 0, 0, 2 ] }, "max": 66, "mean": 15.96774, "median": 8, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 18.96397 } }, { "name": "C", "statistics": { "histogram": { "bin_edges": [ 1, 8, 15, 22, 29, 36, 43, 50, 57, 64, 64 ], "hist": [ 16, 4, 2, 4, 1, 1, 0, 1, 1, 1 ] }, "max": 64, "mean": 15.32258, "median": 7, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 17.66803 } }, { "name": "D", "statistics": { "histogram": { "bin_edges": [ 1, 9, 17, 25, 33, 41, 49, 57, 65, 73, 74 ], "hist": [ 15, 7, 1, 4, 2, 1, 0, 0, 0, 1 ] }, "max": 74, "mean": 15.09677, "median": 9, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 17.15101 } }, { "name": "question", "statistics": { "histogram": { "bin_edges": [ 23, 32, 41, 50, 59, 68, 77, 86, 95, 104, 105 ], "hist": [ 10, 5, 6, 1, 2, 2, 2, 1, 1, 1 ] }, "max": 105, "mean": 48.41935, "median": 41, "min": 23, "nan_count": 0, "nan_proportion": 0, "std": 24.033 } } ]
false
ceval/ceval-exam
high_school_chinese
test
178
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 2, 13, 24, 35, 46, 57, 68, 79, 90, 101, 110 ], "hist": [ 28, 14, 19, 43, 29, 26, 12, 3, 3, 1 ] }, "max": 110, "mean": 40.69101, "median": 42, "min": 2, "nan_count": 0, "nan_proportion": 0, "std": 22.61043 } }, { "name": "B", "statistics": { "histogram": { "bin_edges": [ 2, 12, 22, 32, 42, 52, 62, 72, 82, 92, 93 ], "hist": [ 28, 10, 16, 37, 30, 29, 22, 4, 1, 1 ] }, "max": 93, "mean": 39.5618, "median": 41, "min": 2, "nan_count": 0, "nan_proportion": 0, "std": 21.04231 } }, { "name": "C", "statistics": { "histogram": { "bin_edges": [ 2, 11, 20, 29, 38, 47, 56, 65, 74, 83, 89 ], "hist": [ 26, 11, 16, 26, 19, 35, 24, 13, 5, 3 ] }, "max": 89, "mean": 39.27528, "median": 40, "min": 2, "nan_count": 0, "nan_proportion": 0, "std": 21.50568 } }, { "name": "D", "statistics": { "histogram": { "bin_edges": [ 2, 12, 22, 32, 42, 52, 62, 72, 82, 92, 93 ], "hist": [ 25, 13, 24, 23, 33, 36, 14, 7, 2, 1 ] }, "max": 93, "mean": 39.92697, "median": 43.5, "min": 2, "nan_count": 0, "nan_proportion": 0, "std": 21.388 } }, { "name": "question", "statistics": { "histogram": { "bin_edges": [ 13, 40, 67, 94, 121, 148, 175, 202, 229, 256, 281 ], "hist": [ 125, 6, 16, 9, 4, 9, 4, 2, 1, 2 ] }, "max": 281, "mean": 50.34831, "median": 19, "min": 13, "nan_count": 0, "nan_proportion": 0, "std": 56.91883 } } ]
false
ceval/ceval-exam
high_school_biology
test
175
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 2, 7, 12, 17, 22, 27, 32, 37, 42, 47, 49 ], "hist": [ 60, 31, 27, 17, 20, 8, 9, 2, 0, 1 ] }, "max": 49, "mean": 13.22286, "median": 10, "min": 2, "nan_count": 0, "nan_proportion": 0, "std": 9.79746 } }, { "name": "B", "statistics": { "histogram": { "bin_edges": [ 2, 6, 10, 14, 18, 22, 26, 30, 34, 38, 40 ], "hist": [ 49, 30, 20, 14, 18, 13, 15, 9, 4, 3 ] }, "max": 40, "mean": 13.96, "median": 11, "min": 2, "nan_count": 0, "nan_proportion": 0, "std": 10.20613 } }, { "name": "C", "statistics": { "histogram": { "bin_edges": [ 2, 9, 16, 23, 30, 37, 44, 51, 58, 65, 69 ], "hist": [ 72, 36, 22, 27, 14, 2, 1, 0, 0, 1 ] }, "max": 69, "mean": 14.18286, "median": 11, "min": 2, "nan_count": 0, "nan_proportion": 0, "std": 11.18603 } }, { "name": "D", "statistics": { "histogram": { "bin_edges": [ 1, 6, 11, 16, 21, 26, 31, 36, 41, 41 ], "hist": [ 50, 37, 15, 27, 13, 20, 7, 4, 2 ] }, "max": 41, "mean": 14.06857, "median": 11, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 10.49279 } }, { "name": "question", "statistics": { "histogram": { "bin_edges": [ 11, 32, 53, 74, 95, 116, 137, 158, 179, 200, 215 ], "hist": [ 83, 38, 17, 15, 9, 4, 3, 3, 1, 2 ] }, "max": 215, "mean": 49.73714, "median": 34, "min": 11, "nan_count": 0, "nan_proportion": 0, "std": 39.91629 } } ]
false
ceval/ceval-exam
environmental_impact_assessment_engineer
test
281
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 7, 13, 19, 25, 31, 37, 43, 49, 55, 55 ], "hist": [ 150, 62, 34, 13, 3, 7, 8, 3, 0, 1 ] }, "max": 55, "mean": 9.879, "median": 6, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 9.6551 } }, { "name": "B", "statistics": { "histogram": { "bin_edges": [ 1, 11, 21, 31, 41, 51, 61, 71, 81, 91, 95 ], "hist": [ 192, 49, 20, 11, 4, 2, 1, 1, 0, 1 ] }, "max": 95, "mean": 10.77936, "median": 6, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 12.12028 } }, { "name": "C", "statistics": { "histogram": { "bin_edges": [ 1, 7, 13, 19, 25, 31, 37, 43, 49, 53 ], "hist": [ 130, 72, 34, 18, 8, 8, 4, 2, 5 ] }, "max": 53, "mean": 10.84342, "median": 7, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 10.41275 } }, { "name": "D", "statistics": { "histogram": { "bin_edges": [ 1, 14, 27, 40, 53, 66, 79, 92, 105, 118, 124 ], "hist": [ 204, 54, 14, 4, 2, 2, 0, 0, 0, 1 ] }, "max": 124, "mean": 11.29181, "median": 7, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 12.92313 } }, { "name": "question", "statistics": { "histogram": { "bin_edges": [ 13, 27, 41, 55, 69, 83, 97, 111, 125, 139, 150 ], "hist": [ 43, 40, 74, 51, 44, 13, 9, 4, 2, 1 ] }, "max": 150, "mean": 54.00356, "median": 51, "min": 13, "nan_count": 0, "nan_proportion": 0, "std": 24.50925 } } ]
false
ceval/ceval-exam
environmental_impact_assessment_engineer
val
31
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 4, 7, 10, 13, 16, 19, 22, 25, 28, 30 ], "hist": [ 8, 10, 6, 0, 3, 1, 0, 0, 1, 2 ] }, "max": 30, "mean": 8.32258, "median": 6, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 7.70016 } }, { "name": "B", "statistics": { "histogram": { "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 31 ], "hist": [ 10, 8, 8, 2, 0, 1, 0, 2 ] }, "max": 31, "mean": 8.83871, "median": 7, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 7.54143 } }, { "name": "C", "statistics": { "histogram": { "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 31 ], "hist": [ 12, 8, 3, 2, 3, 0, 1, 2 ] }, "max": 31, "mean": 9.3871, "median": 7, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 8.47615 } }, { "name": "D", "statistics": { "histogram": { "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 31 ], "hist": [ 11, 9, 4, 2, 1, 1, 2, 1 ] }, "max": 31, "mean": 9.16129, "median": 6, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 8.43839 } }, { "name": "question", "statistics": { "histogram": { "bin_edges": [ 23, 32, 41, 50, 59, 68, 77, 86, 95, 104, 108 ], "hist": [ 4, 3, 4, 5, 8, 0, 2, 2, 2, 1 ] }, "max": 108, "mean": 58.25806, "median": 58, "min": 23, "nan_count": 0, "nan_proportion": 0, "std": 23.5074 } } ]
false
ceval/ceval-exam
high_school_geography
test
178
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 31 ], "hist": [ 54, 48, 31, 22, 15, 2, 1, 5 ] }, "max": 31, "mean": 8.91011, "median": 7, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 6.43725 } }, { "name": "B", "statistics": { "histogram": { "bin_edges": [ 2, 6, 10, 14, 18, 22, 26, 30, 34, 38, 38 ], "hist": [ 65, 35, 32, 22, 12, 8, 2, 1, 0, 1 ] }, "max": 38, "mean": 9.58427, "median": 8, "min": 2, "nan_count": 0, "nan_proportion": 0, "std": 6.78118 } }, { "name": "C", "statistics": { "histogram": { "bin_edges": [ 2, 6, 10, 14, 18, 22, 26, 30, 32 ], "hist": [ 61, 45, 30, 15, 13, 10, 1, 3 ] }, "max": 32, "mean": 9.50562, "median": 7, "min": 2, "nan_count": 0, "nan_proportion": 0, "std": 6.84307 } }, { "name": "D", "statistics": { "histogram": { "bin_edges": [ 2, 6, 10, 14, 18, 22, 26, 30, 34, 36 ], "hist": [ 59, 47, 26, 18, 11, 9, 4, 2, 2 ] }, "max": 36, "mean": 9.9382, "median": 7, "min": 2, "nan_count": 0, "nan_proportion": 0, "std": 7.45166 } }, { "name": "question", "statistics": { "histogram": { "bin_edges": [ 11, 27, 43, 59, 75, 91, 107, 123, 139, 155, 167 ], "hist": [ 120, 35, 2, 4, 5, 5, 2, 0, 3, 2 ] }, "max": 167, "mean": 32.29213, "median": 23, "min": 11, "nan_count": 0, "nan_proportion": 0, "std": 29.01517 } } ]
false
ceval/ceval-exam
high_school_history
test
182
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 2, 6, 10, 14, 18, 22, 26, 30, 32 ], "hist": [ 50, 55, 57, 15, 3, 0, 1, 1 ] }, "max": 32, "mean": 8.71429, "median": 9, "min": 2, "nan_count": 0, "nan_proportion": 0, "std": 4.49379 } }, { "name": "B", "statistics": { "histogram": { "bin_edges": [ 2, 6, 10, 14, 18, 22, 26, 30, 34, 34 ], "hist": [ 47, 57, 54, 18, 4, 1, 0, 0, 1 ] }, "max": 34, "mean": 8.81319, "median": 8, "min": 2, "nan_count": 0, "nan_proportion": 0, "std": 4.73826 } }, { "name": "C", "statistics": { "histogram": { "bin_edges": [ 2, 5, 8, 11, 14, 17, 20, 22 ], "hist": [ 38, 30, 56, 34, 15, 5, 4 ] }, "max": 22, "mean": 8.86813, "median": 9, "min": 2, "nan_count": 0, "nan_proportion": 0, "std": 4.36639 } }, { "name": "D", "statistics": { "histogram": { "bin_edges": [ 2, 5, 8, 11, 14, 17, 20, 23, 26, 27 ], "hist": [ 38, 30, 45, 41, 15, 8, 3, 1, 1 ] }, "max": 27, "mean": 9.19231, "median": 9, "min": 2, "nan_count": 0, "nan_proportion": 0, "std": 4.74561 } }, { "name": "question", "statistics": { "histogram": { "bin_edges": [ 14, 27, 40, 53, 66, 79, 92, 105, 118, 131, 140 ], "hist": [ 32, 31, 19, 17, 16, 24, 24, 8, 5, 6 ] }, "max": 140, "mean": 62.46154, "median": 60, "min": 14, "nan_count": 0, "nan_proportion": 0, "std": 33.81549 } } ]
false
ceval/ceval-exam
high_school_chemistry
test
172
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 8, 15, 22, 29, 36, 43, 50, 57, 62 ], "hist": [ 63, 40, 33, 19, 5, 3, 5, 2, 2 ] }, "max": 62, "mean": 14.15698, "median": 11, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 12.16475 } }, { "name": "B", "statistics": { "histogram": { "bin_edges": [ 1, 9, 17, 25, 33, 41, 49, 57, 65, 72 ], "hist": [ 64, 44, 29, 14, 13, 5, 0, 1, 2 ] }, "max": 72, "mean": 15.5, "median": 13, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 12.97524 } }, { "name": "C", "statistics": { "histogram": { "bin_edges": [ 1, 8, 15, 22, 29, 36, 43, 50, 57, 64, 65 ], "hist": [ 62, 34, 24, 24, 11, 10, 3, 1, 2, 1 ] }, "max": 65, "mean": 16.15698, "median": 13, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 13.44922 } }, { "name": "D", "statistics": { "histogram": { "bin_edges": [ 1, 12, 23, 34, 45, 56, 67, 78, 89, 100, 110 ], "hist": [ 81, 44, 31, 8, 3, 2, 2, 0, 0, 1 ] }, "max": 110, "mean": 16.68023, "median": 13, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 15.55549 } }, { "name": "question", "statistics": { "histogram": { "bin_edges": [ 11, 37, 63, 89, 115, 141, 167, 193, 219, 245, 267 ], "hist": [ 105, 26, 16, 11, 7, 3, 1, 0, 2, 1 ] }, "max": 267, "mean": 46.6686, "median": 28.5, "min": 11, "nan_count": 0, "nan_proportion": 0, "std": 42.76565 } } ]
false
ceval/ceval-exam
high_school_mathematics
test
166
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 7, 13, 19, 25, 31, 37, 43, 49, 55, 60 ], "hist": [ 72, 27, 27, 19, 9, 5, 3, 0, 2, 2 ] }, "max": 60, "mean": 11.83735, "median": 8, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 11.75884 } }, { "name": "B", "statistics": { "histogram": { "bin_edges": [ 1, 8, 15, 22, 29, 36, 43, 50, 57, 61 ], "hist": [ 71, 34, 25, 17, 8, 5, 1, 4, 1 ] }, "max": 61, "mean": 13.66867, "median": 10, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 12.73144 } }, { "name": "C", "statistics": { "histogram": { "bin_edges": [ 1, 9, 17, 25, 33, 41, 49, 57, 65, 71 ], "hist": [ 80, 39, 23, 15, 4, 2, 1, 1, 1 ] }, "max": 71, "mean": 12.36145, "median": 9, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 12.06706 } }, { "name": "D", "statistics": { "histogram": { "bin_edges": [ 1, 10, 19, 28, 37, 46, 55, 64, 73, 82, 86 ], "hist": [ 75, 45, 26, 12, 2, 3, 1, 0, 0, 2 ] }, "max": 86, "mean": 13.51205, "median": 10, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 13.6545 } }, { "name": "question", "statistics": { "histogram": { "bin_edges": [ 15, 57, 99, 141, 183, 225, 267, 309, 351, 393, 431 ], "hist": [ 36, 81, 24, 15, 2, 4, 2, 1, 0, 1 ] }, "max": 431, "mean": 94.46386, "median": 77, "min": 15, "nan_count": 0, "nan_proportion": 0, "std": 60.04439 } } ]
false
ceval/ceval-exam
high_school_physics
test
175
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 8, 15, 22, 29, 36, 43, 50, 57, 64, 67 ], "hist": [ 48, 44, 40, 24, 15, 1, 2, 0, 0, 1 ] }, "max": 67, "mean": 15.33143, "median": 14, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 10.4091 } }, { "name": "B", "statistics": { "histogram": { "bin_edges": [ 1, 13, 25, 37, 49, 61, 73, 85, 97, 109, 111 ], "hist": [ 65, 79, 25, 4, 1, 0, 0, 0, 0, 1 ] }, "max": 111, "mean": 16.41714, "median": 15, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 12.0689 } }, { "name": "C", "statistics": { "histogram": { "bin_edges": [ 1, 8, 15, 22, 29, 36, 43, 50, 57, 64, 67 ], "hist": [ 43, 37, 46, 25, 12, 5, 2, 0, 2, 3 ] }, "max": 67, "mean": 17.26857, "median": 16, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 12.45645 } }, { "name": "D", "statistics": { "histogram": { "bin_edges": [ 1, 9, 17, 25, 33, 41, 49, 57, 65, 72 ], "hist": [ 44, 36, 52, 19, 12, 7, 2, 0, 3 ] }, "max": 72, "mean": 18.81143, "median": 18, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 13.08565 } }, { "name": "question", "statistics": { "histogram": { "bin_edges": [ 12, 32, 52, 72, 92, 112, 132, 152, 172, 192, 211 ], "hist": [ 64, 36, 30, 20, 8, 3, 5, 3, 3, 3 ] }, "max": 211, "mean": 56.33714, "median": 46, "min": 12, "nan_count": 0, "nan_proportion": 0, "std": 43.1796 } } ]
false
ceval/ceval-exam
high_school_politics
test
176
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 8, 15, 22, 29, 36, 43, 50, 57, 61 ], "hist": [ 116, 30, 18, 8, 3, 0, 0, 0, 1 ] }, "max": 61, "mean": 7.43182, "median": 3, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 8.0602 } }, { "name": "B", "statistics": { "histogram": { "bin_edges": [ 1, 7, 13, 19, 25, 31, 37, 43, 49, 52 ], "hist": [ 105, 34, 19, 14, 2, 1, 0, 0, 1 ] }, "max": 52, "mean": 7.22159, "median": 3, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 7.51241 } }, { "name": "C", "statistics": { "histogram": { "bin_edges": [ 2, 11, 20, 29, 38, 47, 56, 65, 74, 83, 89 ], "hist": [ 129, 33, 9, 4, 0, 0, 0, 0, 0, 1 ] }, "max": 89, "mean": 7.74432, "median": 3, "min": 2, "nan_count": 0, "nan_proportion": 0, "std": 9.50834 } }, { "name": "D", "statistics": { "histogram": { "bin_edges": [ 2, 8, 14, 20, 26, 32, 38, 44, 50, 54 ], "hist": [ 110, 25, 19, 14, 3, 4, 0, 0, 1 ] }, "max": 54, "mean": 8.07386, "median": 3.5, "min": 2, "nan_count": 0, "nan_proportion": 0, "std": 8.61893 } }, { "name": "question", "statistics": { "histogram": { "bin_edges": [ 12, 46, 80, 114, 148, 182, 216, 250, 284, 318, 350 ], "hist": [ 23, 24, 33, 28, 27, 27, 9, 3, 1, 1 ] }, "max": 350, "mean": 126.57386, "median": 121, "min": 12, "nan_count": 0, "nan_proportion": 0, "std": 65.34152 } } ]
false
ceval/ceval-exam
ideological_and_moral_cultivation
test
172
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 4, 7, 10, 13, 16, 19, 22, 25, 28, 30 ], "hist": [ 21, 63, 35, 20, 12, 7, 5, 3, 2, 4 ] }, "max": 30, "mean": 8.31395, "median": 7, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 6.17344 } }, { "name": "B", "statistics": { "histogram": { "bin_edges": [ 1, 4, 7, 10, 13, 16, 19, 22, 25, 28, 29 ], "hist": [ 23, 61, 25, 24, 16, 10, 6, 4, 2, 1 ] }, "max": 29, "mean": 8.42442, "median": 7, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 5.87791 } }, { "name": "C", "statistics": { "histogram": { "bin_edges": [ 1, 4, 7, 10, 13, 16, 19, 22, 25, 28, 30 ], "hist": [ 19, 62, 29, 19, 14, 13, 5, 7, 2, 2 ] }, "max": 30, "mean": 8.9186, "median": 7, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 6.40032 } }, { "name": "D", "statistics": { "histogram": { "bin_edges": [ 1, 4, 7, 10, 13, 16, 19, 22, 25, 28, 28 ], "hist": [ 21, 61, 25, 18, 18, 15, 8, 1, 4, 1 ] }, "max": 28, "mean": 8.88953, "median": 7, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 6.19229 } }, { "name": "question", "statistics": { "histogram": { "bin_edges": [ 11, 22, 33, 44, 55, 66, 77, 88, 99, 110, 119 ], "hist": [ 79, 55, 20, 11, 5, 1, 0, 0, 0, 1 ] }, "max": 119, "mean": 26.26163, "median": 23, "min": 11, "nan_count": 0, "nan_proportion": 0, "std": 14.02154 } } ]
false
ceval/ceval-exam
law
test
221
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 66, 131, 196, 261, 326, 391, 456, 521, 586, 643 ], "hist": [ 219, 1, 0, 0, 0, 0, 0, 0, 0, 1 ] }, "max": 643, "mean": 16.08597, "median": 10, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 43.90596 } }, { "name": "B", "statistics": { "histogram": { "bin_edges": [ 1, 9, 17, 25, 33, 41, 49, 57, 65, 72 ], "hist": [ 86, 70, 37, 14, 4, 4, 1, 2, 3 ] }, "max": 72, "mean": 13.8009, "median": 10, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 12.11183 } }, { "name": "C", "statistics": { "histogram": { "bin_edges": [ 1, 9, 17, 25, 33, 41, 49, 57, 65, 73, 77 ], "hist": [ 77, 67, 42, 19, 5, 5, 4, 1, 0, 1 ] }, "max": 77, "mean": 15.04525, "median": 13, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 12.01695 } }, { "name": "D", "statistics": { "histogram": { "bin_edges": [ 1, 9, 17, 25, 33, 41, 49, 57, 65, 71 ], "hist": [ 79, 66, 39, 21, 10, 3, 2, 0, 1 ] }, "max": 71, "mean": 14.53394, "median": 12, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 11.2789 } }, { "name": "question", "statistics": { "histogram": { "bin_edges": [ 12, 33, 54, 75, 96, 117, 138, 159, 180, 201, 219 ], "hist": [ 95, 37, 27, 24, 20, 7, 6, 2, 2, 1 ] }, "max": 219, "mean": 55.33937, "median": 38, "min": 12, "nan_count": 0, "nan_proportion": 0, "std": 40.24287 } } ]
false
ceval/ceval-exam
legal_professional
test
215
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 2, 15, 28, 41, 54, 67, 80, 93, 106, 119, 130 ], "hist": [ 46, 87, 47, 20, 7, 3, 1, 3, 0, 1 ] }, "max": 130, "mean": 27.08837, "median": 24, "min": 2, "nan_count": 0, "nan_proportion": 0, "std": 18.87407 } }, { "name": "B", "statistics": { "histogram": { "bin_edges": [ 2, 15, 28, 41, 54, 67, 80, 93, 106, 119, 126 ], "hist": [ 47, 75, 53, 22, 9, 5, 2, 1, 0, 1 ] }, "max": 126, "mean": 27.89302, "median": 24, "min": 2, "nan_count": 0, "nan_proportion": 0, "std": 18.17549 } }, { "name": "C", "statistics": { "histogram": { "bin_edges": [ 2, 16, 30, 44, 58, 72, 86, 100, 114, 128, 135 ], "hist": [ 57, 78, 43, 26, 6, 1, 3, 0, 0, 1 ] }, "max": 135, "mean": 27.92093, "median": 24, "min": 2, "nan_count": 0, "nan_proportion": 0, "std": 18.83421 } }, { "name": "D", "statistics": { "histogram": { "bin_edges": [ 2, 17, 32, 47, 62, 77, 92, 107, 122, 137, 151 ], "hist": [ 48, 90, 43, 24, 4, 2, 3, 0, 0, 1 ] }, "max": 151, "mean": 29.71628, "median": 25, "min": 2, "nan_count": 0, "nan_proportion": 0, "std": 20.11877 } }, { "name": "question", "statistics": { "histogram": { "bin_edges": [ 14, 41, 68, 95, 122, 149, 176, 203, 230, 257, 278 ], "hist": [ 87, 30, 37, 29, 13, 7, 7, 2, 2, 1 ] }, "max": 278, "mean": 71.54419, "median": 60, "min": 14, "nan_count": 0, "nan_proportion": 0, "std": 51.78989 } } ]
false
ceval/ceval-exam
logic
test
204
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 3, 12, 21, 30, 39, 48, 57, 66, 75, 84, 90 ], "hist": [ 36, 61, 55, 28, 12, 4, 4, 3, 0, 1 ] }, "max": 90, "mean": 23.30392, "median": 21, "min": 3, "nan_count": 0, "nan_proportion": 0, "std": 14.14826 } }, { "name": "B", "statistics": { "histogram": { "bin_edges": [ 3, 15, 27, 39, 51, 63, 75, 87, 99, 111, 114 ], "hist": [ 50, 85, 40, 16, 11, 0, 1, 0, 0, 1 ] }, "max": 114, "mean": 24.06373, "median": 21, "min": 3, "nan_count": 0, "nan_proportion": 0, "std": 15.00807 } }, { "name": "C", "statistics": { "histogram": { "bin_edges": [ 4, 16, 28, 40, 52, 64, 76, 88, 100, 112, 120 ], "hist": [ 56, 80, 41, 16, 8, 1, 1, 0, 0, 1 ] }, "max": 120, "mean": 24.62255, "median": 23, "min": 4, "nan_count": 0, "nan_proportion": 0, "std": 15.4827 } }, { "name": "D", "statistics": { "histogram": { "bin_edges": [ 4, 13, 22, 31, 40, 49, 58, 67, 76, 84 ], "hist": [ 43, 58, 51, 26, 14, 3, 5, 3, 1 ] }, "max": 84, "mean": 24.20098, "median": 22, "min": 4, "nan_count": 0, "nan_proportion": 0, "std": 14.56508 } }, { "name": "question", "statistics": { "histogram": { "bin_edges": [ 35, 66, 97, 128, 159, 190, 221, 252, 283, 314, 342 ], "hist": [ 14, 33, 51, 47, 28, 15, 9, 4, 0, 3 ] }, "max": 342, "mean": 136.08333, "median": 128.5, "min": 35, "nan_count": 0, "nan_proportion": 0, "std": 55.41965 } } ]
false
ceval/ceval-exam
mao_zedong_thought
test
219
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 33, 33 ], "hist": [ 53, 66, 41, 28, 19, 6, 2, 1, 3 ] }, "max": 33, "mean": 9.6758, "median": 8, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 6.21702 } }, { "name": "B", "statistics": { "histogram": { "bin_edges": [ 2, 6, 10, 14, 18, 22, 26, 30, 33 ], "hist": [ 66, 82, 39, 11, 11, 6, 2, 2 ] }, "max": 33, "mean": 8.89498, "median": 8, "min": 2, "nan_count": 0, "nan_proportion": 0, "std": 5.61313 } }, { "name": "C", "statistics": { "histogram": { "bin_edges": [ 2, 5, 8, 11, 14, 17, 20, 23, 26, 29, 31 ], "hist": [ 51, 35, 59, 24, 22, 11, 7, 5, 4, 1 ] }, "max": 31, "mean": 9.74886, "median": 8, "min": 2, "nan_count": 0, "nan_proportion": 0, "std": 5.85575 } }, { "name": "D", "statistics": { "histogram": { "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 33, 37, 38 ], "hist": [ 41, 59, 48, 31, 23, 6, 7, 2, 1, 1 ] }, "max": 38, "mean": 10.70776, "median": 9, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 6.77533 } }, { "name": "question", "statistics": { "histogram": { "bin_edges": [ 12, 28, 44, 60, 76, 92, 108, 124, 140, 156, 171 ], "hist": [ 112, 48, 18, 13, 13, 2, 6, 1, 4, 2 ] }, "max": 171, "mean": 39.80822, "median": 27, "min": 12, "nan_count": 0, "nan_proportion": 0, "std": 31.43086 } } ]
false
ceval/ceval-exam
marxism
test
179
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 2, 5, 8, 11, 14, 17, 20, 23, 26, 27 ], "hist": [ 19, 35, 62, 31, 15, 4, 5, 7, 1 ] }, "max": 27, "mean": 9.98324, "median": 9, "min": 2, "nan_count": 0, "nan_proportion": 0, "std": 4.93039 } }, { "name": "B", "statistics": { "histogram": { "bin_edges": [ 2, 5, 8, 11, 14, 17, 20, 23, 26, 29, 30 ], "hist": [ 22, 29, 48, 45, 14, 9, 9, 1, 1, 1 ] }, "max": 30, "mean": 10.32961, "median": 10, "min": 2, "nan_count": 0, "nan_proportion": 0, "std": 4.97158 } }, { "name": "C", "statistics": { "histogram": { "bin_edges": [ 2, 6, 10, 14, 18, 22, 26, 30, 34, 35 ], "hist": [ 28, 60, 51, 21, 10, 5, 2, 1, 1 ] }, "max": 35, "mean": 10.63128, "median": 10, "min": 2, "nan_count": 0, "nan_proportion": 0, "std": 5.57062 } }, { "name": "D", "statistics": { "histogram": { "bin_edges": [ 2, 6, 10, 14, 18, 22, 26, 30, 34, 34 ], "hist": [ 29, 48, 55, 27, 10, 8, 1, 0, 1 ] }, "max": 34, "mean": 10.97207, "median": 10, "min": 2, "nan_count": 0, "nan_proportion": 0, "std": 5.42718 } }, { "name": "question", "statistics": { "histogram": { "bin_edges": [ 7, 16, 25, 34, 43, 52, 61, 70, 79, 88, 92 ], "hist": [ 49, 87, 22, 7, 6, 2, 2, 1, 2, 1 ] }, "max": 92, "mean": 22.13966, "median": 18, "min": 7, "nan_count": 0, "nan_proportion": 0, "std": 14.05637 } } ]
false
ceval/ceval-exam
metrology_engineer
test
219
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 7, 13, 19, 25, 31, 37, 43, 49, 55, 59 ], "hist": [ 132, 49, 16, 11, 5, 2, 3, 0, 0, 1 ] }, "max": 59, "mean": 8.19635, "median": 5, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 8.14281 } }, { "name": "B", "statistics": { "histogram": { "bin_edges": [ 1, 6, 11, 16, 21, 26, 31, 36, 41, 46, 49 ], "hist": [ 109, 61, 19, 14, 5, 4, 5, 1, 0, 1 ] }, "max": 49, "mean": 8.26941, "median": 6, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 7.5956 } }, { "name": "C", "statistics": { "histogram": { "bin_edges": [ 1, 6, 11, 16, 21, 26, 31, 36, 41, 46, 50 ], "hist": [ 108, 56, 19, 16, 7, 8, 3, 0, 1, 1 ] }, "max": 50, "mean": 8.71233, "median": 6, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 8.01743 } }, { "name": "D", "statistics": { "histogram": { "bin_edges": [ 1, 6, 11, 16, 21, 26, 31, 36, 41, 45 ], "hist": [ 108, 66, 15, 10, 8, 4, 5, 1, 2 ] }, "max": 45, "mean": 8.30594, "median": 6, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 7.88638 } }, { "name": "question", "statistics": { "histogram": { "bin_edges": [ 8, 19, 30, 41, 52, 63, 74, 85, 96, 107, 113 ], "hist": [ 33, 75, 43, 29, 18, 13, 4, 2, 1, 1 ] }, "max": 113, "mean": 35.24201, "median": 30, "min": 8, "nan_count": 0, "nan_proportion": 0, "std": 18.25107 } } ]
false
ceval/ceval-exam
middle_school_biology
test
192
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 32 ], "hist": [ 70, 37, 21, 19, 25, 12, 4, 4 ] }, "max": 32, "mean": 9.75521, "median": 7, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 7.55694 } }, { "name": "B", "statistics": { "histogram": { "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 33, 37, 37 ], "hist": [ 67, 37, 18, 23, 24, 11, 8, 3, 0, 1 ] }, "max": 37, "mean": 10.28125, "median": 7.5, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 7.92068 } }, { "name": "C", "statistics": { "histogram": { "bin_edges": [ 1, 6, 11, 16, 21, 26, 31, 36, 41, 44 ], "hist": [ 73, 44, 26, 26, 12, 8, 2, 0, 1 ] }, "max": 44, "mean": 10.23438, "median": 8, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 8.08931 } }, { "name": "D", "statistics": { "histogram": { "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 33, 37, 37 ], "hist": [ 70, 36, 20, 21, 16, 16, 8, 2, 2, 1 ] }, "max": 37, "mean": 10.36979, "median": 8, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 8.22929 } }, { "name": "question", "statistics": { "histogram": { "bin_edges": [ 12, 21, 30, 39, 48, 57, 66, 75, 84, 93, 93 ], "hist": [ 41, 60, 36, 14, 17, 10, 7, 4, 2, 1 ] }, "max": 93, "mean": 34.32292, "median": 29, "min": 12, "nan_count": 0, "nan_proportion": 0, "std": 17.48627 } } ]
false
ceval/ceval-exam
middle_school_chemistry
test
185
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 7, 13, 19, 25, 31, 37, 43, 49, 55, 59 ], "hist": [ 71, 52, 33, 17, 5, 3, 0, 0, 2, 2 ] }, "max": 59, "mean": 11.1027, "median": 9, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 9.58969 } }, { "name": "B", "statistics": { "histogram": { "bin_edges": [ 1, 8, 15, 22, 29, 36, 43, 50, 57, 64, 65 ], "hist": [ 72, 53, 33, 17, 5, 2, 2, 0, 0, 1 ] }, "max": 65, "mean": 12.30811, "median": 11, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 9.95245 } }, { "name": "C", "statistics": { "histogram": { "bin_edges": [ 1, 8, 15, 22, 29, 36, 43, 50, 57, 63 ], "hist": [ 71, 46, 38, 16, 8, 2, 0, 3, 1 ] }, "max": 63, "mean": 12.8973, "median": 12, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 10.77866 } }, { "name": "D", "statistics": { "histogram": { "bin_edges": [ 1, 12, 23, 34, 45, 56, 67, 78, 89, 100, 109 ], "hist": [ 93, 55, 25, 7, 3, 0, 1, 0, 0, 1 ] }, "max": 109, "mean": 14.37297, "median": 11, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 13.58333 } }, { "name": "question", "statistics": { "histogram": { "bin_edges": [ 12, 27, 42, 57, 72, 87, 102, 117, 132, 147, 152 ], "hist": [ 88, 49, 24, 10, 3, 2, 6, 1, 0, 2 ] }, "max": 152, "mean": 35.47568, "median": 27, "min": 12, "nan_count": 0, "nan_proportion": 0, "std": 25.15389 } } ]
false
ceval/ceval-exam
middle_school_geography
test
108
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 2, 5, 8, 11, 14, 17, 20, 23, 26, 29, 31 ], "hist": [ 44, 19, 13, 14, 8, 2, 1, 3, 1, 3 ] }, "max": 31, "mean": 8.46296, "median": 6, "min": 2, "nan_count": 0, "nan_proportion": 0, "std": 6.78016 } }, { "name": "B", "statistics": { "histogram": { "bin_edges": [ 2, 6, 10, 14, 18, 22, 26, 30, 33 ], "hist": [ 46, 27, 8, 14, 7, 4, 0, 2 ] }, "max": 33, "mean": 8.7963, "median": 6, "min": 2, "nan_count": 0, "nan_proportion": 0, "std": 6.7613 } }, { "name": "C", "statistics": { "histogram": { "bin_edges": [ 2, 6, 10, 14, 18, 22, 26, 30, 34, 38, 39 ], "hist": [ 49, 21, 11, 13, 1, 8, 1, 3, 0, 1 ] }, "max": 39, "mean": 9.47222, "median": 6.5, "min": 2, "nan_count": 0, "nan_proportion": 0, "std": 7.89115 } }, { "name": "D", "statistics": { "histogram": { "bin_edges": [ 2, 6, 10, 14, 18, 22, 26, 30, 34, 38, 41 ], "hist": [ 46, 23, 15, 8, 3, 7, 2, 2, 0, 2 ] }, "max": 41, "mean": 9.74074, "median": 6.5, "min": 2, "nan_count": 0, "nan_proportion": 0, "std": 8.37148 } }, { "name": "question", "statistics": { "histogram": { "bin_edges": [ 8, 19, 30, 41, 52, 63, 74, 85, 96, 107, 111 ], "hist": [ 38, 41, 13, 3, 6, 2, 1, 0, 1, 3 ] }, "max": 111, "mean": 28.83333, "median": 22, "min": 8, "nan_count": 0, "nan_proportion": 0, "std": 20.26991 } } ]
false
ceval/ceval-exam
middle_school_history
test
207
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 4, 7, 10, 13, 16, 19, 22, 25, 26 ], "hist": [ 68, 67, 37, 14, 13, 4, 2, 1, 1 ] }, "max": 26, "mean": 6.05314, "median": 4, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 4.39352 } }, { "name": "B", "statistics": { "histogram": { "bin_edges": [ 2, 5, 8, 11, 14, 17, 20, 23, 26, 26 ], "hist": [ 100, 47, 30, 15, 6, 5, 1, 2, 1 ] }, "max": 26, "mean": 6.2657, "median": 5, "min": 2, "nan_count": 0, "nan_proportion": 0, "std": 4.56793 } }, { "name": "C", "statistics": { "histogram": { "bin_edges": [ 2, 5, 8, 11, 14, 17, 20, 23, 26, 28 ], "hist": [ 98, 47, 33, 14, 7, 1, 2, 1, 4 ] }, "max": 28, "mean": 6.37198, "median": 5, "min": 2, "nan_count": 0, "nan_proportion": 0, "std": 4.98169 } }, { "name": "D", "statistics": { "histogram": { "bin_edges": [ 2, 5, 8, 11, 14, 17, 20, 23, 26, 27 ], "hist": [ 89, 54, 28, 17, 11, 3, 2, 1, 2 ] }, "max": 27, "mean": 6.58454, "median": 5, "min": 2, "nan_count": 0, "nan_proportion": 0, "std": 4.7143 } }, { "name": "question", "statistics": { "histogram": { "bin_edges": [ 11, 27, 43, 59, 75, 91, 107, 123, 139, 155, 167 ], "hist": [ 74, 53, 25, 23, 10, 10, 9, 2, 0, 1 ] }, "max": 167, "mean": 44.62802, "median": 35, "min": 11, "nan_count": 0, "nan_proportion": 0, "std": 29.5027 } } ]
false
ceval/ceval-exam
middle_school_mathematics
test
177
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 8, 15, 22, 29, 36, 43, 50, 57, 61 ], "hist": [ 95, 42, 21, 5, 11, 2, 0, 0, 1 ] }, "max": 61, "mean": 9.80226, "median": 6, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 9.86732 } }, { "name": "B", "statistics": { "histogram": { "bin_edges": [ 1, 8, 15, 22, 29, 36, 43, 50, 57, 62 ], "hist": [ 94, 36, 24, 7, 10, 3, 2, 0, 1 ] }, "max": 62, "mean": 10.44068, "median": 6, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 10.75175 } }, { "name": "C", "statistics": { "histogram": { "bin_edges": [ 1, 8, 15, 22, 29, 36, 43, 50, 57, 61 ], "hist": [ 90, 40, 21, 9, 10, 2, 2, 2, 1 ] }, "max": 61, "mean": 10.9435, "median": 7, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 11.02565 } }, { "name": "D", "statistics": { "histogram": { "bin_edges": [ 1, 8, 15, 22, 29, 36, 43, 50, 57, 63 ], "hist": [ 89, 35, 24, 10, 10, 5, 1, 1, 2 ] }, "max": 63, "mean": 11.75141, "median": 7, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 11.87172 } }, { "name": "question", "statistics": { "histogram": { "bin_edges": [ 10, 31, 52, 73, 94, 115, 136, 157, 178, 199, 218 ], "hist": [ 65, 47, 34, 15, 6, 5, 2, 2, 0, 1 ] }, "max": 218, "mean": 48.07345, "median": 40, "min": 10, "nan_count": 0, "nan_proportion": 0, "std": 33.69449 } } ]
false
ceval/ceval-exam
middle_school_politics
test
193
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 2, 5, 8, 11, 14, 17, 20, 23, 26, 26 ], "hist": [ 101, 16, 18, 25, 19, 5, 1, 4, 4 ] }, "max": 26, "mean": 7.41969, "median": 4, "min": 2, "nan_count": 0, "nan_proportion": 0, "std": 6.19938 } }, { "name": "B", "statistics": { "histogram": { "bin_edges": [ 2, 5, 8, 11, 14, 17, 20, 23, 26, 26 ], "hist": [ 100, 18, 15, 24, 23, 6, 3, 3, 1 ] }, "max": 26, "mean": 7.36788, "median": 4, "min": 2, "nan_count": 0, "nan_proportion": 0, "std": 5.92526 } }, { "name": "C", "statistics": { "histogram": { "bin_edges": [ 2, 5, 8, 11, 14, 17, 20, 23, 26, 29, 29 ], "hist": [ 100, 19, 14, 24, 21, 8, 3, 2, 1, 1 ] }, "max": 29, "mean": 7.43005, "median": 4, "min": 2, "nan_count": 0, "nan_proportion": 0, "std": 6.10981 } }, { "name": "D", "statistics": { "histogram": { "bin_edges": [ 2, 5, 8, 11, 14, 17, 20, 23, 26, 28 ], "hist": [ 93, 18, 20, 22, 23, 8, 4, 3, 2 ] }, "max": 28, "mean": 7.77202, "median": 5, "min": 2, "nan_count": 0, "nan_proportion": 0, "std": 6.25748 } }, { "name": "question", "statistics": { "histogram": { "bin_edges": [ 12, 34, 56, 78, 100, 122, 144, 166, 188, 210, 231 ], "hist": [ 17, 29, 52, 28, 17, 16, 16, 9, 5, 4 ] }, "max": 231, "mean": 90.70984, "median": 77, "min": 12, "nan_count": 0, "nan_proportion": 0, "std": 48.86726 } } ]
false
ceval/ceval-exam
middle_school_physics
test
178
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 33, 36 ], "hist": [ 44, 20, 33, 33, 28, 14, 4, 0, 2 ] }, "max": 36, "mean": 11.67978, "median": 11.5, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 7.4039 } }, { "name": "B", "statistics": { "histogram": { "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 33, 33 ], "hist": [ 40, 20, 25, 40, 24, 19, 4, 4, 2 ] }, "max": 33, "mean": 12.51124, "median": 13, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 7.71032 } }, { "name": "C", "statistics": { "histogram": { "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 33, 35 ], "hist": [ 41, 21, 24, 33, 22, 19, 13, 2, 3 ] }, "max": 35, "mean": 12.91573, "median": 13, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 8.24681 } }, { "name": "D", "statistics": { "histogram": { "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 33, 35 ], "hist": [ 40, 22, 20, 26, 31, 23, 8, 5, 3 ] }, "max": 35, "mean": 13.46067, "median": 13, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 8.44264 } }, { "name": "question", "statistics": { "histogram": { "bin_edges": [ 12, 27, 42, 57, 72, 87, 102, 117, 132, 147, 158 ], "hist": [ 99, 31, 22, 10, 10, 3, 2, 0, 0, 1 ] }, "max": 158, "mean": 34.33146, "median": 24, "min": 12, "nan_count": 0, "nan_proportion": 0, "std": 23.42429 } } ]
false
ceval/ceval-exam
modern_chinese_history
test
212
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 2, 6, 10, 14, 18, 22, 26, 30, 34, 38, 38 ], "hist": [ 57, 64, 48, 20, 8, 6, 4, 3, 1, 1 ] }, "max": 38, "mean": 9.91038, "median": 9, "min": 2, "nan_count": 0, "nan_proportion": 0, "std": 6.63657 } }, { "name": "B", "statistics": { "histogram": { "bin_edges": [ 2, 6, 10, 14, 18, 22, 26, 30, 34, 38, 41 ], "hist": [ 57, 61, 50, 20, 13, 5, 3, 1, 0, 2 ] }, "max": 41, "mean": 9.86321, "median": 9, "min": 2, "nan_count": 0, "nan_proportion": 0, "std": 6.4877 } }, { "name": "C", "statistics": { "histogram": { "bin_edges": [ 2, 6, 10, 14, 18, 22, 26, 30, 34, 38, 40 ], "hist": [ 66, 62, 37, 27, 8, 4, 3, 1, 1, 3 ] }, "max": 40, "mean": 9.64151, "median": 8, "min": 2, "nan_count": 0, "nan_proportion": 0, "std": 6.81953 } }, { "name": "D", "statistics": { "histogram": { "bin_edges": [ 2, 6, 10, 14, 18, 22, 26, 30, 34, 37 ], "hist": [ 62, 57, 51, 20, 8, 8, 0, 4, 2 ] }, "max": 37, "mean": 9.83491, "median": 8.5, "min": 2, "nan_count": 0, "nan_proportion": 0, "std": 6.50382 } }, { "name": "question", "statistics": { "histogram": { "bin_edges": [ 14, 24, 34, 44, 54, 64, 74, 84, 94, 104, 113 ], "hist": [ 65, 75, 27, 14, 15, 6, 6, 1, 1, 2 ] }, "max": 113, "mean": 34.3066, "median": 29, "min": 14, "nan_count": 0, "nan_proportion": 0, "std": 18.0325 } } ]
false
ceval/ceval-exam
operating_system
test
179
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 33, 33 ], "hist": [ 88, 51, 16, 16, 3, 4, 0, 0, 1 ] }, "max": 33, "mean": 6.35754, "median": 5, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 5.10074 } }, { "name": "B", "statistics": { "histogram": { "bin_edges": [ 1, 4, 7, 10, 13, 16, 19, 22, 25, 28, 30 ], "hist": [ 35, 84, 19, 19, 12, 6, 2, 0, 0, 2 ] }, "max": 30, "mean": 6.55866, "median": 5, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 4.82278 } }, { "name": "C", "statistics": { "histogram": { "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 32 ], "hist": [ 83, 49, 23, 18, 2, 1, 2, 1 ] }, "max": 32, "mean": 6.60894, "median": 5, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 4.97557 } }, { "name": "D", "statistics": { "histogram": { "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 33, 33 ], "hist": [ 82, 47, 21, 19, 7, 0, 1, 1, 1 ] }, "max": 33, "mean": 6.87709, "median": 5, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 5.40978 } }, { "name": "question", "statistics": { "histogram": { "bin_edges": [ 10, 21, 32, 43, 54, 65, 76, 87, 98, 109, 115 ], "hist": [ 34, 81, 45, 11, 5, 1, 1, 0, 0, 1 ] }, "max": 115, "mean": 29.95531, "median": 27, "min": 10, "nan_count": 0, "nan_proportion": 0, "std": 12.77718 } } ]
false
ceval/ceval-exam
physician
test
443
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 7, 13, 19, 25, 31, 37, 43, 49, 53 ], "hist": [ 259, 120, 51, 7, 3, 1, 1, 0, 1 ] }, "max": 53, "mean": 7.39278, "median": 6, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 5.42027 } }, { "name": "B", "statistics": { "histogram": { "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 33, 37, 38 ], "hist": [ 136, 159, 80, 38, 14, 4, 7, 1, 1, 3 ] }, "max": 38, "mean": 7.84424, "median": 6, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 5.6649 } }, { "name": "C", "statistics": { "histogram": { "bin_edges": [ 1, 8, 15, 22, 29, 36, 43, 50, 57, 64, 68 ], "hist": [ 273, 127, 29, 6, 3, 3, 0, 1, 0, 1 ] }, "max": 68, "mean": 8.03837, "median": 6, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 6.4808 } }, { "name": "D", "statistics": { "histogram": { "bin_edges": [ 1, 6, 11, 16, 21, 26, 31, 36, 41, 46, 48 ], "hist": [ 182, 151, 65, 22, 14, 4, 1, 1, 1, 2 ] }, "max": 48, "mean": 8.14673, "median": 6, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 6.26299 } }, { "name": "question", "statistics": { "histogram": { "bin_edges": [ 7, 31, 55, 79, 103, 127, 151, 175, 199, 223, 242 ], "hist": [ 332, 48, 26, 17, 12, 6, 1, 0, 0, 1 ] }, "max": 242, "mean": 31.91874, "median": 21, "min": 7, "nan_count": 0, "nan_proportion": 0, "std": 28.78745 } } ]
false
ceval/ceval-exam
physician
val
49
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 2, 4, 6, 8, 10, 12, 14, 16, 18, 19 ], "hist": [ 10, 14, 9, 3, 2, 8, 2, 0, 1 ] }, "max": 19, "mean": 6.81633, "median": 6, "min": 2, "nan_count": 0, "nan_proportion": 0, "std": 4.08592 } }, { "name": "B", "statistics": { "histogram": { "bin_edges": [ 2, 4, 6, 8, 10, 12, 14, 16, 18, 19 ], "hist": [ 6, 21, 6, 7, 2, 3, 3, 0, 1 ] }, "max": 19, "mean": 6.63265, "median": 5, "min": 2, "nan_count": 0, "nan_proportion": 0, "std": 3.81168 } }, { "name": "C", "statistics": { "histogram": { "bin_edges": [ 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 20 ], "hist": [ 5, 16, 10, 6, 4, 2, 4, 0, 1, 1 ] }, "max": 20, "mean": 7.30612, "median": 6, "min": 2, "nan_count": 0, "nan_proportion": 0, "std": 4.20914 } }, { "name": "D", "statistics": { "histogram": { "bin_edges": [ 2, 6, 10, 14, 18, 22, 26, 30, 33 ], "hist": [ 23, 16, 3, 5, 0, 0, 1, 1 ] }, "max": 33, "mean": 7.59184, "median": 6, "min": 2, "nan_count": 0, "nan_proportion": 0, "std": 6.00319 } }, { "name": "question", "statistics": { "histogram": { "bin_edges": [ 8, 16, 24, 32, 40, 48, 56, 64, 72, 78 ], "hist": [ 8, 21, 11, 2, 3, 1, 2, 0, 1 ] }, "max": 78, "mean": 25.2449, "median": 20, "min": 8, "nan_count": 0, "nan_proportion": 0, "std": 14.109 } } ]
false
ceval/ceval-exam
plant_protection
test
199
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 4, 7, 10, 13, 16, 19, 22, 25, 27 ], "hist": [ 86, 74, 19, 12, 6, 1, 0, 0, 1 ] }, "max": 27, "mean": 4.78894, "median": 4, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 3.43712 } }, { "name": "B", "statistics": { "histogram": { "bin_edges": [ 1, 4, 7, 10, 13, 16, 19, 22, 25, 28, 29 ], "hist": [ 82, 74, 25, 10, 5, 1, 1, 0, 0, 1 ] }, "max": 29, "mean": 4.95477, "median": 4, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 3.61436 } }, { "name": "C", "statistics": { "histogram": { "bin_edges": [ 1, 4, 7, 10, 13, 16, 19, 22, 25, 27 ], "hist": [ 74, 89, 19, 7, 4, 5, 0, 0, 1 ] }, "max": 27, "mean": 4.97487, "median": 4, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 3.58369 } }, { "name": "D", "statistics": { "histogram": { "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 32 ], "hist": [ 104, 60, 22, 8, 3, 1, 0, 1 ] }, "max": 32, "mean": 5.58291, "median": 4, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 4.08272 } }, { "name": "question", "statistics": { "histogram": { "bin_edges": [ 10, 20, 30, 40, 50, 60, 70, 80, 90, 100, 103 ], "hist": [ 48, 76, 46, 21, 7, 0, 0, 0, 0, 1 ] }, "max": 103, "mean": 28.01005, "median": 26, "min": 10, "nan_count": 0, "nan_proportion": 0, "std": 11.53256 } } ]
false
ceval/ceval-exam
probability_and_statistics
test
166
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 22, 43, 64, 85, 106, 127, 148, 169, 190, 206 ], "hist": [ 101, 34, 10, 9, 4, 3, 3, 1, 0, 1 ] }, "max": 206, "mean": 27.25301, "median": 13, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 34.34331 } }, { "name": "B", "statistics": { "histogram": { "bin_edges": [ 1, 22, 43, 64, 85, 106, 127, 148, 169, 190, 204 ], "hist": [ 98, 39, 11, 6, 5, 3, 3, 0, 0, 1 ] }, "max": 204, "mean": 27.07831, "median": 14.5, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 32.53697 } }, { "name": "C", "statistics": { "histogram": { "bin_edges": [ 1, 22, 43, 64, 85, 106, 127, 148, 169, 190, 204 ], "hist": [ 100, 30, 14, 7, 7, 5, 2, 0, 0, 1 ] }, "max": 204, "mean": 28.38554, "median": 15.5, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 33.86849 } }, { "name": "D", "statistics": { "histogram": { "bin_edges": [ 1, 22, 43, 64, 85, 106, 127, 148, 169, 190, 203 ], "hist": [ 103, 28, 16, 6, 4, 5, 1, 1, 1, 1 ] }, "max": 203, "mean": 27.61446, "median": 14.5, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 34.59509 } }, { "name": "question", "statistics": { "histogram": { "bin_edges": [ 16, 65, 114, 163, 212, 261, 310, 359, 408, 457, 503 ], "hist": [ 45, 48, 28, 25, 7, 7, 1, 2, 1, 2 ] }, "max": 503, "mean": 125.56024, "median": 99, "min": 16, "nan_count": 0, "nan_proportion": 0, "std": 88.57754 } } ]
false
ceval/ceval-exam
sports_science
test
180
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 2, 5, 8, 11, 14, 17, 20, 23, 26, 28 ], "hist": [ 69, 56, 26, 12, 7, 4, 2, 1, 3 ] }, "max": 28, "mean": 6.92222, "median": 5, "min": 2, "nan_count": 0, "nan_proportion": 0, "std": 4.99212 } }, { "name": "B", "statistics": { "histogram": { "bin_edges": [ 2, 5, 8, 11, 14, 17, 20, 23, 26, 26 ], "hist": [ 67, 54, 33, 10, 8, 3, 2, 2, 1 ] }, "max": 26, "mean": 6.96111, "median": 6, "min": 2, "nan_count": 0, "nan_proportion": 0, "std": 4.53954 } }, { "name": "C", "statistics": { "histogram": { "bin_edges": [ 2, 6, 10, 14, 18, 22, 26, 30, 33 ], "hist": [ 84, 65, 16, 6, 4, 2, 2, 1 ] }, "max": 33, "mean": 7.08333, "median": 6, "min": 2, "nan_count": 0, "nan_proportion": 0, "std": 5.04491 } }, { "name": "D", "statistics": { "histogram": { "bin_edges": [ 2, 5, 8, 11, 14, 17, 20, 23, 26, 28 ], "hist": [ 68, 52, 31, 12, 8, 2, 3, 3, 1 ] }, "max": 28, "mean": 7.03889, "median": 6, "min": 2, "nan_count": 0, "nan_proportion": 0, "std": 4.80963 } }, { "name": "question", "statistics": { "histogram": { "bin_edges": [ 8, 15, 22, 29, 36, 43, 50, 57, 64, 71, 74 ], "hist": [ 15, 67, 47, 20, 12, 10, 3, 5, 0, 1 ] }, "max": 74, "mean": 25.58889, "median": 22, "min": 8, "nan_count": 0, "nan_proportion": 0, "std": 11.30669 } } ]
false
ceval/ceval-exam
professional_tour_guide
test
266
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 4, 7, 10, 13, 16, 19, 21 ], "hist": [ 139, 95, 14, 10, 6, 0, 2 ] }, "max": 21, "mean": 4.16917, "median": 3, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 2.95717 } }, { "name": "B", "statistics": { "histogram": { "bin_edges": [ 1, 4, 7, 10, 13, 16, 19, 21 ], "hist": [ 135, 97, 16, 10, 6, 1, 1 ] }, "max": 21, "mean": 4.15038, "median": 3, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 2.90087 } }, { "name": "C", "statistics": { "histogram": { "bin_edges": [ 1, 4, 7, 10, 13, 16, 19, 21 ], "hist": [ 141, 88, 17, 11, 6, 2, 1 ] }, "max": 21, "mean": 4.26316, "median": 3, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 2.97891 } }, { "name": "D", "statistics": { "histogram": { "bin_edges": [ 1, 4, 7, 10, 13, 16, 19, 22, 25, 25 ], "hist": [ 135, 103, 10, 12, 5, 0, 0, 0, 1 ] }, "max": 25, "mean": 4.16541, "median": 3, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 2.84089 } }, { "name": "question", "statistics": { "histogram": { "bin_edges": [ 11, 17, 23, 29, 35, 41, 47, 53, 59, 65, 65 ], "hist": [ 39, 73, 53, 32, 26, 16, 11, 9, 6, 1 ] }, "max": 65, "mean": 27.80827, "median": 25, "min": 11, "nan_count": 0, "nan_proportion": 0, "std": 12.38155 } } ]
false
ceval/ceval-exam
tax_accountant
test
443
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 10, 19, 28, 37, 46, 55, 64, 73, 81 ], "hist": [ 219, 92, 57, 36, 16, 15, 4, 2, 2 ] }, "max": 81, "mean": 15.00903, "median": 10, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 14.20375 } }, { "name": "B", "statistics": { "histogram": { "bin_edges": [ 1, 8, 15, 22, 29, 36, 43, 50, 57, 64, 70 ], "hist": [ 184, 75, 65, 43, 24, 25, 8, 10, 6, 3 ] }, "max": 70, "mean": 15.82619, "median": 10, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 14.46306 } }, { "name": "C", "statistics": { "histogram": { "bin_edges": [ 1, 11, 21, 31, 41, 51, 61, 71, 81, 91, 95 ], "hist": [ 215, 91, 64, 35, 15, 17, 3, 1, 1, 1 ] }, "max": 95, "mean": 16.43115, "median": 11, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 15.45294 } }, { "name": "D", "statistics": { "histogram": { "bin_edges": [ 1, 10, 19, 28, 37, 46, 55, 64, 73, 82, 89 ], "hist": [ 210, 72, 74, 38, 23, 14, 7, 2, 2, 1 ] }, "max": 89, "mean": 16.45147, "median": 11, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 15.15931 } }, { "name": "question", "statistics": { "histogram": { "bin_edges": [ 17, 46, 75, 104, 133, 162, 191, 220, 249, 278, 299 ], "hist": [ 245, 56, 45, 39, 22, 15, 9, 4, 5, 3 ] }, "max": 299, "mean": 67.89842, "median": 40, "min": 17, "nan_count": 0, "nan_proportion": 0, "std": 57.17007 } } ]
false
ceval/ceval-exam
tax_accountant
val
49
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 7, 13, 19, 25, 31, 37, 43, 49, 55, 58 ], "hist": [ 13, 14, 7, 2, 1, 3, 1, 4, 2, 2 ] }, "max": 58, "mean": 18.36735, "median": 11, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 16.67045 } }, { "name": "B", "statistics": { "histogram": { "bin_edges": [ 3, 8, 13, 18, 23, 28, 33, 38, 43, 46 ], "hist": [ 16, 8, 6, 2, 2, 9, 4, 1, 1 ] }, "max": 46, "mean": 16.83673, "median": 13, "min": 3, "nan_count": 0, "nan_proportion": 0, "std": 12.24804 } }, { "name": "C", "statistics": { "histogram": { "bin_edges": [ 3, 11, 19, 27, 35, 43, 51, 59, 67, 74 ], "hist": [ 20, 9, 9, 4, 2, 4, 0, 0, 1 ] }, "max": 74, "mean": 18.63265, "median": 16, "min": 3, "nan_count": 0, "nan_proportion": 0, "std": 15.33745 } }, { "name": "D", "statistics": { "histogram": { "bin_edges": [ 2, 8, 14, 20, 26, 32, 38, 44, 50, 56, 57 ], "hist": [ 14, 8, 8, 4, 6, 4, 0, 1, 3, 1 ] }, "max": 57, "mean": 19.30612, "median": 18, "min": 2, "nan_count": 0, "nan_proportion": 0, "std": 14.83128 } }, { "name": "question", "statistics": { "histogram": { "bin_edges": [ 17, 40, 63, 86, 109, 132, 155, 178, 201, 224, 240 ], "hist": [ 29, 4, 7, 3, 3, 0, 2, 0, 0, 1 ] }, "max": 240, "mean": 54.5102, "median": 28, "min": 17, "nan_count": 0, "nan_proportion": 0, "std": 47.50926 } } ]
false
ceval/ceval-exam
teacher_qualification
test
399
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 33, 37, 38 ], "hist": [ 206, 101, 35, 32, 13, 8, 2, 1, 0, 1 ] }, "max": 38, "mean": 6.53885, "median": 4, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 5.26703 } }, { "name": "B", "statistics": { "histogram": { "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 33, 37, 38 ], "hist": [ 203, 105, 34, 31, 14, 7, 3, 0, 1, 1 ] }, "max": 38, "mean": 6.6792, "median": 4, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 5.48065 } }, { "name": "C", "statistics": { "histogram": { "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 33, 37, 39 ], "hist": [ 188, 114, 36, 32, 13, 6, 3, 3, 1, 3 ] }, "max": 39, "mean": 7.01754, "median": 5, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 6.11508 } }, { "name": "D", "statistics": { "histogram": { "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 32 ], "hist": [ 197, 101, 31, 38, 16, 10, 4, 2 ] }, "max": 32, "mean": 6.96742, "median": 5, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 5.62536 } }, { "name": "question", "statistics": { "histogram": { "bin_edges": [ 10, 33, 56, 79, 102, 125, 148, 171, 194, 217, 237 ], "hist": [ 96, 124, 110, 47, 19, 0, 2, 0, 0, 1 ] }, "max": 237, "mean": 55.20802, "median": 52, "min": 10, "nan_count": 0, "nan_proportion": 0, "std": 27.91335 } } ]
false
ceval/ceval-exam
teacher_qualification
val
44
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 2, 6, 10, 14, 18, 22, 26, 30, 32 ], "hist": [ 25, 7, 5, 3, 3, 0, 0, 1 ] }, "max": 32, "mean": 7.79545, "median": 5, "min": 2, "nan_count": 0, "nan_proportion": 0, "std": 6.37429 } }, { "name": "B", "statistics": { "histogram": { "bin_edges": [ 2, 5, 8, 11, 14, 17, 20, 23, 26, 29, 31 ], "hist": [ 19, 13, 2, 3, 3, 1, 1, 0, 1, 1 ] }, "max": 31, "mean": 7.5, "median": 5, "min": 2, "nan_count": 0, "nan_proportion": 0, "std": 6.35207 } }, { "name": "C", "statistics": { "histogram": { "bin_edges": [ 2, 6, 10, 14, 18, 22, 26, 30, 34, 38, 39 ], "hist": [ 24, 11, 2, 3, 2, 0, 0, 0, 1, 1 ] }, "max": 39, "mean": 7.93182, "median": 5, "min": 2, "nan_count": 0, "nan_proportion": 0, "std": 7.68386 } }, { "name": "D", "statistics": { "histogram": { "bin_edges": [ 2, 6, 10, 14, 18, 22, 26, 30, 34, 38, 41 ], "hist": [ 24, 11, 2, 1, 2, 1, 1, 1, 0, 1 ] }, "max": 41, "mean": 8.52273, "median": 5, "min": 2, "nan_count": 0, "nan_proportion": 0, "std": 8.49484 } }, { "name": "question", "statistics": { "histogram": { "bin_edges": [ 16, 27, 38, 49, 60, 71, 82, 93, 104, 115, 124 ], "hist": [ 7, 8, 5, 5, 5, 9, 3, 0, 0, 2 ] }, "max": 124, "mean": 53.79545, "median": 51.5, "min": 16, "nan_count": 0, "nan_proportion": 0, "std": 25.8084 } } ]
false
ceval/ceval-exam
urban_and_rural_planner
test
418
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 7, 13, 19, 25, 31, 37, 43, 49, 55, 56 ], "hist": [ 194, 109, 44, 29, 19, 10, 5, 4, 3, 1 ] }, "max": 56, "mean": 10.69378, "median": 7, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 9.91895 } }, { "name": "B", "statistics": { "histogram": { "bin_edges": [ 1, 8, 15, 22, 29, 36, 43, 50, 57, 61 ], "hist": [ 209, 93, 53, 28, 15, 10, 7, 2, 1 ] }, "max": 61, "mean": 11.43541, "median": 7.5, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 10.6459 } }, { "name": "C", "statistics": { "histogram": { "bin_edges": [ 1, 12, 23, 34, 45, 56, 67, 78, 89, 100, 108 ], "hist": [ 273, 84, 31, 16, 6, 3, 3, 1, 0, 1 ] }, "max": 108, "mean": 12.25598, "median": 7, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 13.32688 } }, { "name": "D", "statistics": { "histogram": { "bin_edges": [ 1, 11, 21, 31, 41, 51, 61, 71, 81, 91, 99 ], "hist": [ 262, 83, 39, 15, 8, 6, 2, 1, 0, 2 ] }, "max": 99, "mean": 12.38517, "median": 8, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 13.20553 } }, { "name": "question", "statistics": { "histogram": { "bin_edges": [ 11, 20, 29, 38, 47, 56, 65, 74, 83, 92, 100 ], "hist": [ 48, 120, 87, 79, 33, 23, 10, 5, 10, 3 ] }, "max": 100, "mean": 35.88517, "median": 32, "min": 11, "nan_count": 0, "nan_proportion": 0, "std": 16.65091 } } ]
false
ceval/ceval-exam
urban_and_rural_planner
val
46
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 8, 15, 22, 29, 36, 43, 50, 57, 63 ], "hist": [ 21, 14, 4, 0, 2, 1, 2, 1, 1 ] }, "max": 63, "mean": 13.23913, "median": 8, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 14.73044 } }, { "name": "B", "statistics": { "histogram": { "bin_edges": [ 1, 7, 13, 19, 25, 31, 37, 43, 49, 53 ], "hist": [ 22, 11, 2, 2, 3, 1, 4, 0, 1 ] }, "max": 53, "mean": 12.97826, "median": 8, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 13.1174 } }, { "name": "C", "statistics": { "histogram": { "bin_edges": [ 1, 7, 13, 19, 25, 31, 37, 43, 49, 55, 57 ], "hist": [ 21, 9, 5, 3, 5, 1, 1, 0, 0, 1 ] }, "max": 57, "mean": 12.47826, "median": 7.5, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 12.12479 } }, { "name": "D", "statistics": { "histogram": { "bin_edges": [ 1, 11, 21, 31, 41, 51, 61, 71, 81, 91, 99 ], "hist": [ 29, 6, 6, 1, 2, 1, 0, 0, 0, 1 ] }, "max": 99, "mean": 14.84783, "median": 8, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 17.91829 } }, { "name": "question", "statistics": { "histogram": { "bin_edges": [ 14, 20, 26, 32, 38, 44, 50, 56, 62, 68, 68 ], "hist": [ 7, 9, 11, 4, 4, 5, 4, 1, 0, 1 ] }, "max": 68, "mean": 32.26087, "median": 29, "min": 14, "nan_count": 0, "nan_proportion": 0, "std": 13.12239 } } ]
false
ceval/ceval-exam
veterinary_medicine
test
210
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 33, 37, 39 ], "hist": [ 92, 53, 26, 23, 10, 3, 1, 0, 1, 1 ] }, "max": 39, "mean": 7.50476, "median": 5, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 5.90535 } }, { "name": "B", "statistics": { "histogram": { "bin_edges": [ 1, 7, 13, 19, 25, 31, 37, 43, 49, 55, 55 ], "hist": [ 126, 46, 21, 9, 4, 3, 0, 0, 0, 1 ] }, "max": 55, "mean": 7.83333, "median": 5, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 7.04978 } }, { "name": "C", "statistics": { "histogram": { "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 33, 34 ], "hist": [ 91, 59, 28, 16, 6, 9, 0, 0, 1 ] }, "max": 34, "mean": 7.27619, "median": 5, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 5.59048 } }, { "name": "D", "statistics": { "histogram": { "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 33, 35 ], "hist": [ 84, 60, 20, 19, 11, 6, 6, 3, 1 ] }, "max": 35, "mean": 8.21905, "median": 5, "min": 1, "nan_count": 0, "nan_proportion": 0, "std": 6.82099 } }, { "name": "question", "statistics": { "histogram": { "bin_edges": [ 8, 22, 36, 50, 64, 78, 92, 106, 120, 134, 145 ], "hist": [ 106, 72, 9, 4, 5, 2, 2, 3, 4, 3 ] }, "max": 145, "mean": 29.59524, "median": 21, "min": 8, "nan_count": 0, "nan_proportion": 0, "std": 26.20937 } } ]
false
THUDM/LongBench
2wikimqa
test
200
[ { "name": "_id", "statistics": { "histogram": { "bin_edges": [ 48, 48 ], "hist": [ 200 ] }, "max": 48, "mean": 48, "median": 48, "min": 48, "nan_count": 0, "nan_proportion": 0, "std": 0 } }, { "name": "context", "statistics": { "histogram": { "bin_edges": [ 2847, 9813, 16779, 23745, 30711, 37677, 44643, 51609, 58575, 65541, 72498 ], "hist": [ 10, 18, 57, 53, 18, 8, 10, 10, 10, 6 ] }, "max": 72498, "mean": 29615.005, "median": 25488.5, "min": 2847, "nan_count": 0, "nan_proportion": 0, "std": 15333.49641 } }, { "name": "input", "statistics": { "histogram": { "bin_edges": [ 33, 42, 51, 60, 69, 78, 87, 96, 105, 114, 119 ], "hist": [ 8, 16, 47, 45, 37, 24, 15, 4, 2, 2 ] }, "max": 119, "mean": 66.99, "median": 66, "min": 33, "nan_count": 0, "nan_proportion": 0, "std": 15.8631 } } ]
false
THUDM/LongBench
dureader
test
200
[ { "name": "_id", "statistics": { "histogram": { "bin_edges": [ 48, 48 ], "hist": [ 200 ] }, "max": 48, "mean": 48, "median": 48, "min": 48, "nan_count": 0, "nan_proportion": 0, "std": 0 } }, { "name": "context", "statistics": { "histogram": { "bin_edges": [ 7965, 9875, 11785, 13695, 15605, 17515, 19425, 21335, 23245, 25155, 27064 ], "hist": [ 12, 21, 30, 44, 30, 25, 24, 7, 5, 2 ] }, "max": 27064, "mean": 15641.86, "median": 15175, "min": 7965, "nan_count": 0, "nan_proportion": 0, "std": 3907.43879 } }, { "name": "input", "statistics": { "histogram": { "bin_edges": [ 3, 5, 7, 9, 11, 13, 15, 17, 19, 20 ], "hist": [ 4, 27, 44, 66, 21, 18, 13, 3, 4 ] }, "max": 20, "mean": 9.735, "median": 9, "min": 3, "nan_count": 0, "nan_proportion": 0, "std": 3.25696 } } ]
false
THUDM/LongBench
gov_report
test
200
[ { "name": "_id", "statistics": { "histogram": { "bin_edges": [ 48, 48 ], "hist": [ 200 ] }, "max": 48, "mean": 48, "median": 48, "min": 48, "nan_count": 0, "nan_proportion": 0, "std": 0 } }, { "name": "context", "statistics": { "histogram": { "bin_edges": [ 11102, 35704, 60306, 84908, 109510, 134112, 158714, 183316, 207918, 232520, 257121 ], "hist": [ 65, 74, 34, 17, 4, 3, 0, 2, 0, 1 ] }, "max": 257121, "mean": 53924.245, "median": 46358, "min": 11102, "nan_count": 0, "nan_proportion": 0, "std": 34390.20179 } } ]
false
THUDM/LongBench
hotpotqa
test
200
[ { "name": "_id", "statistics": { "histogram": { "bin_edges": [ 48, 48 ], "hist": [ 200 ] }, "max": 48, "mean": 48, "median": 48, "min": 48, "nan_count": 0, "nan_proportion": 0, "std": 0 } }, { "name": "context", "statistics": { "histogram": { "bin_edges": [ 6558, 14033, 21508, 28983, 36458, 43933, 51408, 58883, 66358, 73833, 81304 ], "hist": [ 3, 8, 13, 6, 15, 24, 17, 33, 58, 23 ] }, "max": 81304, "mean": 56549.57, "median": 62495, "min": 6558, "nan_count": 0, "nan_proportion": 0, "std": 17858.75958 } }, { "name": "input", "statistics": { "histogram": { "bin_edges": [ 33, 53, 73, 93, 113, 133, 153, 173, 193, 213, 231 ], "hist": [ 23, 42, 58, 39, 20, 10, 5, 1, 0, 2 ] }, "max": 231, "mean": 88.13, "median": 83, "min": 33, "nan_count": 0, "nan_proportion": 0, "std": 32.44172 } } ]
false
THUDM/LongBench
lcc
test
500
[ { "name": "_id", "statistics": { "histogram": { "bin_edges": [ 48, 48 ], "hist": [ 500 ] }, "max": 48, "mean": 48, "median": 48, "min": 48, "nan_count": 0, "nan_proportion": 0, "std": 0 } }, { "name": "context", "statistics": { "histogram": { "bin_edges": [ 4160, 14151, 24142, 34133, 44124, 54115, 64106, 74097, 84088, 94079, 104060 ], "hist": [ 371, 80, 26, 10, 1, 4, 5, 1, 1, 1 ] }, "max": 104060, "mean": 13616.114, "median": 9793.5, "min": 4160, "nan_count": 0, "nan_proportion": 0, "std": 11548.26867 } } ]
false
THUDM/LongBench
lsht
test
200
[ { "name": "_id", "statistics": { "histogram": { "bin_edges": [ 48, 48 ], "hist": [ 200 ] }, "max": 48, "mean": 48, "median": 48, "min": 48, "nan_count": 0, "nan_proportion": 0, "std": 0 } }, { "name": "context", "statistics": { "histogram": { "bin_edges": [ 5576, 9407, 13238, 17069, 20900, 24731, 28562, 32393, 36224, 40055, 43878 ], "hist": [ 10, 26, 31, 37, 24, 28, 20, 15, 7, 2 ] }, "max": 43878, "mean": 21503.64, "median": 20330.5, "min": 5576, "nan_count": 0, "nan_proportion": 0, "std": 8476.18219 } }, { "name": "input", "statistics": { "histogram": { "bin_edges": [ 34, 548, 1062, 1576, 2090, 2604, 3118, 3632, 4146, 4660, 5170 ], "hist": [ 90, 69, 18, 11, 7, 2, 1, 1, 0, 1 ] }, "max": 5170, "mean": 827.98, "median": 581, "min": 34, "nan_count": 0, "nan_proportion": 0, "std": 693.14043 } } ]
false
THUDM/LongBench
multifieldqa_en
test
150
[ { "name": "_id", "statistics": { "histogram": { "bin_edges": [ 48, 48 ], "hist": [ 150 ] }, "max": 48, "mean": 48, "median": 48, "min": 48, "nan_count": 0, "nan_proportion": 0, "std": 0 } }, { "name": "context", "statistics": { "histogram": { "bin_edges": [ 4493, 10456, 16419, 22382, 28345, 34308, 40271, 46234, 52197, 58160, 64118 ], "hist": [ 18, 24, 19, 9, 15, 23, 24, 11, 3, 4 ] }, "max": 64118, "mean": 28947.90667, "median": 31376, "min": 4493, "nan_count": 0, "nan_proportion": 0, "std": 15101.49024 } }, { "name": "input", "statistics": { "histogram": { "bin_edges": [ 15, 28, 41, 54, 67, 80, 93, 106, 119, 132, 144 ], "hist": [ 2, 17, 39, 35, 34, 11, 6, 3, 2, 1 ] }, "max": 144, "mean": 61.82667, "median": 60, "min": 15, "nan_count": 0, "nan_proportion": 0, "std": 21.00295 } } ]
false
THUDM/LongBench
multifieldqa_zh
test
200
[ { "name": "_id", "statistics": { "histogram": { "bin_edges": [ 48, 48 ], "hist": [ 200 ] }, "max": 48, "mean": 48, "median": 48, "min": 48, "nan_count": 0, "nan_proportion": 0, "std": 0 } }, { "name": "context", "statistics": { "histogram": { "bin_edges": [ 1233, 2784, 4335, 5886, 7437, 8988, 10539, 12090, 13641, 15192, 16733 ], "hist": [ 20, 47, 31, 22, 27, 27, 7, 11, 5, 3 ] }, "max": 16733, "mean": 6664.23, "median": 5990, "min": 1233, "nan_count": 0, "nan_proportion": 0, "std": 3546.50516 } }, { "name": "input", "statistics": { "histogram": { "bin_edges": [ 6, 10, 14, 18, 22, 26, 30, 34, 38, 40 ], "hist": [ 9, 45, 63, 41, 25, 8, 4, 3, 2 ] }, "max": 40, "mean": 17.495, "median": 16, "min": 6, "nan_count": 0, "nan_proportion": 0, "std": 5.96219 } } ]
false
THUDM/LongBench
musique
test
200
[ { "name": "_id", "statistics": { "histogram": { "bin_edges": [ 48, 48 ], "hist": [ 200 ] }, "max": 48, "mean": 48, "median": 48, "min": 48, "nan_count": 0, "nan_proportion": 0, "std": 0 } }, { "name": "context", "statistics": { "histogram": { "bin_edges": [ 22958, 28852, 34746, 40640, 46534, 52428, 58322, 64216, 70110, 76004, 81895 ], "hist": [ 3, 0, 1, 4, 2, 7, 9, 44, 114, 16 ] }, "max": 81895, "mean": 69404.945, "median": 71602.5, "min": 22958, "nan_count": 0, "nan_proportion": 0, "std": 8738.45007 } }, { "name": "input", "statistics": { "histogram": { "bin_edges": [ 35, 51, 67, 83, 99, 115, 131, 147, 163, 179, 187 ], "hist": [ 19, 38, 55, 35, 20, 13, 8, 9, 2, 1 ] }, "max": 187, "mean": 85.485, "median": 79, "min": 35, "nan_count": 0, "nan_proportion": 0, "std": 30.44593 } } ]
false
THUDM/LongBench
narrativeqa
test
200
[ { "name": "_id", "statistics": { "histogram": { "bin_edges": [ 48, 48 ], "hist": [ 200 ] }, "max": 48, "mean": 48, "median": 48, "min": 48, "nan_count": 0, "nan_proportion": 0, "std": 0 } }, { "name": "input", "statistics": { "histogram": { "bin_edges": [ 15, 27, 39, 51, 63, 75, 87, 99, 111, 123, 125 ], "hist": [ 21, 50, 47, 40, 21, 9, 7, 4, 0, 1 ] }, "max": 125, "mean": 48.715, "median": 46.5, "min": 15, "nan_count": 0, "nan_proportion": 0, "std": 20.12183 } } ]
false
THUDM/LongBench
passage_count
test
200
[ { "name": "_id", "statistics": { "histogram": { "bin_edges": [ 48, 48 ], "hist": [ 200 ] }, "max": 48, "mean": 48, "median": 48, "min": 48, "nan_count": 0, "nan_proportion": 0, "std": 0 } }, { "name": "context", "statistics": { "histogram": { "bin_edges": [ 21562, 32050, 42538, 53026, 63514, 74002, 84490, 94978, 105466, 115954, 126439 ], "hist": [ 9, 26, 19, 33, 32, 39, 23, 14, 4, 1 ] }, "max": 126439, "mean": 66816.91, "median": 68701.5, "min": 21562, "nan_count": 0, "nan_proportion": 0, "std": 21194.60814 } } ]
false
THUDM/LongBench
passage_retrieval_en
test
200
[ { "name": "_id", "statistics": { "histogram": { "bin_edges": [ 48, 48 ], "hist": [ 200 ] }, "max": 48, "mean": 48, "median": 48, "min": 48, "nan_count": 0, "nan_proportion": 0, "std": 0 } }, { "name": "context", "statistics": { "histogram": { "bin_edges": [ 43906, 46288, 48670, 51052, 53434, 55816, 58198, 60580, 62962, 65344, 67723 ], "hist": [ 6, 15, 20, 39, 34, 28, 31, 18, 5, 4 ] }, "max": 67723, "mean": 55088.595, "median": 54633.5, "min": 43906, "nan_count": 0, "nan_proportion": 0, "std": 4854.31606 } }, { "name": "input", "statistics": { "histogram": { "bin_edges": [ 260, 410, 560, 710, 860, 1010, 1160, 1310, 1460, 1610, 1758 ], "hist": [ 9, 16, 51, 42, 38, 21, 9, 7, 5, 2 ] }, "max": 1758, "mean": 832.015, "median": 786, "min": 260, "nan_count": 0, "nan_proportion": 0, "std": 278.67848 } } ]
false
THUDM/LongBench
passage_retrieval_zh
test
200
[ { "name": "_id", "statistics": { "histogram": { "bin_edges": [ 48, 48 ], "hist": [ 200 ] }, "max": 48, "mean": 48, "median": 48, "min": 48, "nan_count": 0, "nan_proportion": 0, "std": 0 } }, { "name": "context", "statistics": { "histogram": { "bin_edges": [ 4865, 5254, 5643, 6032, 6421, 6810, 7199, 7588, 7977, 8366, 8748 ], "hist": [ 5, 15, 29, 37, 41, 28, 26, 8, 7, 4 ] }, "max": 8748, "mean": 6597.51, "median": 6549, "min": 4865, "nan_count": 0, "nan_proportion": 0, "std": 767.00789 } }, { "name": "input", "statistics": { "histogram": { "bin_edges": [ 17, 123, 229, 335, 441, 547, 653, 759, 865, 971, 1073 ], "hist": [ 87, 92, 15, 5, 0, 0, 0, 0, 0, 1 ] }, "max": 1073, "mean": 146.64, "median": 130, "min": 17, "nan_count": 0, "nan_proportion": 0, "std": 92.99744 } } ]
false
THUDM/LongBench
qasper
test
200
[ { "name": "_id", "statistics": { "histogram": { "bin_edges": [ 48, 48 ], "hist": [ 200 ] }, "max": 48, "mean": 48, "median": 48, "min": 48, "nan_count": 0, "nan_proportion": 0, "std": 0 } }, { "name": "context", "statistics": { "histogram": { "bin_edges": [ 9022, 18223, 27424, 36625, 45826, 55027, 64228, 73429, 82630, 91831, 101028 ], "hist": [ 71, 77, 38, 7, 2, 1, 1, 0, 1, 2 ] }, "max": 101028, "mean": 23640.54, "median": 22335.5, "min": 9022, "nan_count": 0, "nan_proportion": 0, "std": 12434.89217 } }, { "name": "input", "statistics": { "histogram": { "bin_edges": [ 22, 36, 50, 64, 78, 92, 106, 120, 134, 148, 152 ], "hist": [ 46, 86, 34, 17, 10, 4, 1, 1, 0, 1 ] }, "max": 152, "mean": 47.79, "median": 43, "min": 22, "nan_count": 0, "nan_proportion": 0, "std": 19.13286 } } ]
false
THUDM/LongBench
qmsum
test
200
[ { "name": "_id", "statistics": { "histogram": { "bin_edges": [ 48, 48 ], "hist": [ 200 ] }, "max": 48, "mean": 48, "median": 48, "min": 48, "nan_count": 0, "nan_proportion": 0, "std": 0 } }, { "name": "context", "statistics": { "histogram": { "bin_edges": [ 9978, 23328, 36678, 50028, 63378, 76728, 90078, 103428, 116778, 130128, 143473 ], "hist": [ 12, 35, 34, 67, 20, 10, 4, 4, 9, 5 ] }, "max": 143473, "mean": 57460.025, "median": 56387, "min": 9978, "nan_count": 0, "nan_proportion": 0, "std": 27535.98213 } }, { "name": "input", "statistics": { "histogram": { "bin_edges": [ 21, 39, 57, 75, 93, 111, 129, 147, 165, 183, 195 ], "hist": [ 27, 44, 45, 28, 24, 18, 7, 6, 0, 1 ] }, "max": 195, "mean": 74.055, "median": 66.5, "min": 21, "nan_count": 0, "nan_proportion": 0, "std": 34.43225 } } ]
false
THUDM/LongBench
repobench-p
test
500
[ { "name": "_id", "statistics": { "histogram": { "bin_edges": [ 48, 48 ], "hist": [ 500 ] }, "max": 48, "mean": 48, "median": 48, "min": 48, "nan_count": 0, "nan_proportion": 0, "std": 0 } }, { "name": "context", "statistics": { "histogram": { "bin_edges": [ 4442, 24885, 45328, 65771, 86214, 106657, 127100, 147543, 167986, 188429, 208864 ], "hist": [ 164, 140, 95, 60, 18, 11, 10, 1, 0, 1 ] }, "max": 208864, "mean": 43583.2, "median": 37740.5, "min": 4442, "nan_count": 0, "nan_proportion": 0, "std": 29880.03926 } }, { "name": "input", "statistics": { "histogram": { "bin_edges": [ 474, 2351, 4228, 6105, 7982, 9859, 11736, 13613, 15490, 17367, 19234 ], "hist": [ 103, 125, 165, 80, 18, 6, 1, 0, 1, 1 ] }, "max": 19234, "mean": 4488.134, "median": 4450.5, "min": 474, "nan_count": 0, "nan_proportion": 0, "std": 2292.30418 } } ]
false
THUDM/LongBench
trec
test
200
[ { "name": "_id", "statistics": { "histogram": { "bin_edges": [ 48, 48 ], "hist": [ 200 ] }, "max": 48, "mean": 48, "median": 48, "min": 48, "nan_count": 0, "nan_proportion": 0, "std": 0 } }, { "name": "context", "statistics": { "histogram": { "bin_edges": [ 8329, 12518, 16707, 20896, 25085, 29274, 33463, 37652, 41841, 46030, 50212 ], "hist": [ 21, 19, 17, 18, 22, 17, 19, 21, 22, 24 ] }, "max": 50212, "mean": 29948.42, "median": 29984.5, "min": 8329, "nan_count": 0, "nan_proportion": 0, "std": 12400.3286 } }, { "name": "input", "statistics": { "histogram": { "bin_edges": [ 30, 37, 44, 51, 58, 65, 72, 79, 86, 93, 93 ], "hist": [ 25, 35, 42, 47, 23, 15, 4, 6, 2, 1 ] }, "max": 93, "mean": 51.245, "median": 49.5, "min": 30, "nan_count": 0, "nan_proportion": 0, "std": 12.68382 } } ]
false
THUDM/LongBench
triviaqa
test
200
[ { "name": "_id", "statistics": { "histogram": { "bin_edges": [ 48, 48 ], "hist": [ 200 ] }, "max": 48, "mean": 48, "median": 48, "min": 48, "nan_count": 0, "nan_proportion": 0, "std": 0 } }, { "name": "context", "statistics": { "histogram": { "bin_edges": [ 5518, 15110, 24702, 34294, 43886, 53478, 63070, 72662, 82254, 91846, 101431 ], "hist": [ 27, 15, 24, 22, 32, 21, 21, 21, 15, 2 ] }, "max": 101431, "mean": 47122.875, "median": 48040, "min": 5518, "nan_count": 0, "nan_proportion": 0, "std": 24597.77103 } }, { "name": "input", "statistics": { "histogram": { "bin_edges": [ 349, 1032, 1715, 2398, 3081, 3764, 4447, 5130, 5813, 6496, 7172 ], "hist": [ 30, 25, 25, 33, 14, 14, 21, 26, 10, 2 ] }, "max": 7172, "mean": 3067.845, "median": 2718.5, "min": 349, "nan_count": 0, "nan_proportion": 0, "std": 1784.39271 } } ]
false
README.md exists but content is empty. Use the Edit dataset card button to edit it.
Downloads last month
46
Edit dataset card