dataset
stringlengths
4
115
config
stringlengths
1
121
split
stringlengths
1
228
num_examples
int64
3
341M
column_name
stringlengths
1
22.7k
min
int64
0
1.81M
max
int64
0
981M
mean
float64
0
42.2M
median
float64
0
24M
std
float64
0
84.2M
histogram
dict
partial
bool
2 classes
canristiian/drug_rule_sort2
default
train
4,818
output
2
16
3.67248
4
1.82372
{ "bin_edges": [ 2, 4, 6, 8, 10, 12, 14, 16, 16 ], "hist": [ 1989, 2333, 328, 93, 49, 24, 0, 2 ] }
false
Teja2022/samlicense
default
train
9,846
text
32
14,491
1,447.92129
1,190
1,169.27134
{ "bin_edges": [ 32, 1478, 2924, 4370, 5816, 7262, 8708, 10154, 11600, 13046, 14491 ], "hist": [ 6050, 2819, 721, 186, 39, 18, 6, 4, 1, 2 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=1, 'temperature'=0.9, 'top_k'=1000, 'top_p'=0.1}
train
3,270
id
17
20
19.40061
19
0.61718
{ "bin_edges": [ 17, 18, 19, 20, 20 ], "hist": [ 21, 167, 1563, 1519 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=1, 'temperature'=0.9, 'top_k'=1000, 'top_p'=0.1}
train
3,270
prediction
5
499
39.18471
6
61.00867
{ "bin_edges": [ 5, 55, 105, 155, 205, 255, 305, 355, 405, 455, 499 ], "hist": [ 2204, 476, 542, 9, 4, 4, 5, 5, 13, 8 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=1, 'temperature'=0.9, 'top_k'=10000, 'top_p'=0.05}
train
3,270
id
17
20
19.40061
19
0.61718
{ "bin_edges": [ 17, 18, 19, 20, 20 ], "hist": [ 21, 167, 1563, 1519 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=1, 'temperature'=0.9, 'top_k'=10000, 'top_p'=0.05}
train
3,270
prediction
5
499
39.18471
6
61.00867
{ "bin_edges": [ 5, 55, 105, 155, 205, 255, 305, 355, 405, 455, 499 ], "hist": [ 2204, 476, 542, 9, 4, 4, 5, 5, 13, 8 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=1, 'temperature'=0.9, 'top_k'=1000, 'top_p'=0.2}
train
3,270
id
17
20
19.40061
19
0.61718
{ "bin_edges": [ 17, 18, 19, 20, 20 ], "hist": [ 21, 167, 1563, 1519 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=1, 'temperature'=0.9, 'top_k'=1000, 'top_p'=0.2}
train
3,270
prediction
5
499
39.18471
6
61.00867
{ "bin_edges": [ 5, 55, 105, 155, 205, 255, 305, 355, 405, 455, 499 ], "hist": [ 2204, 476, 542, 9, 4, 4, 5, 5, 13, 8 ] }
false
vidhikatkoria/SGD_Banks
default
train
4,400
context
24
418
153.73909
150
50.35455
{ "bin_edges": [ 24, 64, 104, 144, 184, 224, 264, 304, 344, 384, 418 ], "hist": [ 190, 330, 1383, 1438, 718, 222, 78, 33, 7, 1 ] }
false
vidhikatkoria/SGD_Banks
default
train
4,400
response
2
191
40.825
36
24.26604
{ "bin_edges": [ 2, 21, 40, 59, 78, 97, 116, 135, 154, 173, 191 ], "hist": [ 805, 1665, 1223, 349, 166, 129, 42, 16, 2, 3 ] }
false
gegre/ddpm-butterflies-128
default
train
1,000
edan_url
30
31
30.787
31
0.40963
{ "bin_edges": [ 30, 31, 31 ], "hist": [ 213, 787 ] }
false
gegre/ddpm-butterflies-128
default
train
1,000
guid
63
63
63
63
0
{ "bin_edges": [ 63, 63 ], "hist": [ 1000 ] }
false
gegre/ddpm-butterflies-128
default
train
1,000
id
45
51
45.294
45
1.29586
{ "bin_edges": [ 45, 46, 47, 48, 49, 50, 51, 51 ], "hist": [ 951, 0, 0, 0, 0, 0, 49 ] }
false
gegre/ddpm-butterflies-128
default
train
1,000
image_alt
35
90
45.108
44
4.70537
{ "bin_edges": [ 35, 41, 47, 53, 59, 65, 71, 77, 83, 89, 90 ], "hist": [ 80, 663, 198, 38, 12, 6, 1, 1, 0, 1 ] }
false
gegre/ddpm-butterflies-128
default
train
1,000
image_hash
32
32
32
32
0
{ "bin_edges": [ 32, 32 ], "hist": [ 1000 ] }
false
gegre/ddpm-butterflies-128
default
train
1,000
image_url
87
93
87.294
87
1.29586
{ "bin_edges": [ 87, 88, 89, 90, 91, 92, 93, 93 ], "hist": [ 951, 0, 0, 0, 0, 0, 49 ] }
false
gegre/ddpm-butterflies-128
default
train
1,000
locality
4
130
42.67766
39
20.76068
{ "bin_edges": [ 4, 17, 30, 43, 56, 69, 82, 95, 108, 121, 130 ], "hist": [ 21, 59, 76, 43, 43, 22, 5, 3, 0, 1 ] }
false
gegre/ddpm-butterflies-128
default
train
1,000
name
7
62
17.108
16
4.70537
{ "bin_edges": [ 7, 13, 19, 25, 31, 37, 43, 49, 55, 61, 62 ], "hist": [ 80, 663, 198, 38, 12, 6, 1, 1, 0, 1 ] }
false
gegre/ddpm-butterflies-128
default
train
1,000
scientific_name
9
61
20.171
17
8.68546
{ "bin_edges": [ 9, 15, 21, 27, 33, 39, 45, 51, 57, 61 ], "hist": [ 234, 499, 56, 108, 55, 23, 12, 11, 2 ] }
false
gegre/ddpm-butterflies-128
default
train
1,000
usnm_no
7
24
14.84388
15
3.4149
{ "bin_edges": [ 7, 9, 11, 13, 15, 17, 19, 21, 23, 24 ], "hist": [ 68, 0, 27, 0, 552, 0, 0, 36, 28 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=1, 'temperature'=0.9, 'top_k'=10000, 'top_p'=0.2}
train
3,270
id
17
20
19.40061
19
0.61718
{ "bin_edges": [ 17, 18, 19, 20, 20 ], "hist": [ 21, 167, 1563, 1519 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=1, 'temperature'=0.9, 'top_k'=10000, 'top_p'=0.2}
train
3,270
prediction
5
499
39.18471
6
61.00867
{ "bin_edges": [ 5, 55, 105, 155, 205, 255, 305, 355, 405, 455, 499 ], "hist": [ 2204, 476, 542, 9, 4, 4, 5, 5, 13, 8 ] }
false
vsarathy/nl-robotics-semantic-parsing-info_structure-30k-no-context
default
train
22,330
input
1
122
45.35481
41
16.90815
{ "bin_edges": [ 1, 14, 27, 40, 53, 66, 79, 92, 105, 118, 122 ], "hist": [ 67, 1734, 8298, 5918, 3046, 2155, 882, 193, 33, 4 ] }
false
vsarathy/nl-robotics-semantic-parsing-info_structure-30k-no-context
default
train
22,330
text
603
765
680.43426
681
25.43456
{ "bin_edges": [ 603, 620, 637, 654, 671, 688, 705, 722, 739, 756, 765 ], "hist": [ 485, 1364, 765, 3668, 7609, 4924, 2477, 900, 127, 11 ] }
false
vsarathy/nl-robotics-semantic-parsing-info_structure-30k-no-context
default
train
22,330
utterance
1
122
45.35481
41
16.90815
{ "bin_edges": [ 1, 14, 27, 40, 53, 66, 79, 92, 105, 118, 122 ], "hist": [ 67, 1734, 8298, 5918, 3046, 2155, 882, 193, 33, 4 ] }
false
ThingsSolver/nsql-eng
default
train
261,423
answer
2
4,435
143.21088
82
166.65518
{ "bin_edges": [ 2, 446, 890, 1334, 1778, 2222, 2666, 3110, 3554, 3998, 4435 ], "hist": [ 246987, 11496, 2698, 124, 47, 47, 12, 9, 2, 1 ] }
false
ThingsSolver/nsql-eng
default
train
261,423
context
36
7,139
800.53032
170
1,304.04959
{ "bin_edges": [ 36, 747, 1458, 2169, 2880, 3591, 4302, 5013, 5724, 6435, 7139 ], "hist": [ 186885, 31528, 12112, 10864, 4514, 5261, 0, 10051, 0, 208 ] }
false
ThingsSolver/nsql-eng
default
train
261,423
prompt
297
7,603
1,109.0048
471
1,309.10709
{ "bin_edges": [ 297, 1028, 1759, 2490, 3221, 3952, 4683, 5414, 6145, 6876, 7603 ], "hist": [ 186740, 31504, 12269, 10828, 4640, 5183, 0, 10011, 40, 208 ] }
false
ThingsSolver/nsql-eng
default
train
261,423
question
1
1,058
74.47445
65
38.15466
{ "bin_edges": [ 1, 107, 213, 319, 425, 531, 637, 743, 849, 955, 1058 ], "hist": [ 222361, 36314, 2631, 77, 24, 5, 4, 4, 0, 3 ] }
false
ThingsSolver/nsql-eng
default
train
261,423
text
318
8,512
1,252.21568
556
1,416.54357
{ "bin_edges": [ 318, 1138, 1958, 2778, 3598, 4418, 5238, 6058, 6878, 7698, 8512 ], "hist": [ 186623, 31936, 8152, 11514, 8567, 3886, 5769, 4741, 210, 25 ] }
false
ethansimrm/train_tfidf_words_and_weights
default
train
174,470
text
18
78
28.43327
28
4.02966
{ "bin_edges": [ 18, 25, 32, 39, 46, 53, 60, 67, 74, 78 ], "hist": [ 29305, 108637, 33902, 2519, 102, 4, 0, 0, 1 ] }
false
mrSoul7766/ECTSum
default
test
495
summary
22
1,366
264.95152
215
208.17422
{ "bin_edges": [ 22, 157, 292, 427, 562, 697, 832, 967, 1102, 1237, 1366 ], "hist": [ 182, 144, 80, 48, 22, 7, 6, 2, 2, 2 ] }
false
mrSoul7766/ECTSum
default
test
495
text
2,776
48,445
17,584.17576
17,035
6,740.5318
{ "bin_edges": [ 2776, 7343, 11910, 16477, 21044, 25611, 30178, 34745, 39312, 43879, 48445 ], "hist": [ 16, 75, 137, 133, 81, 36, 9, 2, 3, 3 ] }
false
mrSoul7766/ECTSum
default
train
1,681
summary
16
1,675
274.35158
220
215.77106
{ "bin_edges": [ 16, 182, 348, 514, 680, 846, 1012, 1178, 1344, 1510, 1675 ], "hist": [ 692, 531, 265, 106, 41, 23, 14, 5, 3, 1 ] }
false
mrSoul7766/ECTSum
default
train
1,681
text
1,540
70,127
17,862.04759
17,346
6,678.44883
{ "bin_edges": [ 1540, 8399, 15258, 22117, 28976, 35835, 42694, 49553, 56412, 63271, 70127 ], "hist": [ 101, 539, 633, 316, 77, 10, 4, 0, 0, 1 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=1, 'temperature'=0.9, 'top_k'=10000, 'top_p'=0.1}
train
3,270
id
17
20
19.40061
19
0.61718
{ "bin_edges": [ 17, 18, 19, 20, 20 ], "hist": [ 21, 167, 1563, 1519 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=1, 'temperature'=0.9, 'top_k'=10000, 'top_p'=0.1}
train
3,270
prediction
5
499
39.18471
6
61.00867
{ "bin_edges": [ 5, 55, 105, 155, 205, 255, 305, 355, 405, 455, 499 ], "hist": [ 2204, 476, 542, 9, 4, 4, 5, 5, 13, 8 ] }
false
kursathalat/rating_ds
default
train
20
sentence1
36
135
89.9
98
31.38203
{ "bin_edges": [ 36, 46, 56, 66, 76, 86, 96, 106, 116, 126, 135 ], "hist": [ 1, 2, 3, 2, 1, 0, 3, 2, 3, 3 ] }
false
kursathalat/rating_ds
default
train
20
sentence2
36
135
89.9
98
31.38203
{ "bin_edges": [ 36, 46, 56, 66, 76, 86, 96, 106, 116, 126, 135 ], "hist": [ 1, 2, 3, 2, 1, 0, 3, 2, 3, 3 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=1, 'temperature'=0.95, 'top_k'=100, 'top_p'=0.05}
train
3,270
id
17
20
19.40061
19
0.61718
{ "bin_edges": [ 17, 18, 19, 20, 20 ], "hist": [ 21, 167, 1563, 1519 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=1, 'temperature'=0.95, 'top_k'=100, 'top_p'=0.05}
train
3,270
prediction
5
499
39.18471
6
61.00867
{ "bin_edges": [ 5, 55, 105, 155, 205, 255, 305, 355, 405, 455, 499 ], "hist": [ 2204, 476, 542, 9, 4, 4, 5, 5, 13, 8 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=1, 'temperature'=0.95, 'top_k'=100, 'top_p'=0.2}
train
3,270
id
17
20
19.40061
19
0.61718
{ "bin_edges": [ 17, 18, 19, 20, 20 ], "hist": [ 21, 167, 1563, 1519 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=1, 'temperature'=0.95, 'top_k'=100, 'top_p'=0.2}
train
3,270
prediction
5
499
39.18471
6
61.00867
{ "bin_edges": [ 5, 55, 105, 155, 205, 255, 305, 355, 405, 455, 499 ], "hist": [ 2204, 476, 542, 9, 4, 4, 5, 5, 13, 8 ] }
false
wesley7137/dataset_alpaca_counseling
default
train
4,565
text
52
32,885
1,124.9805
930
993.42086
{ "bin_edges": [ 52, 3336, 6620, 9904, 13188, 16472, 19756, 23040, 26324, 29608, 32885 ], "hist": [ 4475, 88, 0, 0, 0, 0, 0, 0, 0, 2 ] }
false
DataLinguistic/Data_OpenSet
default
train
122,606
text
72
16,401
1,141.96392
882
857.6068
{ "bin_edges": [ 72, 1705, 3338, 4971, 6604, 8237, 9870, 11503, 13136, 14769, 16401 ], "hist": [ 101601, 17609, 2768, 451, 119, 34, 7, 9, 5, 3 ] }
false
tilyupo/mmlu_qa2a
default
train
110,421
answer
1
224
39.52777
38
19.61983
{ "bin_edges": [ 1, 24, 47, 70, 93, 116, 139, 162, 185, 208, 224 ], "hist": [ 25604, 47978, 28989, 6703, 877, 213, 39, 12, 3, 3 ] }
false
tilyupo/mmlu_qa2a
default
train
110,421
distraction
1
186
37.46024
36
18.54637
{ "bin_edges": [ 1, 20, 39, 58, 77, 96, 115, 134, 153, 172, 186 ], "hist": [ 19691, 41604, 33811, 12178, 2536, 444, 106, 40, 8, 3 ] }
false
tilyupo/mmlu_qa2a
default
train
110,421
question
17
262
54.0326
51
21.24573
{ "bin_edges": [ 17, 42, 67, 92, 117, 142, 167, 192, 217, 242, 262 ], "hist": [ 34520, 52158, 18560, 3512, 891, 427, 215, 96, 39, 3 ] }
false
tilyupo/mmlu_qa2a
default
validation
13,036
answer
1
207
50.93564
40
38.88178
{ "bin_edges": [ 1, 22, 43, 64, 85, 106, 127, 148, 169, 190, 207 ], "hist": [ 4097, 2661, 2056, 1677, 1147, 756, 342, 189, 75, 36 ] }
false
tilyupo/mmlu_qa2a
default
validation
13,036
distraction
1
214
48.09244
40
34.69886
{ "bin_edges": [ 1, 23, 45, 67, 89, 111, 133, 155, 177, 199, 214 ], "hist": [ 4181, 2866, 2329, 1768, 1092, 538, 200, 52, 8, 2 ] }
false
tilyupo/mmlu_qa2a
default
validation
13,036
question
17
253
87.37719
80
38.38669
{ "bin_edges": [ 17, 41, 65, 89, 113, 137, 161, 185, 209, 233, 253 ], "hist": [ 897, 3108, 3631, 2524, 1373, 770, 459, 187, 72, 15 ] }
false
cha7ura/food-data-prompts
default
train
10,000
text
196
2,763
322.5101
303
110.63264
{ "bin_edges": [ 196, 453, 710, 967, 1224, 1481, 1738, 1995, 2252, 2509, 2763 ], "hist": [ 9431, 495, 33, 21, 8, 3, 1, 2, 4, 2 ] }
false
cha7ura/food-data-prompts
default
test
1,000
text
195
1,338
311.675
291.5
90.72962
{ "bin_edges": [ 195, 310, 425, 540, 655, 770, 885, 1000, 1115, 1230, 1338 ], "hist": [ 594, 326, 70, 2, 3, 1, 0, 2, 0, 2 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=1, 'temperature'=0.95, 'top_k'=100, 'top_p'=0.1}
train
3,270
id
17
20
19.40061
19
0.61718
{ "bin_edges": [ 17, 18, 19, 20, 20 ], "hist": [ 21, 167, 1563, 1519 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=1, 'temperature'=0.95, 'top_k'=100, 'top_p'=0.1}
train
3,270
prediction
5
499
39.18471
6
61.00867
{ "bin_edges": [ 5, 55, 105, 155, 205, 255, 305, 355, 405, 455, 499 ], "hist": [ 2204, 476, 542, 9, 4, 4, 5, 5, 13, 8 ] }
false
liuyanchen1015/MULTI_VALUE_sst2_what_comparative
default
train
108
sentence
29
215
100.62037
90
47.4668
{ "bin_edges": [ 29, 48, 67, 86, 105, 124, 143, 162, 181, 200, 215 ], "hist": [ 13, 19, 16, 15, 15, 7, 5, 10, 6, 2 ] }
false
paoloitaliani/news_articles
corriere_autunno
train
90
author
17
51
28.95349
28
5.58129
{ "bin_edges": [ 17, 21, 25, 29, 33, 37, 41, 45, 49, 51 ], "hist": [ 5, 3, 43, 24, 5, 2, 1, 1, 2 ] }
false
paoloitaliani/news_articles
corriere_autunno
train
90
body
1,818
7,444
3,543.12222
3,402.5
1,157.05076
{ "bin_edges": [ 1818, 2381, 2944, 3507, 4070, 4633, 5196, 5759, 6322, 6885, 7444 ], "hist": [ 11, 18, 21, 20, 5, 5, 6, 2, 0, 2 ] }
false
paoloitaliani/news_articles
corriere_autunno
train
90
date
14
16
15.35556
15
0.52598
{ "bin_edges": [ 14, 15, 16, 16 ], "hist": [ 2, 54, 34 ] }
false
paoloitaliani/news_articles
corriere_autunno
train
90
journal
114
134
123.86667
125
4.39918
{ "bin_edges": [ 114, 117, 120, 123, 126, 129, 132, 134 ], "hist": [ 14, 0, 5, 32, 32, 6, 1 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=1, 'temperature'=0.95, 'top_k'=1000, 'top_p'=0.05}
train
3,270
id
17
20
19.40061
19
0.61718
{ "bin_edges": [ 17, 18, 19, 20, 20 ], "hist": [ 21, 167, 1563, 1519 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=1, 'temperature'=0.95, 'top_k'=1000, 'top_p'=0.05}
train
3,270
prediction
5
499
39.18471
6
61.00867
{ "bin_edges": [ 5, 55, 105, 155, 205, 255, 305, 355, 405, 455, 499 ], "hist": [ 2204, 476, 542, 9, 4, 4, 5, 5, 13, 8 ] }
false
luistakahashi/ts-classifier-pear-5
default
train
2,399
title
5
62
26.07461
26
6.4541
{ "bin_edges": [ 5, 11, 17, 23, 29, 35, 41, 47, 53, 59, 62 ], "hist": [ 29, 136, 431, 1070, 552, 123, 41, 12, 4, 1 ] }
false
paoloitaliani/news_articles
corriere_primavera
train
105
author
17
50
29.07609
28.5
5.02354
{ "bin_edges": [ 17, 21, 25, 29, 33, 37, 41, 45, 49, 50 ], "hist": [ 5, 2, 39, 33, 8, 2, 1, 0, 2 ] }
false
paoloitaliani/news_articles
corriere_primavera
train
105
body
82
7,413
3,039.79592
3,059
1,517.42618
{ "bin_edges": [ 82, 816, 1550, 2284, 3018, 3752, 4486, 5220, 5954, 6688, 7413 ], "hist": [ 7, 10, 9, 20, 26, 15, 4, 1, 3, 3 ] }
false
paoloitaliani/news_articles
corriere_primavera
train
105
journal
114
129
124.4381
126
3.78744
{ "bin_edges": [ 114, 116, 118, 120, 122, 124, 126, 128, 129 ], "hist": [ 11, 0, 0, 2, 14, 20, 52, 6 ] }
false
Giacinta/label
default
train
636
cleaned_text
20
9,863
2,505.27044
1,413
2,606.9184
{ "bin_edges": [ 20, 1005, 1990, 2975, 3960, 4945, 5930, 6915, 7900, 8885, 9863 ], "hist": [ 244, 140, 71, 48, 21, 22, 23, 19, 18, 30 ] }
false
Giacinta/label
default
train
636
id
7
7
7
7
0
{ "bin_edges": [ 7, 7 ], "hist": [ 636 ] }
false
Giacinta/label
default
train
636
post
20
10,031
2,559.04403
1,432
2,685.73188
{ "bin_edges": [ 20, 1022, 2024, 3026, 4028, 5030, 6032, 7034, 8036, 9038, 10031 ], "hist": [ 246, 138, 70, 48, 22, 24, 21, 18, 14, 35 ] }
false
Giacinta/label
default
train
636
translation
11
6,541
955.51101
470.5
1,179.33305
{ "bin_edges": [ 11, 665, 1319, 1973, 2627, 3281, 3935, 4589, 5243, 5897, 6541 ], "hist": [ 384, 118, 41, 31, 28, 4, 6, 15, 7, 2 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=1, 'temperature'=0.95, 'top_k'=1000, 'top_p'=0.1}
train
3,270
id
17
20
19.40061
19
0.61718
{ "bin_edges": [ 17, 18, 19, 20, 20 ], "hist": [ 21, 167, 1563, 1519 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=1, 'temperature'=0.95, 'top_k'=1000, 'top_p'=0.1}
train
3,270
prediction
5
499
39.18471
6
61.00867
{ "bin_edges": [ 5, 55, 105, 155, 205, 255, 305, 355, 405, 455, 499 ], "hist": [ 2204, 476, 542, 9, 4, 4, 5, 5, 13, 8 ] }
false
paoloitaliani/news_articles
fattoq_primavera
train
152
author
22
35
27.01974
26
3.07408
{ "bin_edges": [ 22, 24, 26, 28, 30, 32, 34, 35 ], "hist": [ 2, 55, 34, 25, 16, 17, 3 ] }
false
paoloitaliani/news_articles
fattoq_primavera
train
152
body
32
7,637
3,240.52703
3,151
1,557.04225
{ "bin_edges": [ 32, 793, 1554, 2315, 3076, 3837, 4598, 5359, 6120, 6881, 7637 ], "hist": [ 3, 20, 15, 32, 43, 11, 5, 9, 7, 3 ] }
false
paoloitaliani/news_articles
fattoq_autunno
train
133
author
22
50
27.78195
27
3.7986
{ "bin_edges": [ 22, 25, 28, 31, 34, 37, 40, 43, 46, 49, 50 ], "hist": [ 38, 32, 29, 29, 3, 1, 0, 0, 0, 1 ] }
false
paoloitaliani/news_articles
fattoq_autunno
train
133
body
1,316
8,053
3,697.41353
3,410
1,408.36695
{ "bin_edges": [ 1316, 1990, 2664, 3338, 4012, 4686, 5360, 6034, 6708, 7382, 8053 ], "hist": [ 11, 19, 27, 37, 13, 6, 9, 4, 6, 1 ] }
false
juanberasategui/Master_Thesis_Data
default
train
1,440,042
DATE
10
10
10
10
0
{ "bin_edges": [ 10, 10 ], "hist": [ 1440042 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=1, 'temperature'=0.95, 'top_k'=1000, 'top_p'=0.2}
train
3,270
id
17
20
19.40061
19
0.61718
{ "bin_edges": [ 17, 18, 19, 20, 20 ], "hist": [ 21, 167, 1563, 1519 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=1, 'temperature'=0.95, 'top_k'=1000, 'top_p'=0.2}
train
3,270
prediction
5
499
39.18471
6
61.00867
{ "bin_edges": [ 5, 55, 105, 155, 205, 255, 305, 355, 405, 455, 499 ], "hist": [ 2204, 476, 542, 9, 4, 4, 5, 5, 13, 8 ] }
false
RikoteMaster/goemotion_4_llama2
default
train
72,648
Text_processed
2
231
68.52717
66
35.6146
{ "bin_edges": [ 2, 25, 48, 71, 94, 117, 140, 163, 186, 209, 231 ], "hist": [ 8808, 14938, 15376, 14238, 11466, 6238, 1440, 142, 0, 2 ] }
false
RikoteMaster/goemotion_4_llama2
default
train
72,648
text
203
432
271.66358
269
35.71799
{ "bin_edges": [ 203, 226, 249, 272, 295, 318, 341, 364, 387, 410, 432 ], "hist": [ 7502, 14842, 15442, 14268, 11882, 6738, 1778, 186, 8, 2 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=1, 'temperature'=0.95, 'top_k'=10000, 'top_p'=0.05}
train
3,270
id
17
20
19.40061
19
0.61718
{ "bin_edges": [ 17, 18, 19, 20, 20 ], "hist": [ 21, 167, 1563, 1519 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=1, 'temperature'=0.95, 'top_k'=10000, 'top_p'=0.05}
train
3,270
prediction
5
499
39.18471
6
61.00867
{ "bin_edges": [ 5, 55, 105, 155, 205, 255, 305, 355, 405, 455, 499 ], "hist": [ 2204, 476, 542, 9, 4, 4, 5, 5, 13, 8 ] }
false
FreedomIntelligence/sharegpt-french
default
train
5,617
id
1
5
4.84422
5
0.42212
{ "bin_edges": [ 1, 2, 3, 4, 5, 5 ], "hist": [ 5, 6, 83, 671, 4852 ] }
false
paoloitaliani/news_articles
ukraine
train
27,449
author
1
342
17.87301
15
9.41193
{ "bin_edges": [ 1, 36, 71, 106, 141, 176, 211, 246, 281, 316, 342 ], "hist": [ 10284, 456, 14, 1, 1, 0, 0, 0, 0, 1 ] }
false
paoloitaliani/news_articles
ukraine
train
27,449
body
15
67,283
2,902.20916
2,722
2,043.02828
{ "bin_edges": [ 15, 6742, 13469, 20196, 26923, 33650, 40377, 47104, 53831, 60558, 67283 ], "hist": [ 26542, 818, 58, 11, 4, 3, 0, 1, 0, 1 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=1, 'temperature'=0.95, 'top_k'=10000, 'top_p'=0.1}
train
3,270
id
17
20
19.40061
19
0.61718
{ "bin_edges": [ 17, 18, 19, 20, 20 ], "hist": [ 21, 167, 1563, 1519 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=1, 'temperature'=0.95, 'top_k'=10000, 'top_p'=0.1}
train
3,270
prediction
5
499
39.18471
6
61.00867
{ "bin_edges": [ 5, 55, 105, 155, 205, 255, 305, 355, 405, 455, 499 ], "hist": [ 2204, 476, 542, 9, 4, 4, 5, 5, 13, 8 ] }
false
kyleeasterly/purple-aerospace-mix-v2-300-12
default
train
3,900
text
314
13,893
4,162.80513
3,800
1,910.72178
{ "bin_edges": [ 314, 1672, 3030, 4388, 5746, 7104, 8462, 9820, 11178, 12536, 13893 ], "hist": [ 144, 1020, 1305, 781, 353, 153, 59, 70, 11, 4 ] }
false
Svetlana0303/all_samples_Regression
default
validation
37
Текст
6
247
65.05405
42
51.17234
{ "bin_edges": [ 6, 31, 56, 81, 106, 131, 156, 181, 206, 231, 247 ], "hist": [ 7, 15, 4, 5, 3, 1, 0, 1, 0, 1 ] }
false
Svetlana0303/all_samples_Regression
default
validation
37
виды речевых актов
9
32
14.42424
11
6.36411
{ "bin_edges": [ 9, 12, 15, 18, 21, 24, 27, 30, 32 ], "hist": [ 17, 3, 5, 1, 4, 2, 0, 1 ] }
false
Svetlana0303/all_samples_Regression
default
validation
37
исправление
1
5
2.05405
1
1.76298
{ "bin_edges": [ 1, 2, 3, 4, 5, 5 ], "hist": [ 27, 0, 0, 1, 9 ] }
false
Svetlana0303/all_samples_Regression
default
validation
37
категории эмпатии/антипатии
2
17
5.75676
3
4.31023
{ "bin_edges": [ 2, 4, 6, 8, 10, 12, 14, 16, 17 ], "hist": [ 20, 2, 4, 4, 3, 2, 0, 2 ] }
false
Svetlana0303/all_samples_Regression
default
validation
37
текст с примером ошибки
1
5
2.78378
1
1.97393
{ "bin_edges": [ 1, 2, 3, 4, 5, 5 ], "hist": [ 20, 0, 0, 2, 15 ] }
false
Svetlana0303/all_samples_Regression
default
train
130
Текст
3
352
90.31538
71
66.81933
{ "bin_edges": [ 3, 38, 73, 108, 143, 178, 213, 248, 283, 318, 352 ], "hist": [ 26, 40, 22, 20, 10, 3, 4, 3, 0, 2 ] }
false
Svetlana0303/all_samples_Regression
default
train
130
виды речевых актов
1
36
12.18898
11
5.6227
{ "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 33, 36 ], "hist": [ 4, 0, 88, 16, 7, 6, 4, 0, 2 ] }
false
Svetlana0303/all_samples_Regression
default
train
130
категории эмпатии/антипатии
1
24
7.176
6
4.80434
{ "bin_edges": [ 1, 4, 7, 10, 13, 16, 19, 22, 24 ], "hist": [ 39, 24, 28, 16, 10, 5, 2, 1 ] }
false
Svetlana0303/all_samples_Regression
default
train
130
обоснование (слово, фраза, предложение)
1
47
12.21649
9
10.16963
{ "bin_edges": [ 1, 6, 11, 16, 21, 26, 31, 36, 41, 46, 47 ], "hist": [ 28, 29, 14, 6, 8, 5, 3, 2, 1, 1 ] }
false
Svetlana0303/all_samples_Regression
default
test
19
Текст
7
167
65.05263
55
45.84088
{ "bin_edges": [ 7, 24, 41, 58, 75, 92, 109, 126, 143, 160, 167 ], "hist": [ 3, 4, 3, 2, 3, 1, 1, 0, 1, 1 ] }
false
Svetlana0303/all_samples_Regression
default
test
19
виды речевых актов
6
47
15.52632
11
10.08502
{ "bin_edges": [ 6, 11, 16, 21, 26, 31, 36, 41, 46, 47 ], "hist": [ 9, 4, 0, 4, 1, 0, 0, 0, 1 ] }
false