dataset
stringlengths
4
115
config
stringlengths
1
121
split
stringlengths
1
228
num_examples
int64
3
341M
column_name
stringlengths
1
22.7k
min
int64
0
1.81M
max
int64
0
981M
mean
float64
0
42.2M
median
float64
0
24M
std
float64
0
84.2M
histogram
dict
partial
bool
2 classes
sethapun/cv_svamp_augmented_fold3
default
train
3,973
Numbers
3
59
8.68286
8
4.83162
{ "bin_edges": [ 3, 9, 15, 21, 27, 33, 39, 45, 51, 57, 59 ], "hist": [ 2247, 1561, 111, 24, 2, 13, 4, 0, 2, 9 ] }
false
sethapun/cv_svamp_augmented_fold3
default
train
3,973
Ques
12
166
43.92497
40
15.02272
{ "bin_edges": [ 12, 28, 44, 60, 76, 92, 108, 124, 140, 156, 166 ], "hist": [ 226, 2142, 1099, 321, 135, 28, 15, 3, 2, 1 ] }
false
sethapun/cv_svamp_augmented_fold3
default
train
3,973
Question
30
421
160.94815
156
49.05976
{ "bin_edges": [ 30, 70, 110, 150, 190, 230, 270, 310, 350, 390, 421 ], "hist": [ 16, 544, 1242, 1220, 629, 218, 57, 26, 14, 7 ] }
false
sethapun/cv_svamp_augmented_fold3
default
train
3,973
equation
1
67
13.43745
12
6.15758
{ "bin_edges": [ 1, 8, 15, 22, 29, 36, 43, 50, 57, 64, 67 ], "hist": [ 270, 2421, 1014, 196, 33, 22, 2, 2, 1, 12 ] }
false
sethapun/cv_svamp_augmented_fold3
default
train
3,973
group_nums
18
94
42.31941
42
10.31927
{ "bin_edges": [ 18, 26, 34, 42, 50, 58, 66, 74, 82, 90, 94 ], "hist": [ 50, 1048, 687, 1411, 463, 190, 93, 18, 12, 1 ] }
false
sethapun/cv_svamp_augmented_fold3
default
train
3,973
question
5
370
105.77322
99
45.35996
{ "bin_edges": [ 5, 42, 79, 116, 153, 190, 227, 264, 301, 338, 370 ], "hist": [ 114, 1085, 1361, 880, 360, 101, 38, 16, 14, 4 ] }
false
sethapun/cv_svamp_augmented_fold3
default
train
3,973
wrong_equation
1
67
13.43745
12
6.15758
{ "bin_edges": [ 1, 8, 15, 22, 29, 36, 43, 50, 57, 64, 67 ], "hist": [ 270, 2421, 1014, 196, 33, 22, 2, 2, 1, 12 ] }
false
keirp/hungarian_national_hs_finals_exam
default
test
33
Question
63
1,232
280.42424
247
219.16105
{ "bin_edges": [ 63, 180, 297, 414, 531, 648, 765, 882, 999, 1116, 1232 ], "hist": [ 13, 9, 4, 6, 0, 0, 0, 0, 0, 1 ] }
false
alexmaraval/svamp_optimize_examples
default
train
600
Body
21
263
112.90667
107
39.69796
{ "bin_edges": [ 21, 46, 71, 96, 121, 146, 171, 196, 221, 246, 263 ], "hist": [ 9, 61, 155, 162, 96, 67, 19, 23, 7, 1 ] }
false
alexmaraval/svamp_optimize_examples
default
train
600
CoT_example
124
355
212.76333
207.5
44.62787
{ "bin_edges": [ 124, 148, 172, 196, 220, 244, 268, 292, 316, 340, 355 ], "hist": [ 27, 75, 144, 115, 101, 70, 35, 13, 14, 6 ] }
false
alexmaraval/svamp_optimize_examples
default
train
600
Equation
3
30
16.875
15
4.54281
{ "bin_edges": [ 3, 6, 9, 12, 15, 18, 21, 24, 27, 30, 30 ], "hist": [ 1, 0, 0, 217, 251, 1, 33, 76, 20, 1 ] }
false
alexmaraval/svamp_optimize_examples
default
train
600
ID
6
8
7.9
8
0.31649
{ "bin_edges": [ 6, 7, 8, 8 ], "hist": [ 3, 54, 543 ] }
false
alexmaraval/svamp_optimize_examples
default
train
600
Question
22
90
48.085
46
15.1133
{ "bin_edges": [ 22, 29, 36, 43, 50, 57, 64, 71, 78, 85, 90 ], "hist": [ 41, 89, 119, 119, 73, 58, 28, 44, 20, 9 ] }
false
alexmaraval/svamp_optimize_examples
default
train
600
answer
3
9
3.89667
4
0.95787
{ "bin_edges": [ 3, 4, 5, 6, 7, 8, 9, 9 ], "hist": [ 229, 259, 76, 22, 10, 3, 1 ] }
false
alexmaraval/svamp_optimize_examples
default
train
600
question
76
304
161.99167
154.5
43.91928
{ "bin_edges": [ 76, 99, 122, 145, 168, 191, 214, 237, 260, 283, 304 ], "hist": [ 28, 73, 140, 110, 110, 64, 43, 11, 7, 14 ] }
false
alexmaraval/svamp_optimize_examples
default
train
600
rationale
3
30
16.875
15
4.54281
{ "bin_edges": [ 3, 6, 9, 12, 15, 18, 21, 24, 27, 30, 30 ], "hist": [ 1, 0, 0, 217, 251, 1, 33, 76, 20, 1 ] }
false
sethapun/cv_svamp_augmented_fold3
default
validation
165
Body
26
255
137.81212
129
52.16807
{ "bin_edges": [ 26, 49, 72, 95, 118, 141, 164, 187, 210, 233, 255 ], "hist": [ 2, 12, 11, 52, 17, 21, 22, 8, 9, 11 ] }
false
sethapun/cv_svamp_augmented_fold3
default
validation
165
Equation
17
27
19.84848
17
4.52716
{ "bin_edges": [ 17, 19, 21, 23, 25, 27, 27 ], "hist": [ 118, 0, 0, 0, 0, 47 ] }
false
sethapun/cv_svamp_augmented_fold3
default
validation
165
Numbers
3
15
6.93939
6
3.02972
{ "bin_edges": [ 3, 5, 7, 9, 11, 13, 15, 15 ], "hist": [ 37, 51, 35, 19, 9, 12, 2 ] }
false
sethapun/cv_svamp_augmented_fold3
default
validation
165
Ques
26
100
50.73939
49
15.56297
{ "bin_edges": [ 26, 34, 42, 50, 58, 66, 74, 82, 90, 98, 100 ], "hist": [ 20, 33, 36, 21, 30, 9, 7, 5, 3, 1 ] }
false
sethapun/cv_svamp_augmented_fold3
default
validation
165
Question
87
347
189.55152
182
58.97461
{ "bin_edges": [ 87, 114, 141, 168, 195, 222, 249, 276, 303, 330, 347 ], "hist": [ 12, 18, 44, 26, 17, 25, 4, 13, 3, 3 ] }
false
sethapun/cv_svamp_augmented_fold3
default
validation
165
equation
7
19
10.64848
9
3.23058
{ "bin_edges": [ 7, 9, 11, 13, 15, 17, 19, 19 ], "hist": [ 52, 51, 15, 19, 18, 7, 3 ] }
false
sethapun/cv_svamp_augmented_fold3
default
validation
165
group_nums
27
85
50.75152
48
12.35225
{ "bin_edges": [ 27, 33, 39, 45, 51, 57, 63, 69, 75, 81, 85 ], "hist": [ 7, 13, 41, 25, 34, 13, 18, 8, 2, 4 ] }
false
sethapun/cv_svamp_augmented_fold3
default
validation
165
question
22
242
124.15758
120
51.27068
{ "bin_edges": [ 22, 45, 68, 91, 114, 137, 160, 183, 206, 229, 242 ], "hist": [ 5, 14, 40, 22, 27, 12, 19, 12, 8, 6 ] }
false
sethapun/cv_svamp_augmented_fold3
default
validation
165
wrong_equation
7
19
10.64848
9
3.23058
{ "bin_edges": [ 7, 9, 11, 13, 15, 17, 19, 19 ], "hist": [ 52, 51, 15, 19, 18, 7, 3 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=5, 'temperature'=0.9, 'top_k'=1000, 'top_p'=0.05}
train
3,270
id
17
20
19.40061
19
0.61718
{ "bin_edges": [ 17, 18, 19, 20, 20 ], "hist": [ 21, 167, 1563, 1519 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=5, 'temperature'=0.9, 'top_k'=1000, 'top_p'=0.05}
train
3,270
prediction
5
499
39.18471
6
61.00867
{ "bin_edges": [ 5, 55, 105, 155, 205, 255, 305, 355, 405, 455, 499 ], "hist": [ 2204, 476, 542, 9, 4, 4, 5, 5, 13, 8 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=5, 'temperature'=0.9, 'top_k'=100, 'top_p'=0.2}
train
3,270
id
17
20
19.40061
19
0.61718
{ "bin_edges": [ 17, 18, 19, 20, 20 ], "hist": [ 21, 167, 1563, 1519 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=5, 'temperature'=0.9, 'top_k'=100, 'top_p'=0.2}
train
3,270
prediction
5
499
39.18287
6
60.99544
{ "bin_edges": [ 5, 55, 105, 155, 205, 255, 305, 355, 405, 455, 499 ], "hist": [ 2204, 476, 542, 9, 4, 4, 5, 5, 13, 8 ] }
false
mtyrrell/NDC_documents_master
default
train
378
country
4
41
10.03968
8
6.56985
{ "bin_edges": [ 4, 8, 12, 16, 20, 24, 28, 32, 36, 40, 41 ], "hist": [ 178, 117, 26, 22, 11, 11, 0, 10, 2, 1 ] }
false
mtyrrell/NDC_documents_master
default
train
378
country_code
1
3
2.99471
3
0.10287
{ "bin_edges": [ 1, 2, 3, 3 ], "hist": [ 1, 0, 377 ] }
false
mtyrrell/NDC_documents_master
default
train
378
date
13
19
18.9709
19
0.40119
{ "bin_edges": [ 13, 14, 15, 16, 17, 18, 19, 19 ], "hist": [ 1, 1, 0, 0, 0, 0, 376 ] }
false
mtyrrell/NDC_documents_master
default
train
378
document_path
97
97
97
97
0
{ "bin_edges": [ 97, 97 ], "hist": [ 378 ] }
false
mtyrrell/NDC_documents_master
default
train
378
para_list
2
1,106,088
130,986.16931
67,428
161,991.42026
{ "bin_edges": [ 2, 110611, 221220, 331829, 442438, 553047, 663656, 774265, 884874, 995483, 1106088 ], "hist": [ 240, 66, 33, 17, 10, 4, 4, 2, 0, 2 ] }
false
mtyrrell/NDC_documents_master
default
train
378
url
53
197
89.66402
86
20.50893
{ "bin_edges": [ 53, 68, 83, 98, 113, 128, 143, 158, 173, 188, 197 ], "hist": [ 40, 119, 104, 74, 24, 10, 2, 3, 1, 1 ] }
false
nus-yam/ex-repair
recoder-corpus-in
train
297,029
ground_truth
9
284,321
1,247.03437
642
2,641.74017
{ "bin_edges": [ 9, 28441, 56873, 85305, 113737, 142169, 170601, 199033, 227465, 255897, 284321 ], "hist": [ 296747, 197, 65, 8, 5, 0, 3, 0, 0, 4 ] }
false
nus-yam/ex-repair
recoder-corpus-in
train
297,029
source
9
285,748
1,207.96865
608
2,626.9344
{ "bin_edges": [ 9, 28583, 57157, 85731, 114305, 142879, 171453, 200027, 228601, 257175, 285748 ], "hist": [ 296751, 194, 64, 9, 4, 1, 2, 0, 0, 4 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=5, 'temperature'=0.9, 'top_k'=1000, 'top_p'=0.1}
train
3,270
id
17
20
19.40061
19
0.61718
{ "bin_edges": [ 17, 18, 19, 20, 20 ], "hist": [ 21, 167, 1563, 1519 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=5, 'temperature'=0.9, 'top_k'=1000, 'top_p'=0.1}
train
3,270
prediction
5
499
39.18471
6
61.00867
{ "bin_edges": [ 5, 55, 105, 155, 205, 255, 305, 355, 405, 455, 499 ], "hist": [ 2204, 476, 542, 9, 4, 4, 5, 5, 13, 8 ] }
false
Sachin9474/llama_model_dataset
default
train
54
text
94
175
123.90741
113.5
23.23731
{ "bin_edges": [ 94, 103, 112, 121, 130, 139, 148, 157, 166, 175, 175 ], "hist": [ 8, 14, 9, 7, 2, 3, 4, 1, 5, 1 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=5, 'temperature'=0.9, 'top_k'=1000, 'top_p'=0.2}
train
3,270
id
17
20
19.40061
19
0.61718
{ "bin_edges": [ 17, 18, 19, 20, 20 ], "hist": [ 21, 167, 1563, 1519 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=5, 'temperature'=0.9, 'top_k'=1000, 'top_p'=0.2}
train
3,270
prediction
5
499
39.18287
6
60.99544
{ "bin_edges": [ 5, 55, 105, 155, 205, 255, 305, 355, 405, 455, 499 ], "hist": [ 2204, 476, 542, 9, 4, 4, 5, 5, 13, 8 ] }
false
KevinNotSmile/nuscenes-qa-mini
day
validation
1,534
question
19
166
70.21904
68
30.41977
{ "bin_edges": [ 19, 34, 49, 64, 79, 94, 109, 124, 139, 154, 166 ], "hist": [ 236, 177, 253, 287, 212, 201, 97, 37, 25, 9 ] }
true
KevinNotSmile/nuscenes-qa-mini
day
validation
1,534
token
32
32
32
32
0
{ "bin_edges": [ 32, 32 ], "hist": [ 1534 ] }
true
theethdev/Userintent-sendswap
default
train
100
Input
19
43
29.27
29.5
6.46335
{ "bin_edges": [ 19, 22, 25, 28, 31, 34, 37, 40, 43, 43 ], "hist": [ 13, 16, 15, 9, 23, 9, 8, 5, 2 ] }
false
theethdev/Userintent-sendswap
default
train
100
text
52
76
62.27
62.5
6.46335
{ "bin_edges": [ 52, 55, 58, 61, 64, 67, 70, 73, 76, 76 ], "hist": [ 13, 16, 15, 9, 23, 9, 8, 5, 2 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=5, 'temperature'=0.9, 'top_k'=10000, 'top_p'=0.1}
train
3,270
id
17
20
19.40061
19
0.61718
{ "bin_edges": [ 17, 18, 19, 20, 20 ], "hist": [ 21, 167, 1563, 1519 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=5, 'temperature'=0.9, 'top_k'=10000, 'top_p'=0.1}
train
3,270
prediction
5
499
39.18471
6
61.00867
{ "bin_edges": [ 5, 55, 105, 155, 205, 255, 305, 355, 405, 455, 499 ], "hist": [ 2204, 476, 542, 9, 4, 4, 5, 5, 13, 8 ] }
false
sarahpann/GSM8K_Test
default
test
200
answers
64
847
274.68
242.5
138.60438
{ "bin_edges": [ 64, 143, 222, 301, 380, 459, 538, 617, 696, 775, 847 ], "hist": [ 24, 61, 45, 38, 10, 10, 7, 2, 1, 2 ] }
false
sarahpann/GSM8K_Test
default
test
200
prompt
99
583
262.355
235.5
100.23655
{ "bin_edges": [ 99, 148, 197, 246, 295, 344, 393, 442, 491, 540, 583 ], "hist": [ 15, 46, 44, 29, 29, 15, 8, 6, 3, 5 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=5, 'temperature'=0.9, 'top_k'=10000, 'top_p'=0.05}
train
3,270
id
17
20
19.40061
19
0.61718
{ "bin_edges": [ 17, 18, 19, 20, 20 ], "hist": [ 21, 167, 1563, 1519 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=5, 'temperature'=0.9, 'top_k'=10000, 'top_p'=0.05}
train
3,270
prediction
5
499
39.18471
6
61.00867
{ "bin_edges": [ 5, 55, 105, 155, 205, 255, 305, 355, 405, 455, 499 ], "hist": [ 2204, 476, 542, 9, 4, 4, 5, 5, 13, 8 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=5, 'temperature'=0.9, 'top_k'=10000, 'top_p'=0.2}
train
3,270
id
17
20
19.40061
19
0.61718
{ "bin_edges": [ 17, 18, 19, 20, 20 ], "hist": [ 21, 167, 1563, 1519 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=5, 'temperature'=0.9, 'top_k'=10000, 'top_p'=0.2}
train
3,270
prediction
5
499
39.18471
6
61.00867
{ "bin_edges": [ 5, 55, 105, 155, 205, 255, 305, 355, 405, 455, 499 ], "hist": [ 2204, 476, 542, 9, 4, 4, 5, 5, 13, 8 ] }
false
themanas021/chasat-algebra-sub02
default
train
871
Question
43
1,034
349.55339
324
189.39564
{ "bin_edges": [ 43, 143, 243, 343, 443, 543, 643, 743, 843, 943, 1034 ], "hist": [ 130, 179, 154, 138, 126, 76, 43, 19, 4, 2 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=5, 'temperature'=0.95, 'top_k'=100, 'top_p'=0.05}
train
3,270
id
17
20
19.40061
19
0.61718
{ "bin_edges": [ 17, 18, 19, 20, 20 ], "hist": [ 21, 167, 1563, 1519 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=5, 'temperature'=0.95, 'top_k'=100, 'top_p'=0.05}
train
3,270
prediction
5
499
39.18471
6
61.00867
{ "bin_edges": [ 5, 55, 105, 155, 205, 255, 305, 355, 405, 455, 499 ], "hist": [ 2204, 476, 542, 9, 4, 4, 5, 5, 13, 8 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=5, 'temperature'=0.95, 'top_k'=100, 'top_p'=0.1}
train
3,270
id
17
20
19.40061
19
0.61718
{ "bin_edges": [ 17, 18, 19, 20, 20 ], "hist": [ 21, 167, 1563, 1519 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=5, 'temperature'=0.95, 'top_k'=100, 'top_p'=0.1}
train
3,270
prediction
5
499
39.18471
6
61.00867
{ "bin_edges": [ 5, 55, 105, 155, 205, 255, 305, 355, 405, 455, 499 ], "hist": [ 2204, 476, 542, 9, 4, 4, 5, 5, 13, 8 ] }
false
igiag/Lalka
default
train
10,693
text
85
3,305
753.92219
652
422.69912
{ "bin_edges": [ 85, 408, 731, 1054, 1377, 1700, 2023, 2346, 2669, 2992, 3305 ], "hist": [ 2078, 4129, 2372, 1186, 556, 225, 83, 37, 18, 9 ] }
false
AISE-TUDelft/PY150k
default
dev
5,000
full_line
6
205,002
5,406.846
2,019.5
10,962.1764
{ "bin_edges": [ 6, 20506, 41006, 61506, 82006, 102506, 123006, 143506, 164006, 184506, 205002 ], "hist": [ 4743, 171, 47, 25, 5, 3, 2, 3, 0, 1 ] }
false
AISE-TUDelft/PY150k
default
dev
5,000
gt
3
2,438
38.882
34
41.61577
{ "bin_edges": [ 3, 247, 491, 735, 979, 1223, 1467, 1711, 1955, 2199, 2438 ], "hist": [ 4999, 0, 0, 0, 0, 0, 0, 0, 0, 1 ] }
false
AISE-TUDelft/PY150k
default
dev
5,000
input
0
139,327
2,769.8888
747
7,183.74593
{ "bin_edges": [ 0, 13933, 27866, 41799, 55732, 69665, 83598, 97531, 111464, 125397, 139327 ], "hist": [ 4806, 133, 27, 16, 5, 6, 4, 0, 1, 2 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=5, 'temperature'=0.95, 'top_k'=100, 'top_p'=0.2}
train
3,270
id
17
20
19.40061
19
0.61718
{ "bin_edges": [ 17, 18, 19, 20, 20 ], "hist": [ 21, 167, 1563, 1519 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=5, 'temperature'=0.95, 'top_k'=100, 'top_p'=0.2}
train
3,270
prediction
5
499
39.18287
6
60.99544
{ "bin_edges": [ 5, 55, 105, 155, 205, 255, 305, 355, 405, 455, 499 ], "hist": [ 2204, 476, 542, 9, 4, 4, 5, 5, 13, 8 ] }
false
metaeval/monotonicity-entailment
default
train
5,382
sentence1
11
233
39.61167
36
16.89186
{ "bin_edges": [ 11, 34, 57, 80, 103, 126, 149, 172, 195, 218, 233 ], "hist": [ 2231, 2603, 408, 86, 33, 10, 1, 2, 4, 4 ] }
false
metaeval/monotonicity-entailment
default
train
5,382
sentence2
10
233
41.31512
38
17.56578
{ "bin_edges": [ 10, 33, 56, 79, 102, 125, 148, 171, 194, 217, 233 ], "hist": [ 1768, 2829, 611, 116, 32, 15, 1, 2, 4, 4 ] }
false
AISE-TUDelft/PY150k
default
train
95,000
full_line
6
255,897
4,638.87414
1,721
9,763.44783
{ "bin_edges": [ 6, 25596, 51186, 76776, 102366, 127956, 153546, 179136, 204726, 230316, 255897 ], "hist": [ 92200, 2049, 480, 134, 68, 26, 21, 13, 5, 4 ] }
false
AISE-TUDelft/PY150k
default
train
95,000
gt
3
39,493
39.96827
33
188.98829
{ "bin_edges": [ 3, 3953, 7903, 11853, 15803, 19753, 23703, 27653, 31603, 35553, 39493 ], "hist": [ 94995, 1, 1, 0, 1, 0, 0, 0, 0, 2 ] }
false
AISE-TUDelft/PY150k
default
train
95,000
input
0
247,873
2,271.37997
631
5,880.47728
{ "bin_edges": [ 0, 24788, 49576, 74364, 99152, 123940, 148728, 173516, 198304, 223092, 247873 ], "hist": [ 93943, 815, 169, 40, 14, 7, 7, 3, 0, 2 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=5, 'temperature'=0.95, 'top_k'=1000, 'top_p'=0.05}
train
3,270
id
17
20
19.40061
19
0.61718
{ "bin_edges": [ 17, 18, 19, 20, 20 ], "hist": [ 21, 167, 1563, 1519 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=5, 'temperature'=0.95, 'top_k'=1000, 'top_p'=0.05}
train
3,270
prediction
5
499
39.18471
6
61.00867
{ "bin_edges": [ 5, 55, 105, 155, 205, 255, 305, 355, 405, 455, 499 ], "hist": [ 2204, 476, 542, 9, 4, 4, 5, 5, 13, 8 ] }
false
edwright/yearling-sales
default
train
1,746
buyer
7
117
27.12866
22
15.92558
{ "bin_edges": [ 7, 19, 31, 43, 55, 67, 79, 91, 103, 115, 117 ], "hist": [ 508, 593, 224, 111, 96, 26, 4, 5, 2, 1 ] }
false
edwright/yearling-sales
default
train
1,746
dam
8
24
16.7709
17
3.37056
{ "bin_edges": [ 8, 10, 12, 14, 16, 18, 20, 22, 24, 24 ], "hist": [ 5, 77, 273, 300, 339, 357, 233, 132, 30 ] }
false
edwright/yearling-sales
default
train
1,746
dam sire
9
24
16.64204
17
3.62586
{ "bin_edges": [ 9, 11, 13, 15, 17, 19, 21, 23, 24 ], "hist": [ 15, 212, 440, 194, 329, 187, 305, 64 ] }
false
edwright/yearling-sales
default
train
1,746
sale date
16
16
16
16
0
{ "bin_edges": [ 16, 16 ], "hist": [ 1574 ] }
false
AISE-TUDelft/PY150k
default
test
50,000
full_line
6
529,506
4,567.8366
1,703.5
9,571.26877
{ "bin_edges": [ 6, 52957, 105908, 158859, 211810, 264761, 317712, 370663, 423614, 476565, 529506 ], "hist": [ 49701, 245, 37, 15, 1, 0, 0, 0, 0, 1 ] }
false
AISE-TUDelft/PY150k
default
test
50,000
gt
3
17,053
38.98494
33
84.45926
{ "bin_edges": [ 3, 1709, 3415, 5121, 6827, 8533, 10239, 11945, 13651, 15357, 17053 ], "hist": [ 49998, 0, 0, 1, 0, 0, 0, 0, 0, 1 ] }
false
AISE-TUDelft/PY150k
default
test
50,000
input
0
370,863
2,231.90018
630
5,736.15338
{ "bin_edges": [ 0, 37087, 74174, 111261, 148348, 185435, 222522, 259609, 296696, 333783, 370863 ], "hist": [ 49794, 169, 27, 5, 2, 2, 0, 0, 0, 1 ] }
false
ghbacct/topic-classifier-news-headlines-classification
default
train
7,920
text
6
240
61.89874
58
22.45641
{ "bin_edges": [ 6, 30, 54, 78, 102, 126, 150, 174, 198, 222, 240 ], "hist": [ 161, 2937, 3311, 871, 557, 59, 12, 7, 2, 3 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=5, 'temperature'=0.95, 'top_k'=1000, 'top_p'=0.1}
train
3,270
id
17
20
19.40061
19
0.61718
{ "bin_edges": [ 17, 18, 19, 20, 20 ], "hist": [ 21, 167, 1563, 1519 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=5, 'temperature'=0.95, 'top_k'=1000, 'top_p'=0.1}
train
3,270
prediction
5
499
39.18471
6
61.00867
{ "bin_edges": [ 5, 55, 105, 155, 205, 255, 305, 355, 405, 455, 499 ], "hist": [ 2204, 476, 542, 9, 4, 4, 5, 5, 13, 8 ] }
false
ghbacct/topic-classifier-news-headlines-classification
default
test
1,989
text
10
195
61.78984
58
22.5155
{ "bin_edges": [ 10, 29, 48, 67, 86, 105, 124, 143, 162, 181, 195 ], "hist": [ 37, 477, 916, 271, 148, 106, 26, 5, 2, 1 ] }
false
f64k/tnved
default
train
1,000,000
text_g31_1;"tnved_g33"
214
1,046
391.26172
323
179.97265
{ "bin_edges": [ 214, 298, 382, 466, 550, 634, 718, 802, 886, 970, 1046 ], "hist": [ 427915, 197135, 108396, 83420, 64702, 42825, 32056, 20910, 14424, 8217 ] }
false
nielsr/datacomp_small_llamav2_classified
default
train
50,000
sha256
64
64
64
64
0
{ "bin_edges": [ 64, 64 ], "hist": [ 50000 ] }
false
nielsr/datacomp_small_llamav2_classified
default
train
50,000
text
1
7,061
54.18898
39
85.47106
{ "bin_edges": [ 1, 708, 1415, 2122, 2829, 3536, 4243, 4950, 5657, 6364, 7061 ], "hist": [ 49927, 65, 3, 1, 0, 1, 0, 1, 1, 1 ] }
false
nielsr/datacomp_small_llamav2_classified
default
train
50,000
uid
32
32
32
32
0
{ "bin_edges": [ 32, 32 ], "hist": [ 50000 ] }
false
nielsr/datacomp_small_llamav2_classified
default
train
50,000
url
20
3,429
102.04168
89
64.24973
{ "bin_edges": [ 20, 361, 702, 1043, 1384, 1725, 2066, 2407, 2748, 3089, 3429 ], "hist": [ 49773, 132, 74, 6, 9, 4, 0, 0, 0, 2 ] }
false
AdithyaSNair/disease
default
train
1,082,328
DataValue
1
95
3.79515
4
1.66033
{ "bin_edges": [ 1, 11, 21, 31, 41, 51, 61, 71, 81, 91, 95 ], "hist": [ 728932, 0, 0, 0, 0, 18, 102, 135, 0, 48 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=5, 'temperature'=0.95, 'top_k'=1000, 'top_p'=0.2}
train
3,270
id
17
20
19.40061
19
0.61718
{ "bin_edges": [ 17, 18, 19, 20, 20 ], "hist": [ 21, 167, 1563, 1519 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=5, 'temperature'=0.95, 'top_k'=1000, 'top_p'=0.2}
train
3,270
prediction
5
499
39.18471
6
61.00867
{ "bin_edges": [ 5, 55, 105, 155, 205, 255, 305, 355, 405, 455, 499 ], "hist": [ 2204, 476, 542, 9, 4, 4, 5, 5, 13, 8 ] }
false
MichaelOrme/Paraphrased_Word
default
train
28,207
Input
2
1,552
67.95246
44
79.03185
{ "bin_edges": [ 2, 158, 314, 470, 626, 782, 938, 1094, 1250, 1406, 1552 ], "hist": [ 25671, 2057, 329, 87, 36, 15, 8, 2, 0, 2 ] }
false
MichaelOrme/Paraphrased_Word
default
train
28,207
Response
2
2,821
71.35732
45
86.09522
{ "bin_edges": [ 2, 284, 566, 848, 1130, 1412, 1694, 1976, 2258, 2540, 2821 ], "hist": [ 27431, 679, 68, 24, 1, 3, 0, 0, 0, 1 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=5, 'temperature'=0.95, 'top_k'=10000, 'top_p'=0.05}
train
3,270
id
17
20
19.40061
19
0.61718
{ "bin_edges": [ 17, 18, 19, 20, 20 ], "hist": [ 21, 167, 1563, 1519 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=5, 'temperature'=0.95, 'top_k'=10000, 'top_p'=0.05}
train
3,270
prediction
5
499
39.18471
6
61.00867
{ "bin_edges": [ 5, 55, 105, 155, 205, 255, 305, 355, 405, 455, 499 ], "hist": [ 2204, 476, 542, 9, 4, 4, 5, 5, 13, 8 ] }
false
RoversX/Samantha-data-single-line-Mixed-V1-Converted-32K
default
train
32,000
text
29
3,187
380.96031
306
322.82419
{ "bin_edges": [ 29, 345, 661, 977, 1293, 1609, 1925, 2241, 2557, 2873, 3187 ], "hist": [ 16990, 11791, 2076, 488, 205, 124, 127, 120, 67, 12 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=5, 'temperature'=0.95, 'top_k'=10000, 'top_p'=0.1}
train
3,270
id
17
20
19.40061
19
0.61718
{ "bin_edges": [ 17, 18, 19, 20, 20 ], "hist": [ 21, 167, 1563, 1519 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=5, 'temperature'=0.95, 'top_k'=10000, 'top_p'=0.1}
train
3,270
prediction
5
499
39.18471
6
61.00867
{ "bin_edges": [ 5, 55, 105, 155, 205, 255, 305, 355, 405, 455, 499 ], "hist": [ 2204, 476, 542, 9, 4, 4, 5, 5, 13, 8 ] }
false
botp/RyokoAI_ScribbleHub17K
default
train
373,299
text
1
371,293
11,248.69497
9,283
8,930.8562
{ "bin_edges": [ 1, 37131, 74261, 111391, 148521, 185651, 222781, 259911, 297041, 334171, 371293 ], "hist": [ 367320, 5237, 534, 109, 54, 19, 13, 6, 4, 3 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=5, 'temperature'=0.95, 'top_k'=10000, 'top_p'=0.2}
train
3,270
id
17
20
19.40061
19
0.61718
{ "bin_edges": [ 17, 18, 19, 20, 20 ], "hist": [ 21, 167, 1563, 1519 ] }
false