dataset
stringlengths
4
115
config
stringlengths
1
121
split
stringlengths
1
228
num_examples
int64
3
341M
column_name
stringlengths
1
22.7k
min
int64
0
1.81M
max
int64
0
981M
mean
float64
0
42.2M
median
float64
0
24M
std
float64
0
84.2M
histogram
dict
partial
bool
2 classes
Stopwolf/ms-marco-v2.1-sr-500k
default
train
503,098
well_formed_answer
3
903
78.85449
61
52.43186
{ "bin_edges": [ 3, 94, 185, 276, 367, 458, 549, 640, 731, 822, 903 ], "hist": [ 59874, 17817, 3292, 546, 94, 27, 17, 3, 0, 1 ] }
false
nschantz21/booksum-randomized
default
test
1,431
beginend_subset
140
1,001
995.97484
1,001
60.58323
{ "bin_edges": [ 140, 227, 314, 401, 488, 575, 662, 749, 836, 923, 1001 ], "hist": [ 3, 3, 3, 1, 0, 0, 0, 0, 0, 1421 ] }
false
nschantz21/booksum-randomized
default
test
1,431
book_id
17
55
34.27254
36
8.02538
{ "bin_edges": [ 17, 21, 25, 29, 33, 37, 41, 45, 49, 53, 55 ], "hist": [ 129, 126, 38, 202, 291, 470, 76, 26, 62, 11 ] }
false
nschantz21/booksum-randomized
default
test
1,431
chapter
328
512,714
23,152.59399
15,594
31,048.80548
{ "bin_edges": [ 328, 51567, 102806, 154045, 205284, 256523, 307762, 359001, 410240, 461479, 512714 ], "hist": [ 1324, 74, 17, 9, 2, 3, 0, 1, 0, 1 ] }
false
nschantz21/booksum-randomized
default
test
1,431
chapter_path
40
78
44.12648
42
5.84449
{ "bin_edges": [ 40, 44, 48, 52, 56, 60, 64, 68, 72, 76, 78 ], "hist": [ 1177, 41, 18, 30, 122, 16, 24, 0, 2, 1 ] }
false
nschantz21/booksum-randomized
default
test
1,431
middle_subset
139
1,001
995.97135
1,001
60.62406
{ "bin_edges": [ 139, 226, 313, 400, 487, 574, 661, 748, 835, 922, 1001 ], "hist": [ 3, 3, 3, 1, 0, 0, 0, 0, 0, 1421 ] }
false
nschantz21/booksum-randomized
default
test
1,431
random_subset
279
1,000
996.942
1,000
40.58527
{ "bin_edges": [ 279, 352, 425, 498, 571, 644, 717, 790, 863, 936, 1000 ], "hist": [ 3, 0, 0, 2, 1, 2, 1, 0, 0, 1422 ] }
false
nschantz21/booksum-randomized
default
test
1,431
summary
310
38,201
3,431.5905
2,320
3,368.30601
{ "bin_edges": [ 310, 4100, 7890, 11680, 15470, 19260, 23050, 26840, 30630, 34420, 38201 ], "hist": [ 1013, 299, 76, 33, 6, 0, 1, 0, 0, 3 ] }
false
nschantz21/booksum-randomized
default
test
1,431
summary_analysis
98
14,115
2,722.64454
2,377
2,184.07425
{ "bin_edges": [ 98, 1500, 2902, 4304, 5706, 7108, 8510, 9912, 11314, 12716, 14115 ], "hist": [ 291, 133, 147, 96, 29, 14, 4, 2, 2, 5 ] }
false
nschantz21/booksum-randomized
default
test
1,431
summary_id
5
36
12.28791
10
3.76061
{ "bin_edges": [ 5, 9, 13, 17, 21, 25, 29, 33, 36 ], "hist": [ 75, 792, 247, 296, 15, 3, 1, 2 ] }
false
nschantz21/booksum-randomized
default
test
1,431
summary_name
5
53
13.86148
11
7.31871
{ "bin_edges": [ 5, 10, 15, 20, 25, 30, 35, 40, 45, 50, 53 ], "hist": [ 227, 739, 357, 14, 13, 7, 11, 27, 19, 1 ] }
false
nschantz21/booksum-randomized
default
test
1,431
summary_path
55
89
70.74144
73
6.75383
{ "bin_edges": [ 55, 59, 63, 67, 71, 75, 79, 83, 87, 89 ], "hist": [ 84, 216, 15, 232, 477, 340, 30, 32, 5 ] }
false
nschantz21/booksum-randomized
default
test
1,431
summary_text
131
30,181
1,858.45003
1,292
2,008.14904
{ "bin_edges": [ 131, 3137, 6143, 9149, 12155, 15161, 18167, 21173, 24179, 27185, 30181 ], "hist": [ 1226, 171, 23, 6, 1, 1, 0, 0, 1, 2 ] }
false
nschantz21/booksum-randomized
default
test
1,431
summary_url
91
176
122.98532
127
15.9925
{ "bin_edges": [ 91, 100, 109, 118, 127, 136, 145, 154, 163, 172, 176 ], "hist": [ 209, 119, 119, 243, 509, 186, 27, 4, 7, 8 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=5, 'temperature'=0.55, 'top_k'=100, 'top_p'=0.5}
train
3,270
id
17
20
19.40061
19
0.61718
{ "bin_edges": [ 17, 18, 19, 20, 20 ], "hist": [ 21, 167, 1563, 1519 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=5, 'temperature'=0.55, 'top_k'=100, 'top_p'=0.5}
train
3,270
prediction
5
522
39.15076
6
61.18729
{ "bin_edges": [ 5, 57, 109, 161, 213, 265, 317, 369, 421, 473, 522 ], "hist": [ 2636, 50, 535, 10, 5, 5, 5, 7, 13, 4 ] }
false
RazinAleks/SO-Python_QA-Other_class
default
train
6,682
Answer
15
7,001
455.00284
330.5
433.18968
{ "bin_edges": [ 15, 714, 1413, 2112, 2811, 3510, 4209, 4908, 5607, 6306, 7001 ], "hist": [ 5551, 884, 174, 50, 14, 6, 0, 1, 0, 2 ] }
false
RazinAleks/SO-Python_QA-Other_class
default
train
6,682
CreationDate
23
23
23
23
0
{ "bin_edges": [ 23, 23 ], "hist": [ 6682 ] }
false
RazinAleks/SO-Python_QA-Other_class
default
train
6,682
Question
15
7,243
569.1733
465
433.63982
{ "bin_edges": [ 15, 738, 1461, 2184, 2907, 3630, 4353, 5076, 5799, 6522, 7243 ], "hist": [ 5063, 1350, 200, 53, 10, 2, 2, 0, 1, 1 ] }
false
RazinAleks/SO-Python_QA-Other_class
default
train
6,682
Tags
6
76
25.80275
24
12.3907
{ "bin_edges": [ 6, 14, 22, 30, 38, 46, 54, 62, 70, 76 ], "hist": [ 1116, 1625, 1668, 1121, 674, 292, 125, 52, 9 ] }
false
RazinAleks/SO-Python_QA-Other_class
default
train
6,682
Title
15
142
51.14232
48
19.98711
{ "bin_edges": [ 15, 28, 41, 54, 67, 80, 93, 106, 119, 132, 142 ], "hist": [ 608, 1643, 1823, 1290, 684, 353, 191, 62, 17, 11 ] }
false
nschantz21/booksum-randomized
default
train
9,600
beginend_subset
135
1,001
993.78115
1,001
70.09898
{ "bin_edges": [ 135, 222, 309, 396, 483, 570, 657, 744, 831, 918, 1001 ], "hist": [ 20, 21, 36, 18, 8, 0, 0, 0, 0, 9497 ] }
false
nschantz21/booksum-randomized
default
train
9,600
book_id
12
75
29.58583
29
7.1645
{ "bin_edges": [ 12, 19, 26, 33, 40, 47, 54, 61, 68, 75, 75 ], "hist": [ 527, 2213, 3870, 2180, 642, 144, 12, 1, 10, 1 ] }
false
nschantz21/booksum-randomized
default
train
9,600
chapter
277
677,776
22,618.82771
16,365
24,871.88916
{ "bin_edges": [ 277, 68027, 135777, 203527, 271277, 339027, 406777, 474527, 542277, 610027, 677776 ], "hist": [ 9146, 397, 45, 6, 3, 1, 0, 0, 1, 1 ] }
false
nschantz21/booksum-randomized
default
train
9,600
chapter_path
39
74
43.55583
42
5.62568
{ "bin_edges": [ 39, 43, 47, 51, 55, 59, 63, 67, 71, 74 ], "hist": [ 6972, 1496, 59, 74, 546, 276, 151, 22, 4 ] }
false
nschantz21/booksum-randomized
default
train
9,600
middle_subset
135
1,001
993.77438
1,001
70.16257
{ "bin_edges": [ 135, 222, 309, 396, 483, 570, 657, 744, 831, 918, 1001 ], "hist": [ 20, 21, 36, 18, 8, 0, 0, 0, 0, 9497 ] }
false
nschantz21/booksum-randomized
default
train
9,600
random_subset
268
1,000
996.29635
1,000
41.20902
{ "bin_edges": [ 268, 342, 416, 490, 564, 638, 712, 786, 860, 934, 1000 ], "hist": [ 1, 18, 5, 8, 18, 10, 17, 2, 10, 9511 ] }
false
nschantz21/booksum-randomized
default
train
9,600
summary
199
44,015
3,925.25281
3,080
3,210.37642
{ "bin_edges": [ 199, 4581, 8963, 13345, 17727, 22109, 26491, 30873, 35255, 39637, 44015 ], "hist": [ 6653, 2339, 445, 112, 24, 10, 7, 6, 3, 1 ] }
false
nschantz21/booksum-randomized
default
train
9,600
summary_analysis
91
33,654
2,904.18024
2,510.5
2,307.55176
{ "bin_edges": [ 91, 3448, 6805, 10162, 13519, 16876, 20233, 23590, 26947, 30304, 33654 ], "hist": [ 3649, 1419, 164, 43, 13, 7, 3, 3, 2, 1 ] }
false
nschantz21/booksum-randomized
default
train
9,600
summary_id
2
209
13.18583
10
7.72692
{ "bin_edges": [ 2, 23, 44, 65, 86, 107, 128, 149, 170, 191, 209 ], "hist": [ 9241, 251, 69, 27, 7, 2, 1, 1, 0, 1 ] }
false
nschantz21/booksum-randomized
default
train
9,600
summary_name
2
209
13.75396
11
8.87307
{ "bin_edges": [ 2, 23, 44, 65, 86, 107, 128, 149, 170, 191, 209 ], "hist": [ 9048, 329, 118, 34, 7, 1, 2, 2, 0, 2 ] }
false
nschantz21/booksum-randomized
default
train
9,600
summary_path
50
114
65.42146
64
7.15571
{ "bin_edges": [ 50, 57, 64, 71, 78, 85, 92, 99, 106, 113, 114 ], "hist": [ 673, 3883, 2824, 1735, 358, 108, 8, 0, 0, 11 ] }
false
nschantz21/booksum-randomized
default
train
9,600
summary_text
10
26,762
2,129.9824
1,595
1,874.02741
{ "bin_edges": [ 10, 2686, 5362, 8038, 10714, 13390, 16066, 18742, 21418, 24094, 26762 ], "hist": [ 7291, 1751, 393, 103, 38, 16, 5, 0, 0, 3 ] }
false
nschantz21/booksum-randomized
default
train
9,600
summary_url
86
333
118.42105
120
17.12064
{ "bin_edges": [ 86, 111, 136, 161, 186, 211, 236, 261, 286, 311, 333 ], "hist": [ 3347, 4744, 1199, 78, 22, 3, 1, 2, 0, 2 ] }
false
RazinAleks/SO-Python_QA-Other_class
default
validation
1,960
Answer
32
5,574
415.71378
287.5
428.24314
{ "bin_edges": [ 32, 587, 1142, 1697, 2252, 2807, 3362, 3917, 4472, 5027, 5574 ], "hist": [ 1558, 306, 58, 22, 11, 2, 0, 0, 1, 2 ] }
false
RazinAleks/SO-Python_QA-Other_class
default
validation
1,960
CreationDate
23
23
23
23
0
{ "bin_edges": [ 23, 23 ], "hist": [ 1960 ] }
false
RazinAleks/SO-Python_QA-Other_class
default
validation
1,960
Question
61
5,083
549.80051
454
396.19165
{ "bin_edges": [ 61, 564, 1067, 1570, 2073, 2576, 3079, 3582, 4085, 4588, 5083 ], "hist": [ 1254, 553, 102, 27, 16, 6, 1, 0, 0, 1 ] }
false
RazinAleks/SO-Python_QA-Other_class
default
validation
1,960
Tags
6
80
29.88214
28
13.87591
{ "bin_edges": [ 6, 14, 22, 30, 38, 46, 54, 62, 70, 78, 80 ], "hist": [ 205, 394, 488, 344, 254, 153, 76, 27, 18, 1 ] }
false
RazinAleks/SO-Python_QA-Other_class
default
validation
1,960
Title
15
150
57.04031
54
21.06663
{ "bin_edges": [ 15, 29, 43, 57, 71, 85, 99, 113, 127, 141, 150 ], "hist": [ 97, 408, 569, 459, 245, 91, 53, 22, 9, 7 ] }
false
RazinAleks/SO-Python_QA-Other_class
default
test
900
Answer
28
3,191
388.72667
260.5
390.22112
{ "bin_edges": [ 28, 345, 662, 979, 1296, 1613, 1930, 2247, 2564, 2881, 3191 ], "hist": [ 561, 201, 73, 28, 17, 7, 9, 2, 1, 1 ] }
false
RazinAleks/SO-Python_QA-Other_class
default
test
900
CreationDate
23
23
23
23
0
{ "bin_edges": [ 23, 23 ], "hist": [ 900 ] }
false
RazinAleks/SO-Python_QA-Other_class
default
test
900
Question
63
8,214
515.48667
427
439.3891
{ "bin_edges": [ 63, 879, 1695, 2511, 3327, 4143, 4959, 5775, 6591, 7407, 8214 ], "hist": [ 815, 73, 6, 5, 0, 0, 0, 0, 0, 1 ] }
false
RazinAleks/SO-Python_QA-Other_class
default
test
900
Tags
6
83
30.17667
28
14.10705
{ "bin_edges": [ 6, 14, 22, 30, 38, 46, 54, 62, 70, 78, 83 ], "hist": [ 85, 195, 191, 193, 101, 75, 37, 15, 5, 3 ] }
false
RazinAleks/SO-Python_QA-Other_class
default
test
900
Title
16
148
59.02111
55
22.70389
{ "bin_edges": [ 16, 30, 44, 58, 72, 86, 100, 114, 128, 142, 148 ], "hist": [ 53, 176, 251, 212, 94, 59, 29, 18, 6, 2 ] }
false
nschantz21/booksum-randomized
default
validation
1,484
beginend_subset
116
1,001
978.79313
1,001
121.52207
{ "bin_edges": [ 116, 205, 294, 383, 472, 561, 650, 739, 828, 917, 1001 ], "hist": [ 10, 13, 0, 18, 9, 0, 0, 0, 0, 1434 ] }
false
nschantz21/booksum-randomized
default
validation
1,484
book_id
16
49
29.51146
28
8.09605
{ "bin_edges": [ 16, 20, 24, 28, 32, 36, 40, 44, 48, 49 ], "hist": [ 67, 444, 129, 243, 228, 193, 79, 78, 23 ] }
false
nschantz21/booksum-randomized
default
validation
1,484
chapter
235
108,697
19,357.8403
16,627
16,205.38996
{ "bin_edges": [ 235, 11082, 21929, 32776, 43623, 54470, 65317, 76164, 87011, 97858, 108697 ], "hist": [ 518, 419, 306, 160, 40, 13, 9, 0, 5, 14 ] }
false
nschantz21/booksum-randomized
default
validation
1,484
chapter_path
41
62
42.66442
42
3.91402
{ "bin_edges": [ 41, 44, 47, 50, 53, 56, 59, 62, 62 ], "hist": [ 1370, 10, 3, 0, 19, 74, 7, 1 ] }
false
nschantz21/booksum-randomized
default
validation
1,484
middle_subset
115
1,001
978.77763
1,001
121.60811
{ "bin_edges": [ 115, 204, 293, 382, 471, 560, 649, 738, 827, 916, 1001 ], "hist": [ 10, 12, 1, 16, 11, 0, 0, 0, 0, 1434 ] }
false
nschantz21/booksum-randomized
default
validation
1,484
random_subset
231
1,000
989.29717
1,000
75.8025
{ "bin_edges": [ 231, 308, 385, 462, 539, 616, 693, 770, 847, 924, 1000 ], "hist": [ 10, 0, 0, 8, 5, 0, 0, 8, 4, 1449 ] }
false
nschantz21/booksum-randomized
default
validation
1,484
summary
227
18,953
3,778.63208
3,426.5
2,463.68552
{ "bin_edges": [ 227, 2100, 3973, 5846, 7719, 9592, 11465, 13338, 15211, 17084, 18953 ], "hist": [ 434, 431, 359, 160, 66, 21, 4, 5, 2, 2 ] }
false
nschantz21/booksum-randomized
default
validation
1,484
summary_analysis
147
14,119
2,659.81089
2,848
1,694.40262
{ "bin_edges": [ 147, 1545, 2943, 4341, 5739, 7137, 8535, 9933, 11331, 12729, 14119 ], "hist": [ 298, 235, 364, 70, 26, 10, 3, 2, 1, 1 ] }
false
nschantz21/booksum-randomized
default
validation
1,484
summary_id
5
98
12.78032
10
11.3562
{ "bin_edges": [ 5, 15, 25, 35, 45, 55, 65, 75, 85, 95, 98 ], "hist": [ 1310, 143, 3, 3, 0, 0, 0, 0, 6, 19 ] }
false
nschantz21/booksum-randomized
default
validation
1,484
summary_name
5
98
12.83683
10
11.5974
{ "bin_edges": [ 5, 15, 25, 35, 45, 55, 65, 75, 85, 95, 98 ], "hist": [ 1253, 189, 5, 2, 3, 0, 0, 0, 6, 19 ] }
false
nschantz21/booksum-randomized
default
validation
1,484
summary_path
53
86
66.99259
65
6.83911
{ "bin_edges": [ 53, 57, 61, 65, 69, 73, 77, 81, 85, 86 ], "hist": [ 12, 162, 441, 322, 287, 145, 34, 4, 77 ] }
false
nschantz21/booksum-randomized
default
validation
1,484
summary_text
37
11,060
1,775.94272
1,358
1,466.21639
{ "bin_edges": [ 37, 1140, 2243, 3346, 4449, 5552, 6655, 7758, 8861, 9964, 11060 ], "hist": [ 614, 474, 230, 83, 43, 17, 9, 6, 6, 2 ] }
false
nschantz21/booksum-randomized
default
validation
1,484
summary_url
89
153
118.09569
120
16.10984
{ "bin_edges": [ 89, 96, 103, 110, 117, 124, 131, 138, 145, 152, 153 ], "hist": [ 174, 188, 122, 100, 259, 341, 123, 107, 54, 16 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=5, 'temperature'=0.05, 'top_k'=10000, 'top_p'=1.0}
train
3,270
id
17
20
19.40061
19
0.61718
{ "bin_edges": [ 17, 18, 19, 20, 20 ], "hist": [ 21, 167, 1563, 1519 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=5, 'temperature'=0.05, 'top_k'=10000, 'top_p'=1.0}
train
3,270
prediction
5
499
38.95627
6
60.98597
{ "bin_edges": [ 5, 55, 105, 155, 205, 255, 305, 355, 405, 455, 499 ], "hist": [ 2214, 472, 536, 9, 4, 3, 5, 6, 14, 7 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=5, 'temperature'=0.55, 'top_k'=1000, 'top_p'=0.5}
train
3,270
id
17
20
19.40061
19
0.61718
{ "bin_edges": [ 17, 18, 19, 20, 20 ], "hist": [ 21, 167, 1563, 1519 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=5, 'temperature'=0.55, 'top_k'=1000, 'top_p'=0.5}
train
3,270
prediction
5
511
38.98624
6
60.94645
{ "bin_edges": [ 5, 56, 107, 158, 209, 260, 311, 362, 413, 464, 511 ], "hist": [ 2602, 83, 536, 10, 6, 3, 5, 5, 14, 6 ] }
false
Vrushali/chatbot
default
train
21,910,000
BlockName
1
30
7.39098
7
2.76205
{ "bin_edges": [ 1, 4, 7, 10, 13, 16, 19, 22, 25, 28, 30 ], "hist": [ 196593, 9116792, 8378321, 3060428, 865699, 251156, 26979, 13081, 86, 865 ] }
true
Vrushali/chatbot
default
train
21,910,000
Crop
2
45
8.88414
6
6.92057
{ "bin_edges": [ 2, 7, 12, 17, 22, 27, 32, 37, 42, 45 ], "hist": [ 14000423, 3811632, 1936126, 683611, 179150, 634466, 271358, 96707, 149612 ] }
true
Vrushali/chatbot
default
train
21,910,000
DistrictName
3
36
7.80464
7
2.97519
{ "bin_edges": [ 3, 7, 11, 15, 19, 23, 27, 31, 35, 36 ], "hist": [ 7648284, 11495321, 2244253, 305797, 74820, 115245, 4171, 0, 22109 ] }
true
Vrushali/chatbot
default
train
21,910,000
KccAns
1
66,600
65.57296
49
87.50284
{ "bin_edges": [ 1, 6661, 13321, 19981, 26641, 33301, 39961, 46621, 53281, 59941, 66600 ], "hist": [ 21701426, 90, 15, 6, 4, 2, 3, 1, 0, 1 ] }
true
Vrushali/chatbot
default
train
21,910,000
QueryText
1
81,071
32.08386
29
41.13589
{ "bin_edges": [ 1, 8109, 16217, 24325, 32433, 40541, 48649, 56757, 64865, 72973, 81071 ], "hist": [ 21901090, 17, 0, 1, 1, 1, 0, 3, 0, 2 ] }
true
Vrushali/chatbot
default
train
21,910,000
QueryType
1
81
11.64763
7
7.80597
{ "bin_edges": [ 1, 10, 19, 28, 37, 46, 55, 64, 73, 81 ], "hist": [ 11382907, 7335107, 788306, 1106187, 39197, 13577, 0, 0, 2891 ] }
true
Vrushali/chatbot
default
train
21,910,000
StateName
5
17
10.02468
9
3.04826
{ "bin_edges": [ 5, 7, 9, 11, 13, 15, 17, 17 ], "hist": [ 3726290, 2626924, 4857819, 4091558, 5880936, 405962, 320511 ] }
true
daishen/cra-travelinsurace
default
train
8,865
query
1,323
1,385
1,342.52882
1,342
9.54116
{ "bin_edges": [ 1323, 1330, 1337, 1344, 1351, 1358, 1365, 1372, 1379, 1385 ], "hist": [ 854, 1351, 2640, 2299, 1204, 445, 55, 15, 2 ] }
false
daishen/cra-travelinsurace
default
train
8,865
text
204
266
223.52882
223
9.54116
{ "bin_edges": [ 204, 211, 218, 225, 232, 239, 246, 253, 260, 266 ], "hist": [ 854, 1351, 2640, 2299, 1204, 445, 55, 15, 2 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=5, 'temperature'=0.55, 'top_k'=100, 'top_p'=1.0}
train
3,270
id
17
20
19.40061
19
0.61718
{ "bin_edges": [ 17, 18, 19, 20, 20 ], "hist": [ 21, 167, 1563, 1519 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=5, 'temperature'=0.55, 'top_k'=100, 'top_p'=1.0}
train
3,270
prediction
5
507
39.8526
6
61.78134
{ "bin_edges": [ 5, 56, 107, 158, 209, 260, 311, 362, 413, 464, 507 ], "hist": [ 2563, 107, 539, 15, 7, 9, 5, 10, 9, 6 ] }
false
jahb57/gpt2_embeddings_BATCH_14
default
train
28,952
sentence
3
1,006
73.28081
57
54.28293
{ "bin_edges": [ 3, 104, 205, 306, 407, 508, 609, 710, 811, 912, 1006 ], "hist": [ 23703, 4327, 748, 121, 25, 13, 8, 1, 2, 4 ] }
true
daishen/cra-travelinsurace
default
validation
1,266
query
1,324
1,375
1,343.13744
1,343
9.65819
{ "bin_edges": [ 1324, 1330, 1336, 1342, 1348, 1354, 1360, 1366, 1372, 1375 ], "hist": [ 113, 135, 365, 281, 143, 177, 39, 10, 3 ] }
false
daishen/cra-travelinsurace
default
validation
1,266
text
205
256
224.13744
224
9.65819
{ "bin_edges": [ 205, 211, 217, 223, 229, 235, 241, 247, 253, 256 ], "hist": [ 113, 135, 365, 281, 143, 177, 39, 10, 3 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=5, 'temperature'=0.55, 'top_k'=1000, 'top_p'=1.0}
train
3,270
id
17
20
19.40061
19
0.61718
{ "bin_edges": [ 17, 18, 19, 20, 20 ], "hist": [ 21, 167, 1563, 1519 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=5, 'temperature'=0.55, 'top_k'=1000, 'top_p'=1.0}
train
3,270
prediction
5
482
39.63089
6
62.1966
{ "bin_edges": [ 5, 53, 101, 149, 197, 245, 293, 341, 389, 437, 482 ], "hist": [ 2186, 494, 532, 10, 3, 5, 10, 7, 10, 13 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=5, 'temperature'=0.55, 'top_k'=10000, 'top_p'=0.5}
train
3,270
id
17
20
19.40061
19
0.61718
{ "bin_edges": [ 17, 18, 19, 20, 20 ], "hist": [ 21, 167, 1563, 1519 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=5, 'temperature'=0.55, 'top_k'=10000, 'top_p'=0.5}
train
3,270
prediction
5
499
38.83272
6
60.66365
{ "bin_edges": [ 5, 55, 105, 155, 205, 255, 305, 355, 405, 455, 499 ], "hist": [ 2220, 464, 538, 10, 4, 3, 6, 5, 12, 8 ] }
false
LahiruLowe/niv2_filtered_2pertask
default
train
3,112
inputs
54
40,310
743.08933
446
1,340.06894
{ "bin_edges": [ 54, 4080, 8106, 12132, 16158, 20184, 24210, 28236, 32262, 36288, 40310 ], "hist": [ 3079, 19, 8, 1, 3, 0, 0, 1, 0, 1 ] }
false
LahiruLowe/niv2_filtered_2pertask
default
train
3,112
targets
0
4,304
53.02699
15
139.46556
{ "bin_edges": [ 0, 431, 862, 1293, 1724, 2155, 2586, 3017, 3448, 3879, 4304 ], "hist": [ 3094, 9, 2, 3, 1, 1, 1, 0, 0, 1 ] }
false
LahiruLowe/niv2_filtered_2pertask
default
train
3,112
task_name
19
85
38.19216
37
8.45871
{ "bin_edges": [ 19, 26, 33, 40, 47, 54, 61, 68, 75, 82, 85 ], "hist": [ 82, 788, 1048, 738, 286, 120, 30, 12, 6, 2 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=5, 'temperature'=0.55, 'top_k'=10000, 'top_p'=1.0}
train
3,270
id
17
20
19.40061
19
0.61718
{ "bin_edges": [ 17, 18, 19, 20, 20 ], "hist": [ 21, 167, 1563, 1519 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=5, 'temperature'=0.55, 'top_k'=10000, 'top_p'=1.0}
train
3,270
prediction
5
515
38.60765
6
60.62929
{ "bin_edges": [ 5, 57, 109, 161, 213, 265, 317, 369, 421, 473, 515 ], "hist": [ 2636, 73, 510, 10, 7, 5, 5, 9, 11, 4 ] }
false
azhou03/fr_test
default
train
1,900
text
496
4,197
1,110.06368
1,044
339.6176
{ "bin_edges": [ 496, 867, 1238, 1609, 1980, 2351, 2722, 3093, 3464, 3835, 4197 ], "hist": [ 462, 931, 377, 89, 23, 8, 6, 3, 0, 1 ] }
false
davanstrien/test_imdb_embedd2
default
train
25,000
text
52
13,704
1,325.06964
979
1,003.13367
{ "bin_edges": [ 52, 1418, 2784, 4150, 5516, 6882, 8248, 9614, 10980, 12346, 13704 ], "hist": [ 17426, 5384, 1490, 535, 147, 11, 4, 2, 0, 1 ] }
false
azhou03/fr_test
default
test
100
text
675
2,695
1,138.17
1,075.5
321.57906
{ "bin_edges": [ 675, 878, 1081, 1284, 1487, 1690, 1893, 2096, 2299, 2502, 2695 ], "hist": [ 22, 29, 19, 19, 5, 3, 2, 0, 0, 1 ] }
false
thanhduycao/oscar_vi_shard_0
default
train
2,474,428
text
2
788,665
2,573.3345
1,421
5,797.00423
{ "bin_edges": [ 2, 78869, 157736, 236603, 315470, 394337, 473204, 552071, 630938, 709805, 788665 ], "hist": [ 2472957, 1101, 205, 74, 41, 20, 10, 11, 7, 2 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=5, 'temperature'=0.9, 'top_k'=100, 'top_p'=0.05}
train
3,270
id
17
20
19.40061
19
0.61718
{ "bin_edges": [ 17, 18, 19, 20, 20 ], "hist": [ 21, 167, 1563, 1519 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=5, 'temperature'=0.9, 'top_k'=100, 'top_p'=0.05}
train
3,270
prediction
5
499
39.18471
6
61.00867
{ "bin_edges": [ 5, 55, 105, 155, 205, 255, 305, 355, 405, 455, 499 ], "hist": [ 2204, 476, 542, 9, 4, 4, 5, 5, 13, 8 ] }
false
davanstrien/test_imdb_embedd2
default
test
25,000
text
32
12,988
1,293.7924
962
975.90776
{ "bin_edges": [ 32, 1328, 2624, 3920, 5216, 6512, 7808, 9104, 10400, 11696, 12988 ], "hist": [ 17044, 5634, 1550, 544, 221, 2, 1, 1, 0, 3 ] }
false
cminor102/smallsalesforcebot
default
train
5,000
text
454
1,398
720.2268
709
112.1524
{ "bin_edges": [ 454, 549, 644, 739, 834, 929, 1024, 1119, 1214, 1309, 1398 ], "hist": [ 208, 1135, 1659, 1209, 561, 182, 35, 10, 0, 1 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=5, 'temperature'=0.9, 'top_k'=100, 'top_p'=0.1}
train
3,270
id
17
20
19.40061
19
0.61718
{ "bin_edges": [ 17, 18, 19, 20, 20 ], "hist": [ 21, 167, 1563, 1519 ] }
false
automated-research-group/llama2_7b_chat-boolq-results
{'do_sample'=True, 'beams'=5, 'temperature'=0.9, 'top_k'=100, 'top_p'=0.1}
train
3,270
prediction
5
499
39.18471
6
61.00867
{ "bin_edges": [ 5, 55, 105, 155, 205, 255, 305, 355, 405, 455, 499 ], "hist": [ 2204, 476, 542, 9, 4, 4, 5, 5, 13, 8 ] }
false
nus-yam/ex-repair
cvefixes-corpus-in
train
4,695
ground_truth
3
7,618,406
50,234.3707
18,246
181,193.81382
{ "bin_edges": [ 3, 761844, 1523685, 2285526, 3047367, 3809208, 4571049, 5332890, 6094731, 6856572, 7618406 ], "hist": [ 4664, 3, 0, 3, 3, 1, 0, 0, 0, 1 ] }
false
nus-yam/ex-repair
cvefixes-corpus-in
train
4,695
project_id
13
16
13.59084
14
0.60269
{ "bin_edges": [ 13, 14, 15, 16, 16 ], "hist": [ 2111, 2489, 0, 95 ] }
false
nus-yam/ex-repair
cvefixes-corpus-in
train
4,695
source
5
7,618,115
51,360.57021
18,850
183,592.04078
{ "bin_edges": [ 5, 761817, 1523629, 2285441, 3047253, 3809065, 4570877, 5332689, 6094501, 6856313, 7618115 ], "hist": [ 4546, 4, 0, 3, 3, 1, 0, 0, 0, 1 ] }
false
davanstrien/test_imdb_embedd2
default
unsupervised
50,000
text
43
14,282
1,329.9025
983
1,004.67168
{ "bin_edges": [ 43, 1467, 2891, 4315, 5739, 7163, 8587, 10011, 11435, 12859, 14282 ], "hist": [ 35383, 10610, 2771, 1047, 174, 12, 2, 0, 0, 1 ] }
false
sethapun/cv_svamp_augmented_fold3
default
train
3,973
Body
5
380
116.04656
111
46.56762
{ "bin_edges": [ 5, 43, 81, 119, 157, 195, 233, 271, 309, 347, 380 ], "hist": [ 82, 841, 1366, 1005, 475, 130, 37, 18, 15, 4 ] }
false
sethapun/cv_svamp_augmented_fold3
default
train
3,973
Equation
7
62
20.25522
17
5.10862
{ "bin_edges": [ 7, 13, 19, 25, 31, 37, 43, 49, 55, 61, 62 ], "hist": [ 2, 2716, 14, 1198, 14, 17, 8, 0, 3, 1 ] }
false