dataset
stringlengths
4
115
config
stringlengths
1
121
split
stringlengths
1
228
num_examples
int64
3
341M
column_name
stringlengths
1
22.7k
min
int64
0
1.81M
max
int64
0
981M
mean
float64
0
42.2M
median
float64
0
24M
std
float64
0
84.2M
histogram
dict
partial
bool
2 classes
ceval/ceval-exam
high_school_mathematics
test
166
C
1
71
12.36145
9
12.06706
{ "bin_edges": [ 1, 9, 17, 25, 33, 41, 49, 57, 65, 71 ], "hist": [ 80, 39, 23, 15, 4, 2, 1, 1, 1 ] }
false
ceval/ceval-exam
high_school_mathematics
test
166
D
1
86
13.51205
10
13.6545
{ "bin_edges": [ 1, 10, 19, 28, 37, 46, 55, 64, 73, 82, 86 ], "hist": [ 75, 45, 26, 12, 2, 3, 1, 0, 0, 2 ] }
false
ceval/ceval-exam
high_school_mathematics
test
166
question
15
431
94.46386
77
60.04439
{ "bin_edges": [ 15, 57, 99, 141, 183, 225, 267, 309, 351, 393, 431 ], "hist": [ 36, 81, 24, 15, 2, 4, 2, 1, 0, 1 ] }
false
ceval/ceval-exam
high_school_physics
test
175
A
1
67
15.33143
14
10.4091
{ "bin_edges": [ 1, 8, 15, 22, 29, 36, 43, 50, 57, 64, 67 ], "hist": [ 48, 44, 40, 24, 15, 1, 2, 0, 0, 1 ] }
false
ceval/ceval-exam
high_school_physics
test
175
B
1
111
16.41714
15
12.0689
{ "bin_edges": [ 1, 13, 25, 37, 49, 61, 73, 85, 97, 109, 111 ], "hist": [ 65, 79, 25, 4, 1, 0, 0, 0, 0, 1 ] }
false
ceval/ceval-exam
high_school_physics
test
175
C
1
67
17.26857
16
12.45645
{ "bin_edges": [ 1, 8, 15, 22, 29, 36, 43, 50, 57, 64, 67 ], "hist": [ 43, 37, 46, 25, 12, 5, 2, 0, 2, 3 ] }
false
ceval/ceval-exam
high_school_physics
test
175
D
1
72
18.81143
18
13.08565
{ "bin_edges": [ 1, 9, 17, 25, 33, 41, 49, 57, 65, 72 ], "hist": [ 44, 36, 52, 19, 12, 7, 2, 0, 3 ] }
false
ceval/ceval-exam
high_school_physics
test
175
question
12
211
56.33714
46
43.1796
{ "bin_edges": [ 12, 32, 52, 72, 92, 112, 132, 152, 172, 192, 211 ], "hist": [ 64, 36, 30, 20, 8, 3, 5, 3, 3, 3 ] }
false
ceval/ceval-exam
high_school_politics
test
176
A
1
61
7.43182
3
8.0602
{ "bin_edges": [ 1, 8, 15, 22, 29, 36, 43, 50, 57, 61 ], "hist": [ 116, 30, 18, 8, 3, 0, 0, 0, 1 ] }
false
ceval/ceval-exam
high_school_politics
test
176
B
1
52
7.22159
3
7.51241
{ "bin_edges": [ 1, 7, 13, 19, 25, 31, 37, 43, 49, 52 ], "hist": [ 105, 34, 19, 14, 2, 1, 0, 0, 1 ] }
false
ceval/ceval-exam
high_school_politics
test
176
C
2
89
7.74432
3
9.50834
{ "bin_edges": [ 2, 11, 20, 29, 38, 47, 56, 65, 74, 83, 89 ], "hist": [ 129, 33, 9, 4, 0, 0, 0, 0, 0, 1 ] }
false
ceval/ceval-exam
high_school_politics
test
176
D
2
54
8.07386
3.5
8.61893
{ "bin_edges": [ 2, 8, 14, 20, 26, 32, 38, 44, 50, 54 ], "hist": [ 110, 25, 19, 14, 3, 4, 0, 0, 1 ] }
false
ceval/ceval-exam
high_school_politics
test
176
question
12
350
126.57386
121
65.34152
{ "bin_edges": [ 12, 46, 80, 114, 148, 182, 216, 250, 284, 318, 350 ], "hist": [ 23, 24, 33, 28, 27, 27, 9, 3, 1, 1 ] }
false
ceval/ceval-exam
ideological_and_moral_cultivation
test
172
A
1
30
8.31395
7
6.17344
{ "bin_edges": [ 1, 4, 7, 10, 13, 16, 19, 22, 25, 28, 30 ], "hist": [ 21, 63, 35, 20, 12, 7, 5, 3, 2, 4 ] }
false
ceval/ceval-exam
ideological_and_moral_cultivation
test
172
B
1
29
8.42442
7
5.87791
{ "bin_edges": [ 1, 4, 7, 10, 13, 16, 19, 22, 25, 28, 29 ], "hist": [ 23, 61, 25, 24, 16, 10, 6, 4, 2, 1 ] }
false
ceval/ceval-exam
ideological_and_moral_cultivation
test
172
C
1
30
8.9186
7
6.40032
{ "bin_edges": [ 1, 4, 7, 10, 13, 16, 19, 22, 25, 28, 30 ], "hist": [ 19, 62, 29, 19, 14, 13, 5, 7, 2, 2 ] }
false
ceval/ceval-exam
ideological_and_moral_cultivation
test
172
D
1
28
8.88953
7
6.19229
{ "bin_edges": [ 1, 4, 7, 10, 13, 16, 19, 22, 25, 28, 28 ], "hist": [ 21, 61, 25, 18, 18, 15, 8, 1, 4, 1 ] }
false
ceval/ceval-exam
ideological_and_moral_cultivation
test
172
question
11
119
26.26163
23
14.02154
{ "bin_edges": [ 11, 22, 33, 44, 55, 66, 77, 88, 99, 110, 119 ], "hist": [ 79, 55, 20, 11, 5, 1, 0, 0, 0, 1 ] }
false
ceval/ceval-exam
law
test
221
A
1
643
16.08597
10
43.90596
{ "bin_edges": [ 1, 66, 131, 196, 261, 326, 391, 456, 521, 586, 643 ], "hist": [ 219, 1, 0, 0, 0, 0, 0, 0, 0, 1 ] }
false
ceval/ceval-exam
law
test
221
B
1
72
13.8009
10
12.11183
{ "bin_edges": [ 1, 9, 17, 25, 33, 41, 49, 57, 65, 72 ], "hist": [ 86, 70, 37, 14, 4, 4, 1, 2, 3 ] }
false
ceval/ceval-exam
law
test
221
C
1
77
15.04525
13
12.01695
{ "bin_edges": [ 1, 9, 17, 25, 33, 41, 49, 57, 65, 73, 77 ], "hist": [ 77, 67, 42, 19, 5, 5, 4, 1, 0, 1 ] }
false
ceval/ceval-exam
law
test
221
D
1
71
14.53394
12
11.2789
{ "bin_edges": [ 1, 9, 17, 25, 33, 41, 49, 57, 65, 71 ], "hist": [ 79, 66, 39, 21, 10, 3, 2, 0, 1 ] }
false
ceval/ceval-exam
law
test
221
question
12
219
55.33937
38
40.24287
{ "bin_edges": [ 12, 33, 54, 75, 96, 117, 138, 159, 180, 201, 219 ], "hist": [ 95, 37, 27, 24, 20, 7, 6, 2, 2, 1 ] }
false
ceval/ceval-exam
legal_professional
test
215
A
2
130
27.08837
24
18.87407
{ "bin_edges": [ 2, 15, 28, 41, 54, 67, 80, 93, 106, 119, 130 ], "hist": [ 46, 87, 47, 20, 7, 3, 1, 3, 0, 1 ] }
false
ceval/ceval-exam
legal_professional
test
215
B
2
126
27.89302
24
18.17549
{ "bin_edges": [ 2, 15, 28, 41, 54, 67, 80, 93, 106, 119, 126 ], "hist": [ 47, 75, 53, 22, 9, 5, 2, 1, 0, 1 ] }
false
ceval/ceval-exam
legal_professional
test
215
C
2
135
27.92093
24
18.83421
{ "bin_edges": [ 2, 16, 30, 44, 58, 72, 86, 100, 114, 128, 135 ], "hist": [ 57, 78, 43, 26, 6, 1, 3, 0, 0, 1 ] }
false
ceval/ceval-exam
legal_professional
test
215
D
2
151
29.71628
25
20.11877
{ "bin_edges": [ 2, 17, 32, 47, 62, 77, 92, 107, 122, 137, 151 ], "hist": [ 48, 90, 43, 24, 4, 2, 3, 0, 0, 1 ] }
false
ceval/ceval-exam
legal_professional
test
215
question
14
278
71.54419
60
51.78989
{ "bin_edges": [ 14, 41, 68, 95, 122, 149, 176, 203, 230, 257, 278 ], "hist": [ 87, 30, 37, 29, 13, 7, 7, 2, 2, 1 ] }
false
ceval/ceval-exam
logic
test
204
A
3
90
23.30392
21
14.14826
{ "bin_edges": [ 3, 12, 21, 30, 39, 48, 57, 66, 75, 84, 90 ], "hist": [ 36, 61, 55, 28, 12, 4, 4, 3, 0, 1 ] }
false
ceval/ceval-exam
logic
test
204
B
3
114
24.06373
21
15.00807
{ "bin_edges": [ 3, 15, 27, 39, 51, 63, 75, 87, 99, 111, 114 ], "hist": [ 50, 85, 40, 16, 11, 0, 1, 0, 0, 1 ] }
false
ceval/ceval-exam
logic
test
204
C
4
120
24.62255
23
15.4827
{ "bin_edges": [ 4, 16, 28, 40, 52, 64, 76, 88, 100, 112, 120 ], "hist": [ 56, 80, 41, 16, 8, 1, 1, 0, 0, 1 ] }
false
ceval/ceval-exam
logic
test
204
D
4
84
24.20098
22
14.56508
{ "bin_edges": [ 4, 13, 22, 31, 40, 49, 58, 67, 76, 84 ], "hist": [ 43, 58, 51, 26, 14, 3, 5, 3, 1 ] }
false
ceval/ceval-exam
logic
test
204
question
35
342
136.08333
128.5
55.41965
{ "bin_edges": [ 35, 66, 97, 128, 159, 190, 221, 252, 283, 314, 342 ], "hist": [ 14, 33, 51, 47, 28, 15, 9, 4, 0, 3 ] }
false
ceval/ceval-exam
mao_zedong_thought
test
219
A
1
33
9.6758
8
6.21702
{ "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 33, 33 ], "hist": [ 53, 66, 41, 28, 19, 6, 2, 1, 3 ] }
false
ceval/ceval-exam
mao_zedong_thought
test
219
B
2
33
8.89498
8
5.61313
{ "bin_edges": [ 2, 6, 10, 14, 18, 22, 26, 30, 33 ], "hist": [ 66, 82, 39, 11, 11, 6, 2, 2 ] }
false
ceval/ceval-exam
mao_zedong_thought
test
219
C
2
31
9.74886
8
5.85575
{ "bin_edges": [ 2, 5, 8, 11, 14, 17, 20, 23, 26, 29, 31 ], "hist": [ 51, 35, 59, 24, 22, 11, 7, 5, 4, 1 ] }
false
ceval/ceval-exam
mao_zedong_thought
test
219
D
1
38
10.70776
9
6.77533
{ "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 33, 37, 38 ], "hist": [ 41, 59, 48, 31, 23, 6, 7, 2, 1, 1 ] }
false
ceval/ceval-exam
mao_zedong_thought
test
219
question
12
171
39.80822
27
31.43086
{ "bin_edges": [ 12, 28, 44, 60, 76, 92, 108, 124, 140, 156, 171 ], "hist": [ 112, 48, 18, 13, 13, 2, 6, 1, 4, 2 ] }
false
ceval/ceval-exam
marxism
test
179
A
2
27
9.98324
9
4.93039
{ "bin_edges": [ 2, 5, 8, 11, 14, 17, 20, 23, 26, 27 ], "hist": [ 19, 35, 62, 31, 15, 4, 5, 7, 1 ] }
false
ceval/ceval-exam
marxism
test
179
B
2
30
10.32961
10
4.97158
{ "bin_edges": [ 2, 5, 8, 11, 14, 17, 20, 23, 26, 29, 30 ], "hist": [ 22, 29, 48, 45, 14, 9, 9, 1, 1, 1 ] }
false
ceval/ceval-exam
marxism
test
179
C
2
35
10.63128
10
5.57062
{ "bin_edges": [ 2, 6, 10, 14, 18, 22, 26, 30, 34, 35 ], "hist": [ 28, 60, 51, 21, 10, 5, 2, 1, 1 ] }
false
ceval/ceval-exam
marxism
test
179
D
2
34
10.97207
10
5.42718
{ "bin_edges": [ 2, 6, 10, 14, 18, 22, 26, 30, 34, 34 ], "hist": [ 29, 48, 55, 27, 10, 8, 1, 0, 1 ] }
false
ceval/ceval-exam
marxism
test
179
question
7
92
22.13966
18
14.05637
{ "bin_edges": [ 7, 16, 25, 34, 43, 52, 61, 70, 79, 88, 92 ], "hist": [ 49, 87, 22, 7, 6, 2, 2, 1, 2, 1 ] }
false
ceval/ceval-exam
metrology_engineer
test
219
A
1
59
8.19635
5
8.14281
{ "bin_edges": [ 1, 7, 13, 19, 25, 31, 37, 43, 49, 55, 59 ], "hist": [ 132, 49, 16, 11, 5, 2, 3, 0, 0, 1 ] }
false
ceval/ceval-exam
metrology_engineer
test
219
B
1
49
8.26941
6
7.5956
{ "bin_edges": [ 1, 6, 11, 16, 21, 26, 31, 36, 41, 46, 49 ], "hist": [ 109, 61, 19, 14, 5, 4, 5, 1, 0, 1 ] }
false
ceval/ceval-exam
metrology_engineer
test
219
C
1
50
8.71233
6
8.01743
{ "bin_edges": [ 1, 6, 11, 16, 21, 26, 31, 36, 41, 46, 50 ], "hist": [ 108, 56, 19, 16, 7, 8, 3, 0, 1, 1 ] }
false
ceval/ceval-exam
metrology_engineer
test
219
D
1
45
8.30594
6
7.88638
{ "bin_edges": [ 1, 6, 11, 16, 21, 26, 31, 36, 41, 45 ], "hist": [ 108, 66, 15, 10, 8, 4, 5, 1, 2 ] }
false
ceval/ceval-exam
metrology_engineer
test
219
question
8
113
35.24201
30
18.25107
{ "bin_edges": [ 8, 19, 30, 41, 52, 63, 74, 85, 96, 107, 113 ], "hist": [ 33, 75, 43, 29, 18, 13, 4, 2, 1, 1 ] }
false
ceval/ceval-exam
middle_school_biology
test
192
A
1
32
9.75521
7
7.55694
{ "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 32 ], "hist": [ 70, 37, 21, 19, 25, 12, 4, 4 ] }
false
ceval/ceval-exam
middle_school_biology
test
192
B
1
37
10.28125
7.5
7.92068
{ "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 33, 37, 37 ], "hist": [ 67, 37, 18, 23, 24, 11, 8, 3, 0, 1 ] }
false
ceval/ceval-exam
middle_school_biology
test
192
C
1
44
10.23438
8
8.08931
{ "bin_edges": [ 1, 6, 11, 16, 21, 26, 31, 36, 41, 44 ], "hist": [ 73, 44, 26, 26, 12, 8, 2, 0, 1 ] }
false
ceval/ceval-exam
middle_school_biology
test
192
D
1
37
10.36979
8
8.22929
{ "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 33, 37, 37 ], "hist": [ 70, 36, 20, 21, 16, 16, 8, 2, 2, 1 ] }
false
ceval/ceval-exam
middle_school_biology
test
192
question
12
93
34.32292
29
17.48627
{ "bin_edges": [ 12, 21, 30, 39, 48, 57, 66, 75, 84, 93, 93 ], "hist": [ 41, 60, 36, 14, 17, 10, 7, 4, 2, 1 ] }
false
ceval/ceval-exam
middle_school_chemistry
test
185
A
1
59
11.1027
9
9.58969
{ "bin_edges": [ 1, 7, 13, 19, 25, 31, 37, 43, 49, 55, 59 ], "hist": [ 71, 52, 33, 17, 5, 3, 0, 0, 2, 2 ] }
false
ceval/ceval-exam
middle_school_chemistry
test
185
B
1
65
12.30811
11
9.95245
{ "bin_edges": [ 1, 8, 15, 22, 29, 36, 43, 50, 57, 64, 65 ], "hist": [ 72, 53, 33, 17, 5, 2, 2, 0, 0, 1 ] }
false
ceval/ceval-exam
middle_school_chemistry
test
185
C
1
63
12.8973
12
10.77866
{ "bin_edges": [ 1, 8, 15, 22, 29, 36, 43, 50, 57, 63 ], "hist": [ 71, 46, 38, 16, 8, 2, 0, 3, 1 ] }
false
ceval/ceval-exam
middle_school_chemistry
test
185
D
1
109
14.37297
11
13.58333
{ "bin_edges": [ 1, 12, 23, 34, 45, 56, 67, 78, 89, 100, 109 ], "hist": [ 93, 55, 25, 7, 3, 0, 1, 0, 0, 1 ] }
false
ceval/ceval-exam
middle_school_chemistry
test
185
question
12
152
35.47568
27
25.15389
{ "bin_edges": [ 12, 27, 42, 57, 72, 87, 102, 117, 132, 147, 152 ], "hist": [ 88, 49, 24, 10, 3, 2, 6, 1, 0, 2 ] }
false
ceval/ceval-exam
middle_school_geography
test
108
A
2
31
8.46296
6
6.78016
{ "bin_edges": [ 2, 5, 8, 11, 14, 17, 20, 23, 26, 29, 31 ], "hist": [ 44, 19, 13, 14, 8, 2, 1, 3, 1, 3 ] }
false
ceval/ceval-exam
middle_school_geography
test
108
B
2
33
8.7963
6
6.7613
{ "bin_edges": [ 2, 6, 10, 14, 18, 22, 26, 30, 33 ], "hist": [ 46, 27, 8, 14, 7, 4, 0, 2 ] }
false
ceval/ceval-exam
middle_school_geography
test
108
C
2
39
9.47222
6.5
7.89115
{ "bin_edges": [ 2, 6, 10, 14, 18, 22, 26, 30, 34, 38, 39 ], "hist": [ 49, 21, 11, 13, 1, 8, 1, 3, 0, 1 ] }
false
ceval/ceval-exam
middle_school_geography
test
108
D
2
41
9.74074
6.5
8.37148
{ "bin_edges": [ 2, 6, 10, 14, 18, 22, 26, 30, 34, 38, 41 ], "hist": [ 46, 23, 15, 8, 3, 7, 2, 2, 0, 2 ] }
false
ceval/ceval-exam
middle_school_geography
test
108
question
8
111
28.83333
22
20.26991
{ "bin_edges": [ 8, 19, 30, 41, 52, 63, 74, 85, 96, 107, 111 ], "hist": [ 38, 41, 13, 3, 6, 2, 1, 0, 1, 3 ] }
false
ceval/ceval-exam
middle_school_history
test
207
A
1
26
6.05314
4
4.39352
{ "bin_edges": [ 1, 4, 7, 10, 13, 16, 19, 22, 25, 26 ], "hist": [ 68, 67, 37, 14, 13, 4, 2, 1, 1 ] }
false
ceval/ceval-exam
middle_school_history
test
207
B
2
26
6.2657
5
4.56793
{ "bin_edges": [ 2, 5, 8, 11, 14, 17, 20, 23, 26, 26 ], "hist": [ 100, 47, 30, 15, 6, 5, 1, 2, 1 ] }
false
ceval/ceval-exam
middle_school_history
test
207
C
2
28
6.37198
5
4.98169
{ "bin_edges": [ 2, 5, 8, 11, 14, 17, 20, 23, 26, 28 ], "hist": [ 98, 47, 33, 14, 7, 1, 2, 1, 4 ] }
false
ceval/ceval-exam
middle_school_history
test
207
D
2
27
6.58454
5
4.7143
{ "bin_edges": [ 2, 5, 8, 11, 14, 17, 20, 23, 26, 27 ], "hist": [ 89, 54, 28, 17, 11, 3, 2, 1, 2 ] }
false
ceval/ceval-exam
middle_school_history
test
207
question
11
167
44.62802
35
29.5027
{ "bin_edges": [ 11, 27, 43, 59, 75, 91, 107, 123, 139, 155, 167 ], "hist": [ 74, 53, 25, 23, 10, 10, 9, 2, 0, 1 ] }
false
ceval/ceval-exam
middle_school_mathematics
test
177
A
1
61
9.80226
6
9.86732
{ "bin_edges": [ 1, 8, 15, 22, 29, 36, 43, 50, 57, 61 ], "hist": [ 95, 42, 21, 5, 11, 2, 0, 0, 1 ] }
false
ceval/ceval-exam
middle_school_mathematics
test
177
B
1
62
10.44068
6
10.75175
{ "bin_edges": [ 1, 8, 15, 22, 29, 36, 43, 50, 57, 62 ], "hist": [ 94, 36, 24, 7, 10, 3, 2, 0, 1 ] }
false
ceval/ceval-exam
middle_school_mathematics
test
177
C
1
61
10.9435
7
11.02565
{ "bin_edges": [ 1, 8, 15, 22, 29, 36, 43, 50, 57, 61 ], "hist": [ 90, 40, 21, 9, 10, 2, 2, 2, 1 ] }
false
ceval/ceval-exam
middle_school_mathematics
test
177
D
1
63
11.75141
7
11.87172
{ "bin_edges": [ 1, 8, 15, 22, 29, 36, 43, 50, 57, 63 ], "hist": [ 89, 35, 24, 10, 10, 5, 1, 1, 2 ] }
false
ceval/ceval-exam
middle_school_mathematics
test
177
question
10
218
48.07345
40
33.69449
{ "bin_edges": [ 10, 31, 52, 73, 94, 115, 136, 157, 178, 199, 218 ], "hist": [ 65, 47, 34, 15, 6, 5, 2, 2, 0, 1 ] }
false
ceval/ceval-exam
middle_school_politics
test
193
A
2
26
7.41969
4
6.19938
{ "bin_edges": [ 2, 5, 8, 11, 14, 17, 20, 23, 26, 26 ], "hist": [ 101, 16, 18, 25, 19, 5, 1, 4, 4 ] }
false
ceval/ceval-exam
middle_school_politics
test
193
B
2
26
7.36788
4
5.92526
{ "bin_edges": [ 2, 5, 8, 11, 14, 17, 20, 23, 26, 26 ], "hist": [ 100, 18, 15, 24, 23, 6, 3, 3, 1 ] }
false
ceval/ceval-exam
middle_school_politics
test
193
C
2
29
7.43005
4
6.10981
{ "bin_edges": [ 2, 5, 8, 11, 14, 17, 20, 23, 26, 29, 29 ], "hist": [ 100, 19, 14, 24, 21, 8, 3, 2, 1, 1 ] }
false
ceval/ceval-exam
middle_school_politics
test
193
D
2
28
7.77202
5
6.25748
{ "bin_edges": [ 2, 5, 8, 11, 14, 17, 20, 23, 26, 28 ], "hist": [ 93, 18, 20, 22, 23, 8, 4, 3, 2 ] }
false
ceval/ceval-exam
middle_school_politics
test
193
question
12
231
90.70984
77
48.86726
{ "bin_edges": [ 12, 34, 56, 78, 100, 122, 144, 166, 188, 210, 231 ], "hist": [ 17, 29, 52, 28, 17, 16, 16, 9, 5, 4 ] }
false
ceval/ceval-exam
middle_school_physics
test
178
A
1
36
11.67978
11.5
7.4039
{ "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 33, 36 ], "hist": [ 44, 20, 33, 33, 28, 14, 4, 0, 2 ] }
false
ceval/ceval-exam
middle_school_physics
test
178
B
1
33
12.51124
13
7.71032
{ "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 33, 33 ], "hist": [ 40, 20, 25, 40, 24, 19, 4, 4, 2 ] }
false
ceval/ceval-exam
middle_school_physics
test
178
C
1
35
12.91573
13
8.24681
{ "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 33, 35 ], "hist": [ 41, 21, 24, 33, 22, 19, 13, 2, 3 ] }
false
ceval/ceval-exam
middle_school_physics
test
178
D
1
35
13.46067
13
8.44264
{ "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 33, 35 ], "hist": [ 40, 22, 20, 26, 31, 23, 8, 5, 3 ] }
false
ceval/ceval-exam
middle_school_physics
test
178
question
12
158
34.33146
24
23.42429
{ "bin_edges": [ 12, 27, 42, 57, 72, 87, 102, 117, 132, 147, 158 ], "hist": [ 99, 31, 22, 10, 10, 3, 2, 0, 0, 1 ] }
false
ceval/ceval-exam
modern_chinese_history
test
212
A
2
38
9.91038
9
6.63657
{ "bin_edges": [ 2, 6, 10, 14, 18, 22, 26, 30, 34, 38, 38 ], "hist": [ 57, 64, 48, 20, 8, 6, 4, 3, 1, 1 ] }
false
ceval/ceval-exam
modern_chinese_history
test
212
B
2
41
9.86321
9
6.4877
{ "bin_edges": [ 2, 6, 10, 14, 18, 22, 26, 30, 34, 38, 41 ], "hist": [ 57, 61, 50, 20, 13, 5, 3, 1, 0, 2 ] }
false
ceval/ceval-exam
modern_chinese_history
test
212
C
2
40
9.64151
8
6.81953
{ "bin_edges": [ 2, 6, 10, 14, 18, 22, 26, 30, 34, 38, 40 ], "hist": [ 66, 62, 37, 27, 8, 4, 3, 1, 1, 3 ] }
false
ceval/ceval-exam
modern_chinese_history
test
212
D
2
37
9.83491
8.5
6.50382
{ "bin_edges": [ 2, 6, 10, 14, 18, 22, 26, 30, 34, 37 ], "hist": [ 62, 57, 51, 20, 8, 8, 0, 4, 2 ] }
false
ceval/ceval-exam
modern_chinese_history
test
212
question
14
113
34.3066
29
18.0325
{ "bin_edges": [ 14, 24, 34, 44, 54, 64, 74, 84, 94, 104, 113 ], "hist": [ 65, 75, 27, 14, 15, 6, 6, 1, 1, 2 ] }
false
ceval/ceval-exam
operating_system
test
179
A
1
33
6.35754
5
5.10074
{ "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 33, 33 ], "hist": [ 88, 51, 16, 16, 3, 4, 0, 0, 1 ] }
false
ceval/ceval-exam
operating_system
test
179
B
1
30
6.55866
5
4.82278
{ "bin_edges": [ 1, 4, 7, 10, 13, 16, 19, 22, 25, 28, 30 ], "hist": [ 35, 84, 19, 19, 12, 6, 2, 0, 0, 2 ] }
false
ceval/ceval-exam
operating_system
test
179
C
1
32
6.60894
5
4.97557
{ "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 32 ], "hist": [ 83, 49, 23, 18, 2, 1, 2, 1 ] }
false
ceval/ceval-exam
operating_system
test
179
D
1
33
6.87709
5
5.40978
{ "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 33, 33 ], "hist": [ 82, 47, 21, 19, 7, 0, 1, 1, 1 ] }
false
ceval/ceval-exam
operating_system
test
179
question
10
115
29.95531
27
12.77718
{ "bin_edges": [ 10, 21, 32, 43, 54, 65, 76, 87, 98, 109, 115 ], "hist": [ 34, 81, 45, 11, 5, 1, 1, 0, 0, 1 ] }
false
ceval/ceval-exam
physician
test
443
A
1
53
7.39278
6
5.42027
{ "bin_edges": [ 1, 7, 13, 19, 25, 31, 37, 43, 49, 53 ], "hist": [ 259, 120, 51, 7, 3, 1, 1, 0, 1 ] }
false
ceval/ceval-exam
physician
test
443
B
1
38
7.84424
6
5.6649
{ "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 33, 37, 38 ], "hist": [ 136, 159, 80, 38, 14, 4, 7, 1, 1, 3 ] }
false
ceval/ceval-exam
physician
test
443
C
1
68
8.03837
6
6.4808
{ "bin_edges": [ 1, 8, 15, 22, 29, 36, 43, 50, 57, 64, 68 ], "hist": [ 273, 127, 29, 6, 3, 3, 0, 1, 0, 1 ] }
false
ceval/ceval-exam
physician
test
443
D
1
48
8.14673
6
6.26299
{ "bin_edges": [ 1, 6, 11, 16, 21, 26, 31, 36, 41, 46, 48 ], "hist": [ 182, 151, 65, 22, 14, 4, 1, 1, 1, 2 ] }
false
ceval/ceval-exam
physician
test
443
question
7
242
31.91874
21
28.78745
{ "bin_edges": [ 7, 31, 55, 79, 103, 127, 151, 175, 199, 223, 242 ], "hist": [ 332, 48, 26, 17, 12, 6, 1, 0, 0, 1 ] }
false
ceval/ceval-exam
physician
val
49
A
2
19
6.81633
6
4.08592
{ "bin_edges": [ 2, 4, 6, 8, 10, 12, 14, 16, 18, 19 ], "hist": [ 10, 14, 9, 3, 2, 8, 2, 0, 1 ] }
false
ceval/ceval-exam
physician
val
49
B
2
19
6.63265
5
3.81168
{ "bin_edges": [ 2, 4, 6, 8, 10, 12, 14, 16, 18, 19 ], "hist": [ 6, 21, 6, 7, 2, 3, 3, 0, 1 ] }
false