choco9966
commited on
Commit
·
2c5773a
1
Parent(s):
acb5167
remove results
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +0 -55
- 42MARU/GenAI-llama-2-ko-en-instruct-v1/result_2023-10-12 11:14:34.json +0 -444
- 42MARU/GenAI-llama2-ko-en-instruct-v2-13b/result_2023-10-18 01:40:38.json +0 -444
- 42MARU/GenAI-llama2-ko-en-platypus/result_2023-10-13 16:58:44.json +0 -444
- 42MARU/llama-2-ko-7b-instruct/result_2023-09-29 09:41:36.json +0 -444
- 42MARU/llama-2-ko-7b-instruction-v3/result_2023-10-01 18:41:33.json +0 -444
- 42MARU/polyglot-ko-12.8b-instruct/result_2023-09-27 21:10:18.json +0 -444
- 42MARU/sitebunny-13b/result_2023-09-27 08:17:31.json +0 -444
- 42dot/42dot_LLM-PLM-1.3B/result_2023-10-18 01:46:47.json +0 -444
- 42dot/42dot_LLM-SFT-1.3B/result_2023-10-18 01:47:03.json +0 -444
- AtAndDev/ShortKingv0.1/result_2023-09-29 19:59:47.json +0 -444
- BM-K/polyglot-ko-1.3b-it-v1.0/result_2023-10-06 06:41:38.json +0 -444
- BM-K/polyglot-ko-1.3b-it-v1.1/result_2023-10-06 07:23:29.json +0 -444
- BM-K/polyglot-ko-1.3b-it-v1.2/result_2023-10-09 06:14:19.json +0 -444
- BM-K/polyglot-ko-1.3b-it-v1.3/result_2023-10-09 06:23:09.json +0 -444
- BM-K/polyglot-ko-1.3b-it-v1.4/result_2023-10-09 06:31:19.json +0 -444
- Chang-Su/llama-2-13b-chat-ko/result_2023-10-18 16:07:29.json +0 -444
- DILAB-HYU/KoQuality-Polyglot-5.8b/result_2023-10-12 13:21:04.json +0 -444
- DopeorNope/COLA3-7B/result_2023-10-03 08:35:59.json +0 -444
- DopeorNope/COLA3_13B/result_2023-10-05 10:17:21.json +0 -444
- DopeorNope/COLA_LO-7B/result_2023-10-03 17:04:14.json +0 -444
- DopeorNope/KOAT-5.8b/result_2023-10-01 15:52:29.json +0 -444
- DopeorNope/ZeroCoka-7B/result_2023-10-11 12:06:32.json +0 -444
- DopeorNope/Zero_COKE_K-13B/result_2023-10-08 06:50:15.json +0 -444
- EleutherAI/polyglot-ko-1.3b/result_2023-09-24 15:21:38.json +0 -444
- EleutherAI/polyglot-ko-12.8b/result_2023-09-26 09:55:07.json +0 -444
- EleutherAI/polyglot-ko-3.8b/result_2023-09-26 09:54:58.json +0 -444
- EleutherAI/polyglot-ko-5.8b/result_2023-09-24 15:21:38.json +0 -444
- FINDA-FIT/llama-2-ko-plain/result_2023-09-30 03:54:00.json +0 -444
- FINDA-FIT/llama-ko-7b/result_2023-09-29 16:26:20.json +0 -444
- FINDA-FIT/llama-m/result_2023-09-30 08:24:55.json +0 -444
- FINDA-FIT/llama-p/result_2023-09-30 17:05:38.json +0 -444
- FINDA-FIT/llama-r/result_2023-09-30 09:12:26.json +0 -444
- FINDA-FIT/xllama-instruct/result_2023-10-01 07:23:53.json +0 -444
- GAI-LLM/ko-en-llama2-13b-mixed-v3/result_2023-10-23 00:18:31.json +0 -444
- HAERAE-HUB/hae-tae_v0.1.1/result_2023-09-30 11:46:43.json +0 -444
- HAERAE-HUB/hae-tae_v0.1.2/result_2023-09-30 11:46:34.json +0 -444
- HumanF-MarkrAI/pub-llama-13B-v3/result_2023-10-24 18:02:37.json +0 -444
- HumanF-MarkrAI/pub-llama-13b-v1/result_2023-10-19 18:44:30.json +0 -444
- HumanF-MarkrAI/pub-llama-13b-v2/result_2023-10-22 16:02:46.json +0 -444
- HumanF-MarkrAI/pub-llama-7b-v1/result_2023-10-19 00:06:32.json +0 -444
- Jaewoo1/Foundation_Platypus_data/result_2023-10-18 09:16:14.json +0 -444
- Jaewoo1/KoT-Platypus2_foundation/result_2023-10-16 07:12:51.json +0 -444
- Jaewoo1/Llama2-7B-Blend-3rd-dup-Active-LoRA/result_2023-10-04 03:17:08.json +0 -444
- Jaewoo1/Llama2-7B-ShareGPT-Wiki_noprompt-News_noprompt-CoT-blending-circulus/result_2023-10-04 09:05:17.json +0 -444
- Jaewoo1/Platypus7B_Follow_FT/result_2023-10-21 14:41:08.json +0 -444
- Jaewoo1/Platypus7B_Follow_LoRA/result_2023-10-22 15:04:14.json +0 -444
- KRAFTON/KORani-v1-13B/result_2023-10-17 13:34:31.json +0 -444
- KRAFTON/KORani-v2-13B/result_2023-10-17 13:34:06.json +0 -444
- KRAFTON/KORani-v3-13B/result_2023-10-17 13:33:45.json +0 -444
.gitattributes
DELETED
@@ -1,55 +0,0 @@
|
|
1 |
-
*.7z filter=lfs diff=lfs merge=lfs -text
|
2 |
-
*.arrow filter=lfs diff=lfs merge=lfs -text
|
3 |
-
*.bin filter=lfs diff=lfs merge=lfs -text
|
4 |
-
*.bz2 filter=lfs diff=lfs merge=lfs -text
|
5 |
-
*.ckpt filter=lfs diff=lfs merge=lfs -text
|
6 |
-
*.ftz filter=lfs diff=lfs merge=lfs -text
|
7 |
-
*.gz filter=lfs diff=lfs merge=lfs -text
|
8 |
-
*.h5 filter=lfs diff=lfs merge=lfs -text
|
9 |
-
*.joblib filter=lfs diff=lfs merge=lfs -text
|
10 |
-
*.lfs.* filter=lfs diff=lfs merge=lfs -text
|
11 |
-
*.lz4 filter=lfs diff=lfs merge=lfs -text
|
12 |
-
*.mlmodel filter=lfs diff=lfs merge=lfs -text
|
13 |
-
*.model filter=lfs diff=lfs merge=lfs -text
|
14 |
-
*.msgpack filter=lfs diff=lfs merge=lfs -text
|
15 |
-
*.npy filter=lfs diff=lfs merge=lfs -text
|
16 |
-
*.npz filter=lfs diff=lfs merge=lfs -text
|
17 |
-
*.onnx filter=lfs diff=lfs merge=lfs -text
|
18 |
-
*.ot filter=lfs diff=lfs merge=lfs -text
|
19 |
-
*.parquet filter=lfs diff=lfs merge=lfs -text
|
20 |
-
*.pb filter=lfs diff=lfs merge=lfs -text
|
21 |
-
*.pickle filter=lfs diff=lfs merge=lfs -text
|
22 |
-
*.pkl filter=lfs diff=lfs merge=lfs -text
|
23 |
-
*.pt filter=lfs diff=lfs merge=lfs -text
|
24 |
-
*.pth filter=lfs diff=lfs merge=lfs -text
|
25 |
-
*.rar filter=lfs diff=lfs merge=lfs -text
|
26 |
-
*.safetensors filter=lfs diff=lfs merge=lfs -text
|
27 |
-
saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
28 |
-
*.tar.* filter=lfs diff=lfs merge=lfs -text
|
29 |
-
*.tar filter=lfs diff=lfs merge=lfs -text
|
30 |
-
*.tflite filter=lfs diff=lfs merge=lfs -text
|
31 |
-
*.tgz filter=lfs diff=lfs merge=lfs -text
|
32 |
-
*.wasm filter=lfs diff=lfs merge=lfs -text
|
33 |
-
*.xz filter=lfs diff=lfs merge=lfs -text
|
34 |
-
*.zip filter=lfs diff=lfs merge=lfs -text
|
35 |
-
*.zst filter=lfs diff=lfs merge=lfs -text
|
36 |
-
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
37 |
-
# Audio files - uncompressed
|
38 |
-
*.pcm filter=lfs diff=lfs merge=lfs -text
|
39 |
-
*.sam filter=lfs diff=lfs merge=lfs -text
|
40 |
-
*.raw filter=lfs diff=lfs merge=lfs -text
|
41 |
-
# Audio files - compressed
|
42 |
-
*.aac filter=lfs diff=lfs merge=lfs -text
|
43 |
-
*.flac filter=lfs diff=lfs merge=lfs -text
|
44 |
-
*.mp3 filter=lfs diff=lfs merge=lfs -text
|
45 |
-
*.ogg filter=lfs diff=lfs merge=lfs -text
|
46 |
-
*.wav filter=lfs diff=lfs merge=lfs -text
|
47 |
-
# Image files - uncompressed
|
48 |
-
*.bmp filter=lfs diff=lfs merge=lfs -text
|
49 |
-
*.gif filter=lfs diff=lfs merge=lfs -text
|
50 |
-
*.png filter=lfs diff=lfs merge=lfs -text
|
51 |
-
*.tiff filter=lfs diff=lfs merge=lfs -text
|
52 |
-
# Image files - compressed
|
53 |
-
*.jpg filter=lfs diff=lfs merge=lfs -text
|
54 |
-
*.jpeg filter=lfs diff=lfs merge=lfs -text
|
55 |
-
*.webp filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
42MARU/GenAI-llama-2-ko-en-instruct-v1/result_2023-10-12 11:14:34.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.39505119453924914,
|
5 |
-
"acc_stderr": 0.014285898292938165,
|
6 |
-
"acc_norm": 0.4445392491467577,
|
7 |
-
"acc_norm_stderr": 0.014521226405627077
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.41545508862776337,
|
11 |
-
"acc_stderr": 0.004917931778593191,
|
12 |
-
"acc_norm": 0.5571599283011353,
|
13 |
-
"acc_norm_stderr": 0.004957068377516512
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.47953216374269003,
|
17 |
-
"acc_stderr": 0.0383161053282193,
|
18 |
-
"acc_norm": 0.47953216374269003,
|
19 |
-
"acc_norm_stderr": 0.0383161053282193
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.4174757281553398,
|
23 |
-
"acc_stderr": 0.048828405482122375,
|
24 |
-
"acc_norm": 0.4174757281553398,
|
25 |
-
"acc_norm_stderr": 0.048828405482122375
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.5108556832694764,
|
29 |
-
"acc_stderr": 0.017875748840242407,
|
30 |
-
"acc_norm": 0.5108556832694764,
|
31 |
-
"acc_norm_stderr": 0.017875748840242407
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.4222222222222222,
|
35 |
-
"acc_stderr": 0.04266763404099582,
|
36 |
-
"acc_norm": 0.4222222222222222,
|
37 |
-
"acc_norm_stderr": 0.04266763404099582
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.25,
|
41 |
-
"acc_stderr": 0.04351941398892446,
|
42 |
-
"acc_norm": 0.25,
|
43 |
-
"acc_norm_stderr": 0.04351941398892446
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.32340425531914896,
|
47 |
-
"acc_stderr": 0.03057944277361033,
|
48 |
-
"acc_norm": 0.32340425531914896,
|
49 |
-
"acc_norm_stderr": 0.03057944277361033
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.41566265060240964,
|
53 |
-
"acc_stderr": 0.03836722176598053,
|
54 |
-
"acc_norm": 0.41566265060240964,
|
55 |
-
"acc_norm_stderr": 0.03836722176598053
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.4758842443729904,
|
59 |
-
"acc_stderr": 0.02836504154256457,
|
60 |
-
"acc_norm": 0.4758842443729904,
|
61 |
-
"acc_norm_stderr": 0.02836504154256457
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.3811659192825112,
|
65 |
-
"acc_stderr": 0.03259625118416828,
|
66 |
-
"acc_norm": 0.3811659192825112,
|
67 |
-
"acc_norm_stderr": 0.03259625118416828
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.46564885496183206,
|
71 |
-
"acc_stderr": 0.043749285605997376,
|
72 |
-
"acc_norm": 0.46564885496183206,
|
73 |
-
"acc_norm_stderr": 0.043749285605997376
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.3,
|
77 |
-
"acc_stderr": 0.046056618647183814,
|
78 |
-
"acc_norm": 0.3,
|
79 |
-
"acc_norm_stderr": 0.046056618647183814
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.4696969696969697,
|
83 |
-
"acc_stderr": 0.03555804051763929,
|
84 |
-
"acc_norm": 0.4696969696969697,
|
85 |
-
"acc_norm_stderr": 0.03555804051763929
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.4068965517241379,
|
89 |
-
"acc_stderr": 0.04093793981266237,
|
90 |
-
"acc_norm": 0.4068965517241379,
|
91 |
-
"acc_norm_stderr": 0.04093793981266237
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.2549019607843137,
|
95 |
-
"acc_stderr": 0.043364327079931785,
|
96 |
-
"acc_norm": 0.2549019607843137,
|
97 |
-
"acc_norm_stderr": 0.043364327079931785
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.36134453781512604,
|
101 |
-
"acc_stderr": 0.031204691225150013,
|
102 |
-
"acc_norm": 0.36134453781512604,
|
103 |
-
"acc_norm_stderr": 0.031204691225150013
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.3564102564102564,
|
107 |
-
"acc_stderr": 0.02428314052946728,
|
108 |
-
"acc_norm": 0.3564102564102564,
|
109 |
-
"acc_norm_stderr": 0.02428314052946728
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.47,
|
113 |
-
"acc_stderr": 0.050161355804659205,
|
114 |
-
"acc_norm": 0.47,
|
115 |
-
"acc_norm_stderr": 0.050161355804659205
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.35,
|
119 |
-
"acc_stderr": 0.047937248544110196,
|
120 |
-
"acc_norm": 0.35,
|
121 |
-
"acc_norm_stderr": 0.047937248544110196
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.4166666666666667,
|
125 |
-
"acc_stderr": 0.04766075165356461,
|
126 |
-
"acc_norm": 0.4166666666666667,
|
127 |
-
"acc_norm_stderr": 0.04766075165356461
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.3448275862068966,
|
131 |
-
"acc_stderr": 0.03344283744280459,
|
132 |
-
"acc_norm": 0.3448275862068966,
|
133 |
-
"acc_norm_stderr": 0.03344283744280459
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.4483870967741935,
|
137 |
-
"acc_stderr": 0.028292056830112735,
|
138 |
-
"acc_norm": 0.4483870967741935,
|
139 |
-
"acc_norm_stderr": 0.028292056830112735
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.6068376068376068,
|
143 |
-
"acc_stderr": 0.03199957924651047,
|
144 |
-
"acc_norm": 0.6068376068376068,
|
145 |
-
"acc_norm_stderr": 0.03199957924651047
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.43018867924528303,
|
149 |
-
"acc_stderr": 0.030471445867183238,
|
150 |
-
"acc_norm": 0.43018867924528303,
|
151 |
-
"acc_norm_stderr": 0.030471445867183238
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.4909090909090909,
|
155 |
-
"acc_stderr": 0.04788339768702861,
|
156 |
-
"acc_norm": 0.4909090909090909,
|
157 |
-
"acc_norm_stderr": 0.04788339768702861
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.2518518518518518,
|
161 |
-
"acc_stderr": 0.026466117538959916,
|
162 |
-
"acc_norm": 0.2518518518518518,
|
163 |
-
"acc_norm_stderr": 0.026466117538959916
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.26490066225165565,
|
167 |
-
"acc_stderr": 0.03603038545360384,
|
168 |
-
"acc_norm": 0.26490066225165565,
|
169 |
-
"acc_norm_stderr": 0.03603038545360384
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.5124378109452736,
|
173 |
-
"acc_stderr": 0.0353443984853958,
|
174 |
-
"acc_norm": 0.5124378109452736,
|
175 |
-
"acc_norm_stderr": 0.0353443984853958
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.3583815028901734,
|
179 |
-
"acc_stderr": 0.03656343653353159,
|
180 |
-
"acc_norm": 0.3583815028901734,
|
181 |
-
"acc_norm_stderr": 0.03656343653353159
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.30687830687830686,
|
185 |
-
"acc_stderr": 0.023752928712112126,
|
186 |
-
"acc_norm": 0.30687830687830686,
|
187 |
-
"acc_norm_stderr": 0.023752928712112126
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.2986111111111111,
|
191 |
-
"acc_stderr": 0.03827052357950756,
|
192 |
-
"acc_norm": 0.2986111111111111,
|
193 |
-
"acc_norm_stderr": 0.03827052357950756
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.22,
|
197 |
-
"acc_stderr": 0.04163331998932269,
|
198 |
-
"acc_norm": 0.22,
|
199 |
-
"acc_norm_stderr": 0.04163331998932269
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.53,
|
203 |
-
"acc_stderr": 0.050161355804659205,
|
204 |
-
"acc_norm": 0.53,
|
205 |
-
"acc_norm_stderr": 0.050161355804659205
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.44508670520231214,
|
209 |
-
"acc_stderr": 0.02675625512966377,
|
210 |
-
"acc_norm": 0.44508670520231214,
|
211 |
-
"acc_norm_stderr": 0.02675625512966377
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.3558282208588957,
|
215 |
-
"acc_stderr": 0.03761521380046734,
|
216 |
-
"acc_norm": 0.3558282208588957,
|
217 |
-
"acc_norm_stderr": 0.03761521380046734
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.44135802469135804,
|
221 |
-
"acc_stderr": 0.027628737155668777,
|
222 |
-
"acc_norm": 0.44135802469135804,
|
223 |
-
"acc_norm_stderr": 0.027628737155668777
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.24,
|
227 |
-
"acc_stderr": 0.04292346959909283,
|
228 |
-
"acc_norm": 0.24,
|
229 |
-
"acc_norm_stderr": 0.04292346959909283
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.40414507772020725,
|
233 |
-
"acc_stderr": 0.0354150857888402,
|
234 |
-
"acc_norm": 0.40414507772020725,
|
235 |
-
"acc_norm_stderr": 0.0354150857888402
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.2631578947368421,
|
239 |
-
"acc_stderr": 0.041424397194893624,
|
240 |
-
"acc_norm": 0.2631578947368421,
|
241 |
-
"acc_norm_stderr": 0.041424397194893624
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.4091743119266055,
|
245 |
-
"acc_stderr": 0.02108067026443373,
|
246 |
-
"acc_norm": 0.4091743119266055,
|
247 |
-
"acc_norm_stderr": 0.02108067026443373
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.1984126984126984,
|
251 |
-
"acc_stderr": 0.03567016675276863,
|
252 |
-
"acc_norm": 0.1984126984126984,
|
253 |
-
"acc_norm_stderr": 0.03567016675276863
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.39869281045751637,
|
257 |
-
"acc_stderr": 0.02803609227389177,
|
258 |
-
"acc_norm": 0.39869281045751637,
|
259 |
-
"acc_norm_stderr": 0.02803609227389177
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.42,
|
263 |
-
"acc_stderr": 0.049604496374885836,
|
264 |
-
"acc_norm": 0.42,
|
265 |
-
"acc_norm_stderr": 0.049604496374885836
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.6033057851239669,
|
269 |
-
"acc_stderr": 0.04465869780531009,
|
270 |
-
"acc_norm": 0.6033057851239669,
|
271 |
-
"acc_norm_stderr": 0.04465869780531009
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.4342105263157895,
|
275 |
-
"acc_stderr": 0.040335656678483184,
|
276 |
-
"acc_norm": 0.4342105263157895,
|
277 |
-
"acc_norm_stderr": 0.040335656678483184
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.3333333333333333,
|
281 |
-
"acc_stderr": 0.0190709855896875,
|
282 |
-
"acc_norm": 0.3333333333333333,
|
283 |
-
"acc_norm_stderr": 0.0190709855896875
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.3191489361702128,
|
287 |
-
"acc_stderr": 0.027807990141320207,
|
288 |
-
"acc_norm": 0.3191489361702128,
|
289 |
-
"acc_norm_stderr": 0.027807990141320207
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.19642857142857142,
|
293 |
-
"acc_stderr": 0.03770970049347019,
|
294 |
-
"acc_norm": 0.19642857142857142,
|
295 |
-
"acc_norm_stderr": 0.03770970049347019
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.25462962962962965,
|
299 |
-
"acc_stderr": 0.02971127586000534,
|
300 |
-
"acc_norm": 0.25462962962962965,
|
301 |
-
"acc_norm_stderr": 0.02971127586000534
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.2424581005586592,
|
305 |
-
"acc_stderr": 0.01433352205921789,
|
306 |
-
"acc_norm": 0.2424581005586592,
|
307 |
-
"acc_norm_stderr": 0.01433352205921789
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.31,
|
311 |
-
"acc_stderr": 0.04648231987117316,
|
312 |
-
"acc_norm": 0.31,
|
313 |
-
"acc_norm_stderr": 0.04648231987117316
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.44,
|
317 |
-
"acc_stderr": 0.04988876515698589,
|
318 |
-
"acc_norm": 0.44,
|
319 |
-
"acc_norm_stderr": 0.04988876515698589
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.22426470588235295,
|
323 |
-
"acc_stderr": 0.02533684856333237,
|
324 |
-
"acc_norm": 0.22426470588235295,
|
325 |
-
"acc_norm_stderr": 0.02533684856333237
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.4163265306122449,
|
329 |
-
"acc_stderr": 0.03155782816556164,
|
330 |
-
"acc_norm": 0.4163265306122449,
|
331 |
-
"acc_norm_stderr": 0.03155782816556164
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.41350210970464135,
|
335 |
-
"acc_stderr": 0.03205649904851859,
|
336 |
-
"acc_norm": 0.41350210970464135,
|
337 |
-
"acc_norm_stderr": 0.03205649904851859
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.29335071707953064,
|
341 |
-
"acc_stderr": 0.011628520449582076,
|
342 |
-
"acc_norm": 0.29335071707953064,
|
343 |
-
"acc_norm_stderr": 0.011628520449582076
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.3627450980392157,
|
347 |
-
"acc_stderr": 0.033744993563193555,
|
348 |
-
"acc_norm": 0.3627450980392157,
|
349 |
-
"acc_norm_stderr": 0.033744993563193555
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.43636363636363634,
|
353 |
-
"acc_stderr": 0.03872592983524754,
|
354 |
-
"acc_norm": 0.43636363636363634,
|
355 |
-
"acc_norm_stderr": 0.03872592983524754
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.2962056303549572,
|
359 |
-
"mc1_stderr": 0.01598359510181139,
|
360 |
-
"mc2": 0.4602391231259313,
|
361 |
-
"mc2_stderr": 0.015191570633369808
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.4765258215962441,
|
365 |
-
"acc_stderr": 0.017120879527725653,
|
366 |
-
"acc_norm": 0.5258215962441315,
|
367 |
-
"acc_norm_stderr": 0.017116907933735905
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "42MARU/GenAI-llama-2-ko-en-instruct-v1",
|
436 |
-
"model_sha": "aee07500d61a1d5d214cf0bc0040650957cf3da0",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
42MARU/GenAI-llama2-ko-en-instruct-v2-13b/result_2023-10-18 01:40:38.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.3848122866894198,
|
5 |
-
"acc_stderr": 0.014218371065251095,
|
6 |
-
"acc_norm": 0.4402730375426621,
|
7 |
-
"acc_norm_stderr": 0.014506769524804243
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.4190400318661621,
|
11 |
-
"acc_stderr": 0.0049239357498424945,
|
12 |
-
"acc_norm": 0.5560645289782912,
|
13 |
-
"acc_norm_stderr": 0.004958314114266494
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.5614035087719298,
|
17 |
-
"acc_stderr": 0.038057975055904594,
|
18 |
-
"acc_norm": 0.5614035087719298,
|
19 |
-
"acc_norm_stderr": 0.038057975055904594
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.6019417475728155,
|
23 |
-
"acc_stderr": 0.04846748253977238,
|
24 |
-
"acc_norm": 0.6019417475728155,
|
25 |
-
"acc_norm_stderr": 0.04846748253977238
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.5287356321839081,
|
29 |
-
"acc_stderr": 0.017850410794380173,
|
30 |
-
"acc_norm": 0.5287356321839081,
|
31 |
-
"acc_norm_stderr": 0.017850410794380173
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.4222222222222222,
|
35 |
-
"acc_stderr": 0.04266763404099582,
|
36 |
-
"acc_norm": 0.4222222222222222,
|
37 |
-
"acc_norm_stderr": 0.04266763404099582
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.28,
|
41 |
-
"acc_stderr": 0.045126085985421276,
|
42 |
-
"acc_norm": 0.28,
|
43 |
-
"acc_norm_stderr": 0.045126085985421276
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.3404255319148936,
|
47 |
-
"acc_stderr": 0.030976692998534443,
|
48 |
-
"acc_norm": 0.3404255319148936,
|
49 |
-
"acc_norm_stderr": 0.030976692998534443
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.39759036144578314,
|
53 |
-
"acc_stderr": 0.038099730845402184,
|
54 |
-
"acc_norm": 0.39759036144578314,
|
55 |
-
"acc_norm_stderr": 0.038099730845402184
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.5273311897106109,
|
59 |
-
"acc_stderr": 0.028355633568328188,
|
60 |
-
"acc_norm": 0.5273311897106109,
|
61 |
-
"acc_norm_stderr": 0.028355633568328188
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.4798206278026906,
|
65 |
-
"acc_stderr": 0.033530461674123,
|
66 |
-
"acc_norm": 0.4798206278026906,
|
67 |
-
"acc_norm_stderr": 0.033530461674123
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.5114503816793893,
|
71 |
-
"acc_stderr": 0.043841400240780176,
|
72 |
-
"acc_norm": 0.5114503816793893,
|
73 |
-
"acc_norm_stderr": 0.043841400240780176
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.39,
|
77 |
-
"acc_stderr": 0.04902071300001975,
|
78 |
-
"acc_norm": 0.39,
|
79 |
-
"acc_norm_stderr": 0.04902071300001975
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.5353535353535354,
|
83 |
-
"acc_stderr": 0.03553436368828061,
|
84 |
-
"acc_norm": 0.5353535353535354,
|
85 |
-
"acc_norm_stderr": 0.03553436368828061
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.4827586206896552,
|
89 |
-
"acc_stderr": 0.04164188720169377,
|
90 |
-
"acc_norm": 0.4827586206896552,
|
91 |
-
"acc_norm_stderr": 0.04164188720169377
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.23529411764705882,
|
95 |
-
"acc_stderr": 0.04220773659171452,
|
96 |
-
"acc_norm": 0.23529411764705882,
|
97 |
-
"acc_norm_stderr": 0.04220773659171452
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.49159663865546216,
|
101 |
-
"acc_stderr": 0.03247390276569669,
|
102 |
-
"acc_norm": 0.49159663865546216,
|
103 |
-
"acc_norm_stderr": 0.03247390276569669
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.46153846153846156,
|
107 |
-
"acc_stderr": 0.025275892070240634,
|
108 |
-
"acc_norm": 0.46153846153846156,
|
109 |
-
"acc_norm_stderr": 0.025275892070240634
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.45,
|
113 |
-
"acc_stderr": 0.05,
|
114 |
-
"acc_norm": 0.45,
|
115 |
-
"acc_norm_stderr": 0.05
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.37,
|
119 |
-
"acc_stderr": 0.048523658709391,
|
120 |
-
"acc_norm": 0.37,
|
121 |
-
"acc_norm_stderr": 0.048523658709391
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.5277777777777778,
|
125 |
-
"acc_stderr": 0.04826217294139894,
|
126 |
-
"acc_norm": 0.5277777777777778,
|
127 |
-
"acc_norm_stderr": 0.04826217294139894
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.3399014778325123,
|
131 |
-
"acc_stderr": 0.033327690684107895,
|
132 |
-
"acc_norm": 0.3399014778325123,
|
133 |
-
"acc_norm_stderr": 0.033327690684107895
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.4806451612903226,
|
137 |
-
"acc_stderr": 0.0284226874043121,
|
138 |
-
"acc_norm": 0.4806451612903226,
|
139 |
-
"acc_norm_stderr": 0.0284226874043121
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.6752136752136753,
|
143 |
-
"acc_stderr": 0.03067902276549883,
|
144 |
-
"acc_norm": 0.6752136752136753,
|
145 |
-
"acc_norm_stderr": 0.03067902276549883
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.47547169811320755,
|
149 |
-
"acc_stderr": 0.030735822206205615,
|
150 |
-
"acc_norm": 0.47547169811320755,
|
151 |
-
"acc_norm_stderr": 0.030735822206205615
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.5454545454545454,
|
155 |
-
"acc_stderr": 0.04769300568972745,
|
156 |
-
"acc_norm": 0.5454545454545454,
|
157 |
-
"acc_norm_stderr": 0.04769300568972745
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.2777777777777778,
|
161 |
-
"acc_stderr": 0.027309140588230172,
|
162 |
-
"acc_norm": 0.2777777777777778,
|
163 |
-
"acc_norm_stderr": 0.027309140588230172
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.33112582781456956,
|
167 |
-
"acc_stderr": 0.038425817186598696,
|
168 |
-
"acc_norm": 0.33112582781456956,
|
169 |
-
"acc_norm_stderr": 0.038425817186598696
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.5671641791044776,
|
173 |
-
"acc_stderr": 0.03503490923673282,
|
174 |
-
"acc_norm": 0.5671641791044776,
|
175 |
-
"acc_norm_stderr": 0.03503490923673282
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.4393063583815029,
|
179 |
-
"acc_stderr": 0.037842719328874674,
|
180 |
-
"acc_norm": 0.4393063583815029,
|
181 |
-
"acc_norm_stderr": 0.037842719328874674
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.3333333333333333,
|
185 |
-
"acc_stderr": 0.0242785680243077,
|
186 |
-
"acc_norm": 0.3333333333333333,
|
187 |
-
"acc_norm_stderr": 0.0242785680243077
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.4027777777777778,
|
191 |
-
"acc_stderr": 0.04101405519842425,
|
192 |
-
"acc_norm": 0.4027777777777778,
|
193 |
-
"acc_norm_stderr": 0.04101405519842425
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.34,
|
197 |
-
"acc_stderr": 0.047609522856952344,
|
198 |
-
"acc_norm": 0.34,
|
199 |
-
"acc_norm_stderr": 0.047609522856952344
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.63,
|
203 |
-
"acc_stderr": 0.04852365870939099,
|
204 |
-
"acc_norm": 0.63,
|
205 |
-
"acc_norm_stderr": 0.04852365870939099
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.49421965317919075,
|
209 |
-
"acc_stderr": 0.026917296179149116,
|
210 |
-
"acc_norm": 0.49421965317919075,
|
211 |
-
"acc_norm_stderr": 0.026917296179149116
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.48466257668711654,
|
215 |
-
"acc_stderr": 0.039265223787088445,
|
216 |
-
"acc_norm": 0.48466257668711654,
|
217 |
-
"acc_norm_stderr": 0.039265223787088445
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.49074074074074076,
|
221 |
-
"acc_stderr": 0.027815973433878014,
|
222 |
-
"acc_norm": 0.49074074074074076,
|
223 |
-
"acc_norm_stderr": 0.027815973433878014
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.3,
|
227 |
-
"acc_stderr": 0.04605661864718381,
|
228 |
-
"acc_norm": 0.3,
|
229 |
-
"acc_norm_stderr": 0.04605661864718381
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.5440414507772021,
|
233 |
-
"acc_stderr": 0.03594413711272436,
|
234 |
-
"acc_norm": 0.5440414507772021,
|
235 |
-
"acc_norm_stderr": 0.03594413711272436
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.2543859649122807,
|
239 |
-
"acc_stderr": 0.040969851398436716,
|
240 |
-
"acc_norm": 0.2543859649122807,
|
241 |
-
"acc_norm_stderr": 0.040969851398436716
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.5559633027522936,
|
245 |
-
"acc_stderr": 0.021302621211654518,
|
246 |
-
"acc_norm": 0.5559633027522936,
|
247 |
-
"acc_norm_stderr": 0.021302621211654518
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.30158730158730157,
|
251 |
-
"acc_stderr": 0.04104947269903394,
|
252 |
-
"acc_norm": 0.30158730158730157,
|
253 |
-
"acc_norm_stderr": 0.04104947269903394
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.4673202614379085,
|
257 |
-
"acc_stderr": 0.028568699752225875,
|
258 |
-
"acc_norm": 0.4673202614379085,
|
259 |
-
"acc_norm_stderr": 0.028568699752225875
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.51,
|
263 |
-
"acc_stderr": 0.05024183937956912,
|
264 |
-
"acc_norm": 0.51,
|
265 |
-
"acc_norm_stderr": 0.05024183937956912
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.7272727272727273,
|
269 |
-
"acc_stderr": 0.04065578140908705,
|
270 |
-
"acc_norm": 0.7272727272727273,
|
271 |
-
"acc_norm_stderr": 0.04065578140908705
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.47368421052631576,
|
275 |
-
"acc_stderr": 0.04063302731486671,
|
276 |
-
"acc_norm": 0.47368421052631576,
|
277 |
-
"acc_norm_stderr": 0.04063302731486671
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.3741830065359477,
|
281 |
-
"acc_stderr": 0.019576953122088833,
|
282 |
-
"acc_norm": 0.3741830065359477,
|
283 |
-
"acc_norm_stderr": 0.019576953122088833
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.31560283687943264,
|
287 |
-
"acc_stderr": 0.027724989449509314,
|
288 |
-
"acc_norm": 0.31560283687943264,
|
289 |
-
"acc_norm_stderr": 0.027724989449509314
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.22321428571428573,
|
293 |
-
"acc_stderr": 0.039523019677025116,
|
294 |
-
"acc_norm": 0.22321428571428573,
|
295 |
-
"acc_norm_stderr": 0.039523019677025116
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.4074074074074074,
|
299 |
-
"acc_stderr": 0.033509916046960436,
|
300 |
-
"acc_norm": 0.4074074074074074,
|
301 |
-
"acc_norm_stderr": 0.033509916046960436
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.264804469273743,
|
305 |
-
"acc_stderr": 0.014756906483260664,
|
306 |
-
"acc_norm": 0.264804469273743,
|
307 |
-
"acc_norm_stderr": 0.014756906483260664
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.35,
|
311 |
-
"acc_stderr": 0.0479372485441102,
|
312 |
-
"acc_norm": 0.35,
|
313 |
-
"acc_norm_stderr": 0.0479372485441102
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.39,
|
317 |
-
"acc_stderr": 0.04902071300001975,
|
318 |
-
"acc_norm": 0.39,
|
319 |
-
"acc_norm_stderr": 0.04902071300001975
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.39705882352941174,
|
323 |
-
"acc_stderr": 0.029722152099280058,
|
324 |
-
"acc_norm": 0.39705882352941174,
|
325 |
-
"acc_norm_stderr": 0.029722152099280058
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.49387755102040815,
|
329 |
-
"acc_stderr": 0.032006820201639086,
|
330 |
-
"acc_norm": 0.49387755102040815,
|
331 |
-
"acc_norm_stderr": 0.032006820201639086
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.5738396624472574,
|
335 |
-
"acc_stderr": 0.03219035703131774,
|
336 |
-
"acc_norm": 0.5738396624472574,
|
337 |
-
"acc_norm_stderr": 0.03219035703131774
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.35071707953063885,
|
341 |
-
"acc_stderr": 0.012187773370741518,
|
342 |
-
"acc_norm": 0.35071707953063885,
|
343 |
-
"acc_norm_stderr": 0.012187773370741518
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.4950980392156863,
|
347 |
-
"acc_stderr": 0.03509143375606786,
|
348 |
-
"acc_norm": 0.4950980392156863,
|
349 |
-
"acc_norm_stderr": 0.03509143375606786
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.5696969696969697,
|
353 |
-
"acc_stderr": 0.03866225962879077,
|
354 |
-
"acc_norm": 0.5696969696969697,
|
355 |
-
"acc_norm_stderr": 0.03866225962879077
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.27539779681762544,
|
359 |
-
"mc1_stderr": 0.015638135667775523,
|
360 |
-
"mc2": 0.44227632802507094,
|
361 |
-
"mc2_stderr": 0.015242459306682204
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.431924882629108,
|
365 |
-
"acc_stderr": 0.01698017709257206,
|
366 |
-
"acc_norm": 0.5105633802816901,
|
367 |
-
"acc_norm_stderr": 0.017135953743220793
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "42MARU/GenAI-llama2-ko-en-instruct-v2-13b",
|
436 |
-
"model_sha": "9f429309fc6b939d08c659ab4666f6e80324dcd1",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
42MARU/GenAI-llama2-ko-en-platypus/result_2023-10-13 16:58:44.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.3839590443686007,
|
5 |
-
"acc_stderr": 0.01421244498065189,
|
6 |
-
"acc_norm": 0.4522184300341297,
|
7 |
-
"acc_norm_stderr": 0.014544519880633832
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.4166500697072296,
|
11 |
-
"acc_stderr": 0.00491996282220832,
|
12 |
-
"acc_norm": 0.5524795857398924,
|
13 |
-
"acc_norm_stderr": 0.004962220512548352
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.5614035087719298,
|
17 |
-
"acc_stderr": 0.038057975055904594,
|
18 |
-
"acc_norm": 0.5614035087719298,
|
19 |
-
"acc_norm_stderr": 0.038057975055904594
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.49514563106796117,
|
23 |
-
"acc_stderr": 0.049505043821289195,
|
24 |
-
"acc_norm": 0.49514563106796117,
|
25 |
-
"acc_norm_stderr": 0.049505043821289195
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.5351213282247765,
|
29 |
-
"acc_stderr": 0.017835798806290642,
|
30 |
-
"acc_norm": 0.5351213282247765,
|
31 |
-
"acc_norm_stderr": 0.017835798806290642
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.34814814814814815,
|
35 |
-
"acc_stderr": 0.041153246103369526,
|
36 |
-
"acc_norm": 0.34814814814814815,
|
37 |
-
"acc_norm_stderr": 0.041153246103369526
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.24,
|
41 |
-
"acc_stderr": 0.042923469599092816,
|
42 |
-
"acc_norm": 0.24,
|
43 |
-
"acc_norm_stderr": 0.042923469599092816
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.31063829787234043,
|
47 |
-
"acc_stderr": 0.03025123757921317,
|
48 |
-
"acc_norm": 0.31063829787234043,
|
49 |
-
"acc_norm_stderr": 0.03025123757921317
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.42168674698795183,
|
53 |
-
"acc_stderr": 0.038444531817709175,
|
54 |
-
"acc_norm": 0.42168674698795183,
|
55 |
-
"acc_norm_stderr": 0.038444531817709175
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.5048231511254019,
|
59 |
-
"acc_stderr": 0.02839677044411129,
|
60 |
-
"acc_norm": 0.5048231511254019,
|
61 |
-
"acc_norm_stderr": 0.02839677044411129
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.4484304932735426,
|
65 |
-
"acc_stderr": 0.03337883736255099,
|
66 |
-
"acc_norm": 0.4484304932735426,
|
67 |
-
"acc_norm_stderr": 0.03337883736255099
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.5267175572519084,
|
71 |
-
"acc_stderr": 0.04379024936553894,
|
72 |
-
"acc_norm": 0.5267175572519084,
|
73 |
-
"acc_norm_stderr": 0.04379024936553894
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.43,
|
77 |
-
"acc_stderr": 0.04975698519562429,
|
78 |
-
"acc_norm": 0.43,
|
79 |
-
"acc_norm_stderr": 0.04975698519562429
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.494949494949495,
|
83 |
-
"acc_stderr": 0.035621707606254015,
|
84 |
-
"acc_norm": 0.494949494949495,
|
85 |
-
"acc_norm_stderr": 0.035621707606254015
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.4068965517241379,
|
89 |
-
"acc_stderr": 0.04093793981266236,
|
90 |
-
"acc_norm": 0.4068965517241379,
|
91 |
-
"acc_norm_stderr": 0.04093793981266236
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.17647058823529413,
|
95 |
-
"acc_stderr": 0.0379328118530781,
|
96 |
-
"acc_norm": 0.17647058823529413,
|
97 |
-
"acc_norm_stderr": 0.0379328118530781
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.4957983193277311,
|
101 |
-
"acc_stderr": 0.03247734334448111,
|
102 |
-
"acc_norm": 0.4957983193277311,
|
103 |
-
"acc_norm_stderr": 0.03247734334448111
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.4230769230769231,
|
107 |
-
"acc_stderr": 0.025049197876042328,
|
108 |
-
"acc_norm": 0.4230769230769231,
|
109 |
-
"acc_norm_stderr": 0.025049197876042328
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.46,
|
113 |
-
"acc_stderr": 0.05009082659620332,
|
114 |
-
"acc_norm": 0.46,
|
115 |
-
"acc_norm_stderr": 0.05009082659620332
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.28,
|
119 |
-
"acc_stderr": 0.04512608598542128,
|
120 |
-
"acc_norm": 0.28,
|
121 |
-
"acc_norm_stderr": 0.04512608598542128
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.46296296296296297,
|
125 |
-
"acc_stderr": 0.04820403072760628,
|
126 |
-
"acc_norm": 0.46296296296296297,
|
127 |
-
"acc_norm_stderr": 0.04820403072760628
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.35960591133004927,
|
131 |
-
"acc_stderr": 0.03376458246509568,
|
132 |
-
"acc_norm": 0.35960591133004927,
|
133 |
-
"acc_norm_stderr": 0.03376458246509568
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.43548387096774194,
|
137 |
-
"acc_stderr": 0.02820622559150275,
|
138 |
-
"acc_norm": 0.43548387096774194,
|
139 |
-
"acc_norm_stderr": 0.02820622559150275
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.6581196581196581,
|
143 |
-
"acc_stderr": 0.03107502852650775,
|
144 |
-
"acc_norm": 0.6581196581196581,
|
145 |
-
"acc_norm_stderr": 0.03107502852650775
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.4490566037735849,
|
149 |
-
"acc_stderr": 0.030612730713641095,
|
150 |
-
"acc_norm": 0.4490566037735849,
|
151 |
-
"acc_norm_stderr": 0.030612730713641095
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.41818181818181815,
|
155 |
-
"acc_stderr": 0.04724577405731572,
|
156 |
-
"acc_norm": 0.41818181818181815,
|
157 |
-
"acc_norm_stderr": 0.04724577405731572
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.23333333333333334,
|
161 |
-
"acc_stderr": 0.02578787422095932,
|
162 |
-
"acc_norm": 0.23333333333333334,
|
163 |
-
"acc_norm_stderr": 0.02578787422095932
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.2052980132450331,
|
167 |
-
"acc_stderr": 0.032979866484738336,
|
168 |
-
"acc_norm": 0.2052980132450331,
|
169 |
-
"acc_norm_stderr": 0.032979866484738336
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.5771144278606966,
|
173 |
-
"acc_stderr": 0.034932317774212816,
|
174 |
-
"acc_norm": 0.5771144278606966,
|
175 |
-
"acc_norm_stderr": 0.034932317774212816
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.3699421965317919,
|
179 |
-
"acc_stderr": 0.036812296333943194,
|
180 |
-
"acc_norm": 0.3699421965317919,
|
181 |
-
"acc_norm_stderr": 0.036812296333943194
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.2830687830687831,
|
185 |
-
"acc_stderr": 0.023201392938194978,
|
186 |
-
"acc_norm": 0.2830687830687831,
|
187 |
-
"acc_norm_stderr": 0.023201392938194978
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.4166666666666667,
|
191 |
-
"acc_stderr": 0.04122728707651282,
|
192 |
-
"acc_norm": 0.4166666666666667,
|
193 |
-
"acc_norm_stderr": 0.04122728707651282
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.28,
|
197 |
-
"acc_stderr": 0.04512608598542128,
|
198 |
-
"acc_norm": 0.28,
|
199 |
-
"acc_norm_stderr": 0.04512608598542128
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.66,
|
203 |
-
"acc_stderr": 0.04760952285695237,
|
204 |
-
"acc_norm": 0.66,
|
205 |
-
"acc_norm_stderr": 0.04760952285695237
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.5057803468208093,
|
209 |
-
"acc_stderr": 0.02691729617914911,
|
210 |
-
"acc_norm": 0.5057803468208093,
|
211 |
-
"acc_norm_stderr": 0.02691729617914911
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.44785276073619634,
|
215 |
-
"acc_stderr": 0.03906947479456602,
|
216 |
-
"acc_norm": 0.44785276073619634,
|
217 |
-
"acc_norm_stderr": 0.03906947479456602
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.4722222222222222,
|
221 |
-
"acc_stderr": 0.027777777777777797,
|
222 |
-
"acc_norm": 0.4722222222222222,
|
223 |
-
"acc_norm_stderr": 0.027777777777777797
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.31,
|
227 |
-
"acc_stderr": 0.04648231987117316,
|
228 |
-
"acc_norm": 0.31,
|
229 |
-
"acc_norm_stderr": 0.04648231987117316
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.5647668393782384,
|
233 |
-
"acc_stderr": 0.03578038165008586,
|
234 |
-
"acc_norm": 0.5647668393782384,
|
235 |
-
"acc_norm_stderr": 0.03578038165008586
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.23684210526315788,
|
239 |
-
"acc_stderr": 0.03999423879281335,
|
240 |
-
"acc_norm": 0.23684210526315788,
|
241 |
-
"acc_norm_stderr": 0.03999423879281335
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.5522935779816514,
|
245 |
-
"acc_stderr": 0.02131975496242546,
|
246 |
-
"acc_norm": 0.5522935779816514,
|
247 |
-
"acc_norm_stderr": 0.02131975496242546
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.2777777777777778,
|
251 |
-
"acc_stderr": 0.040061680838488774,
|
252 |
-
"acc_norm": 0.2777777777777778,
|
253 |
-
"acc_norm_stderr": 0.040061680838488774
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.45751633986928103,
|
257 |
-
"acc_stderr": 0.02852638345214264,
|
258 |
-
"acc_norm": 0.45751633986928103,
|
259 |
-
"acc_norm_stderr": 0.02852638345214264
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.48,
|
263 |
-
"acc_stderr": 0.050211673156867795,
|
264 |
-
"acc_norm": 0.48,
|
265 |
-
"acc_norm_stderr": 0.050211673156867795
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.6859504132231405,
|
269 |
-
"acc_stderr": 0.04236964753041018,
|
270 |
-
"acc_norm": 0.6859504132231405,
|
271 |
-
"acc_norm_stderr": 0.04236964753041018
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.45394736842105265,
|
275 |
-
"acc_stderr": 0.04051646342874141,
|
276 |
-
"acc_norm": 0.45394736842105265,
|
277 |
-
"acc_norm_stderr": 0.04051646342874141
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.3839869281045752,
|
281 |
-
"acc_stderr": 0.019675808135281525,
|
282 |
-
"acc_norm": 0.3839869281045752,
|
283 |
-
"acc_norm_stderr": 0.019675808135281525
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.35815602836879434,
|
287 |
-
"acc_stderr": 0.02860208586275942,
|
288 |
-
"acc_norm": 0.35815602836879434,
|
289 |
-
"acc_norm_stderr": 0.02860208586275942
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.2857142857142857,
|
293 |
-
"acc_stderr": 0.042878587513404544,
|
294 |
-
"acc_norm": 0.2857142857142857,
|
295 |
-
"acc_norm_stderr": 0.042878587513404544
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.25462962962962965,
|
299 |
-
"acc_stderr": 0.02971127586000534,
|
300 |
-
"acc_norm": 0.25462962962962965,
|
301 |
-
"acc_norm_stderr": 0.02971127586000534
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.293854748603352,
|
305 |
-
"acc_stderr": 0.015235075776719616,
|
306 |
-
"acc_norm": 0.293854748603352,
|
307 |
-
"acc_norm_stderr": 0.015235075776719616
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.32,
|
311 |
-
"acc_stderr": 0.046882617226215034,
|
312 |
-
"acc_norm": 0.32,
|
313 |
-
"acc_norm_stderr": 0.046882617226215034
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.41,
|
317 |
-
"acc_stderr": 0.049431107042371025,
|
318 |
-
"acc_norm": 0.41,
|
319 |
-
"acc_norm_stderr": 0.049431107042371025
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.27941176470588236,
|
323 |
-
"acc_stderr": 0.02725720260611495,
|
324 |
-
"acc_norm": 0.27941176470588236,
|
325 |
-
"acc_norm_stderr": 0.02725720260611495
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.4775510204081633,
|
329 |
-
"acc_stderr": 0.031976941187136725,
|
330 |
-
"acc_norm": 0.4775510204081633,
|
331 |
-
"acc_norm_stderr": 0.031976941187136725
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.6075949367088608,
|
335 |
-
"acc_stderr": 0.0317847187456473,
|
336 |
-
"acc_norm": 0.6075949367088608,
|
337 |
-
"acc_norm_stderr": 0.0317847187456473
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.33116036505867014,
|
341 |
-
"acc_stderr": 0.01202012819598576,
|
342 |
-
"acc_norm": 0.33116036505867014,
|
343 |
-
"acc_norm_stderr": 0.01202012819598576
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.49019607843137253,
|
347 |
-
"acc_stderr": 0.03508637358630572,
|
348 |
-
"acc_norm": 0.49019607843137253,
|
349 |
-
"acc_norm_stderr": 0.03508637358630572
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.5454545454545454,
|
353 |
-
"acc_stderr": 0.038881769216741004,
|
354 |
-
"acc_norm": 0.5454545454545454,
|
355 |
-
"acc_norm_stderr": 0.038881769216741004
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.27539779681762544,
|
359 |
-
"mc1_stderr": 0.015638135667775523,
|
360 |
-
"mc2": 0.4478448789442893,
|
361 |
-
"mc2_stderr": 0.015296172022310957
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.568075117370892,
|
365 |
-
"acc_stderr": 0.016980177092572074,
|
366 |
-
"acc_norm": 0.6197183098591549,
|
367 |
-
"acc_norm_stderr": 0.01664121729750358
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "42MARU/GenAI-llama2-ko-en-platypus",
|
436 |
-
"model_sha": "35093e36712fc3edde865a2ac45e7a8f09c9f514",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
42MARU/llama-2-ko-7b-instruct/result_2023-09-29 09:41:36.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.3455631399317406,
|
5 |
-
"acc_stderr": 0.013896938461145678,
|
6 |
-
"acc_norm": 0.3839590443686007,
|
7 |
-
"acc_norm_stderr": 0.01421244498065189
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.3975303724357698,
|
11 |
-
"acc_stderr": 0.004883871774350598,
|
12 |
-
"acc_norm": 0.5247958573989245,
|
13 |
-
"acc_norm_stderr": 0.004983641854351152
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.3684210526315789,
|
17 |
-
"acc_stderr": 0.036996580176568775,
|
18 |
-
"acc_norm": 0.3684210526315789,
|
19 |
-
"acc_norm_stderr": 0.036996580176568775
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.32038834951456313,
|
23 |
-
"acc_stderr": 0.0462028408228004,
|
24 |
-
"acc_norm": 0.32038834951456313,
|
25 |
-
"acc_norm_stderr": 0.0462028408228004
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.3831417624521073,
|
29 |
-
"acc_stderr": 0.01738477419488563,
|
30 |
-
"acc_norm": 0.3831417624521073,
|
31 |
-
"acc_norm_stderr": 0.01738477419488563
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.32592592592592595,
|
35 |
-
"acc_stderr": 0.040491220417025055,
|
36 |
-
"acc_norm": 0.32592592592592595,
|
37 |
-
"acc_norm_stderr": 0.040491220417025055
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.28,
|
41 |
-
"acc_stderr": 0.045126085985421255,
|
42 |
-
"acc_norm": 0.28,
|
43 |
-
"acc_norm_stderr": 0.045126085985421255
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.28085106382978725,
|
47 |
-
"acc_stderr": 0.02937917046412482,
|
48 |
-
"acc_norm": 0.28085106382978725,
|
49 |
-
"acc_norm_stderr": 0.02937917046412482
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.3795180722891566,
|
53 |
-
"acc_stderr": 0.03777798822748017,
|
54 |
-
"acc_norm": 0.3795180722891566,
|
55 |
-
"acc_norm_stderr": 0.03777798822748017
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.35691318327974275,
|
59 |
-
"acc_stderr": 0.027210420375934012,
|
60 |
-
"acc_norm": 0.35691318327974275,
|
61 |
-
"acc_norm_stderr": 0.027210420375934012
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.4170403587443946,
|
65 |
-
"acc_stderr": 0.03309266936071721,
|
66 |
-
"acc_norm": 0.4170403587443946,
|
67 |
-
"acc_norm_stderr": 0.03309266936071721
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.4198473282442748,
|
71 |
-
"acc_stderr": 0.043285772152629715,
|
72 |
-
"acc_norm": 0.4198473282442748,
|
73 |
-
"acc_norm_stderr": 0.043285772152629715
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.28,
|
77 |
-
"acc_stderr": 0.04512608598542127,
|
78 |
-
"acc_norm": 0.28,
|
79 |
-
"acc_norm_stderr": 0.04512608598542127
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.35858585858585856,
|
83 |
-
"acc_stderr": 0.034169036403915214,
|
84 |
-
"acc_norm": 0.35858585858585856,
|
85 |
-
"acc_norm_stderr": 0.034169036403915214
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.25517241379310346,
|
89 |
-
"acc_stderr": 0.03632984052707842,
|
90 |
-
"acc_norm": 0.25517241379310346,
|
91 |
-
"acc_norm_stderr": 0.03632984052707842
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.1568627450980392,
|
95 |
-
"acc_stderr": 0.03618664819936245,
|
96 |
-
"acc_norm": 0.1568627450980392,
|
97 |
-
"acc_norm_stderr": 0.03618664819936245
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.25630252100840334,
|
101 |
-
"acc_stderr": 0.028359620870533953,
|
102 |
-
"acc_norm": 0.25630252100840334,
|
103 |
-
"acc_norm_stderr": 0.028359620870533953
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.24871794871794872,
|
107 |
-
"acc_stderr": 0.021916957709213803,
|
108 |
-
"acc_norm": 0.24871794871794872,
|
109 |
-
"acc_norm_stderr": 0.021916957709213803
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.36,
|
113 |
-
"acc_stderr": 0.048241815132442176,
|
114 |
-
"acc_norm": 0.36,
|
115 |
-
"acc_norm_stderr": 0.048241815132442176
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.33,
|
119 |
-
"acc_stderr": 0.04725815626252605,
|
120 |
-
"acc_norm": 0.33,
|
121 |
-
"acc_norm_stderr": 0.04725815626252605
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.37037037037037035,
|
125 |
-
"acc_stderr": 0.04668408033024932,
|
126 |
-
"acc_norm": 0.37037037037037035,
|
127 |
-
"acc_norm_stderr": 0.04668408033024932
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.23645320197044334,
|
131 |
-
"acc_stderr": 0.029896114291733545,
|
132 |
-
"acc_norm": 0.23645320197044334,
|
133 |
-
"acc_norm_stderr": 0.029896114291733545
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.3161290322580645,
|
137 |
-
"acc_stderr": 0.026450874489042767,
|
138 |
-
"acc_norm": 0.3161290322580645,
|
139 |
-
"acc_norm_stderr": 0.026450874489042767
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.4358974358974359,
|
143 |
-
"acc_stderr": 0.03248577511578401,
|
144 |
-
"acc_norm": 0.4358974358974359,
|
145 |
-
"acc_norm_stderr": 0.03248577511578401
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.30943396226415093,
|
149 |
-
"acc_stderr": 0.028450154794118627,
|
150 |
-
"acc_norm": 0.30943396226415093,
|
151 |
-
"acc_norm_stderr": 0.028450154794118627
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.37272727272727274,
|
155 |
-
"acc_stderr": 0.04631381319425463,
|
156 |
-
"acc_norm": 0.37272727272727274,
|
157 |
-
"acc_norm_stderr": 0.04631381319425463
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.25555555555555554,
|
161 |
-
"acc_stderr": 0.026593939101844072,
|
162 |
-
"acc_norm": 0.25555555555555554,
|
163 |
-
"acc_norm_stderr": 0.026593939101844072
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.23841059602649006,
|
167 |
-
"acc_stderr": 0.0347918557259966,
|
168 |
-
"acc_norm": 0.23841059602649006,
|
169 |
-
"acc_norm_stderr": 0.0347918557259966
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.3283582089552239,
|
173 |
-
"acc_stderr": 0.033206858897443244,
|
174 |
-
"acc_norm": 0.3283582089552239,
|
175 |
-
"acc_norm_stderr": 0.033206858897443244
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.23121387283236994,
|
179 |
-
"acc_stderr": 0.0321473730202947,
|
180 |
-
"acc_norm": 0.23121387283236994,
|
181 |
-
"acc_norm_stderr": 0.0321473730202947
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.23809523809523808,
|
185 |
-
"acc_stderr": 0.021935878081184756,
|
186 |
-
"acc_norm": 0.23809523809523808,
|
187 |
-
"acc_norm_stderr": 0.021935878081184756
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.2777777777777778,
|
191 |
-
"acc_stderr": 0.03745554791462457,
|
192 |
-
"acc_norm": 0.2777777777777778,
|
193 |
-
"acc_norm_stderr": 0.03745554791462457
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.21,
|
197 |
-
"acc_stderr": 0.040936018074033256,
|
198 |
-
"acc_norm": 0.21,
|
199 |
-
"acc_norm_stderr": 0.040936018074033256
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.37,
|
203 |
-
"acc_stderr": 0.04852365870939099,
|
204 |
-
"acc_norm": 0.37,
|
205 |
-
"acc_norm_stderr": 0.04852365870939099
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.33236994219653176,
|
209 |
-
"acc_stderr": 0.025361168749688225,
|
210 |
-
"acc_norm": 0.33236994219653176,
|
211 |
-
"acc_norm_stderr": 0.025361168749688225
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.25766871165644173,
|
215 |
-
"acc_stderr": 0.03436150827846917,
|
216 |
-
"acc_norm": 0.25766871165644173,
|
217 |
-
"acc_norm_stderr": 0.03436150827846917
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.33641975308641975,
|
221 |
-
"acc_stderr": 0.026289734945952926,
|
222 |
-
"acc_norm": 0.33641975308641975,
|
223 |
-
"acc_norm_stderr": 0.026289734945952926
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.28,
|
227 |
-
"acc_stderr": 0.04512608598542129,
|
228 |
-
"acc_norm": 0.28,
|
229 |
-
"acc_norm_stderr": 0.04512608598542129
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.32642487046632124,
|
233 |
-
"acc_stderr": 0.033840286211432945,
|
234 |
-
"acc_norm": 0.32642487046632124,
|
235 |
-
"acc_norm_stderr": 0.033840286211432945
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.30701754385964913,
|
239 |
-
"acc_stderr": 0.0433913832257986,
|
240 |
-
"acc_norm": 0.30701754385964913,
|
241 |
-
"acc_norm_stderr": 0.0433913832257986
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.3174311926605505,
|
245 |
-
"acc_stderr": 0.019957152198460497,
|
246 |
-
"acc_norm": 0.3174311926605505,
|
247 |
-
"acc_norm_stderr": 0.019957152198460497
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.1746031746031746,
|
251 |
-
"acc_stderr": 0.03395490020856111,
|
252 |
-
"acc_norm": 0.1746031746031746,
|
253 |
-
"acc_norm_stderr": 0.03395490020856111
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.35294117647058826,
|
257 |
-
"acc_stderr": 0.02736359328468495,
|
258 |
-
"acc_norm": 0.35294117647058826,
|
259 |
-
"acc_norm_stderr": 0.02736359328468495
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.25,
|
263 |
-
"acc_stderr": 0.04351941398892446,
|
264 |
-
"acc_norm": 0.25,
|
265 |
-
"acc_norm_stderr": 0.04351941398892446
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.36363636363636365,
|
269 |
-
"acc_stderr": 0.043913262867240704,
|
270 |
-
"acc_norm": 0.36363636363636365,
|
271 |
-
"acc_norm_stderr": 0.043913262867240704
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.3026315789473684,
|
275 |
-
"acc_stderr": 0.037385206761196686,
|
276 |
-
"acc_norm": 0.3026315789473684,
|
277 |
-
"acc_norm_stderr": 0.037385206761196686
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.2679738562091503,
|
281 |
-
"acc_stderr": 0.017917974069594726,
|
282 |
-
"acc_norm": 0.2679738562091503,
|
283 |
-
"acc_norm_stderr": 0.017917974069594726
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.2624113475177305,
|
287 |
-
"acc_stderr": 0.02624492034984301,
|
288 |
-
"acc_norm": 0.2624113475177305,
|
289 |
-
"acc_norm_stderr": 0.02624492034984301
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.33035714285714285,
|
293 |
-
"acc_stderr": 0.04464285714285712,
|
294 |
-
"acc_norm": 0.33035714285714285,
|
295 |
-
"acc_norm_stderr": 0.04464285714285712
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.24074074074074073,
|
299 |
-
"acc_stderr": 0.029157522184605586,
|
300 |
-
"acc_norm": 0.24074074074074073,
|
301 |
-
"acc_norm_stderr": 0.029157522184605586
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.2424581005586592,
|
305 |
-
"acc_stderr": 0.01433352205921789,
|
306 |
-
"acc_norm": 0.2424581005586592,
|
307 |
-
"acc_norm_stderr": 0.01433352205921789
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.32,
|
311 |
-
"acc_stderr": 0.04688261722621504,
|
312 |
-
"acc_norm": 0.32,
|
313 |
-
"acc_norm_stderr": 0.04688261722621504
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.21,
|
317 |
-
"acc_stderr": 0.040936018074033256,
|
318 |
-
"acc_norm": 0.21,
|
319 |
-
"acc_norm_stderr": 0.040936018074033256
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.35294117647058826,
|
323 |
-
"acc_stderr": 0.0290294228156814,
|
324 |
-
"acc_norm": 0.35294117647058826,
|
325 |
-
"acc_norm_stderr": 0.0290294228156814
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.27755102040816326,
|
329 |
-
"acc_stderr": 0.028666857790274648,
|
330 |
-
"acc_norm": 0.27755102040816326,
|
331 |
-
"acc_norm_stderr": 0.028666857790274648
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.35864978902953587,
|
335 |
-
"acc_stderr": 0.031219569445301847,
|
336 |
-
"acc_norm": 0.35864978902953587,
|
337 |
-
"acc_norm_stderr": 0.031219569445301847
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.27249022164276404,
|
341 |
-
"acc_stderr": 0.01137165829431153,
|
342 |
-
"acc_norm": 0.27249022164276404,
|
343 |
-
"acc_norm_stderr": 0.01137165829431153
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.2696078431372549,
|
347 |
-
"acc_stderr": 0.031145570659486782,
|
348 |
-
"acc_norm": 0.2696078431372549,
|
349 |
-
"acc_norm_stderr": 0.031145570659486782
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.3090909090909091,
|
353 |
-
"acc_stderr": 0.036085410115739666,
|
354 |
-
"acc_norm": 0.3090909090909091,
|
355 |
-
"acc_norm_stderr": 0.036085410115739666
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.24969400244798043,
|
359 |
-
"mc1_stderr": 0.015152286907148125,
|
360 |
-
"mc2": 0.39805148377575406,
|
361 |
-
"mc2_stderr": 0.015027401787198838
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.392018779342723,
|
365 |
-
"acc_stderr": 0.016735309112043194,
|
366 |
-
"acc_norm": 0.46830985915492956,
|
367 |
-
"acc_norm_stderr": 0.017105318850828437
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "42MARU/llama-2-ko-7b-instruct",
|
436 |
-
"model_sha": "3c590472282b5de4c76d846153db5f41b82c1b62",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
42MARU/llama-2-ko-7b-instruction-v3/result_2023-10-01 18:41:33.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.3293515358361775,
|
5 |
-
"acc_stderr": 0.013734057652635474,
|
6 |
-
"acc_norm": 0.386518771331058,
|
7 |
-
"acc_norm_stderr": 0.014230084761910474
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.3835889265086636,
|
11 |
-
"acc_stderr": 0.00485265887677539,
|
12 |
-
"acc_norm": 0.5022903804023103,
|
13 |
-
"acc_norm_stderr": 0.004989729059957435
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.391812865497076,
|
17 |
-
"acc_stderr": 0.03743979825926401,
|
18 |
-
"acc_norm": 0.391812865497076,
|
19 |
-
"acc_norm_stderr": 0.03743979825926401
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.2815533980582524,
|
23 |
-
"acc_stderr": 0.04453254836326466,
|
24 |
-
"acc_norm": 0.2815533980582524,
|
25 |
-
"acc_norm_stderr": 0.04453254836326466
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.367816091954023,
|
29 |
-
"acc_stderr": 0.01724382889184626,
|
30 |
-
"acc_norm": 0.367816091954023,
|
31 |
-
"acc_norm_stderr": 0.01724382889184626
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.34074074074074073,
|
35 |
-
"acc_stderr": 0.04094376269996795,
|
36 |
-
"acc_norm": 0.34074074074074073,
|
37 |
-
"acc_norm_stderr": 0.04094376269996795
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.3,
|
41 |
-
"acc_stderr": 0.046056618647183814,
|
42 |
-
"acc_norm": 0.3,
|
43 |
-
"acc_norm_stderr": 0.046056618647183814
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.2851063829787234,
|
47 |
-
"acc_stderr": 0.02951319662553935,
|
48 |
-
"acc_norm": 0.2851063829787234,
|
49 |
-
"acc_norm_stderr": 0.02951319662553935
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.3132530120481928,
|
53 |
-
"acc_stderr": 0.03610805018031024,
|
54 |
-
"acc_norm": 0.3132530120481928,
|
55 |
-
"acc_norm_stderr": 0.03610805018031024
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.3858520900321543,
|
59 |
-
"acc_stderr": 0.027648149599751464,
|
60 |
-
"acc_norm": 0.3858520900321543,
|
61 |
-
"acc_norm_stderr": 0.027648149599751464
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.43946188340807174,
|
65 |
-
"acc_stderr": 0.03331092511038179,
|
66 |
-
"acc_norm": 0.43946188340807174,
|
67 |
-
"acc_norm_stderr": 0.03331092511038179
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.4351145038167939,
|
71 |
-
"acc_stderr": 0.04348208051644858,
|
72 |
-
"acc_norm": 0.4351145038167939,
|
73 |
-
"acc_norm_stderr": 0.04348208051644858
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.3,
|
77 |
-
"acc_stderr": 0.046056618647183814,
|
78 |
-
"acc_norm": 0.3,
|
79 |
-
"acc_norm_stderr": 0.046056618647183814
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.31313131313131315,
|
83 |
-
"acc_stderr": 0.03304205087813653,
|
84 |
-
"acc_norm": 0.31313131313131315,
|
85 |
-
"acc_norm_stderr": 0.03304205087813653
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.3586206896551724,
|
89 |
-
"acc_stderr": 0.039966295748767186,
|
90 |
-
"acc_norm": 0.3586206896551724,
|
91 |
-
"acc_norm_stderr": 0.039966295748767186
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.20588235294117646,
|
95 |
-
"acc_stderr": 0.04023382273617747,
|
96 |
-
"acc_norm": 0.20588235294117646,
|
97 |
-
"acc_norm_stderr": 0.04023382273617747
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.3697478991596639,
|
101 |
-
"acc_stderr": 0.031357095996135904,
|
102 |
-
"acc_norm": 0.3697478991596639,
|
103 |
-
"acc_norm_stderr": 0.031357095996135904
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.2794871794871795,
|
107 |
-
"acc_stderr": 0.022752388839776823,
|
108 |
-
"acc_norm": 0.2794871794871795,
|
109 |
-
"acc_norm_stderr": 0.022752388839776823
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.4,
|
113 |
-
"acc_stderr": 0.049236596391733084,
|
114 |
-
"acc_norm": 0.4,
|
115 |
-
"acc_norm_stderr": 0.049236596391733084
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.2,
|
119 |
-
"acc_stderr": 0.04020151261036846,
|
120 |
-
"acc_norm": 0.2,
|
121 |
-
"acc_norm_stderr": 0.04020151261036846
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.37962962962962965,
|
125 |
-
"acc_stderr": 0.04691521224077742,
|
126 |
-
"acc_norm": 0.37962962962962965,
|
127 |
-
"acc_norm_stderr": 0.04691521224077742
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.22660098522167488,
|
131 |
-
"acc_stderr": 0.02945486383529298,
|
132 |
-
"acc_norm": 0.22660098522167488,
|
133 |
-
"acc_norm_stderr": 0.02945486383529298
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.3225806451612903,
|
137 |
-
"acc_stderr": 0.026593084516572267,
|
138 |
-
"acc_norm": 0.3225806451612903,
|
139 |
-
"acc_norm_stderr": 0.026593084516572267
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.5,
|
143 |
-
"acc_stderr": 0.03275608910402091,
|
144 |
-
"acc_norm": 0.5,
|
145 |
-
"acc_norm_stderr": 0.03275608910402091
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.3471698113207547,
|
149 |
-
"acc_stderr": 0.029300101705549652,
|
150 |
-
"acc_norm": 0.3471698113207547,
|
151 |
-
"acc_norm_stderr": 0.029300101705549652
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.36363636363636365,
|
155 |
-
"acc_stderr": 0.04607582090719976,
|
156 |
-
"acc_norm": 0.36363636363636365,
|
157 |
-
"acc_norm_stderr": 0.04607582090719976
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.23703703703703705,
|
161 |
-
"acc_stderr": 0.025928876132766118,
|
162 |
-
"acc_norm": 0.23703703703703705,
|
163 |
-
"acc_norm_stderr": 0.025928876132766118
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.2119205298013245,
|
167 |
-
"acc_stderr": 0.03336767086567977,
|
168 |
-
"acc_norm": 0.2119205298013245,
|
169 |
-
"acc_norm_stderr": 0.03336767086567977
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.39800995024875624,
|
173 |
-
"acc_stderr": 0.034611994290400135,
|
174 |
-
"acc_norm": 0.39800995024875624,
|
175 |
-
"acc_norm_stderr": 0.034611994290400135
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.2774566473988439,
|
179 |
-
"acc_stderr": 0.03414014007044036,
|
180 |
-
"acc_norm": 0.2774566473988439,
|
181 |
-
"acc_norm_stderr": 0.03414014007044036
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.24867724867724866,
|
185 |
-
"acc_stderr": 0.022261817692400175,
|
186 |
-
"acc_norm": 0.24867724867724866,
|
187 |
-
"acc_norm_stderr": 0.022261817692400175
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.2916666666666667,
|
191 |
-
"acc_stderr": 0.03800968060554859,
|
192 |
-
"acc_norm": 0.2916666666666667,
|
193 |
-
"acc_norm_stderr": 0.03800968060554859
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.21,
|
197 |
-
"acc_stderr": 0.040936018074033256,
|
198 |
-
"acc_norm": 0.21,
|
199 |
-
"acc_norm_stderr": 0.040936018074033256
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.44,
|
203 |
-
"acc_stderr": 0.04988876515698589,
|
204 |
-
"acc_norm": 0.44,
|
205 |
-
"acc_norm_stderr": 0.04988876515698589
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.38439306358381503,
|
209 |
-
"acc_stderr": 0.026189666966272035,
|
210 |
-
"acc_norm": 0.38439306358381503,
|
211 |
-
"acc_norm_stderr": 0.026189666966272035
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.3496932515337423,
|
215 |
-
"acc_stderr": 0.03746668325470022,
|
216 |
-
"acc_norm": 0.3496932515337423,
|
217 |
-
"acc_norm_stderr": 0.03746668325470022
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.3611111111111111,
|
221 |
-
"acc_stderr": 0.026725868809100793,
|
222 |
-
"acc_norm": 0.3611111111111111,
|
223 |
-
"acc_norm_stderr": 0.026725868809100793
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.28,
|
227 |
-
"acc_stderr": 0.04512608598542128,
|
228 |
-
"acc_norm": 0.28,
|
229 |
-
"acc_norm_stderr": 0.04512608598542128
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.35751295336787564,
|
233 |
-
"acc_stderr": 0.03458816042181005,
|
234 |
-
"acc_norm": 0.35751295336787564,
|
235 |
-
"acc_norm_stderr": 0.03458816042181005
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.23684210526315788,
|
239 |
-
"acc_stderr": 0.03999423879281336,
|
240 |
-
"acc_norm": 0.23684210526315788,
|
241 |
-
"acc_norm_stderr": 0.03999423879281336
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.3394495412844037,
|
245 |
-
"acc_stderr": 0.02030210934266235,
|
246 |
-
"acc_norm": 0.3394495412844037,
|
247 |
-
"acc_norm_stderr": 0.02030210934266235
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.2619047619047619,
|
251 |
-
"acc_stderr": 0.039325376803928704,
|
252 |
-
"acc_norm": 0.2619047619047619,
|
253 |
-
"acc_norm_stderr": 0.039325376803928704
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.3660130718954248,
|
257 |
-
"acc_stderr": 0.027582811415159607,
|
258 |
-
"acc_norm": 0.3660130718954248,
|
259 |
-
"acc_norm_stderr": 0.027582811415159607
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.38,
|
263 |
-
"acc_stderr": 0.04878317312145632,
|
264 |
-
"acc_norm": 0.38,
|
265 |
-
"acc_norm_stderr": 0.04878317312145632
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.49586776859504134,
|
269 |
-
"acc_stderr": 0.04564198767432754,
|
270 |
-
"acc_norm": 0.49586776859504134,
|
271 |
-
"acc_norm_stderr": 0.04564198767432754
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.24342105263157895,
|
275 |
-
"acc_stderr": 0.034923496688842384,
|
276 |
-
"acc_norm": 0.24342105263157895,
|
277 |
-
"acc_norm_stderr": 0.034923496688842384
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.3366013071895425,
|
281 |
-
"acc_stderr": 0.019117213911495165,
|
282 |
-
"acc_norm": 0.3366013071895425,
|
283 |
-
"acc_norm_stderr": 0.019117213911495165
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.29432624113475175,
|
287 |
-
"acc_stderr": 0.027187127011503796,
|
288 |
-
"acc_norm": 0.29432624113475175,
|
289 |
-
"acc_norm_stderr": 0.027187127011503796
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.29464285714285715,
|
293 |
-
"acc_stderr": 0.04327040932578728,
|
294 |
-
"acc_norm": 0.29464285714285715,
|
295 |
-
"acc_norm_stderr": 0.04327040932578728
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.2824074074074074,
|
299 |
-
"acc_stderr": 0.030701372111510927,
|
300 |
-
"acc_norm": 0.2824074074074074,
|
301 |
-
"acc_norm_stderr": 0.030701372111510927
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.25139664804469275,
|
305 |
-
"acc_stderr": 0.014508979453553977,
|
306 |
-
"acc_norm": 0.25139664804469275,
|
307 |
-
"acc_norm_stderr": 0.014508979453553977
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.32,
|
311 |
-
"acc_stderr": 0.04688261722621505,
|
312 |
-
"acc_norm": 0.32,
|
313 |
-
"acc_norm_stderr": 0.04688261722621505
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.31,
|
317 |
-
"acc_stderr": 0.04648231987117316,
|
318 |
-
"acc_norm": 0.31,
|
319 |
-
"acc_norm_stderr": 0.04648231987117316
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.3786764705882353,
|
323 |
-
"acc_stderr": 0.029465133639776125,
|
324 |
-
"acc_norm": 0.3786764705882353,
|
325 |
-
"acc_norm_stderr": 0.029465133639776125
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.3510204081632653,
|
329 |
-
"acc_stderr": 0.03055531675557364,
|
330 |
-
"acc_norm": 0.3510204081632653,
|
331 |
-
"acc_norm_stderr": 0.03055531675557364
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.4767932489451477,
|
335 |
-
"acc_stderr": 0.032512152011410174,
|
336 |
-
"acc_norm": 0.4767932489451477,
|
337 |
-
"acc_norm_stderr": 0.032512152011410174
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.3089960886571056,
|
341 |
-
"acc_stderr": 0.01180172977723925,
|
342 |
-
"acc_norm": 0.3089960886571056,
|
343 |
-
"acc_norm_stderr": 0.01180172977723925
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.31862745098039214,
|
347 |
-
"acc_stderr": 0.032702871814820816,
|
348 |
-
"acc_norm": 0.31862745098039214,
|
349 |
-
"acc_norm_stderr": 0.032702871814820816
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.3212121212121212,
|
353 |
-
"acc_stderr": 0.0364620496325381,
|
354 |
-
"acc_norm": 0.3212121212121212,
|
355 |
-
"acc_norm_stderr": 0.0364620496325381
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.25091799265605874,
|
359 |
-
"mc1_stderr": 0.01517698502770769,
|
360 |
-
"mc2": 0.38056097212603235,
|
361 |
-
"mc2_stderr": 0.014936929596682727
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.5704225352112676,
|
365 |
-
"acc_stderr": 0.01696892392010678,
|
366 |
-
"acc_norm": 0.6549295774647887,
|
367 |
-
"acc_norm_stderr": 0.016296201644718785
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "42MARU/llama-2-ko-7b-instruction-v3",
|
436 |
-
"model_sha": "c0fea9cb31d4ae90aa2ed048f774a9000341b538",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
42MARU/polyglot-ko-12.8b-instruct/result_2023-09-27 21:10:18.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.3046075085324232,
|
5 |
-
"acc_stderr": 0.013449522109932492,
|
6 |
-
"acc_norm": 0.363481228668942,
|
7 |
-
"acc_norm_stderr": 0.014056207319068282
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.3981278629755029,
|
11 |
-
"acc_stderr": 0.0048851164655502755,
|
12 |
-
"acc_norm": 0.5159330810595499,
|
13 |
-
"acc_norm_stderr": 0.004987247325495624
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.22807017543859648,
|
17 |
-
"acc_stderr": 0.03218093795602357,
|
18 |
-
"acc_norm": 0.22807017543859648,
|
19 |
-
"acc_norm_stderr": 0.03218093795602357
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.24271844660194175,
|
23 |
-
"acc_stderr": 0.04245022486384495,
|
24 |
-
"acc_norm": 0.24271844660194175,
|
25 |
-
"acc_norm_stderr": 0.04245022486384495
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.23754789272030652,
|
29 |
-
"acc_stderr": 0.015218733046150193,
|
30 |
-
"acc_norm": 0.23754789272030652,
|
31 |
-
"acc_norm_stderr": 0.015218733046150193
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.28888888888888886,
|
35 |
-
"acc_stderr": 0.039154506304142495,
|
36 |
-
"acc_norm": 0.28888888888888886,
|
37 |
-
"acc_norm_stderr": 0.039154506304142495
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.27,
|
41 |
-
"acc_stderr": 0.0446196043338474,
|
42 |
-
"acc_norm": 0.27,
|
43 |
-
"acc_norm_stderr": 0.0446196043338474
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.2297872340425532,
|
47 |
-
"acc_stderr": 0.02750175294441242,
|
48 |
-
"acc_norm": 0.2297872340425532,
|
49 |
-
"acc_norm_stderr": 0.02750175294441242
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.2469879518072289,
|
53 |
-
"acc_stderr": 0.03357351982064536,
|
54 |
-
"acc_norm": 0.2469879518072289,
|
55 |
-
"acc_norm_stderr": 0.03357351982064536
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.33440514469453375,
|
59 |
-
"acc_stderr": 0.026795422327893944,
|
60 |
-
"acc_norm": 0.33440514469453375,
|
61 |
-
"acc_norm_stderr": 0.026795422327893944
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.1031390134529148,
|
65 |
-
"acc_stderr": 0.020412564289839272,
|
66 |
-
"acc_norm": 0.1031390134529148,
|
67 |
-
"acc_norm_stderr": 0.020412564289839272
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.2748091603053435,
|
71 |
-
"acc_stderr": 0.039153454088478354,
|
72 |
-
"acc_norm": 0.2748091603053435,
|
73 |
-
"acc_norm_stderr": 0.039153454088478354
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.37,
|
77 |
-
"acc_stderr": 0.048523658709391,
|
78 |
-
"acc_norm": 0.37,
|
79 |
-
"acc_norm_stderr": 0.048523658709391
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.40404040404040403,
|
83 |
-
"acc_stderr": 0.03496130972056128,
|
84 |
-
"acc_norm": 0.40404040404040403,
|
85 |
-
"acc_norm_stderr": 0.03496130972056128
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.3310344827586207,
|
89 |
-
"acc_stderr": 0.03921545312467122,
|
90 |
-
"acc_norm": 0.3310344827586207,
|
91 |
-
"acc_norm_stderr": 0.03921545312467122
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.2647058823529412,
|
95 |
-
"acc_stderr": 0.04389869956808777,
|
96 |
-
"acc_norm": 0.2647058823529412,
|
97 |
-
"acc_norm_stderr": 0.04389869956808777
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.18907563025210083,
|
101 |
-
"acc_stderr": 0.02543511943810536,
|
102 |
-
"acc_norm": 0.18907563025210083,
|
103 |
-
"acc_norm_stderr": 0.02543511943810536
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.21794871794871795,
|
107 |
-
"acc_stderr": 0.020932445774463175,
|
108 |
-
"acc_norm": 0.21794871794871795,
|
109 |
-
"acc_norm_stderr": 0.020932445774463175
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.18,
|
113 |
-
"acc_stderr": 0.03861229196653694,
|
114 |
-
"acc_norm": 0.18,
|
115 |
-
"acc_norm_stderr": 0.03861229196653694
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.16,
|
119 |
-
"acc_stderr": 0.03684529491774708,
|
120 |
-
"acc_norm": 0.16,
|
121 |
-
"acc_norm_stderr": 0.03684529491774708
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.26851851851851855,
|
125 |
-
"acc_stderr": 0.04284467968052191,
|
126 |
-
"acc_norm": 0.26851851851851855,
|
127 |
-
"acc_norm_stderr": 0.04284467968052191
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.1921182266009852,
|
131 |
-
"acc_stderr": 0.027719315709614778,
|
132 |
-
"acc_norm": 0.1921182266009852,
|
133 |
-
"acc_norm_stderr": 0.027719315709614778
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.25483870967741934,
|
137 |
-
"acc_stderr": 0.024790118459332204,
|
138 |
-
"acc_norm": 0.25483870967741934,
|
139 |
-
"acc_norm_stderr": 0.024790118459332204
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.2905982905982906,
|
143 |
-
"acc_stderr": 0.029745048572674057,
|
144 |
-
"acc_norm": 0.2905982905982906,
|
145 |
-
"acc_norm_stderr": 0.029745048572674057
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.29056603773584905,
|
149 |
-
"acc_stderr": 0.027943219989337156,
|
150 |
-
"acc_norm": 0.29056603773584905,
|
151 |
-
"acc_norm_stderr": 0.027943219989337156
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.2636363636363636,
|
155 |
-
"acc_stderr": 0.04220224692971987,
|
156 |
-
"acc_norm": 0.2636363636363636,
|
157 |
-
"acc_norm_stderr": 0.04220224692971987
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.25925925925925924,
|
161 |
-
"acc_stderr": 0.026719240783712166,
|
162 |
-
"acc_norm": 0.25925925925925924,
|
163 |
-
"acc_norm_stderr": 0.026719240783712166
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.2913907284768212,
|
167 |
-
"acc_stderr": 0.03710185726119995,
|
168 |
-
"acc_norm": 0.2913907284768212,
|
169 |
-
"acc_norm_stderr": 0.03710185726119995
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.19402985074626866,
|
173 |
-
"acc_stderr": 0.027962677604768914,
|
174 |
-
"acc_norm": 0.19402985074626866,
|
175 |
-
"acc_norm_stderr": 0.027962677604768914
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.2254335260115607,
|
179 |
-
"acc_stderr": 0.03186209851641143,
|
180 |
-
"acc_norm": 0.2254335260115607,
|
181 |
-
"acc_norm_stderr": 0.03186209851641143
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.24867724867724866,
|
185 |
-
"acc_stderr": 0.02226181769240018,
|
186 |
-
"acc_norm": 0.24867724867724866,
|
187 |
-
"acc_norm_stderr": 0.02226181769240018
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.3125,
|
191 |
-
"acc_stderr": 0.038760854559127644,
|
192 |
-
"acc_norm": 0.3125,
|
193 |
-
"acc_norm_stderr": 0.038760854559127644
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.21,
|
197 |
-
"acc_stderr": 0.04093601807403326,
|
198 |
-
"acc_norm": 0.21,
|
199 |
-
"acc_norm_stderr": 0.04093601807403326
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.27,
|
203 |
-
"acc_stderr": 0.044619604333847394,
|
204 |
-
"acc_norm": 0.27,
|
205 |
-
"acc_norm_stderr": 0.044619604333847394
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.2774566473988439,
|
209 |
-
"acc_stderr": 0.024105712607754307,
|
210 |
-
"acc_norm": 0.2774566473988439,
|
211 |
-
"acc_norm_stderr": 0.024105712607754307
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.3067484662576687,
|
215 |
-
"acc_stderr": 0.036230899157241474,
|
216 |
-
"acc_norm": 0.3067484662576687,
|
217 |
-
"acc_norm_stderr": 0.036230899157241474
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.24074074074074073,
|
221 |
-
"acc_stderr": 0.023788583551658537,
|
222 |
-
"acc_norm": 0.24074074074074073,
|
223 |
-
"acc_norm_stderr": 0.023788583551658537
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.26,
|
227 |
-
"acc_stderr": 0.04408440022768077,
|
228 |
-
"acc_norm": 0.26,
|
229 |
-
"acc_norm_stderr": 0.04408440022768077
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.22797927461139897,
|
233 |
-
"acc_stderr": 0.030276909945178253,
|
234 |
-
"acc_norm": 0.22797927461139897,
|
235 |
-
"acc_norm_stderr": 0.030276909945178253
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.30701754385964913,
|
239 |
-
"acc_stderr": 0.043391383225798594,
|
240 |
-
"acc_norm": 0.30701754385964913,
|
241 |
-
"acc_norm_stderr": 0.043391383225798594
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.3376146788990826,
|
245 |
-
"acc_stderr": 0.02027526598663891,
|
246 |
-
"acc_norm": 0.3376146788990826,
|
247 |
-
"acc_norm_stderr": 0.02027526598663891
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.23809523809523808,
|
251 |
-
"acc_stderr": 0.038095238095238126,
|
252 |
-
"acc_norm": 0.23809523809523808,
|
253 |
-
"acc_norm_stderr": 0.038095238095238126
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.27450980392156865,
|
257 |
-
"acc_stderr": 0.025553169991826524,
|
258 |
-
"acc_norm": 0.27450980392156865,
|
259 |
-
"acc_norm_stderr": 0.025553169991826524
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.32,
|
263 |
-
"acc_stderr": 0.04688261722621505,
|
264 |
-
"acc_norm": 0.32,
|
265 |
-
"acc_norm_stderr": 0.04688261722621505
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.24793388429752067,
|
269 |
-
"acc_stderr": 0.03941897526516302,
|
270 |
-
"acc_norm": 0.24793388429752067,
|
271 |
-
"acc_norm_stderr": 0.03941897526516302
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.23684210526315788,
|
275 |
-
"acc_stderr": 0.03459777606810537,
|
276 |
-
"acc_norm": 0.23684210526315788,
|
277 |
-
"acc_norm_stderr": 0.03459777606810537
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.25980392156862747,
|
281 |
-
"acc_stderr": 0.01774089950917779,
|
282 |
-
"acc_norm": 0.25980392156862747,
|
283 |
-
"acc_norm_stderr": 0.01774089950917779
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.22340425531914893,
|
287 |
-
"acc_stderr": 0.024847921358063962,
|
288 |
-
"acc_norm": 0.22340425531914893,
|
289 |
-
"acc_norm_stderr": 0.024847921358063962
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.25892857142857145,
|
293 |
-
"acc_stderr": 0.04157751539865629,
|
294 |
-
"acc_norm": 0.25892857142857145,
|
295 |
-
"acc_norm_stderr": 0.04157751539865629
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.24537037037037038,
|
299 |
-
"acc_stderr": 0.029346665094372937,
|
300 |
-
"acc_norm": 0.24537037037037038,
|
301 |
-
"acc_norm_stderr": 0.029346665094372937
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.27262569832402234,
|
305 |
-
"acc_stderr": 0.014893391735249608,
|
306 |
-
"acc_norm": 0.27262569832402234,
|
307 |
-
"acc_norm_stderr": 0.014893391735249608
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.32,
|
311 |
-
"acc_stderr": 0.046882617226215034,
|
312 |
-
"acc_norm": 0.32,
|
313 |
-
"acc_norm_stderr": 0.046882617226215034
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.31,
|
317 |
-
"acc_stderr": 0.04648231987117316,
|
318 |
-
"acc_norm": 0.31,
|
319 |
-
"acc_norm_stderr": 0.04648231987117316
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.3786764705882353,
|
323 |
-
"acc_stderr": 0.02946513363977613,
|
324 |
-
"acc_norm": 0.3786764705882353,
|
325 |
-
"acc_norm_stderr": 0.02946513363977613
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.35918367346938773,
|
329 |
-
"acc_stderr": 0.03071356045510849,
|
330 |
-
"acc_norm": 0.35918367346938773,
|
331 |
-
"acc_norm_stderr": 0.03071356045510849
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.24472573839662448,
|
335 |
-
"acc_stderr": 0.027985699387036416,
|
336 |
-
"acc_norm": 0.24472573839662448,
|
337 |
-
"acc_norm_stderr": 0.027985699387036416
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.2588005215123859,
|
341 |
-
"acc_stderr": 0.011186109046564608,
|
342 |
-
"acc_norm": 0.2588005215123859,
|
343 |
-
"acc_norm_stderr": 0.011186109046564608
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.28431372549019607,
|
347 |
-
"acc_stderr": 0.031660096793998116,
|
348 |
-
"acc_norm": 0.28431372549019607,
|
349 |
-
"acc_norm_stderr": 0.031660096793998116
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.2,
|
353 |
-
"acc_stderr": 0.03123475237772118,
|
354 |
-
"acc_norm": 0.2,
|
355 |
-
"acc_norm_stderr": 0.03123475237772118
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.27906976744186046,
|
359 |
-
"mc1_stderr": 0.015702107090627887,
|
360 |
-
"mc2": 0.4515720476496737,
|
361 |
-
"mc2_stderr": 0.015493161984611252
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.5528169014084507,
|
365 |
-
"acc_stderr": 0.017043883876215398,
|
366 |
-
"acc_norm": 0.5997652582159625,
|
367 |
-
"acc_norm_stderr": 0.016795125938543782
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "42MARU/polyglot-ko-12.8b-instruct",
|
436 |
-
"model_sha": "a8354bcedc167e8e1f7dac8a347bf4b61d9c9bf0",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
42MARU/sitebunny-13b/result_2023-09-27 08:17:31.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.3643344709897611,
|
5 |
-
"acc_stderr": 0.014063260279882417,
|
6 |
-
"acc_norm": 0.4112627986348123,
|
7 |
-
"acc_norm_stderr": 0.014379441068522084
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.3732324238199562,
|
11 |
-
"acc_stderr": 0.004826746160830189,
|
12 |
-
"acc_norm": 0.4751045608444533,
|
13 |
-
"acc_norm_stderr": 0.004983592410934169
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.4853801169590643,
|
17 |
-
"acc_stderr": 0.038331852752130205,
|
18 |
-
"acc_norm": 0.4853801169590643,
|
19 |
-
"acc_norm_stderr": 0.038331852752130205
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.5631067961165048,
|
23 |
-
"acc_stderr": 0.04911147107365777,
|
24 |
-
"acc_norm": 0.5631067961165048,
|
25 |
-
"acc_norm_stderr": 0.04911147107365777
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.4878671775223499,
|
29 |
-
"acc_stderr": 0.017874698667491355,
|
30 |
-
"acc_norm": 0.4878671775223499,
|
31 |
-
"acc_norm_stderr": 0.017874698667491355
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.4148148148148148,
|
35 |
-
"acc_stderr": 0.042561937679014075,
|
36 |
-
"acc_norm": 0.4148148148148148,
|
37 |
-
"acc_norm_stderr": 0.042561937679014075
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.3,
|
41 |
-
"acc_stderr": 0.046056618647183814,
|
42 |
-
"acc_norm": 0.3,
|
43 |
-
"acc_norm_stderr": 0.046056618647183814
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.4,
|
47 |
-
"acc_stderr": 0.03202563076101735,
|
48 |
-
"acc_norm": 0.4,
|
49 |
-
"acc_norm_stderr": 0.03202563076101735
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.3855421686746988,
|
53 |
-
"acc_stderr": 0.03789134424611548,
|
54 |
-
"acc_norm": 0.3855421686746988,
|
55 |
-
"acc_norm_stderr": 0.03789134424611548
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.4533762057877814,
|
59 |
-
"acc_stderr": 0.028274359854894245,
|
60 |
-
"acc_norm": 0.4533762057877814,
|
61 |
-
"acc_norm_stderr": 0.028274359854894245
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.4663677130044843,
|
65 |
-
"acc_stderr": 0.033481800170603065,
|
66 |
-
"acc_norm": 0.4663677130044843,
|
67 |
-
"acc_norm_stderr": 0.033481800170603065
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.48091603053435117,
|
71 |
-
"acc_stderr": 0.04382094705550988,
|
72 |
-
"acc_norm": 0.48091603053435117,
|
73 |
-
"acc_norm_stderr": 0.04382094705550988
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.42,
|
77 |
-
"acc_stderr": 0.04960449637488583,
|
78 |
-
"acc_norm": 0.42,
|
79 |
-
"acc_norm_stderr": 0.04960449637488583
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.5909090909090909,
|
83 |
-
"acc_stderr": 0.03502975799413008,
|
84 |
-
"acc_norm": 0.5909090909090909,
|
85 |
-
"acc_norm_stderr": 0.03502975799413008
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.4413793103448276,
|
89 |
-
"acc_stderr": 0.04137931034482758,
|
90 |
-
"acc_norm": 0.4413793103448276,
|
91 |
-
"acc_norm_stderr": 0.04137931034482758
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.16666666666666666,
|
95 |
-
"acc_stderr": 0.03708284662416544,
|
96 |
-
"acc_norm": 0.16666666666666666,
|
97 |
-
"acc_norm_stderr": 0.03708284662416544
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.4495798319327731,
|
101 |
-
"acc_stderr": 0.03231293497137707,
|
102 |
-
"acc_norm": 0.4495798319327731,
|
103 |
-
"acc_norm_stderr": 0.03231293497137707
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.4358974358974359,
|
107 |
-
"acc_stderr": 0.025141801511177498,
|
108 |
-
"acc_norm": 0.4358974358974359,
|
109 |
-
"acc_norm_stderr": 0.025141801511177498
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.46,
|
113 |
-
"acc_stderr": 0.05009082659620333,
|
114 |
-
"acc_norm": 0.46,
|
115 |
-
"acc_norm_stderr": 0.05009082659620333
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.42,
|
119 |
-
"acc_stderr": 0.049604496374885836,
|
120 |
-
"acc_norm": 0.42,
|
121 |
-
"acc_norm_stderr": 0.049604496374885836
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.5277777777777778,
|
125 |
-
"acc_stderr": 0.048262172941398944,
|
126 |
-
"acc_norm": 0.5277777777777778,
|
127 |
-
"acc_norm_stderr": 0.048262172941398944
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.3645320197044335,
|
131 |
-
"acc_stderr": 0.0338640574606209,
|
132 |
-
"acc_norm": 0.3645320197044335,
|
133 |
-
"acc_norm_stderr": 0.0338640574606209
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.47096774193548385,
|
137 |
-
"acc_stderr": 0.028396016402761005,
|
138 |
-
"acc_norm": 0.47096774193548385,
|
139 |
-
"acc_norm_stderr": 0.028396016402761005
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.6282051282051282,
|
143 |
-
"acc_stderr": 0.03166098891888078,
|
144 |
-
"acc_norm": 0.6282051282051282,
|
145 |
-
"acc_norm_stderr": 0.03166098891888078
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.4528301886792453,
|
149 |
-
"acc_stderr": 0.030635627957961823,
|
150 |
-
"acc_norm": 0.4528301886792453,
|
151 |
-
"acc_norm_stderr": 0.030635627957961823
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.509090909090909,
|
155 |
-
"acc_stderr": 0.04788339768702861,
|
156 |
-
"acc_norm": 0.509090909090909,
|
157 |
-
"acc_norm_stderr": 0.04788339768702861
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.3111111111111111,
|
161 |
-
"acc_stderr": 0.028226446749683526,
|
162 |
-
"acc_norm": 0.3111111111111111,
|
163 |
-
"acc_norm_stderr": 0.028226446749683526
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.31788079470198677,
|
167 |
-
"acc_stderr": 0.038020397601079024,
|
168 |
-
"acc_norm": 0.31788079470198677,
|
169 |
-
"acc_norm_stderr": 0.038020397601079024
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.5771144278606966,
|
173 |
-
"acc_stderr": 0.034932317774212816,
|
174 |
-
"acc_norm": 0.5771144278606966,
|
175 |
-
"acc_norm_stderr": 0.034932317774212816
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.3815028901734104,
|
179 |
-
"acc_stderr": 0.03703851193099521,
|
180 |
-
"acc_norm": 0.3815028901734104,
|
181 |
-
"acc_norm_stderr": 0.03703851193099521
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.3253968253968254,
|
185 |
-
"acc_stderr": 0.024130158299762613,
|
186 |
-
"acc_norm": 0.3253968253968254,
|
187 |
-
"acc_norm_stderr": 0.024130158299762613
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.3819444444444444,
|
191 |
-
"acc_stderr": 0.040629907841466674,
|
192 |
-
"acc_norm": 0.3819444444444444,
|
193 |
-
"acc_norm_stderr": 0.040629907841466674
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.26,
|
197 |
-
"acc_stderr": 0.04408440022768079,
|
198 |
-
"acc_norm": 0.26,
|
199 |
-
"acc_norm_stderr": 0.04408440022768079
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.6,
|
203 |
-
"acc_stderr": 0.049236596391733084,
|
204 |
-
"acc_norm": 0.6,
|
205 |
-
"acc_norm_stderr": 0.049236596391733084
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.5144508670520231,
|
209 |
-
"acc_stderr": 0.02690784985628254,
|
210 |
-
"acc_norm": 0.5144508670520231,
|
211 |
-
"acc_norm_stderr": 0.02690784985628254
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.44171779141104295,
|
215 |
-
"acc_stderr": 0.03901591825836184,
|
216 |
-
"acc_norm": 0.44171779141104295,
|
217 |
-
"acc_norm_stderr": 0.03901591825836184
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.4444444444444444,
|
221 |
-
"acc_stderr": 0.02764847787741332,
|
222 |
-
"acc_norm": 0.4444444444444444,
|
223 |
-
"acc_norm_stderr": 0.02764847787741332
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.35,
|
227 |
-
"acc_stderr": 0.0479372485441102,
|
228 |
-
"acc_norm": 0.35,
|
229 |
-
"acc_norm_stderr": 0.0479372485441102
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.5077720207253886,
|
233 |
-
"acc_stderr": 0.03608003225569654,
|
234 |
-
"acc_norm": 0.5077720207253886,
|
235 |
-
"acc_norm_stderr": 0.03608003225569654
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.19298245614035087,
|
239 |
-
"acc_stderr": 0.037124548537213684,
|
240 |
-
"acc_norm": 0.19298245614035087,
|
241 |
-
"acc_norm_stderr": 0.037124548537213684
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.47889908256880737,
|
245 |
-
"acc_stderr": 0.021418224754264643,
|
246 |
-
"acc_norm": 0.47889908256880737,
|
247 |
-
"acc_norm_stderr": 0.021418224754264643
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.373015873015873,
|
251 |
-
"acc_stderr": 0.04325506042017086,
|
252 |
-
"acc_norm": 0.373015873015873,
|
253 |
-
"acc_norm_stderr": 0.04325506042017086
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.43790849673202614,
|
257 |
-
"acc_stderr": 0.028408302020332687,
|
258 |
-
"acc_norm": 0.43790849673202614,
|
259 |
-
"acc_norm_stderr": 0.028408302020332687
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.42,
|
263 |
-
"acc_stderr": 0.04960449637488584,
|
264 |
-
"acc_norm": 0.42,
|
265 |
-
"acc_norm_stderr": 0.04960449637488584
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.6859504132231405,
|
269 |
-
"acc_stderr": 0.042369647530410184,
|
270 |
-
"acc_norm": 0.6859504132231405,
|
271 |
-
"acc_norm_stderr": 0.042369647530410184
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.34868421052631576,
|
275 |
-
"acc_stderr": 0.03878139888797609,
|
276 |
-
"acc_norm": 0.34868421052631576,
|
277 |
-
"acc_norm_stderr": 0.03878139888797609
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.3480392156862745,
|
281 |
-
"acc_stderr": 0.01927099870822398,
|
282 |
-
"acc_norm": 0.3480392156862745,
|
283 |
-
"acc_norm_stderr": 0.01927099870822398
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.3475177304964539,
|
287 |
-
"acc_stderr": 0.028406627809590947,
|
288 |
-
"acc_norm": 0.3475177304964539,
|
289 |
-
"acc_norm_stderr": 0.028406627809590947
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.23214285714285715,
|
293 |
-
"acc_stderr": 0.040073418097558065,
|
294 |
-
"acc_norm": 0.23214285714285715,
|
295 |
-
"acc_norm_stderr": 0.040073418097558065
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.3101851851851852,
|
299 |
-
"acc_stderr": 0.031546962856566295,
|
300 |
-
"acc_norm": 0.3101851851851852,
|
301 |
-
"acc_norm_stderr": 0.031546962856566295
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.3106145251396648,
|
305 |
-
"acc_stderr": 0.015476515438005566,
|
306 |
-
"acc_norm": 0.3106145251396648,
|
307 |
-
"acc_norm_stderr": 0.015476515438005566
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.41,
|
311 |
-
"acc_stderr": 0.049431107042371025,
|
312 |
-
"acc_norm": 0.41,
|
313 |
-
"acc_norm_stderr": 0.049431107042371025
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.44,
|
317 |
-
"acc_stderr": 0.04988876515698589,
|
318 |
-
"acc_norm": 0.44,
|
319 |
-
"acc_norm_stderr": 0.04988876515698589
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.3235294117647059,
|
323 |
-
"acc_stderr": 0.02841820861940679,
|
324 |
-
"acc_norm": 0.3235294117647059,
|
325 |
-
"acc_norm_stderr": 0.02841820861940679
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.5428571428571428,
|
329 |
-
"acc_stderr": 0.03189141832421396,
|
330 |
-
"acc_norm": 0.5428571428571428,
|
331 |
-
"acc_norm_stderr": 0.03189141832421396
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.5780590717299579,
|
335 |
-
"acc_stderr": 0.032148146302403695,
|
336 |
-
"acc_norm": 0.5780590717299579,
|
337 |
-
"acc_norm_stderr": 0.032148146302403695
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.32333767926988266,
|
341 |
-
"acc_stderr": 0.011946565758447202,
|
342 |
-
"acc_norm": 0.32333767926988266,
|
343 |
-
"acc_norm_stderr": 0.011946565758447202
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.47549019607843135,
|
347 |
-
"acc_stderr": 0.035050931943487976,
|
348 |
-
"acc_norm": 0.47549019607843135,
|
349 |
-
"acc_norm_stderr": 0.035050931943487976
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.5393939393939394,
|
353 |
-
"acc_stderr": 0.03892207016552012,
|
354 |
-
"acc_norm": 0.5393939393939394,
|
355 |
-
"acc_norm_stderr": 0.03892207016552012
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.35006119951040393,
|
359 |
-
"mc1_stderr": 0.01669794942015103,
|
360 |
-
"mc2": 0.5148844380994511,
|
361 |
-
"mc2_stderr": 0.015947695748354234
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.2136150234741784,
|
365 |
-
"acc_stderr": 0.014049754012186298,
|
366 |
-
"acc_norm": 0.22769953051643194,
|
367 |
-
"acc_norm_stderr": 0.014375052416765484
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "42MARU/sitebunny-13b",
|
436 |
-
"model_sha": "67107327d09c2f9bf3e4b316d97767c97f5a0804",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
42dot/42dot_LLM-PLM-1.3B/result_2023-10-18 01:46:47.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.2636518771331058,
|
5 |
-
"acc_stderr": 0.01287592915129705,
|
6 |
-
"acc_norm": 0.32593856655290104,
|
7 |
-
"acc_norm_stderr": 0.013697432466693242
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.3563035251941844,
|
11 |
-
"acc_stderr": 0.004779276329704052,
|
12 |
-
"acc_norm": 0.4473212507468632,
|
13 |
-
"acc_norm_stderr": 0.004962010338226348
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.23976608187134502,
|
17 |
-
"acc_stderr": 0.03274485211946956,
|
18 |
-
"acc_norm": 0.23976608187134502,
|
19 |
-
"acc_norm_stderr": 0.03274485211946956
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.1941747572815534,
|
23 |
-
"acc_stderr": 0.03916667762822584,
|
24 |
-
"acc_norm": 0.1941747572815534,
|
25 |
-
"acc_norm_stderr": 0.03916667762822584
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.22094508301404853,
|
29 |
-
"acc_stderr": 0.014836205167333574,
|
30 |
-
"acc_norm": 0.22094508301404853,
|
31 |
-
"acc_norm_stderr": 0.014836205167333574
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.18518518518518517,
|
35 |
-
"acc_stderr": 0.0335567721631314,
|
36 |
-
"acc_norm": 0.18518518518518517,
|
37 |
-
"acc_norm_stderr": 0.0335567721631314
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.27,
|
41 |
-
"acc_stderr": 0.04461960433384741,
|
42 |
-
"acc_norm": 0.27,
|
43 |
-
"acc_norm_stderr": 0.04461960433384741
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.2851063829787234,
|
47 |
-
"acc_stderr": 0.02951319662553935,
|
48 |
-
"acc_norm": 0.2851063829787234,
|
49 |
-
"acc_norm_stderr": 0.02951319662553935
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.3072289156626506,
|
53 |
-
"acc_stderr": 0.03591566797824664,
|
54 |
-
"acc_norm": 0.3072289156626506,
|
55 |
-
"acc_norm_stderr": 0.03591566797824664
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.2347266881028939,
|
59 |
-
"acc_stderr": 0.024071805887677045,
|
60 |
-
"acc_norm": 0.2347266881028939,
|
61 |
-
"acc_norm_stderr": 0.024071805887677045
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.2242152466367713,
|
65 |
-
"acc_stderr": 0.027991534258519527,
|
66 |
-
"acc_norm": 0.2242152466367713,
|
67 |
-
"acc_norm_stderr": 0.027991534258519527
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.2900763358778626,
|
71 |
-
"acc_stderr": 0.03980066246467765,
|
72 |
-
"acc_norm": 0.2900763358778626,
|
73 |
-
"acc_norm_stderr": 0.03980066246467765
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.28,
|
77 |
-
"acc_stderr": 0.04512608598542127,
|
78 |
-
"acc_norm": 0.28,
|
79 |
-
"acc_norm_stderr": 0.04512608598542127
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.2474747474747475,
|
83 |
-
"acc_stderr": 0.030746300742124484,
|
84 |
-
"acc_norm": 0.2474747474747475,
|
85 |
-
"acc_norm_stderr": 0.030746300742124484
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.21379310344827587,
|
89 |
-
"acc_stderr": 0.034165204477475494,
|
90 |
-
"acc_norm": 0.21379310344827587,
|
91 |
-
"acc_norm_stderr": 0.034165204477475494
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.22549019607843138,
|
95 |
-
"acc_stderr": 0.041583075330832865,
|
96 |
-
"acc_norm": 0.22549019607843138,
|
97 |
-
"acc_norm_stderr": 0.041583075330832865
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.3235294117647059,
|
101 |
-
"acc_stderr": 0.030388353551886835,
|
102 |
-
"acc_norm": 0.3235294117647059,
|
103 |
-
"acc_norm_stderr": 0.030388353551886835
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.36153846153846153,
|
107 |
-
"acc_stderr": 0.02435958146539698,
|
108 |
-
"acc_norm": 0.36153846153846153,
|
109 |
-
"acc_norm_stderr": 0.02435958146539698
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.17,
|
113 |
-
"acc_stderr": 0.03775251680686371,
|
114 |
-
"acc_norm": 0.17,
|
115 |
-
"acc_norm_stderr": 0.03775251680686371
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.34,
|
119 |
-
"acc_stderr": 0.04760952285695235,
|
120 |
-
"acc_norm": 0.34,
|
121 |
-
"acc_norm_stderr": 0.04760952285695235
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.2037037037037037,
|
125 |
-
"acc_stderr": 0.038935425188248475,
|
126 |
-
"acc_norm": 0.2037037037037037,
|
127 |
-
"acc_norm_stderr": 0.038935425188248475
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.270935960591133,
|
131 |
-
"acc_stderr": 0.031270907132976984,
|
132 |
-
"acc_norm": 0.270935960591133,
|
133 |
-
"acc_norm_stderr": 0.031270907132976984
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.2870967741935484,
|
137 |
-
"acc_stderr": 0.025736542745594525,
|
138 |
-
"acc_norm": 0.2870967741935484,
|
139 |
-
"acc_norm_stderr": 0.025736542745594525
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.20085470085470086,
|
143 |
-
"acc_stderr": 0.02624677294689047,
|
144 |
-
"acc_norm": 0.20085470085470086,
|
145 |
-
"acc_norm_stderr": 0.02624677294689047
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.25660377358490566,
|
149 |
-
"acc_stderr": 0.026880647889051985,
|
150 |
-
"acc_norm": 0.25660377358490566,
|
151 |
-
"acc_norm_stderr": 0.026880647889051985
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.2636363636363636,
|
155 |
-
"acc_stderr": 0.04220224692971987,
|
156 |
-
"acc_norm": 0.2636363636363636,
|
157 |
-
"acc_norm_stderr": 0.04220224692971987
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.24814814814814815,
|
161 |
-
"acc_stderr": 0.0263357394040558,
|
162 |
-
"acc_norm": 0.24814814814814815,
|
163 |
-
"acc_norm_stderr": 0.0263357394040558
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.3509933774834437,
|
167 |
-
"acc_stderr": 0.03896981964257374,
|
168 |
-
"acc_norm": 0.3509933774834437,
|
169 |
-
"acc_norm_stderr": 0.03896981964257374
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.2935323383084577,
|
173 |
-
"acc_stderr": 0.03220024104534205,
|
174 |
-
"acc_norm": 0.2935323383084577,
|
175 |
-
"acc_norm_stderr": 0.03220024104534205
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.26011560693641617,
|
179 |
-
"acc_stderr": 0.03345036916788991,
|
180 |
-
"acc_norm": 0.26011560693641617,
|
181 |
-
"acc_norm_stderr": 0.03345036916788991
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.24338624338624337,
|
185 |
-
"acc_stderr": 0.022101128787415426,
|
186 |
-
"acc_norm": 0.24338624338624337,
|
187 |
-
"acc_norm_stderr": 0.022101128787415426
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.2569444444444444,
|
191 |
-
"acc_stderr": 0.03653946969442099,
|
192 |
-
"acc_norm": 0.2569444444444444,
|
193 |
-
"acc_norm_stderr": 0.03653946969442099
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.26,
|
197 |
-
"acc_stderr": 0.0440844002276808,
|
198 |
-
"acc_norm": 0.26,
|
199 |
-
"acc_norm_stderr": 0.0440844002276808
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.25,
|
203 |
-
"acc_stderr": 0.04351941398892446,
|
204 |
-
"acc_norm": 0.25,
|
205 |
-
"acc_norm_stderr": 0.04351941398892446
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.2254335260115607,
|
209 |
-
"acc_stderr": 0.022497230190967547,
|
210 |
-
"acc_norm": 0.2254335260115607,
|
211 |
-
"acc_norm_stderr": 0.022497230190967547
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.26993865030674846,
|
215 |
-
"acc_stderr": 0.034878251684978906,
|
216 |
-
"acc_norm": 0.26993865030674846,
|
217 |
-
"acc_norm_stderr": 0.034878251684978906
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.28703703703703703,
|
221 |
-
"acc_stderr": 0.025171041915309684,
|
222 |
-
"acc_norm": 0.28703703703703703,
|
223 |
-
"acc_norm_stderr": 0.025171041915309684
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.3,
|
227 |
-
"acc_stderr": 0.046056618647183814,
|
228 |
-
"acc_norm": 0.3,
|
229 |
-
"acc_norm_stderr": 0.046056618647183814
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.35751295336787564,
|
233 |
-
"acc_stderr": 0.03458816042181006,
|
234 |
-
"acc_norm": 0.35751295336787564,
|
235 |
-
"acc_norm_stderr": 0.03458816042181006
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.30701754385964913,
|
239 |
-
"acc_stderr": 0.043391383225798594,
|
240 |
-
"acc_norm": 0.30701754385964913,
|
241 |
-
"acc_norm_stderr": 0.043391383225798594
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.24954128440366974,
|
245 |
-
"acc_stderr": 0.018553897629501614,
|
246 |
-
"acc_norm": 0.24954128440366974,
|
247 |
-
"acc_norm_stderr": 0.018553897629501614
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.373015873015873,
|
251 |
-
"acc_stderr": 0.04325506042017086,
|
252 |
-
"acc_norm": 0.373015873015873,
|
253 |
-
"acc_norm_stderr": 0.04325506042017086
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.24183006535947713,
|
257 |
-
"acc_stderr": 0.024518195641879334,
|
258 |
-
"acc_norm": 0.24183006535947713,
|
259 |
-
"acc_norm_stderr": 0.024518195641879334
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.2,
|
263 |
-
"acc_stderr": 0.04020151261036846,
|
264 |
-
"acc_norm": 0.2,
|
265 |
-
"acc_norm_stderr": 0.04020151261036846
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.38016528925619836,
|
269 |
-
"acc_stderr": 0.04431324501968432,
|
270 |
-
"acc_norm": 0.38016528925619836,
|
271 |
-
"acc_norm_stderr": 0.04431324501968432
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.21710526315789475,
|
275 |
-
"acc_stderr": 0.03355045304882924,
|
276 |
-
"acc_norm": 0.21710526315789475,
|
277 |
-
"acc_norm_stderr": 0.03355045304882924
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.2549019607843137,
|
281 |
-
"acc_stderr": 0.017630827375148383,
|
282 |
-
"acc_norm": 0.2549019607843137,
|
283 |
-
"acc_norm_stderr": 0.017630827375148383
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.26595744680851063,
|
287 |
-
"acc_stderr": 0.026358065698880592,
|
288 |
-
"acc_norm": 0.26595744680851063,
|
289 |
-
"acc_norm_stderr": 0.026358065698880592
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.25892857142857145,
|
293 |
-
"acc_stderr": 0.041577515398656284,
|
294 |
-
"acc_norm": 0.25892857142857145,
|
295 |
-
"acc_norm_stderr": 0.041577515398656284
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.4722222222222222,
|
299 |
-
"acc_stderr": 0.0340470532865388,
|
300 |
-
"acc_norm": 0.4722222222222222,
|
301 |
-
"acc_norm_stderr": 0.0340470532865388
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.27262569832402234,
|
305 |
-
"acc_stderr": 0.014893391735249608,
|
306 |
-
"acc_norm": 0.27262569832402234,
|
307 |
-
"acc_norm_stderr": 0.014893391735249608
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.31,
|
311 |
-
"acc_stderr": 0.04648231987117316,
|
312 |
-
"acc_norm": 0.31,
|
313 |
-
"acc_norm_stderr": 0.04648231987117316
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.23,
|
317 |
-
"acc_stderr": 0.04229525846816505,
|
318 |
-
"acc_norm": 0.23,
|
319 |
-
"acc_norm_stderr": 0.04229525846816505
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.4522058823529412,
|
323 |
-
"acc_stderr": 0.030233758551596452,
|
324 |
-
"acc_norm": 0.4522058823529412,
|
325 |
-
"acc_norm_stderr": 0.030233758551596452
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.27346938775510204,
|
329 |
-
"acc_stderr": 0.02853556033712845,
|
330 |
-
"acc_norm": 0.27346938775510204,
|
331 |
-
"acc_norm_stderr": 0.02853556033712845
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.26582278481012656,
|
335 |
-
"acc_stderr": 0.02875679962965833,
|
336 |
-
"acc_norm": 0.26582278481012656,
|
337 |
-
"acc_norm_stderr": 0.02875679962965833
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.2561929595827901,
|
341 |
-
"acc_stderr": 0.011149173153110583,
|
342 |
-
"acc_norm": 0.2561929595827901,
|
343 |
-
"acc_norm_stderr": 0.011149173153110583
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.24019607843137256,
|
347 |
-
"acc_stderr": 0.02998373305591361,
|
348 |
-
"acc_norm": 0.24019607843137256,
|
349 |
-
"acc_norm_stderr": 0.02998373305591361
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.24242424242424243,
|
353 |
-
"acc_stderr": 0.03346409881055953,
|
354 |
-
"acc_norm": 0.24242424242424243,
|
355 |
-
"acc_norm_stderr": 0.03346409881055953
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.24479804161566707,
|
359 |
-
"mc1_stderr": 0.015051869486715006,
|
360 |
-
"mc2": 0.40367736123530334,
|
361 |
-
"mc2_stderr": 0.014824402657107816
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.2992957746478873,
|
365 |
-
"acc_stderr": 0.015698309276204924,
|
366 |
-
"acc_norm": 0.3591549295774648,
|
367 |
-
"acc_norm_stderr": 0.016445711213506745
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "42dot/42dot_LLM-PLM-1.3B",
|
436 |
-
"model_sha": "a72bf57eb02cd4ea4388a344b4a5893aa95698da",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
42dot/42dot_LLM-SFT-1.3B/result_2023-10-18 01:47:03.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.28242320819112626,
|
5 |
-
"acc_stderr": 0.01315545688409722,
|
6 |
-
"acc_norm": 0.35494880546075086,
|
7 |
-
"acc_norm_stderr": 0.013983036904094094
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.36317466640111534,
|
11 |
-
"acc_stderr": 0.004799317209902023,
|
12 |
-
"acc_norm": 0.4613622784305915,
|
13 |
-
"acc_norm_stderr": 0.004974860878464429
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.32748538011695905,
|
17 |
-
"acc_stderr": 0.035993357714560276,
|
18 |
-
"acc_norm": 0.32748538011695905,
|
19 |
-
"acc_norm_stderr": 0.035993357714560276
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.13592233009708737,
|
23 |
-
"acc_stderr": 0.033932957297610124,
|
24 |
-
"acc_norm": 0.13592233009708737,
|
25 |
-
"acc_norm_stderr": 0.033932957297610124
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.23754789272030652,
|
29 |
-
"acc_stderr": 0.015218733046150193,
|
30 |
-
"acc_norm": 0.23754789272030652,
|
31 |
-
"acc_norm_stderr": 0.015218733046150193
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.23703703703703705,
|
35 |
-
"acc_stderr": 0.03673731683969506,
|
36 |
-
"acc_norm": 0.23703703703703705,
|
37 |
-
"acc_norm_stderr": 0.03673731683969506
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.28,
|
41 |
-
"acc_stderr": 0.04512608598542127,
|
42 |
-
"acc_norm": 0.28,
|
43 |
-
"acc_norm_stderr": 0.04512608598542127
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.2723404255319149,
|
47 |
-
"acc_stderr": 0.029101290698386698,
|
48 |
-
"acc_norm": 0.2723404255319149,
|
49 |
-
"acc_norm_stderr": 0.029101290698386698
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.2710843373493976,
|
53 |
-
"acc_stderr": 0.034605799075530276,
|
54 |
-
"acc_norm": 0.2710843373493976,
|
55 |
-
"acc_norm_stderr": 0.034605799075530276
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.2604501607717042,
|
59 |
-
"acc_stderr": 0.024926723224845543,
|
60 |
-
"acc_norm": 0.2604501607717042,
|
61 |
-
"acc_norm_stderr": 0.024926723224845543
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.242152466367713,
|
65 |
-
"acc_stderr": 0.028751392398694755,
|
66 |
-
"acc_norm": 0.242152466367713,
|
67 |
-
"acc_norm_stderr": 0.028751392398694755
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.2366412213740458,
|
71 |
-
"acc_stderr": 0.03727673575596918,
|
72 |
-
"acc_norm": 0.2366412213740458,
|
73 |
-
"acc_norm_stderr": 0.03727673575596918
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.27,
|
77 |
-
"acc_stderr": 0.0446196043338474,
|
78 |
-
"acc_norm": 0.27,
|
79 |
-
"acc_norm_stderr": 0.0446196043338474
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.22727272727272727,
|
83 |
-
"acc_stderr": 0.02985751567338641,
|
84 |
-
"acc_norm": 0.22727272727272727,
|
85 |
-
"acc_norm_stderr": 0.02985751567338641
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.2482758620689655,
|
89 |
-
"acc_stderr": 0.036001056927277716,
|
90 |
-
"acc_norm": 0.2482758620689655,
|
91 |
-
"acc_norm_stderr": 0.036001056927277716
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.1568627450980392,
|
95 |
-
"acc_stderr": 0.03618664819936246,
|
96 |
-
"acc_norm": 0.1568627450980392,
|
97 |
-
"acc_norm_stderr": 0.03618664819936246
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.24789915966386555,
|
101 |
-
"acc_stderr": 0.028047967224176892,
|
102 |
-
"acc_norm": 0.24789915966386555,
|
103 |
-
"acc_norm_stderr": 0.028047967224176892
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.23076923076923078,
|
107 |
-
"acc_stderr": 0.021362027725222728,
|
108 |
-
"acc_norm": 0.23076923076923078,
|
109 |
-
"acc_norm_stderr": 0.021362027725222728
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.32,
|
113 |
-
"acc_stderr": 0.04688261722621504,
|
114 |
-
"acc_norm": 0.32,
|
115 |
-
"acc_norm_stderr": 0.04688261722621504
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.19,
|
119 |
-
"acc_stderr": 0.03942772444036623,
|
120 |
-
"acc_norm": 0.19,
|
121 |
-
"acc_norm_stderr": 0.03942772444036623
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.25925925925925924,
|
125 |
-
"acc_stderr": 0.042365112580946336,
|
126 |
-
"acc_norm": 0.25925925925925924,
|
127 |
-
"acc_norm_stderr": 0.042365112580946336
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.18719211822660098,
|
131 |
-
"acc_stderr": 0.027444924966882618,
|
132 |
-
"acc_norm": 0.18719211822660098,
|
133 |
-
"acc_norm_stderr": 0.027444924966882618
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.2645161290322581,
|
137 |
-
"acc_stderr": 0.02509189237885928,
|
138 |
-
"acc_norm": 0.2645161290322581,
|
139 |
-
"acc_norm_stderr": 0.02509189237885928
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.3034188034188034,
|
143 |
-
"acc_stderr": 0.030118210106942652,
|
144 |
-
"acc_norm": 0.3034188034188034,
|
145 |
-
"acc_norm_stderr": 0.030118210106942652
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.2037735849056604,
|
149 |
-
"acc_stderr": 0.02479078450177541,
|
150 |
-
"acc_norm": 0.2037735849056604,
|
151 |
-
"acc_norm_stderr": 0.02479078450177541
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.22727272727272727,
|
155 |
-
"acc_stderr": 0.04013964554072775,
|
156 |
-
"acc_norm": 0.22727272727272727,
|
157 |
-
"acc_norm_stderr": 0.04013964554072775
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.24074074074074073,
|
161 |
-
"acc_stderr": 0.026067159222275794,
|
162 |
-
"acc_norm": 0.24074074074074073,
|
163 |
-
"acc_norm_stderr": 0.026067159222275794
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.24503311258278146,
|
167 |
-
"acc_stderr": 0.035118075718047245,
|
168 |
-
"acc_norm": 0.24503311258278146,
|
169 |
-
"acc_norm_stderr": 0.035118075718047245
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.24875621890547264,
|
173 |
-
"acc_stderr": 0.030567675938916707,
|
174 |
-
"acc_norm": 0.24875621890547264,
|
175 |
-
"acc_norm_stderr": 0.030567675938916707
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.18497109826589594,
|
179 |
-
"acc_stderr": 0.029605623981771204,
|
180 |
-
"acc_norm": 0.18497109826589594,
|
181 |
-
"acc_norm_stderr": 0.029605623981771204
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.25132275132275134,
|
185 |
-
"acc_stderr": 0.022340482339643898,
|
186 |
-
"acc_norm": 0.25132275132275134,
|
187 |
-
"acc_norm_stderr": 0.022340482339643898
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.22916666666666666,
|
191 |
-
"acc_stderr": 0.035146974678623884,
|
192 |
-
"acc_norm": 0.22916666666666666,
|
193 |
-
"acc_norm_stderr": 0.035146974678623884
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.24,
|
197 |
-
"acc_stderr": 0.04292346959909284,
|
198 |
-
"acc_norm": 0.24,
|
199 |
-
"acc_norm_stderr": 0.04292346959909284
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.28,
|
203 |
-
"acc_stderr": 0.04512608598542129,
|
204 |
-
"acc_norm": 0.28,
|
205 |
-
"acc_norm_stderr": 0.04512608598542129
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.23410404624277456,
|
209 |
-
"acc_stderr": 0.022797110278071128,
|
210 |
-
"acc_norm": 0.23410404624277456,
|
211 |
-
"acc_norm_stderr": 0.022797110278071128
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.26380368098159507,
|
215 |
-
"acc_stderr": 0.03462419931615624,
|
216 |
-
"acc_norm": 0.26380368098159507,
|
217 |
-
"acc_norm_stderr": 0.03462419931615624
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.28703703703703703,
|
221 |
-
"acc_stderr": 0.025171041915309684,
|
222 |
-
"acc_norm": 0.28703703703703703,
|
223 |
-
"acc_norm_stderr": 0.025171041915309684
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.3,
|
227 |
-
"acc_stderr": 0.046056618647183814,
|
228 |
-
"acc_norm": 0.3,
|
229 |
-
"acc_norm_stderr": 0.046056618647183814
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.20725388601036268,
|
233 |
-
"acc_stderr": 0.029252823291803644,
|
234 |
-
"acc_norm": 0.20725388601036268,
|
235 |
-
"acc_norm_stderr": 0.029252823291803644
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.2631578947368421,
|
239 |
-
"acc_stderr": 0.04142439719489362,
|
240 |
-
"acc_norm": 0.2631578947368421,
|
241 |
-
"acc_norm_stderr": 0.04142439719489362
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.22201834862385322,
|
245 |
-
"acc_stderr": 0.017818849564796624,
|
246 |
-
"acc_norm": 0.22201834862385322,
|
247 |
-
"acc_norm_stderr": 0.017818849564796624
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.31746031746031744,
|
251 |
-
"acc_stderr": 0.04163453031302859,
|
252 |
-
"acc_norm": 0.31746031746031744,
|
253 |
-
"acc_norm_stderr": 0.04163453031302859
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.24836601307189543,
|
257 |
-
"acc_stderr": 0.024739981355113592,
|
258 |
-
"acc_norm": 0.24836601307189543,
|
259 |
-
"acc_norm_stderr": 0.024739981355113592
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.22,
|
263 |
-
"acc_stderr": 0.04163331998932269,
|
264 |
-
"acc_norm": 0.22,
|
265 |
-
"acc_norm_stderr": 0.04163331998932269
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.32231404958677684,
|
269 |
-
"acc_stderr": 0.042664163633521685,
|
270 |
-
"acc_norm": 0.32231404958677684,
|
271 |
-
"acc_norm_stderr": 0.042664163633521685
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.20394736842105263,
|
275 |
-
"acc_stderr": 0.0327900040631005,
|
276 |
-
"acc_norm": 0.20394736842105263,
|
277 |
-
"acc_norm_stderr": 0.0327900040631005
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.2581699346405229,
|
281 |
-
"acc_stderr": 0.017704531653250075,
|
282 |
-
"acc_norm": 0.2581699346405229,
|
283 |
-
"acc_norm_stderr": 0.017704531653250075
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.2375886524822695,
|
287 |
-
"acc_stderr": 0.025389512552729903,
|
288 |
-
"acc_norm": 0.2375886524822695,
|
289 |
-
"acc_norm_stderr": 0.025389512552729903
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.25,
|
293 |
-
"acc_stderr": 0.04109974682633932,
|
294 |
-
"acc_norm": 0.25,
|
295 |
-
"acc_norm_stderr": 0.04109974682633932
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.3333333333333333,
|
299 |
-
"acc_stderr": 0.03214952147802747,
|
300 |
-
"acc_norm": 0.3333333333333333,
|
301 |
-
"acc_norm_stderr": 0.03214952147802747
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.27262569832402234,
|
305 |
-
"acc_stderr": 0.014893391735249608,
|
306 |
-
"acc_norm": 0.27262569832402234,
|
307 |
-
"acc_norm_stderr": 0.014893391735249608
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.28,
|
311 |
-
"acc_stderr": 0.04512608598542127,
|
312 |
-
"acc_norm": 0.28,
|
313 |
-
"acc_norm_stderr": 0.04512608598542127
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.19,
|
317 |
-
"acc_stderr": 0.039427724440366234,
|
318 |
-
"acc_norm": 0.19,
|
319 |
-
"acc_norm_stderr": 0.039427724440366234
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.25,
|
323 |
-
"acc_stderr": 0.026303648393696036,
|
324 |
-
"acc_norm": 0.25,
|
325 |
-
"acc_norm_stderr": 0.026303648393696036
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.21224489795918366,
|
329 |
-
"acc_stderr": 0.026176967197866764,
|
330 |
-
"acc_norm": 0.21224489795918366,
|
331 |
-
"acc_norm_stderr": 0.026176967197866764
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.28270042194092826,
|
335 |
-
"acc_stderr": 0.029312814153955914,
|
336 |
-
"acc_norm": 0.28270042194092826,
|
337 |
-
"acc_norm_stderr": 0.029312814153955914
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.2503259452411995,
|
341 |
-
"acc_stderr": 0.011064151027165438,
|
342 |
-
"acc_norm": 0.2503259452411995,
|
343 |
-
"acc_norm_stderr": 0.011064151027165438
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.25,
|
347 |
-
"acc_stderr": 0.03039153369274154,
|
348 |
-
"acc_norm": 0.25,
|
349 |
-
"acc_norm_stderr": 0.03039153369274154
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.23030303030303031,
|
353 |
-
"acc_stderr": 0.03287666758603488,
|
354 |
-
"acc_norm": 0.23030303030303031,
|
355 |
-
"acc_norm_stderr": 0.03287666758603488
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.28518971848225216,
|
359 |
-
"mc1_stderr": 0.015805827874454895,
|
360 |
-
"mc2": 0.43765472485909873,
|
361 |
-
"mc2_stderr": 0.015405588178148114
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.3051643192488263,
|
365 |
-
"acc_stderr": 0.01578494789073776,
|
366 |
-
"acc_norm": 0.3732394366197183,
|
367 |
-
"acc_norm_stderr": 0.01657982009187977
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "42dot/42dot_LLM-SFT-1.3B",
|
436 |
-
"model_sha": "2dadd4492f0b27c302d8a5518003fa6045e32a8a",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
AtAndDev/ShortKingv0.1/result_2023-09-29 19:59:47.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.19112627986348124,
|
5 |
-
"acc_stderr": 0.011490055292778596,
|
6 |
-
"acc_norm": 0.24829351535836178,
|
7 |
-
"acc_norm_stderr": 0.012624912868089764
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.2756423023302131,
|
11 |
-
"acc_stderr": 0.0044592414745187915,
|
12 |
-
"acc_norm": 0.29884485162318264,
|
13 |
-
"acc_norm_stderr": 0.004568161710399566
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.27485380116959063,
|
17 |
-
"acc_stderr": 0.03424042924691582,
|
18 |
-
"acc_norm": 0.27485380116959063,
|
19 |
-
"acc_norm_stderr": 0.03424042924691582
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.17475728155339806,
|
23 |
-
"acc_stderr": 0.037601780060266196,
|
24 |
-
"acc_norm": 0.17475728155339806,
|
25 |
-
"acc_norm_stderr": 0.037601780060266196
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.2669220945083014,
|
29 |
-
"acc_stderr": 0.015818450894777555,
|
30 |
-
"acc_norm": 0.2669220945083014,
|
31 |
-
"acc_norm_stderr": 0.015818450894777555
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.3111111111111111,
|
35 |
-
"acc_stderr": 0.03999262876617722,
|
36 |
-
"acc_norm": 0.3111111111111111,
|
37 |
-
"acc_norm_stderr": 0.03999262876617722
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.23,
|
41 |
-
"acc_stderr": 0.04229525846816506,
|
42 |
-
"acc_norm": 0.23,
|
43 |
-
"acc_norm_stderr": 0.04229525846816506
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.2936170212765957,
|
47 |
-
"acc_stderr": 0.02977164271249123,
|
48 |
-
"acc_norm": 0.2936170212765957,
|
49 |
-
"acc_norm_stderr": 0.02977164271249123
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.27710843373493976,
|
53 |
-
"acc_stderr": 0.03484331592680588,
|
54 |
-
"acc_norm": 0.27710843373493976,
|
55 |
-
"acc_norm_stderr": 0.03484331592680588
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.20257234726688103,
|
59 |
-
"acc_stderr": 0.022827317491059686,
|
60 |
-
"acc_norm": 0.20257234726688103,
|
61 |
-
"acc_norm_stderr": 0.022827317491059686
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.3632286995515695,
|
65 |
-
"acc_stderr": 0.032277904428505,
|
66 |
-
"acc_norm": 0.3632286995515695,
|
67 |
-
"acc_norm_stderr": 0.032277904428505
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.2824427480916031,
|
71 |
-
"acc_stderr": 0.03948406125768361,
|
72 |
-
"acc_norm": 0.2824427480916031,
|
73 |
-
"acc_norm_stderr": 0.03948406125768361
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.3,
|
77 |
-
"acc_stderr": 0.046056618647183814,
|
78 |
-
"acc_norm": 0.3,
|
79 |
-
"acc_norm_stderr": 0.046056618647183814
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.21717171717171718,
|
83 |
-
"acc_stderr": 0.029376616484945644,
|
84 |
-
"acc_norm": 0.21717171717171718,
|
85 |
-
"acc_norm_stderr": 0.029376616484945644
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.2206896551724138,
|
89 |
-
"acc_stderr": 0.03455930201924812,
|
90 |
-
"acc_norm": 0.2206896551724138,
|
91 |
-
"acc_norm_stderr": 0.03455930201924812
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.14705882352941177,
|
95 |
-
"acc_stderr": 0.035240689515674474,
|
96 |
-
"acc_norm": 0.14705882352941177,
|
97 |
-
"acc_norm_stderr": 0.035240689515674474
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.226890756302521,
|
101 |
-
"acc_stderr": 0.027205371538279476,
|
102 |
-
"acc_norm": 0.226890756302521,
|
103 |
-
"acc_norm_stderr": 0.027205371538279476
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.2153846153846154,
|
107 |
-
"acc_stderr": 0.020843034557462878,
|
108 |
-
"acc_norm": 0.2153846153846154,
|
109 |
-
"acc_norm_stderr": 0.020843034557462878
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.21,
|
113 |
-
"acc_stderr": 0.040936018074033256,
|
114 |
-
"acc_norm": 0.21,
|
115 |
-
"acc_norm_stderr": 0.040936018074033256
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.26,
|
119 |
-
"acc_stderr": 0.0440844002276808,
|
120 |
-
"acc_norm": 0.26,
|
121 |
-
"acc_norm_stderr": 0.0440844002276808
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.25,
|
125 |
-
"acc_stderr": 0.04186091791394607,
|
126 |
-
"acc_norm": 0.25,
|
127 |
-
"acc_norm_stderr": 0.04186091791394607
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.13793103448275862,
|
131 |
-
"acc_stderr": 0.024261984301044565,
|
132 |
-
"acc_norm": 0.13793103448275862,
|
133 |
-
"acc_norm_stderr": 0.024261984301044565
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.20967741935483872,
|
137 |
-
"acc_stderr": 0.023157879349083536,
|
138 |
-
"acc_norm": 0.20967741935483872,
|
139 |
-
"acc_norm_stderr": 0.023157879349083536
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.2094017094017094,
|
143 |
-
"acc_stderr": 0.026655699653922737,
|
144 |
-
"acc_norm": 0.2094017094017094,
|
145 |
-
"acc_norm_stderr": 0.026655699653922737
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.2490566037735849,
|
149 |
-
"acc_stderr": 0.026616482980501715,
|
150 |
-
"acc_norm": 0.2490566037735849,
|
151 |
-
"acc_norm_stderr": 0.026616482980501715
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.21818181818181817,
|
155 |
-
"acc_stderr": 0.03955932861795833,
|
156 |
-
"acc_norm": 0.21818181818181817,
|
157 |
-
"acc_norm_stderr": 0.03955932861795833
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.26296296296296295,
|
161 |
-
"acc_stderr": 0.02684205787383371,
|
162 |
-
"acc_norm": 0.26296296296296295,
|
163 |
-
"acc_norm_stderr": 0.02684205787383371
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.2847682119205298,
|
167 |
-
"acc_stderr": 0.03684881521389024,
|
168 |
-
"acc_norm": 0.2847682119205298,
|
169 |
-
"acc_norm_stderr": 0.03684881521389024
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.24378109452736318,
|
173 |
-
"acc_stderr": 0.03036049015401464,
|
174 |
-
"acc_norm": 0.24378109452736318,
|
175 |
-
"acc_norm_stderr": 0.03036049015401464
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.2138728323699422,
|
179 |
-
"acc_stderr": 0.03126511206173043,
|
180 |
-
"acc_norm": 0.2138728323699422,
|
181 |
-
"acc_norm_stderr": 0.03126511206173043
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.25396825396825395,
|
185 |
-
"acc_stderr": 0.022418042891113946,
|
186 |
-
"acc_norm": 0.25396825396825395,
|
187 |
-
"acc_norm_stderr": 0.022418042891113946
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.2222222222222222,
|
191 |
-
"acc_stderr": 0.03476590104304134,
|
192 |
-
"acc_norm": 0.2222222222222222,
|
193 |
-
"acc_norm_stderr": 0.03476590104304134
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.24,
|
197 |
-
"acc_stderr": 0.04292346959909283,
|
198 |
-
"acc_norm": 0.24,
|
199 |
-
"acc_norm_stderr": 0.04292346959909283
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.26,
|
203 |
-
"acc_stderr": 0.0440844002276808,
|
204 |
-
"acc_norm": 0.26,
|
205 |
-
"acc_norm_stderr": 0.0440844002276808
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.26011560693641617,
|
209 |
-
"acc_stderr": 0.023618678310069363,
|
210 |
-
"acc_norm": 0.26011560693641617,
|
211 |
-
"acc_norm_stderr": 0.023618678310069363
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.27607361963190186,
|
215 |
-
"acc_stderr": 0.0351238528370505,
|
216 |
-
"acc_norm": 0.27607361963190186,
|
217 |
-
"acc_norm_stderr": 0.0351238528370505
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.24074074074074073,
|
221 |
-
"acc_stderr": 0.02378858355165854,
|
222 |
-
"acc_norm": 0.24074074074074073,
|
223 |
-
"acc_norm_stderr": 0.02378858355165854
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.29,
|
227 |
-
"acc_stderr": 0.045604802157206845,
|
228 |
-
"acc_norm": 0.29,
|
229 |
-
"acc_norm_stderr": 0.045604802157206845
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.20725388601036268,
|
233 |
-
"acc_stderr": 0.02925282329180363,
|
234 |
-
"acc_norm": 0.20725388601036268,
|
235 |
-
"acc_norm_stderr": 0.02925282329180363
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.2807017543859649,
|
239 |
-
"acc_stderr": 0.042270544512322004,
|
240 |
-
"acc_norm": 0.2807017543859649,
|
241 |
-
"acc_norm_stderr": 0.042270544512322004
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.21100917431192662,
|
245 |
-
"acc_stderr": 0.017493922404112648,
|
246 |
-
"acc_norm": 0.21100917431192662,
|
247 |
-
"acc_norm_stderr": 0.017493922404112648
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.23809523809523808,
|
251 |
-
"acc_stderr": 0.038095238095238106,
|
252 |
-
"acc_norm": 0.23809523809523808,
|
253 |
-
"acc_norm_stderr": 0.038095238095238106
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.23529411764705882,
|
257 |
-
"acc_stderr": 0.024288619466046116,
|
258 |
-
"acc_norm": 0.23529411764705882,
|
259 |
-
"acc_norm_stderr": 0.024288619466046116
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.3,
|
263 |
-
"acc_stderr": 0.046056618647183814,
|
264 |
-
"acc_norm": 0.3,
|
265 |
-
"acc_norm_stderr": 0.046056618647183814
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.19008264462809918,
|
269 |
-
"acc_stderr": 0.03581796951709282,
|
270 |
-
"acc_norm": 0.19008264462809918,
|
271 |
-
"acc_norm_stderr": 0.03581796951709282
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.17763157894736842,
|
275 |
-
"acc_stderr": 0.03110318238312338,
|
276 |
-
"acc_norm": 0.17763157894736842,
|
277 |
-
"acc_norm_stderr": 0.03110318238312338
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.25980392156862747,
|
281 |
-
"acc_stderr": 0.017740899509177795,
|
282 |
-
"acc_norm": 0.25980392156862747,
|
283 |
-
"acc_norm_stderr": 0.017740899509177795
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.2765957446808511,
|
287 |
-
"acc_stderr": 0.026684564340460997,
|
288 |
-
"acc_norm": 0.2765957446808511,
|
289 |
-
"acc_norm_stderr": 0.026684564340460997
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.2857142857142857,
|
293 |
-
"acc_stderr": 0.042878587513404544,
|
294 |
-
"acc_norm": 0.2857142857142857,
|
295 |
-
"acc_norm_stderr": 0.042878587513404544
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.35648148148148145,
|
299 |
-
"acc_stderr": 0.032664783315272714,
|
300 |
-
"acc_norm": 0.35648148148148145,
|
301 |
-
"acc_norm_stderr": 0.032664783315272714
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.23016759776536314,
|
305 |
-
"acc_stderr": 0.014078339253425809,
|
306 |
-
"acc_norm": 0.23016759776536314,
|
307 |
-
"acc_norm_stderr": 0.014078339253425809
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.31,
|
311 |
-
"acc_stderr": 0.04648231987117316,
|
312 |
-
"acc_norm": 0.31,
|
313 |
-
"acc_norm_stderr": 0.04648231987117316
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.28,
|
317 |
-
"acc_stderr": 0.04512608598542128,
|
318 |
-
"acc_norm": 0.28,
|
319 |
-
"acc_norm_stderr": 0.04512608598542128
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.29411764705882354,
|
323 |
-
"acc_stderr": 0.02767846864214471,
|
324 |
-
"acc_norm": 0.29411764705882354,
|
325 |
-
"acc_norm_stderr": 0.02767846864214471
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.3142857142857143,
|
329 |
-
"acc_stderr": 0.029719329422417468,
|
330 |
-
"acc_norm": 0.3142857142857143,
|
331 |
-
"acc_norm_stderr": 0.029719329422417468
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.2742616033755274,
|
335 |
-
"acc_stderr": 0.02904133351059804,
|
336 |
-
"acc_norm": 0.2742616033755274,
|
337 |
-
"acc_norm_stderr": 0.02904133351059804
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.24511082138200782,
|
341 |
-
"acc_stderr": 0.010986307870045526,
|
342 |
-
"acc_norm": 0.24511082138200782,
|
343 |
-
"acc_norm_stderr": 0.010986307870045526
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.27941176470588236,
|
347 |
-
"acc_stderr": 0.03149328104507955,
|
348 |
-
"acc_norm": 0.27941176470588236,
|
349 |
-
"acc_norm_stderr": 0.03149328104507955
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.21818181818181817,
|
353 |
-
"acc_stderr": 0.03225078108306289,
|
354 |
-
"acc_norm": 0.21818181818181817,
|
355 |
-
"acc_norm_stderr": 0.03225078108306289
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.29498164014687883,
|
359 |
-
"mc1_stderr": 0.015964400965589674,
|
360 |
-
"mc2": 0.49219803033147647,
|
361 |
-
"mc2_stderr": 0.015947492879186672
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.0539906103286385,
|
365 |
-
"acc_stderr": 0.007747151732014082,
|
366 |
-
"acc_norm": 0.09859154929577464,
|
367 |
-
"acc_norm_stderr": 0.010219175985280616
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "AtAndDev/ShortKingv0.1",
|
436 |
-
"model_sha": "6cd9b5bc13ee15b5e7e7cfb46477bc6a7c0b5d47",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
BM-K/polyglot-ko-1.3b-it-v1.0/result_2023-10-06 06:41:38.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.22696245733788395,
|
5 |
-
"acc_stderr": 0.012240491536132861,
|
6 |
-
"acc_norm": 0.2773037542662116,
|
7 |
-
"acc_norm_stderr": 0.013082095839059374
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.33578968333001397,
|
11 |
-
"acc_stderr": 0.004713006072807706,
|
12 |
-
"acc_norm": 0.41585341565425216,
|
13 |
-
"acc_norm_stderr": 0.0049186120989440285
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.29239766081871343,
|
17 |
-
"acc_stderr": 0.034886477134579236,
|
18 |
-
"acc_norm": 0.29239766081871343,
|
19 |
-
"acc_norm_stderr": 0.034886477134579236
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.2524271844660194,
|
23 |
-
"acc_stderr": 0.04301250399690877,
|
24 |
-
"acc_norm": 0.2524271844660194,
|
25 |
-
"acc_norm_stderr": 0.04301250399690877
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.26309067688378035,
|
29 |
-
"acc_stderr": 0.015745497169049057,
|
30 |
-
"acc_norm": 0.26309067688378035,
|
31 |
-
"acc_norm_stderr": 0.015745497169049057
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.24444444444444444,
|
35 |
-
"acc_stderr": 0.037125378336148665,
|
36 |
-
"acc_norm": 0.24444444444444444,
|
37 |
-
"acc_norm_stderr": 0.037125378336148665
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.33,
|
41 |
-
"acc_stderr": 0.047258156262526045,
|
42 |
-
"acc_norm": 0.33,
|
43 |
-
"acc_norm_stderr": 0.047258156262526045
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.251063829787234,
|
47 |
-
"acc_stderr": 0.028346963777162452,
|
48 |
-
"acc_norm": 0.251063829787234,
|
49 |
-
"acc_norm_stderr": 0.028346963777162452
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.20481927710843373,
|
53 |
-
"acc_stderr": 0.03141784291663926,
|
54 |
-
"acc_norm": 0.20481927710843373,
|
55 |
-
"acc_norm_stderr": 0.03141784291663926
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.2958199356913183,
|
59 |
-
"acc_stderr": 0.025922371788818784,
|
60 |
-
"acc_norm": 0.2958199356913183,
|
61 |
-
"acc_norm_stderr": 0.025922371788818784
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.34977578475336324,
|
65 |
-
"acc_stderr": 0.03200736719484503,
|
66 |
-
"acc_norm": 0.34977578475336324,
|
67 |
-
"acc_norm_stderr": 0.03200736719484503
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.25190839694656486,
|
71 |
-
"acc_stderr": 0.03807387116306085,
|
72 |
-
"acc_norm": 0.25190839694656486,
|
73 |
-
"acc_norm_stderr": 0.03807387116306085
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.27,
|
77 |
-
"acc_stderr": 0.044619604333847394,
|
78 |
-
"acc_norm": 0.27,
|
79 |
-
"acc_norm_stderr": 0.044619604333847394
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.2676767676767677,
|
83 |
-
"acc_stderr": 0.031544498882702866,
|
84 |
-
"acc_norm": 0.2676767676767677,
|
85 |
-
"acc_norm_stderr": 0.031544498882702866
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.22758620689655173,
|
89 |
-
"acc_stderr": 0.03493950380131184,
|
90 |
-
"acc_norm": 0.22758620689655173,
|
91 |
-
"acc_norm_stderr": 0.03493950380131184
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.18627450980392157,
|
95 |
-
"acc_stderr": 0.03873958714149351,
|
96 |
-
"acc_norm": 0.18627450980392157,
|
97 |
-
"acc_norm_stderr": 0.03873958714149351
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.24789915966386555,
|
101 |
-
"acc_stderr": 0.028047967224176892,
|
102 |
-
"acc_norm": 0.24789915966386555,
|
103 |
-
"acc_norm_stderr": 0.028047967224176892
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.21794871794871795,
|
107 |
-
"acc_stderr": 0.020932445774463185,
|
108 |
-
"acc_norm": 0.21794871794871795,
|
109 |
-
"acc_norm_stderr": 0.020932445774463185
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.2,
|
113 |
-
"acc_stderr": 0.04020151261036846,
|
114 |
-
"acc_norm": 0.2,
|
115 |
-
"acc_norm_stderr": 0.04020151261036846
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.3,
|
119 |
-
"acc_stderr": 0.046056618647183814,
|
120 |
-
"acc_norm": 0.3,
|
121 |
-
"acc_norm_stderr": 0.046056618647183814
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.26851851851851855,
|
125 |
-
"acc_stderr": 0.04284467968052191,
|
126 |
-
"acc_norm": 0.26851851851851855,
|
127 |
-
"acc_norm_stderr": 0.04284467968052191
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.27586206896551724,
|
131 |
-
"acc_stderr": 0.031447125816782426,
|
132 |
-
"acc_norm": 0.27586206896551724,
|
133 |
-
"acc_norm_stderr": 0.031447125816782426
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.27419354838709675,
|
137 |
-
"acc_stderr": 0.025378139970885193,
|
138 |
-
"acc_norm": 0.27419354838709675,
|
139 |
-
"acc_norm_stderr": 0.025378139970885193
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.24358974358974358,
|
143 |
-
"acc_stderr": 0.02812096650391441,
|
144 |
-
"acc_norm": 0.24358974358974358,
|
145 |
-
"acc_norm_stderr": 0.02812096650391441
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.2528301886792453,
|
149 |
-
"acc_stderr": 0.026749899771241238,
|
150 |
-
"acc_norm": 0.2528301886792453,
|
151 |
-
"acc_norm_stderr": 0.026749899771241238
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.2545454545454545,
|
155 |
-
"acc_stderr": 0.04172343038705383,
|
156 |
-
"acc_norm": 0.2545454545454545,
|
157 |
-
"acc_norm_stderr": 0.04172343038705383
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.3037037037037037,
|
161 |
-
"acc_stderr": 0.028037929969114993,
|
162 |
-
"acc_norm": 0.3037037037037037,
|
163 |
-
"acc_norm_stderr": 0.028037929969114993
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.2781456953642384,
|
167 |
-
"acc_stderr": 0.03658603262763743,
|
168 |
-
"acc_norm": 0.2781456953642384,
|
169 |
-
"acc_norm_stderr": 0.03658603262763743
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.2537313432835821,
|
173 |
-
"acc_stderr": 0.03076944496729602,
|
174 |
-
"acc_norm": 0.2537313432835821,
|
175 |
-
"acc_norm_stderr": 0.03076944496729602
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.19653179190751446,
|
179 |
-
"acc_stderr": 0.03029957466478815,
|
180 |
-
"acc_norm": 0.19653179190751446,
|
181 |
-
"acc_norm_stderr": 0.03029957466478815
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.2962962962962963,
|
185 |
-
"acc_stderr": 0.023517294335963286,
|
186 |
-
"acc_norm": 0.2962962962962963,
|
187 |
-
"acc_norm_stderr": 0.023517294335963286
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.2777777777777778,
|
191 |
-
"acc_stderr": 0.03745554791462457,
|
192 |
-
"acc_norm": 0.2777777777777778,
|
193 |
-
"acc_norm_stderr": 0.03745554791462457
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.21,
|
197 |
-
"acc_stderr": 0.040936018074033256,
|
198 |
-
"acc_norm": 0.21,
|
199 |
-
"acc_norm_stderr": 0.040936018074033256
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.23,
|
203 |
-
"acc_stderr": 0.04229525846816505,
|
204 |
-
"acc_norm": 0.23,
|
205 |
-
"acc_norm_stderr": 0.04229525846816505
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.24855491329479767,
|
209 |
-
"acc_stderr": 0.023267528432100174,
|
210 |
-
"acc_norm": 0.24855491329479767,
|
211 |
-
"acc_norm_stderr": 0.023267528432100174
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.3374233128834356,
|
215 |
-
"acc_stderr": 0.03714908409935574,
|
216 |
-
"acc_norm": 0.3374233128834356,
|
217 |
-
"acc_norm_stderr": 0.03714908409935574
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.26851851851851855,
|
221 |
-
"acc_stderr": 0.02465968518596728,
|
222 |
-
"acc_norm": 0.26851851851851855,
|
223 |
-
"acc_norm_stderr": 0.02465968518596728
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.29,
|
227 |
-
"acc_stderr": 0.04560480215720684,
|
228 |
-
"acc_norm": 0.29,
|
229 |
-
"acc_norm_stderr": 0.04560480215720684
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.19689119170984457,
|
233 |
-
"acc_stderr": 0.028697873971860688,
|
234 |
-
"acc_norm": 0.19689119170984457,
|
235 |
-
"acc_norm_stderr": 0.028697873971860688
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.15789473684210525,
|
239 |
-
"acc_stderr": 0.03430265978485698,
|
240 |
-
"acc_norm": 0.15789473684210525,
|
241 |
-
"acc_norm_stderr": 0.03430265978485698
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.24036697247706423,
|
245 |
-
"acc_stderr": 0.01832060732096407,
|
246 |
-
"acc_norm": 0.24036697247706423,
|
247 |
-
"acc_norm_stderr": 0.01832060732096407
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.25396825396825395,
|
251 |
-
"acc_stderr": 0.03893259610604673,
|
252 |
-
"acc_norm": 0.25396825396825395,
|
253 |
-
"acc_norm_stderr": 0.03893259610604673
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.23202614379084968,
|
257 |
-
"acc_stderr": 0.02417084087934102,
|
258 |
-
"acc_norm": 0.23202614379084968,
|
259 |
-
"acc_norm_stderr": 0.02417084087934102
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.16,
|
263 |
-
"acc_stderr": 0.0368452949177471,
|
264 |
-
"acc_norm": 0.16,
|
265 |
-
"acc_norm_stderr": 0.0368452949177471
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.23140495867768596,
|
269 |
-
"acc_stderr": 0.03849856098794088,
|
270 |
-
"acc_norm": 0.23140495867768596,
|
271 |
-
"acc_norm_stderr": 0.03849856098794088
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.21710526315789475,
|
275 |
-
"acc_stderr": 0.03355045304882924,
|
276 |
-
"acc_norm": 0.21710526315789475,
|
277 |
-
"acc_norm_stderr": 0.03355045304882924
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.25163398692810457,
|
281 |
-
"acc_stderr": 0.01755581809132226,
|
282 |
-
"acc_norm": 0.25163398692810457,
|
283 |
-
"acc_norm_stderr": 0.01755581809132226
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.2624113475177305,
|
287 |
-
"acc_stderr": 0.026244920349843014,
|
288 |
-
"acc_norm": 0.2624113475177305,
|
289 |
-
"acc_norm_stderr": 0.026244920349843014
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.23214285714285715,
|
293 |
-
"acc_stderr": 0.04007341809755807,
|
294 |
-
"acc_norm": 0.23214285714285715,
|
295 |
-
"acc_norm_stderr": 0.04007341809755807
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.4583333333333333,
|
299 |
-
"acc_stderr": 0.033981108902946366,
|
300 |
-
"acc_norm": 0.4583333333333333,
|
301 |
-
"acc_norm_stderr": 0.033981108902946366
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.27262569832402234,
|
305 |
-
"acc_stderr": 0.014893391735249608,
|
306 |
-
"acc_norm": 0.27262569832402234,
|
307 |
-
"acc_norm_stderr": 0.014893391735249608
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.14,
|
311 |
-
"acc_stderr": 0.03487350880197771,
|
312 |
-
"acc_norm": 0.14,
|
313 |
-
"acc_norm_stderr": 0.03487350880197771
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.29,
|
317 |
-
"acc_stderr": 0.045604802157206845,
|
318 |
-
"acc_norm": 0.29,
|
319 |
-
"acc_norm_stderr": 0.045604802157206845
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.3161764705882353,
|
323 |
-
"acc_stderr": 0.02824568739146292,
|
324 |
-
"acc_norm": 0.3161764705882353,
|
325 |
-
"acc_norm_stderr": 0.02824568739146292
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.2653061224489796,
|
329 |
-
"acc_stderr": 0.028263889943784586,
|
330 |
-
"acc_norm": 0.2653061224489796,
|
331 |
-
"acc_norm_stderr": 0.028263889943784586
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.26582278481012656,
|
335 |
-
"acc_stderr": 0.028756799629658335,
|
336 |
-
"acc_norm": 0.26582278481012656,
|
337 |
-
"acc_norm_stderr": 0.028756799629658335
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.23728813559322035,
|
341 |
-
"acc_stderr": 0.01086543669078027,
|
342 |
-
"acc_norm": 0.23728813559322035,
|
343 |
-
"acc_norm_stderr": 0.01086543669078027
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.22549019607843138,
|
347 |
-
"acc_stderr": 0.02933116229425173,
|
348 |
-
"acc_norm": 0.22549019607843138,
|
349 |
-
"acc_norm_stderr": 0.02933116229425173
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.21212121212121213,
|
353 |
-
"acc_stderr": 0.031922715695483,
|
354 |
-
"acc_norm": 0.21212121212121213,
|
355 |
-
"acc_norm_stderr": 0.031922715695483
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.24479804161566707,
|
359 |
-
"mc1_stderr": 0.015051869486715008,
|
360 |
-
"mc2": 0.41338491158026774,
|
361 |
-
"mc2_stderr": 0.01512108388775634
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.23826291079812206,
|
365 |
-
"acc_stderr": 0.014603803898011337,
|
366 |
-
"acc_norm": 0.3380281690140845,
|
367 |
-
"acc_norm_stderr": 0.016215540194273206
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "BM-K/polyglot-ko-1.3b-it-v1.0",
|
436 |
-
"model_sha": "2f5b0dfed443e3a89c13a13b48d6fe6838c86e67",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
BM-K/polyglot-ko-1.3b-it-v1.1/result_2023-10-06 07:23:29.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.2363481228668942,
|
5 |
-
"acc_stderr": 0.012414960524301818,
|
6 |
-
"acc_norm": 0.2841296928327645,
|
7 |
-
"acc_norm_stderr": 0.013179442447653887
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.3379804819757021,
|
11 |
-
"acc_stderr": 0.004720551323547123,
|
12 |
-
"acc_norm": 0.4192391953794065,
|
13 |
-
"acc_norm_stderr": 0.004924261467934422
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.29239766081871343,
|
17 |
-
"acc_stderr": 0.03488647713457923,
|
18 |
-
"acc_norm": 0.29239766081871343,
|
19 |
-
"acc_norm_stderr": 0.03488647713457923
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.27184466019417475,
|
23 |
-
"acc_stderr": 0.044052680241409216,
|
24 |
-
"acc_norm": 0.27184466019417475,
|
25 |
-
"acc_norm_stderr": 0.044052680241409216
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.24521072796934865,
|
29 |
-
"acc_stderr": 0.01538435228454394,
|
30 |
-
"acc_norm": 0.24521072796934865,
|
31 |
-
"acc_norm_stderr": 0.01538435228454394
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.2518518518518518,
|
35 |
-
"acc_stderr": 0.037498507091740234,
|
36 |
-
"acc_norm": 0.2518518518518518,
|
37 |
-
"acc_norm_stderr": 0.037498507091740234
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.31,
|
41 |
-
"acc_stderr": 0.04648231987117316,
|
42 |
-
"acc_norm": 0.31,
|
43 |
-
"acc_norm_stderr": 0.04648231987117316
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.2553191489361702,
|
47 |
-
"acc_stderr": 0.028504856470514192,
|
48 |
-
"acc_norm": 0.2553191489361702,
|
49 |
-
"acc_norm_stderr": 0.028504856470514192
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.2289156626506024,
|
53 |
-
"acc_stderr": 0.03270745277352477,
|
54 |
-
"acc_norm": 0.2289156626506024,
|
55 |
-
"acc_norm_stderr": 0.03270745277352477
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.2990353697749196,
|
59 |
-
"acc_stderr": 0.026003301117885142,
|
60 |
-
"acc_norm": 0.2990353697749196,
|
61 |
-
"acc_norm_stderr": 0.026003301117885142
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.3273542600896861,
|
65 |
-
"acc_stderr": 0.03149384670994131,
|
66 |
-
"acc_norm": 0.3273542600896861,
|
67 |
-
"acc_norm_stderr": 0.03149384670994131
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.2366412213740458,
|
71 |
-
"acc_stderr": 0.037276735755969174,
|
72 |
-
"acc_norm": 0.2366412213740458,
|
73 |
-
"acc_norm_stderr": 0.037276735755969174
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.2,
|
77 |
-
"acc_stderr": 0.04020151261036846,
|
78 |
-
"acc_norm": 0.2,
|
79 |
-
"acc_norm_stderr": 0.04020151261036846
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.2828282828282828,
|
83 |
-
"acc_stderr": 0.032087795587867514,
|
84 |
-
"acc_norm": 0.2828282828282828,
|
85 |
-
"acc_norm_stderr": 0.032087795587867514
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.2482758620689655,
|
89 |
-
"acc_stderr": 0.03600105692727771,
|
90 |
-
"acc_norm": 0.2482758620689655,
|
91 |
-
"acc_norm_stderr": 0.03600105692727771
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.21568627450980393,
|
95 |
-
"acc_stderr": 0.04092563958237655,
|
96 |
-
"acc_norm": 0.21568627450980393,
|
97 |
-
"acc_norm_stderr": 0.04092563958237655
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.23109243697478993,
|
101 |
-
"acc_stderr": 0.027381406927868973,
|
102 |
-
"acc_norm": 0.23109243697478993,
|
103 |
-
"acc_norm_stderr": 0.027381406927868973
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.2282051282051282,
|
107 |
-
"acc_stderr": 0.02127839386358628,
|
108 |
-
"acc_norm": 0.2282051282051282,
|
109 |
-
"acc_norm_stderr": 0.02127839386358628
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.23,
|
113 |
-
"acc_stderr": 0.04229525846816505,
|
114 |
-
"acc_norm": 0.23,
|
115 |
-
"acc_norm_stderr": 0.04229525846816505
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.29,
|
119 |
-
"acc_stderr": 0.045604802157206845,
|
120 |
-
"acc_norm": 0.29,
|
121 |
-
"acc_norm_stderr": 0.045604802157206845
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.26851851851851855,
|
125 |
-
"acc_stderr": 0.04284467968052191,
|
126 |
-
"acc_norm": 0.26851851851851855,
|
127 |
-
"acc_norm_stderr": 0.04284467968052191
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.2561576354679803,
|
131 |
-
"acc_stderr": 0.0307127300709826,
|
132 |
-
"acc_norm": 0.2561576354679803,
|
133 |
-
"acc_norm_stderr": 0.0307127300709826
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.2838709677419355,
|
137 |
-
"acc_stderr": 0.02564938106302926,
|
138 |
-
"acc_norm": 0.2838709677419355,
|
139 |
-
"acc_norm_stderr": 0.02564938106302926
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.25213675213675213,
|
143 |
-
"acc_stderr": 0.02844796547623101,
|
144 |
-
"acc_norm": 0.25213675213675213,
|
145 |
-
"acc_norm_stderr": 0.02844796547623101
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.2528301886792453,
|
149 |
-
"acc_stderr": 0.02674989977124124,
|
150 |
-
"acc_norm": 0.2528301886792453,
|
151 |
-
"acc_norm_stderr": 0.02674989977124124
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.22727272727272727,
|
155 |
-
"acc_stderr": 0.04013964554072773,
|
156 |
-
"acc_norm": 0.22727272727272727,
|
157 |
-
"acc_norm_stderr": 0.04013964554072773
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.3111111111111111,
|
161 |
-
"acc_stderr": 0.028226446749683515,
|
162 |
-
"acc_norm": 0.3111111111111111,
|
163 |
-
"acc_norm_stderr": 0.028226446749683515
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.2847682119205298,
|
167 |
-
"acc_stderr": 0.03684881521389023,
|
168 |
-
"acc_norm": 0.2847682119205298,
|
169 |
-
"acc_norm_stderr": 0.03684881521389023
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.23880597014925373,
|
173 |
-
"acc_stderr": 0.030147775935409217,
|
174 |
-
"acc_norm": 0.23880597014925373,
|
175 |
-
"acc_norm_stderr": 0.030147775935409217
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.1791907514450867,
|
179 |
-
"acc_stderr": 0.0292425130590633,
|
180 |
-
"acc_norm": 0.1791907514450867,
|
181 |
-
"acc_norm_stderr": 0.0292425130590633
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.2830687830687831,
|
185 |
-
"acc_stderr": 0.023201392938194974,
|
186 |
-
"acc_norm": 0.2830687830687831,
|
187 |
-
"acc_norm_stderr": 0.023201392938194974
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.2777777777777778,
|
191 |
-
"acc_stderr": 0.03745554791462457,
|
192 |
-
"acc_norm": 0.2777777777777778,
|
193 |
-
"acc_norm_stderr": 0.03745554791462457
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.18,
|
197 |
-
"acc_stderr": 0.038612291966536955,
|
198 |
-
"acc_norm": 0.18,
|
199 |
-
"acc_norm_stderr": 0.038612291966536955
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.22,
|
203 |
-
"acc_stderr": 0.041633319989322695,
|
204 |
-
"acc_norm": 0.22,
|
205 |
-
"acc_norm_stderr": 0.041633319989322695
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.2398843930635838,
|
209 |
-
"acc_stderr": 0.022989592543123567,
|
210 |
-
"acc_norm": 0.2398843930635838,
|
211 |
-
"acc_norm_stderr": 0.022989592543123567
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.3312883435582822,
|
215 |
-
"acc_stderr": 0.03697983910025588,
|
216 |
-
"acc_norm": 0.3312883435582822,
|
217 |
-
"acc_norm_stderr": 0.03697983910025588
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.2993827160493827,
|
221 |
-
"acc_stderr": 0.02548311560119547,
|
222 |
-
"acc_norm": 0.2993827160493827,
|
223 |
-
"acc_norm_stderr": 0.02548311560119547
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.32,
|
227 |
-
"acc_stderr": 0.046882617226215034,
|
228 |
-
"acc_norm": 0.32,
|
229 |
-
"acc_norm_stderr": 0.046882617226215034
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.26424870466321243,
|
233 |
-
"acc_stderr": 0.03182155050916647,
|
234 |
-
"acc_norm": 0.26424870466321243,
|
235 |
-
"acc_norm_stderr": 0.03182155050916647
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.18421052631578946,
|
239 |
-
"acc_stderr": 0.03646758875075566,
|
240 |
-
"acc_norm": 0.18421052631578946,
|
241 |
-
"acc_norm_stderr": 0.03646758875075566
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.22201834862385322,
|
245 |
-
"acc_stderr": 0.017818849564796617,
|
246 |
-
"acc_norm": 0.22201834862385322,
|
247 |
-
"acc_norm_stderr": 0.017818849564796617
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.25396825396825395,
|
251 |
-
"acc_stderr": 0.03893259610604672,
|
252 |
-
"acc_norm": 0.25396825396825395,
|
253 |
-
"acc_norm_stderr": 0.03893259610604672
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.24836601307189543,
|
257 |
-
"acc_stderr": 0.02473998135511359,
|
258 |
-
"acc_norm": 0.24836601307189543,
|
259 |
-
"acc_norm_stderr": 0.02473998135511359
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.14,
|
263 |
-
"acc_stderr": 0.03487350880197771,
|
264 |
-
"acc_norm": 0.14,
|
265 |
-
"acc_norm_stderr": 0.03487350880197771
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.2644628099173554,
|
269 |
-
"acc_stderr": 0.040261875275912046,
|
270 |
-
"acc_norm": 0.2644628099173554,
|
271 |
-
"acc_norm_stderr": 0.040261875275912046
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.24342105263157895,
|
275 |
-
"acc_stderr": 0.034923496688842384,
|
276 |
-
"acc_norm": 0.24342105263157895,
|
277 |
-
"acc_norm_stderr": 0.034923496688842384
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.2647058823529412,
|
281 |
-
"acc_stderr": 0.017848089574913222,
|
282 |
-
"acc_norm": 0.2647058823529412,
|
283 |
-
"acc_norm_stderr": 0.017848089574913222
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.2695035460992908,
|
287 |
-
"acc_stderr": 0.026469036818590634,
|
288 |
-
"acc_norm": 0.2695035460992908,
|
289 |
-
"acc_norm_stderr": 0.026469036818590634
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.21428571428571427,
|
293 |
-
"acc_stderr": 0.03894641120044792,
|
294 |
-
"acc_norm": 0.21428571428571427,
|
295 |
-
"acc_norm_stderr": 0.03894641120044792
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.4351851851851852,
|
299 |
-
"acc_stderr": 0.03381200005643525,
|
300 |
-
"acc_norm": 0.4351851851851852,
|
301 |
-
"acc_norm_stderr": 0.03381200005643525
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.27262569832402234,
|
305 |
-
"acc_stderr": 0.014893391735249608,
|
306 |
-
"acc_norm": 0.27262569832402234,
|
307 |
-
"acc_norm_stderr": 0.014893391735249608
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.22,
|
311 |
-
"acc_stderr": 0.0416333199893227,
|
312 |
-
"acc_norm": 0.22,
|
313 |
-
"acc_norm_stderr": 0.0416333199893227
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.32,
|
317 |
-
"acc_stderr": 0.046882617226215034,
|
318 |
-
"acc_norm": 0.32,
|
319 |
-
"acc_norm_stderr": 0.046882617226215034
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.39338235294117646,
|
323 |
-
"acc_stderr": 0.029674288281311183,
|
324 |
-
"acc_norm": 0.39338235294117646,
|
325 |
-
"acc_norm_stderr": 0.029674288281311183
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.2938775510204082,
|
329 |
-
"acc_stderr": 0.02916273841024977,
|
330 |
-
"acc_norm": 0.2938775510204082,
|
331 |
-
"acc_norm_stderr": 0.02916273841024977
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.24050632911392406,
|
335 |
-
"acc_stderr": 0.02782078198114968,
|
336 |
-
"acc_norm": 0.24050632911392406,
|
337 |
-
"acc_norm_stderr": 0.02782078198114968
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.23989569752281617,
|
341 |
-
"acc_stderr": 0.010906282617981653,
|
342 |
-
"acc_norm": 0.23989569752281617,
|
343 |
-
"acc_norm_stderr": 0.010906282617981653
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.24019607843137256,
|
347 |
-
"acc_stderr": 0.02998373305591362,
|
348 |
-
"acc_norm": 0.24019607843137256,
|
349 |
-
"acc_norm_stderr": 0.02998373305591362
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.21212121212121213,
|
353 |
-
"acc_stderr": 0.031922715695482995,
|
354 |
-
"acc_norm": 0.21212121212121213,
|
355 |
-
"acc_norm_stderr": 0.031922715695482995
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.24479804161566707,
|
359 |
-
"mc1_stderr": 0.01505186948671501,
|
360 |
-
"mc2": 0.4174341547322483,
|
361 |
-
"mc2_stderr": 0.015183101828823979
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.2676056338028169,
|
365 |
-
"acc_stderr": 0.01517593100847281,
|
366 |
-
"acc_norm": 0.3615023474178404,
|
367 |
-
"acc_norm_stderr": 0.01646912149043009
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "BM-K/polyglot-ko-1.3b-it-v1.1",
|
436 |
-
"model_sha": "78f227625af9b7013b69de4ef2a203ac71bdda5b",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
BM-K/polyglot-ko-1.3b-it-v1.2/result_2023-10-09 06:14:19.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.2551194539249147,
|
5 |
-
"acc_stderr": 0.012739038695202109,
|
6 |
-
"acc_norm": 0.30119453924914674,
|
7 |
-
"acc_norm_stderr": 0.01340674176784762
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.3379804819757021,
|
11 |
-
"acc_stderr": 0.0047205513235471196,
|
12 |
-
"acc_norm": 0.4176458872734515,
|
13 |
-
"acc_norm_stderr": 0.00492163264510238
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.2807017543859649,
|
17 |
-
"acc_stderr": 0.034462962170884265,
|
18 |
-
"acc_norm": 0.2807017543859649,
|
19 |
-
"acc_norm_stderr": 0.034462962170884265
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.2524271844660194,
|
23 |
-
"acc_stderr": 0.04301250399690877,
|
24 |
-
"acc_norm": 0.2524271844660194,
|
25 |
-
"acc_norm_stderr": 0.04301250399690877
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.29118773946360155,
|
29 |
-
"acc_stderr": 0.016246087069701393,
|
30 |
-
"acc_norm": 0.29118773946360155,
|
31 |
-
"acc_norm_stderr": 0.016246087069701393
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.26666666666666666,
|
35 |
-
"acc_stderr": 0.038201699145179055,
|
36 |
-
"acc_norm": 0.26666666666666666,
|
37 |
-
"acc_norm_stderr": 0.038201699145179055
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.23,
|
41 |
-
"acc_stderr": 0.04229525846816506,
|
42 |
-
"acc_norm": 0.23,
|
43 |
-
"acc_norm_stderr": 0.04229525846816506
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.25957446808510637,
|
47 |
-
"acc_stderr": 0.028659179374292326,
|
48 |
-
"acc_norm": 0.25957446808510637,
|
49 |
-
"acc_norm_stderr": 0.028659179374292326
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.26506024096385544,
|
53 |
-
"acc_stderr": 0.03436024037944967,
|
54 |
-
"acc_norm": 0.26506024096385544,
|
55 |
-
"acc_norm_stderr": 0.03436024037944967
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.2540192926045016,
|
59 |
-
"acc_stderr": 0.02472386150477169,
|
60 |
-
"acc_norm": 0.2540192926045016,
|
61 |
-
"acc_norm_stderr": 0.02472386150477169
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.3273542600896861,
|
65 |
-
"acc_stderr": 0.031493846709941306,
|
66 |
-
"acc_norm": 0.3273542600896861,
|
67 |
-
"acc_norm_stderr": 0.031493846709941306
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.24427480916030533,
|
71 |
-
"acc_stderr": 0.037683359597287414,
|
72 |
-
"acc_norm": 0.24427480916030533,
|
73 |
-
"acc_norm_stderr": 0.037683359597287414
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.23,
|
77 |
-
"acc_stderr": 0.04229525846816506,
|
78 |
-
"acc_norm": 0.23,
|
79 |
-
"acc_norm_stderr": 0.04229525846816506
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.24242424242424243,
|
83 |
-
"acc_stderr": 0.030532892233932026,
|
84 |
-
"acc_norm": 0.24242424242424243,
|
85 |
-
"acc_norm_stderr": 0.030532892233932026
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.23448275862068965,
|
89 |
-
"acc_stderr": 0.035306258743465914,
|
90 |
-
"acc_norm": 0.23448275862068965,
|
91 |
-
"acc_norm_stderr": 0.035306258743465914
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.21568627450980393,
|
95 |
-
"acc_stderr": 0.04092563958237655,
|
96 |
-
"acc_norm": 0.21568627450980393,
|
97 |
-
"acc_norm_stderr": 0.04092563958237655
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.3445378151260504,
|
101 |
-
"acc_stderr": 0.030868682604121626,
|
102 |
-
"acc_norm": 0.3445378151260504,
|
103 |
-
"acc_norm_stderr": 0.030868682604121626
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.25384615384615383,
|
107 |
-
"acc_stderr": 0.022066054378726257,
|
108 |
-
"acc_norm": 0.25384615384615383,
|
109 |
-
"acc_norm_stderr": 0.022066054378726257
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.25,
|
113 |
-
"acc_stderr": 0.04351941398892446,
|
114 |
-
"acc_norm": 0.25,
|
115 |
-
"acc_norm_stderr": 0.04351941398892446
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.26,
|
119 |
-
"acc_stderr": 0.04408440022768079,
|
120 |
-
"acc_norm": 0.26,
|
121 |
-
"acc_norm_stderr": 0.04408440022768079
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.24074074074074073,
|
125 |
-
"acc_stderr": 0.04133119440243838,
|
126 |
-
"acc_norm": 0.24074074074074073,
|
127 |
-
"acc_norm_stderr": 0.04133119440243838
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.28078817733990147,
|
131 |
-
"acc_stderr": 0.03161856335358609,
|
132 |
-
"acc_norm": 0.28078817733990147,
|
133 |
-
"acc_norm_stderr": 0.03161856335358609
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.2903225806451613,
|
137 |
-
"acc_stderr": 0.025822106119415898,
|
138 |
-
"acc_norm": 0.2903225806451613,
|
139 |
-
"acc_norm_stderr": 0.025822106119415898
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.23504273504273504,
|
143 |
-
"acc_stderr": 0.027778835904935437,
|
144 |
-
"acc_norm": 0.23504273504273504,
|
145 |
-
"acc_norm_stderr": 0.027778835904935437
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.2792452830188679,
|
149 |
-
"acc_stderr": 0.027611163402399715,
|
150 |
-
"acc_norm": 0.2792452830188679,
|
151 |
-
"acc_norm_stderr": 0.027611163402399715
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.21818181818181817,
|
155 |
-
"acc_stderr": 0.039559328617958335,
|
156 |
-
"acc_norm": 0.21818181818181817,
|
157 |
-
"acc_norm_stderr": 0.039559328617958335
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.24074074074074073,
|
161 |
-
"acc_stderr": 0.026067159222275815,
|
162 |
-
"acc_norm": 0.24074074074074073,
|
163 |
-
"acc_norm_stderr": 0.026067159222275815
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.2185430463576159,
|
167 |
-
"acc_stderr": 0.033742355504256936,
|
168 |
-
"acc_norm": 0.2185430463576159,
|
169 |
-
"acc_norm_stderr": 0.033742355504256936
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.208955223880597,
|
173 |
-
"acc_stderr": 0.028748298931728665,
|
174 |
-
"acc_norm": 0.208955223880597,
|
175 |
-
"acc_norm_stderr": 0.028748298931728665
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.2138728323699422,
|
179 |
-
"acc_stderr": 0.031265112061730424,
|
180 |
-
"acc_norm": 0.2138728323699422,
|
181 |
-
"acc_norm_stderr": 0.031265112061730424
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.2566137566137566,
|
185 |
-
"acc_stderr": 0.022494510767503154,
|
186 |
-
"acc_norm": 0.2566137566137566,
|
187 |
-
"acc_norm_stderr": 0.022494510767503154
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.2222222222222222,
|
191 |
-
"acc_stderr": 0.03476590104304134,
|
192 |
-
"acc_norm": 0.2222222222222222,
|
193 |
-
"acc_norm_stderr": 0.03476590104304134
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.21,
|
197 |
-
"acc_stderr": 0.040936018074033256,
|
198 |
-
"acc_norm": 0.21,
|
199 |
-
"acc_norm_stderr": 0.040936018074033256
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.23,
|
203 |
-
"acc_stderr": 0.04229525846816505,
|
204 |
-
"acc_norm": 0.23,
|
205 |
-
"acc_norm_stderr": 0.04229525846816505
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.2774566473988439,
|
209 |
-
"acc_stderr": 0.024105712607754307,
|
210 |
-
"acc_norm": 0.2774566473988439,
|
211 |
-
"acc_norm_stderr": 0.024105712607754307
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.31901840490797545,
|
215 |
-
"acc_stderr": 0.03661997551073836,
|
216 |
-
"acc_norm": 0.31901840490797545,
|
217 |
-
"acc_norm_stderr": 0.03661997551073836
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.28703703703703703,
|
221 |
-
"acc_stderr": 0.025171041915309684,
|
222 |
-
"acc_norm": 0.28703703703703703,
|
223 |
-
"acc_norm_stderr": 0.025171041915309684
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.25,
|
227 |
-
"acc_stderr": 0.04351941398892446,
|
228 |
-
"acc_norm": 0.25,
|
229 |
-
"acc_norm_stderr": 0.04351941398892446
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.29533678756476683,
|
233 |
-
"acc_stderr": 0.0329229663915514,
|
234 |
-
"acc_norm": 0.29533678756476683,
|
235 |
-
"acc_norm_stderr": 0.0329229663915514
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.2543859649122807,
|
239 |
-
"acc_stderr": 0.04096985139843671,
|
240 |
-
"acc_norm": 0.2543859649122807,
|
241 |
-
"acc_norm_stderr": 0.04096985139843671
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.23853211009174313,
|
245 |
-
"acc_stderr": 0.018272575810231863,
|
246 |
-
"acc_norm": 0.23853211009174313,
|
247 |
-
"acc_norm_stderr": 0.018272575810231863
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.3492063492063492,
|
251 |
-
"acc_stderr": 0.04263906892795132,
|
252 |
-
"acc_norm": 0.3492063492063492,
|
253 |
-
"acc_norm_stderr": 0.04263906892795132
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.2581699346405229,
|
257 |
-
"acc_stderr": 0.025058503316958157,
|
258 |
-
"acc_norm": 0.2581699346405229,
|
259 |
-
"acc_norm_stderr": 0.025058503316958157
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.25,
|
263 |
-
"acc_stderr": 0.04351941398892446,
|
264 |
-
"acc_norm": 0.25,
|
265 |
-
"acc_norm_stderr": 0.04351941398892446
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.2231404958677686,
|
269 |
-
"acc_stderr": 0.03800754475228733,
|
270 |
-
"acc_norm": 0.2231404958677686,
|
271 |
-
"acc_norm_stderr": 0.03800754475228733
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.17105263157894737,
|
275 |
-
"acc_stderr": 0.0306436070716771,
|
276 |
-
"acc_norm": 0.17105263157894737,
|
277 |
-
"acc_norm_stderr": 0.0306436070716771
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.26633986928104575,
|
281 |
-
"acc_stderr": 0.01788318813466719,
|
282 |
-
"acc_norm": 0.26633986928104575,
|
283 |
-
"acc_norm_stderr": 0.01788318813466719
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.2553191489361702,
|
287 |
-
"acc_stderr": 0.02601199293090201,
|
288 |
-
"acc_norm": 0.2553191489361702,
|
289 |
-
"acc_norm_stderr": 0.02601199293090201
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.20535714285714285,
|
293 |
-
"acc_stderr": 0.038342410214190735,
|
294 |
-
"acc_norm": 0.20535714285714285,
|
295 |
-
"acc_norm_stderr": 0.038342410214190735
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.2361111111111111,
|
299 |
-
"acc_stderr": 0.02896370257079101,
|
300 |
-
"acc_norm": 0.2361111111111111,
|
301 |
-
"acc_norm_stderr": 0.02896370257079101
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.2670391061452514,
|
305 |
-
"acc_stderr": 0.014796502622562544,
|
306 |
-
"acc_norm": 0.2670391061452514,
|
307 |
-
"acc_norm_stderr": 0.014796502622562544
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.16,
|
311 |
-
"acc_stderr": 0.0368452949177471,
|
312 |
-
"acc_norm": 0.16,
|
313 |
-
"acc_norm_stderr": 0.0368452949177471
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.27,
|
317 |
-
"acc_stderr": 0.0446196043338474,
|
318 |
-
"acc_norm": 0.27,
|
319 |
-
"acc_norm_stderr": 0.0446196043338474
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.2536764705882353,
|
323 |
-
"acc_stderr": 0.026431329870789513,
|
324 |
-
"acc_norm": 0.2536764705882353,
|
325 |
-
"acc_norm_stderr": 0.026431329870789513
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.31020408163265306,
|
329 |
-
"acc_stderr": 0.029613459872484378,
|
330 |
-
"acc_norm": 0.31020408163265306,
|
331 |
-
"acc_norm_stderr": 0.029613459872484378
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.270042194092827,
|
335 |
-
"acc_stderr": 0.028900721906293426,
|
336 |
-
"acc_norm": 0.270042194092827,
|
337 |
-
"acc_norm_stderr": 0.028900721906293426
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.23598435462842243,
|
341 |
-
"acc_stderr": 0.010844802669662682,
|
342 |
-
"acc_norm": 0.23598435462842243,
|
343 |
-
"acc_norm_stderr": 0.010844802669662682
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.2696078431372549,
|
347 |
-
"acc_stderr": 0.031145570659486782,
|
348 |
-
"acc_norm": 0.2696078431372549,
|
349 |
-
"acc_norm_stderr": 0.031145570659486782
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.2606060606060606,
|
353 |
-
"acc_stderr": 0.03427743175816524,
|
354 |
-
"acc_norm": 0.2606060606060606,
|
355 |
-
"acc_norm_stderr": 0.03427743175816524
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.2252141982864137,
|
359 |
-
"mc1_stderr": 0.01462324076802348,
|
360 |
-
"mc2": 0.4080616788903193,
|
361 |
-
"mc2_stderr": 0.015242253889585933
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.4107981220657277,
|
365 |
-
"acc_stderr": 0.01686481206490982,
|
366 |
-
"acc_norm": 0.4953051643192488,
|
367 |
-
"acc_norm_stderr": 0.017139023665847616
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "BM-K/polyglot-ko-1.3b-it-v1.2",
|
436 |
-
"model_sha": "d1a6abed1624c40b91b5df3acb5e245e281adc18",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
BM-K/polyglot-ko-1.3b-it-v1.3/result_2023-10-09 06:23:09.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.2619453924914676,
|
5 |
-
"acc_stderr": 0.012849054826858117,
|
6 |
-
"acc_norm": 0.30802047781569963,
|
7 |
-
"acc_norm_stderr": 0.01349142951729204
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.33957379008165706,
|
11 |
-
"acc_stderr": 0.0047259676848064045,
|
12 |
-
"acc_norm": 0.4195379406492731,
|
13 |
-
"acc_norm_stderr": 0.004924748500639348
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.25146198830409355,
|
17 |
-
"acc_stderr": 0.033275044238468436,
|
18 |
-
"acc_norm": 0.25146198830409355,
|
19 |
-
"acc_norm_stderr": 0.033275044238468436
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.2524271844660194,
|
23 |
-
"acc_stderr": 0.04301250399690877,
|
24 |
-
"acc_norm": 0.2524271844660194,
|
25 |
-
"acc_norm_stderr": 0.04301250399690877
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.2784163473818646,
|
29 |
-
"acc_stderr": 0.01602829518899247,
|
30 |
-
"acc_norm": 0.2784163473818646,
|
31 |
-
"acc_norm_stderr": 0.01602829518899247
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.23703703703703705,
|
35 |
-
"acc_stderr": 0.03673731683969506,
|
36 |
-
"acc_norm": 0.23703703703703705,
|
37 |
-
"acc_norm_stderr": 0.03673731683969506
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.21,
|
41 |
-
"acc_stderr": 0.040936018074033256,
|
42 |
-
"acc_norm": 0.21,
|
43 |
-
"acc_norm_stderr": 0.040936018074033256
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.25957446808510637,
|
47 |
-
"acc_stderr": 0.028659179374292326,
|
48 |
-
"acc_norm": 0.25957446808510637,
|
49 |
-
"acc_norm_stderr": 0.028659179374292326
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.3192771084337349,
|
53 |
-
"acc_stderr": 0.036293353299478595,
|
54 |
-
"acc_norm": 0.3192771084337349,
|
55 |
-
"acc_norm_stderr": 0.036293353299478595
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.2733118971061093,
|
59 |
-
"acc_stderr": 0.02531176597542612,
|
60 |
-
"acc_norm": 0.2733118971061093,
|
61 |
-
"acc_norm_stderr": 0.02531176597542612
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.29596412556053814,
|
65 |
-
"acc_stderr": 0.030636591348699796,
|
66 |
-
"acc_norm": 0.29596412556053814,
|
67 |
-
"acc_norm_stderr": 0.030636591348699796
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.22900763358778625,
|
71 |
-
"acc_stderr": 0.036853466317118506,
|
72 |
-
"acc_norm": 0.22900763358778625,
|
73 |
-
"acc_norm_stderr": 0.036853466317118506
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.24,
|
77 |
-
"acc_stderr": 0.042923469599092816,
|
78 |
-
"acc_norm": 0.24,
|
79 |
-
"acc_norm_stderr": 0.042923469599092816
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.2676767676767677,
|
83 |
-
"acc_stderr": 0.03154449888270287,
|
84 |
-
"acc_norm": 0.2676767676767677,
|
85 |
-
"acc_norm_stderr": 0.03154449888270287
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.1724137931034483,
|
89 |
-
"acc_stderr": 0.031478307902595745,
|
90 |
-
"acc_norm": 0.1724137931034483,
|
91 |
-
"acc_norm_stderr": 0.031478307902595745
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.22549019607843138,
|
95 |
-
"acc_stderr": 0.041583075330832865,
|
96 |
-
"acc_norm": 0.22549019607843138,
|
97 |
-
"acc_norm_stderr": 0.041583075330832865
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.31512605042016806,
|
101 |
-
"acc_stderr": 0.030176808288974337,
|
102 |
-
"acc_norm": 0.31512605042016806,
|
103 |
-
"acc_norm_stderr": 0.030176808288974337
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.2282051282051282,
|
107 |
-
"acc_stderr": 0.021278393863586282,
|
108 |
-
"acc_norm": 0.2282051282051282,
|
109 |
-
"acc_norm_stderr": 0.021278393863586282
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.29,
|
113 |
-
"acc_stderr": 0.045604802157206845,
|
114 |
-
"acc_norm": 0.29,
|
115 |
-
"acc_norm_stderr": 0.045604802157206845
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.31,
|
119 |
-
"acc_stderr": 0.04648231987117316,
|
120 |
-
"acc_norm": 0.31,
|
121 |
-
"acc_norm_stderr": 0.04648231987117316
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.25,
|
125 |
-
"acc_stderr": 0.04186091791394607,
|
126 |
-
"acc_norm": 0.25,
|
127 |
-
"acc_norm_stderr": 0.04186091791394607
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.2315270935960591,
|
131 |
-
"acc_stderr": 0.02967833314144446,
|
132 |
-
"acc_norm": 0.2315270935960591,
|
133 |
-
"acc_norm_stderr": 0.02967833314144446
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.2838709677419355,
|
137 |
-
"acc_stderr": 0.02564938106302925,
|
138 |
-
"acc_norm": 0.2838709677419355,
|
139 |
-
"acc_norm_stderr": 0.02564938106302925
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.24786324786324787,
|
143 |
-
"acc_stderr": 0.028286324075564393,
|
144 |
-
"acc_norm": 0.24786324786324787,
|
145 |
-
"acc_norm_stderr": 0.028286324075564393
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.27169811320754716,
|
149 |
-
"acc_stderr": 0.027377706624670713,
|
150 |
-
"acc_norm": 0.27169811320754716,
|
151 |
-
"acc_norm_stderr": 0.027377706624670713
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.2909090909090909,
|
155 |
-
"acc_stderr": 0.04350271442923243,
|
156 |
-
"acc_norm": 0.2909090909090909,
|
157 |
-
"acc_norm_stderr": 0.04350271442923243
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.26666666666666666,
|
161 |
-
"acc_stderr": 0.02696242432507383,
|
162 |
-
"acc_norm": 0.26666666666666666,
|
163 |
-
"acc_norm_stderr": 0.02696242432507383
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.23841059602649006,
|
167 |
-
"acc_stderr": 0.034791855725996586,
|
168 |
-
"acc_norm": 0.23841059602649006,
|
169 |
-
"acc_norm_stderr": 0.034791855725996586
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.263681592039801,
|
173 |
-
"acc_stderr": 0.031157150869355568,
|
174 |
-
"acc_norm": 0.263681592039801,
|
175 |
-
"acc_norm_stderr": 0.031157150869355568
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.2254335260115607,
|
179 |
-
"acc_stderr": 0.03186209851641144,
|
180 |
-
"acc_norm": 0.2254335260115607,
|
181 |
-
"acc_norm_stderr": 0.03186209851641144
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.25132275132275134,
|
185 |
-
"acc_stderr": 0.022340482339643898,
|
186 |
-
"acc_norm": 0.25132275132275134,
|
187 |
-
"acc_norm_stderr": 0.022340482339643898
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.22916666666666666,
|
191 |
-
"acc_stderr": 0.035146974678623884,
|
192 |
-
"acc_norm": 0.22916666666666666,
|
193 |
-
"acc_norm_stderr": 0.035146974678623884
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.23,
|
197 |
-
"acc_stderr": 0.042295258468165044,
|
198 |
-
"acc_norm": 0.23,
|
199 |
-
"acc_norm_stderr": 0.042295258468165044
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.21,
|
203 |
-
"acc_stderr": 0.040936018074033256,
|
204 |
-
"acc_norm": 0.21,
|
205 |
-
"acc_norm_stderr": 0.040936018074033256
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.30057803468208094,
|
209 |
-
"acc_stderr": 0.02468531686725781,
|
210 |
-
"acc_norm": 0.30057803468208094,
|
211 |
-
"acc_norm_stderr": 0.02468531686725781
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.26380368098159507,
|
215 |
-
"acc_stderr": 0.03462419931615624,
|
216 |
-
"acc_norm": 0.26380368098159507,
|
217 |
-
"acc_norm_stderr": 0.03462419931615624
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.2623456790123457,
|
221 |
-
"acc_stderr": 0.0244772228561351,
|
222 |
-
"acc_norm": 0.2623456790123457,
|
223 |
-
"acc_norm_stderr": 0.0244772228561351
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.27,
|
227 |
-
"acc_stderr": 0.044619604333847394,
|
228 |
-
"acc_norm": 0.27,
|
229 |
-
"acc_norm_stderr": 0.044619604333847394
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.24352331606217617,
|
233 |
-
"acc_stderr": 0.03097543638684542,
|
234 |
-
"acc_norm": 0.24352331606217617,
|
235 |
-
"acc_norm_stderr": 0.03097543638684542
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.23684210526315788,
|
239 |
-
"acc_stderr": 0.03999423879281336,
|
240 |
-
"acc_norm": 0.23684210526315788,
|
241 |
-
"acc_norm_stderr": 0.03999423879281336
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.21100917431192662,
|
245 |
-
"acc_stderr": 0.01749392240411265,
|
246 |
-
"acc_norm": 0.21100917431192662,
|
247 |
-
"acc_norm_stderr": 0.01749392240411265
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.2619047619047619,
|
251 |
-
"acc_stderr": 0.03932537680392871,
|
252 |
-
"acc_norm": 0.2619047619047619,
|
253 |
-
"acc_norm_stderr": 0.03932537680392871
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.25163398692810457,
|
257 |
-
"acc_stderr": 0.0248480182638752,
|
258 |
-
"acc_norm": 0.25163398692810457,
|
259 |
-
"acc_norm_stderr": 0.0248480182638752
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.2,
|
263 |
-
"acc_stderr": 0.040201512610368445,
|
264 |
-
"acc_norm": 0.2,
|
265 |
-
"acc_norm_stderr": 0.040201512610368445
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.35537190082644626,
|
269 |
-
"acc_stderr": 0.04369236326573981,
|
270 |
-
"acc_norm": 0.35537190082644626,
|
271 |
-
"acc_norm_stderr": 0.04369236326573981
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.17105263157894737,
|
275 |
-
"acc_stderr": 0.030643607071677105,
|
276 |
-
"acc_norm": 0.17105263157894737,
|
277 |
-
"acc_norm_stderr": 0.030643607071677105
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.2679738562091503,
|
281 |
-
"acc_stderr": 0.017917974069594722,
|
282 |
-
"acc_norm": 0.2679738562091503,
|
283 |
-
"acc_norm_stderr": 0.017917974069594722
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.2553191489361702,
|
287 |
-
"acc_stderr": 0.02601199293090201,
|
288 |
-
"acc_norm": 0.2553191489361702,
|
289 |
-
"acc_norm_stderr": 0.02601199293090201
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.23214285714285715,
|
293 |
-
"acc_stderr": 0.04007341809755806,
|
294 |
-
"acc_norm": 0.23214285714285715,
|
295 |
-
"acc_norm_stderr": 0.04007341809755806
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.2175925925925926,
|
299 |
-
"acc_stderr": 0.02813968944485966,
|
300 |
-
"acc_norm": 0.2175925925925926,
|
301 |
-
"acc_norm_stderr": 0.02813968944485966
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.25921787709497207,
|
305 |
-
"acc_stderr": 0.014655780837497717,
|
306 |
-
"acc_norm": 0.25921787709497207,
|
307 |
-
"acc_norm_stderr": 0.014655780837497717
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.15,
|
311 |
-
"acc_stderr": 0.0358870281282637,
|
312 |
-
"acc_norm": 0.15,
|
313 |
-
"acc_norm_stderr": 0.0358870281282637
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.27,
|
317 |
-
"acc_stderr": 0.0446196043338474,
|
318 |
-
"acc_norm": 0.27,
|
319 |
-
"acc_norm_stderr": 0.0446196043338474
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.2867647058823529,
|
323 |
-
"acc_stderr": 0.027472274473233818,
|
324 |
-
"acc_norm": 0.2867647058823529,
|
325 |
-
"acc_norm_stderr": 0.027472274473233818
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.3020408163265306,
|
329 |
-
"acc_stderr": 0.029393609319879818,
|
330 |
-
"acc_norm": 0.3020408163265306,
|
331 |
-
"acc_norm_stderr": 0.029393609319879818
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.25738396624472576,
|
335 |
-
"acc_stderr": 0.028458820991460288,
|
336 |
-
"acc_norm": 0.25738396624472576,
|
337 |
-
"acc_norm_stderr": 0.028458820991460288
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.23728813559322035,
|
341 |
-
"acc_stderr": 0.010865436690780272,
|
342 |
-
"acc_norm": 0.23728813559322035,
|
343 |
-
"acc_norm_stderr": 0.010865436690780272
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.2549019607843137,
|
347 |
-
"acc_stderr": 0.03058759135160425,
|
348 |
-
"acc_norm": 0.2549019607843137,
|
349 |
-
"acc_norm_stderr": 0.03058759135160425
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.2727272727272727,
|
353 |
-
"acc_stderr": 0.0347769116216366,
|
354 |
-
"acc_norm": 0.2727272727272727,
|
355 |
-
"acc_norm_stderr": 0.0347769116216366
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.22888616891064872,
|
359 |
-
"mc1_stderr": 0.014706994909055027,
|
360 |
-
"mc2": 0.4031826036090223,
|
361 |
-
"mc2_stderr": 0.0151985432197755
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.318075117370892,
|
365 |
-
"acc_stderr": 0.015964978456287846,
|
366 |
-
"acc_norm": 0.4154929577464789,
|
367 |
-
"acc_norm_stderr": 0.016893200149530024
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "BM-K/polyglot-ko-1.3b-it-v1.3",
|
436 |
-
"model_sha": "1df1840d994fed4d5806ca38746639407c9bb970",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
BM-K/polyglot-ko-1.3b-it-v1.4/result_2023-10-09 06:31:19.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.25170648464163825,
|
5 |
-
"acc_stderr": 0.012682496334042963,
|
6 |
-
"acc_norm": 0.30887372013651876,
|
7 |
-
"acc_norm_stderr": 0.013501770929344003
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.34096793467436765,
|
11 |
-
"acc_stderr": 0.004730658073041557,
|
12 |
-
"acc_norm": 0.4206333399721171,
|
13 |
-
"acc_norm_stderr": 0.004926518439372268
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.23976608187134502,
|
17 |
-
"acc_stderr": 0.03274485211946956,
|
18 |
-
"acc_norm": 0.23976608187134502,
|
19 |
-
"acc_norm_stderr": 0.03274485211946956
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.2815533980582524,
|
23 |
-
"acc_stderr": 0.04453254836326467,
|
24 |
-
"acc_norm": 0.2815533980582524,
|
25 |
-
"acc_norm_stderr": 0.04453254836326467
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.27586206896551724,
|
29 |
-
"acc_stderr": 0.01598281477469563,
|
30 |
-
"acc_norm": 0.27586206896551724,
|
31 |
-
"acc_norm_stderr": 0.01598281477469563
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.22962962962962963,
|
35 |
-
"acc_stderr": 0.03633384414073462,
|
36 |
-
"acc_norm": 0.22962962962962963,
|
37 |
-
"acc_norm_stderr": 0.03633384414073462
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.15,
|
41 |
-
"acc_stderr": 0.03588702812826373,
|
42 |
-
"acc_norm": 0.15,
|
43 |
-
"acc_norm_stderr": 0.03588702812826373
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.23829787234042554,
|
47 |
-
"acc_stderr": 0.027851252973889802,
|
48 |
-
"acc_norm": 0.23829787234042554,
|
49 |
-
"acc_norm_stderr": 0.027851252973889802
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.3072289156626506,
|
53 |
-
"acc_stderr": 0.03591566797824665,
|
54 |
-
"acc_norm": 0.3072289156626506,
|
55 |
-
"acc_norm_stderr": 0.03591566797824665
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.2508038585209003,
|
59 |
-
"acc_stderr": 0.024619771956697165,
|
60 |
-
"acc_norm": 0.2508038585209003,
|
61 |
-
"acc_norm_stderr": 0.024619771956697165
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.2062780269058296,
|
65 |
-
"acc_stderr": 0.02715715047956382,
|
66 |
-
"acc_norm": 0.2062780269058296,
|
67 |
-
"acc_norm_stderr": 0.02715715047956382
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.2595419847328244,
|
71 |
-
"acc_stderr": 0.03844876139785271,
|
72 |
-
"acc_norm": 0.2595419847328244,
|
73 |
-
"acc_norm_stderr": 0.03844876139785271
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.22,
|
77 |
-
"acc_stderr": 0.04163331998932269,
|
78 |
-
"acc_norm": 0.22,
|
79 |
-
"acc_norm_stderr": 0.04163331998932269
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.31313131313131315,
|
83 |
-
"acc_stderr": 0.033042050878136525,
|
84 |
-
"acc_norm": 0.31313131313131315,
|
85 |
-
"acc_norm_stderr": 0.033042050878136525
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.2413793103448276,
|
89 |
-
"acc_stderr": 0.03565998174135302,
|
90 |
-
"acc_norm": 0.2413793103448276,
|
91 |
-
"acc_norm_stderr": 0.03565998174135302
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.2647058823529412,
|
95 |
-
"acc_stderr": 0.04389869956808779,
|
96 |
-
"acc_norm": 0.2647058823529412,
|
97 |
-
"acc_norm_stderr": 0.04389869956808779
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.36134453781512604,
|
101 |
-
"acc_stderr": 0.031204691225150006,
|
102 |
-
"acc_norm": 0.36134453781512604,
|
103 |
-
"acc_norm_stderr": 0.031204691225150006
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.28974358974358977,
|
107 |
-
"acc_stderr": 0.023000628243687968,
|
108 |
-
"acc_norm": 0.28974358974358977,
|
109 |
-
"acc_norm_stderr": 0.023000628243687968
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.31,
|
113 |
-
"acc_stderr": 0.04648231987117316,
|
114 |
-
"acc_norm": 0.31,
|
115 |
-
"acc_norm_stderr": 0.04648231987117316
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.3,
|
119 |
-
"acc_stderr": 0.046056618647183814,
|
120 |
-
"acc_norm": 0.3,
|
121 |
-
"acc_norm_stderr": 0.046056618647183814
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.25925925925925924,
|
125 |
-
"acc_stderr": 0.04236511258094633,
|
126 |
-
"acc_norm": 0.25925925925925924,
|
127 |
-
"acc_norm_stderr": 0.04236511258094633
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.28078817733990147,
|
131 |
-
"acc_stderr": 0.03161856335358608,
|
132 |
-
"acc_norm": 0.28078817733990147,
|
133 |
-
"acc_norm_stderr": 0.03161856335358608
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.2806451612903226,
|
137 |
-
"acc_stderr": 0.025560604721022884,
|
138 |
-
"acc_norm": 0.2806451612903226,
|
139 |
-
"acc_norm_stderr": 0.025560604721022884
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.2094017094017094,
|
143 |
-
"acc_stderr": 0.026655699653922737,
|
144 |
-
"acc_norm": 0.2094017094017094,
|
145 |
-
"acc_norm_stderr": 0.026655699653922737
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.2792452830188679,
|
149 |
-
"acc_stderr": 0.027611163402399715,
|
150 |
-
"acc_norm": 0.2792452830188679,
|
151 |
-
"acc_norm_stderr": 0.027611163402399715
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.2818181818181818,
|
155 |
-
"acc_stderr": 0.04309118709946459,
|
156 |
-
"acc_norm": 0.2818181818181818,
|
157 |
-
"acc_norm_stderr": 0.04309118709946459
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.26296296296296295,
|
161 |
-
"acc_stderr": 0.026842057873833706,
|
162 |
-
"acc_norm": 0.26296296296296295,
|
163 |
-
"acc_norm_stderr": 0.026842057873833706
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.2980132450331126,
|
167 |
-
"acc_stderr": 0.037345356767871984,
|
168 |
-
"acc_norm": 0.2980132450331126,
|
169 |
-
"acc_norm_stderr": 0.037345356767871984
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.18407960199004975,
|
173 |
-
"acc_stderr": 0.02740385941078684,
|
174 |
-
"acc_norm": 0.18407960199004975,
|
175 |
-
"acc_norm_stderr": 0.02740385941078684
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.20809248554913296,
|
179 |
-
"acc_stderr": 0.030952890217749884,
|
180 |
-
"acc_norm": 0.20809248554913296,
|
181 |
-
"acc_norm_stderr": 0.030952890217749884
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.24867724867724866,
|
185 |
-
"acc_stderr": 0.02226181769240018,
|
186 |
-
"acc_norm": 0.24867724867724866,
|
187 |
-
"acc_norm_stderr": 0.02226181769240018
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.24305555555555555,
|
191 |
-
"acc_stderr": 0.03586879280080342,
|
192 |
-
"acc_norm": 0.24305555555555555,
|
193 |
-
"acc_norm_stderr": 0.03586879280080342
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.26,
|
197 |
-
"acc_stderr": 0.0440844002276808,
|
198 |
-
"acc_norm": 0.26,
|
199 |
-
"acc_norm_stderr": 0.0440844002276808
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.2,
|
203 |
-
"acc_stderr": 0.04020151261036843,
|
204 |
-
"acc_norm": 0.2,
|
205 |
-
"acc_norm_stderr": 0.04020151261036843
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.2630057803468208,
|
209 |
-
"acc_stderr": 0.023703099525258172,
|
210 |
-
"acc_norm": 0.2630057803468208,
|
211 |
-
"acc_norm_stderr": 0.023703099525258172
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.3067484662576687,
|
215 |
-
"acc_stderr": 0.03623089915724148,
|
216 |
-
"acc_norm": 0.3067484662576687,
|
217 |
-
"acc_norm_stderr": 0.03623089915724148
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.25617283950617287,
|
221 |
-
"acc_stderr": 0.024288533637726095,
|
222 |
-
"acc_norm": 0.25617283950617287,
|
223 |
-
"acc_norm_stderr": 0.024288533637726095
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.34,
|
227 |
-
"acc_stderr": 0.04760952285695235,
|
228 |
-
"acc_norm": 0.34,
|
229 |
-
"acc_norm_stderr": 0.04760952285695235
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.25906735751295334,
|
233 |
-
"acc_stderr": 0.03161877917935411,
|
234 |
-
"acc_norm": 0.25906735751295334,
|
235 |
-
"acc_norm_stderr": 0.03161877917935411
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.2982456140350877,
|
239 |
-
"acc_stderr": 0.043036840335373173,
|
240 |
-
"acc_norm": 0.2982456140350877,
|
241 |
-
"acc_norm_stderr": 0.043036840335373173
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.24770642201834864,
|
245 |
-
"acc_stderr": 0.018508143602547805,
|
246 |
-
"acc_norm": 0.24770642201834864,
|
247 |
-
"acc_norm_stderr": 0.018508143602547805
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.2698412698412698,
|
251 |
-
"acc_stderr": 0.03970158273235172,
|
252 |
-
"acc_norm": 0.2698412698412698,
|
253 |
-
"acc_norm_stderr": 0.03970158273235172
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.2679738562091503,
|
257 |
-
"acc_stderr": 0.025360603796242557,
|
258 |
-
"acc_norm": 0.2679738562091503,
|
259 |
-
"acc_norm_stderr": 0.025360603796242557
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.2,
|
263 |
-
"acc_stderr": 0.04020151261036844,
|
264 |
-
"acc_norm": 0.2,
|
265 |
-
"acc_norm_stderr": 0.04020151261036844
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.35537190082644626,
|
269 |
-
"acc_stderr": 0.04369236326573982,
|
270 |
-
"acc_norm": 0.35537190082644626,
|
271 |
-
"acc_norm_stderr": 0.04369236326573982
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.19078947368421054,
|
275 |
-
"acc_stderr": 0.031975658210325004,
|
276 |
-
"acc_norm": 0.19078947368421054,
|
277 |
-
"acc_norm_stderr": 0.031975658210325004
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.24183006535947713,
|
281 |
-
"acc_stderr": 0.017322789207784326,
|
282 |
-
"acc_norm": 0.24183006535947713,
|
283 |
-
"acc_norm_stderr": 0.017322789207784326
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.25177304964539005,
|
287 |
-
"acc_stderr": 0.0258921511567094,
|
288 |
-
"acc_norm": 0.25177304964539005,
|
289 |
-
"acc_norm_stderr": 0.0258921511567094
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.25892857142857145,
|
293 |
-
"acc_stderr": 0.04157751539865629,
|
294 |
-
"acc_norm": 0.25892857142857145,
|
295 |
-
"acc_norm_stderr": 0.04157751539865629
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.2361111111111111,
|
299 |
-
"acc_stderr": 0.028963702570791047,
|
300 |
-
"acc_norm": 0.2361111111111111,
|
301 |
-
"acc_norm_stderr": 0.028963702570791047
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.2636871508379888,
|
305 |
-
"acc_stderr": 0.014736926383761987,
|
306 |
-
"acc_norm": 0.2636871508379888,
|
307 |
-
"acc_norm_stderr": 0.014736926383761987
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.16,
|
311 |
-
"acc_stderr": 0.03684529491774709,
|
312 |
-
"acc_norm": 0.16,
|
313 |
-
"acc_norm_stderr": 0.03684529491774709
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.26,
|
317 |
-
"acc_stderr": 0.04408440022768079,
|
318 |
-
"acc_norm": 0.26,
|
319 |
-
"acc_norm_stderr": 0.04408440022768079
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.19117647058823528,
|
323 |
-
"acc_stderr": 0.023886881922440362,
|
324 |
-
"acc_norm": 0.19117647058823528,
|
325 |
-
"acc_norm_stderr": 0.023886881922440362
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.2816326530612245,
|
329 |
-
"acc_stderr": 0.028795185574291282,
|
330 |
-
"acc_norm": 0.2816326530612245,
|
331 |
-
"acc_norm_stderr": 0.028795185574291282
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.24472573839662448,
|
335 |
-
"acc_stderr": 0.02798569938703642,
|
336 |
-
"acc_norm": 0.24472573839662448,
|
337 |
-
"acc_norm_stderr": 0.02798569938703642
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.23272490221642764,
|
341 |
-
"acc_stderr": 0.0107925955538885,
|
342 |
-
"acc_norm": 0.23272490221642764,
|
343 |
-
"acc_norm_stderr": 0.0107925955538885
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.24019607843137256,
|
347 |
-
"acc_stderr": 0.029983733055913623,
|
348 |
-
"acc_norm": 0.24019607843137256,
|
349 |
-
"acc_norm_stderr": 0.029983733055913623
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.2545454545454545,
|
353 |
-
"acc_stderr": 0.03401506715249039,
|
354 |
-
"acc_norm": 0.2545454545454545,
|
355 |
-
"acc_norm_stderr": 0.03401506715249039
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.23623011015911874,
|
359 |
-
"mc1_stderr": 0.014869755015871096,
|
360 |
-
"mc2": 0.414131633910044,
|
361 |
-
"mc2_stderr": 0.015365810716919849
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.24295774647887325,
|
365 |
-
"acc_stderr": 0.01470146638508064,
|
366 |
-
"acc_norm": 0.37089201877934275,
|
367 |
-
"acc_norm_stderr": 0.01655852169248733
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "BM-K/polyglot-ko-1.3b-it-v1.4",
|
436 |
-
"model_sha": "acbd40970c01a4b40debc0d9a9ac096a74673d74",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
Chang-Su/llama-2-13b-chat-ko/result_2023-10-18 16:07:29.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.3037542662116041,
|
5 |
-
"acc_stderr": 0.013438909184778759,
|
6 |
-
"acc_norm": 0.3464163822525597,
|
7 |
-
"acc_norm_stderr": 0.013905011180063251
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.350726946823342,
|
11 |
-
"acc_stderr": 0.0047622234924352535,
|
12 |
-
"acc_norm": 0.45429197371041624,
|
13 |
-
"acc_norm_stderr": 0.004968888130290068
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.4678362573099415,
|
17 |
-
"acc_stderr": 0.03826882417660369,
|
18 |
-
"acc_norm": 0.4678362573099415,
|
19 |
-
"acc_norm_stderr": 0.03826882417660369
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.4077669902912621,
|
23 |
-
"acc_stderr": 0.048657775704107696,
|
24 |
-
"acc_norm": 0.4077669902912621,
|
25 |
-
"acc_norm_stderr": 0.048657775704107696
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.4610472541507024,
|
29 |
-
"acc_stderr": 0.01782562179323902,
|
30 |
-
"acc_norm": 0.4610472541507024,
|
31 |
-
"acc_norm_stderr": 0.01782562179323902
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.4074074074074074,
|
35 |
-
"acc_stderr": 0.04244633238353228,
|
36 |
-
"acc_norm": 0.4074074074074074,
|
37 |
-
"acc_norm_stderr": 0.04244633238353228
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.39,
|
41 |
-
"acc_stderr": 0.04902071300001975,
|
42 |
-
"acc_norm": 0.39,
|
43 |
-
"acc_norm_stderr": 0.04902071300001975
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.3446808510638298,
|
47 |
-
"acc_stderr": 0.03106898596312215,
|
48 |
-
"acc_norm": 0.3446808510638298,
|
49 |
-
"acc_norm_stderr": 0.03106898596312215
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.3373493975903614,
|
53 |
-
"acc_stderr": 0.03680783690727581,
|
54 |
-
"acc_norm": 0.3373493975903614,
|
55 |
-
"acc_norm_stderr": 0.03680783690727581
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.4180064308681672,
|
59 |
-
"acc_stderr": 0.02801365189199507,
|
60 |
-
"acc_norm": 0.4180064308681672,
|
61 |
-
"acc_norm_stderr": 0.02801365189199507
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.36771300448430494,
|
65 |
-
"acc_stderr": 0.03236198350928276,
|
66 |
-
"acc_norm": 0.36771300448430494,
|
67 |
-
"acc_norm_stderr": 0.03236198350928276
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.44274809160305345,
|
71 |
-
"acc_stderr": 0.043564472026650695,
|
72 |
-
"acc_norm": 0.44274809160305345,
|
73 |
-
"acc_norm_stderr": 0.043564472026650695
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.33,
|
77 |
-
"acc_stderr": 0.047258156262526045,
|
78 |
-
"acc_norm": 0.33,
|
79 |
-
"acc_norm_stderr": 0.047258156262526045
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.41414141414141414,
|
83 |
-
"acc_stderr": 0.03509438348879629,
|
84 |
-
"acc_norm": 0.41414141414141414,
|
85 |
-
"acc_norm_stderr": 0.03509438348879629
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.4068965517241379,
|
89 |
-
"acc_stderr": 0.040937939812662374,
|
90 |
-
"acc_norm": 0.4068965517241379,
|
91 |
-
"acc_norm_stderr": 0.040937939812662374
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.2549019607843137,
|
95 |
-
"acc_stderr": 0.043364327079931785,
|
96 |
-
"acc_norm": 0.2549019607843137,
|
97 |
-
"acc_norm_stderr": 0.043364327079931785
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.41596638655462187,
|
101 |
-
"acc_stderr": 0.03201650100739615,
|
102 |
-
"acc_norm": 0.41596638655462187,
|
103 |
-
"acc_norm_stderr": 0.03201650100739615
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.4025641025641026,
|
107 |
-
"acc_stderr": 0.024864995159767762,
|
108 |
-
"acc_norm": 0.4025641025641026,
|
109 |
-
"acc_norm_stderr": 0.024864995159767762
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.43,
|
113 |
-
"acc_stderr": 0.049756985195624284,
|
114 |
-
"acc_norm": 0.43,
|
115 |
-
"acc_norm_stderr": 0.049756985195624284
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.34,
|
119 |
-
"acc_stderr": 0.047609522856952344,
|
120 |
-
"acc_norm": 0.34,
|
121 |
-
"acc_norm_stderr": 0.047609522856952344
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.5092592592592593,
|
125 |
-
"acc_stderr": 0.04832853553437055,
|
126 |
-
"acc_norm": 0.5092592592592593,
|
127 |
-
"acc_norm_stderr": 0.04832853553437055
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.3497536945812808,
|
131 |
-
"acc_stderr": 0.03355400904969565,
|
132 |
-
"acc_norm": 0.3497536945812808,
|
133 |
-
"acc_norm_stderr": 0.03355400904969565
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.3870967741935484,
|
137 |
-
"acc_stderr": 0.027709359675032488,
|
138 |
-
"acc_norm": 0.3870967741935484,
|
139 |
-
"acc_norm_stderr": 0.027709359675032488
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.5384615384615384,
|
143 |
-
"acc_stderr": 0.03265903381186194,
|
144 |
-
"acc_norm": 0.5384615384615384,
|
145 |
-
"acc_norm_stderr": 0.03265903381186194
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.3584905660377358,
|
149 |
-
"acc_stderr": 0.029514703583981765,
|
150 |
-
"acc_norm": 0.3584905660377358,
|
151 |
-
"acc_norm_stderr": 0.029514703583981765
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.4636363636363636,
|
155 |
-
"acc_stderr": 0.04776449162396197,
|
156 |
-
"acc_norm": 0.4636363636363636,
|
157 |
-
"acc_norm_stderr": 0.04776449162396197
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.3074074074074074,
|
161 |
-
"acc_stderr": 0.02813325257881564,
|
162 |
-
"acc_norm": 0.3074074074074074,
|
163 |
-
"acc_norm_stderr": 0.02813325257881564
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.31125827814569534,
|
167 |
-
"acc_stderr": 0.03780445850526733,
|
168 |
-
"acc_norm": 0.31125827814569534,
|
169 |
-
"acc_norm_stderr": 0.03780445850526733
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.4925373134328358,
|
173 |
-
"acc_stderr": 0.035351400842767194,
|
174 |
-
"acc_norm": 0.4925373134328358,
|
175 |
-
"acc_norm_stderr": 0.035351400842767194
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.3468208092485549,
|
179 |
-
"acc_stderr": 0.036291466701596636,
|
180 |
-
"acc_norm": 0.3468208092485549,
|
181 |
-
"acc_norm_stderr": 0.036291466701596636
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.28835978835978837,
|
185 |
-
"acc_stderr": 0.023330654054535903,
|
186 |
-
"acc_norm": 0.28835978835978837,
|
187 |
-
"acc_norm_stderr": 0.023330654054535903
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.3055555555555556,
|
191 |
-
"acc_stderr": 0.03852084696008534,
|
192 |
-
"acc_norm": 0.3055555555555556,
|
193 |
-
"acc_norm_stderr": 0.03852084696008534
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.28,
|
197 |
-
"acc_stderr": 0.045126085985421276,
|
198 |
-
"acc_norm": 0.28,
|
199 |
-
"acc_norm_stderr": 0.045126085985421276
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.51,
|
203 |
-
"acc_stderr": 0.05024183937956911,
|
204 |
-
"acc_norm": 0.51,
|
205 |
-
"acc_norm_stderr": 0.05024183937956911
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.40173410404624277,
|
209 |
-
"acc_stderr": 0.026394104177643634,
|
210 |
-
"acc_norm": 0.40173410404624277,
|
211 |
-
"acc_norm_stderr": 0.026394104177643634
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.38650306748466257,
|
215 |
-
"acc_stderr": 0.038258255488486076,
|
216 |
-
"acc_norm": 0.38650306748466257,
|
217 |
-
"acc_norm_stderr": 0.038258255488486076
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.3888888888888889,
|
221 |
-
"acc_stderr": 0.027125115513166865,
|
222 |
-
"acc_norm": 0.3888888888888889,
|
223 |
-
"acc_norm_stderr": 0.027125115513166865
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.25,
|
227 |
-
"acc_stderr": 0.04351941398892446,
|
228 |
-
"acc_norm": 0.25,
|
229 |
-
"acc_norm_stderr": 0.04351941398892446
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.40932642487046633,
|
233 |
-
"acc_stderr": 0.03548608168860806,
|
234 |
-
"acc_norm": 0.40932642487046633,
|
235 |
-
"acc_norm_stderr": 0.03548608168860806
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.2982456140350877,
|
239 |
-
"acc_stderr": 0.04303684033537318,
|
240 |
-
"acc_norm": 0.2982456140350877,
|
241 |
-
"acc_norm_stderr": 0.04303684033537318
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.3798165137614679,
|
245 |
-
"acc_stderr": 0.020808825617866244,
|
246 |
-
"acc_norm": 0.3798165137614679,
|
247 |
-
"acc_norm_stderr": 0.020808825617866244
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.25396825396825395,
|
251 |
-
"acc_stderr": 0.038932596106046734,
|
252 |
-
"acc_norm": 0.25396825396825395,
|
253 |
-
"acc_norm_stderr": 0.038932596106046734
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.37254901960784315,
|
257 |
-
"acc_stderr": 0.02768418188330289,
|
258 |
-
"acc_norm": 0.37254901960784315,
|
259 |
-
"acc_norm_stderr": 0.02768418188330289
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.41,
|
263 |
-
"acc_stderr": 0.049431107042371025,
|
264 |
-
"acc_norm": 0.41,
|
265 |
-
"acc_norm_stderr": 0.049431107042371025
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.5785123966942148,
|
269 |
-
"acc_stderr": 0.04507732278775088,
|
270 |
-
"acc_norm": 0.5785123966942148,
|
271 |
-
"acc_norm_stderr": 0.04507732278775088
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.4407894736842105,
|
275 |
-
"acc_stderr": 0.04040311062490436,
|
276 |
-
"acc_norm": 0.4407894736842105,
|
277 |
-
"acc_norm_stderr": 0.04040311062490436
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.3104575163398693,
|
281 |
-
"acc_stderr": 0.018718067052623227,
|
282 |
-
"acc_norm": 0.3104575163398693,
|
283 |
-
"acc_norm_stderr": 0.018718067052623227
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.2978723404255319,
|
287 |
-
"acc_stderr": 0.027281608344469414,
|
288 |
-
"acc_norm": 0.2978723404255319,
|
289 |
-
"acc_norm_stderr": 0.027281608344469414
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.24107142857142858,
|
293 |
-
"acc_stderr": 0.04059867246952687,
|
294 |
-
"acc_norm": 0.24107142857142858,
|
295 |
-
"acc_norm_stderr": 0.04059867246952687
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.2777777777777778,
|
299 |
-
"acc_stderr": 0.030546745264953202,
|
300 |
-
"acc_norm": 0.2777777777777778,
|
301 |
-
"acc_norm_stderr": 0.030546745264953202
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.2424581005586592,
|
305 |
-
"acc_stderr": 0.01433352205921789,
|
306 |
-
"acc_norm": 0.2424581005586592,
|
307 |
-
"acc_norm_stderr": 0.01433352205921789
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.34,
|
311 |
-
"acc_stderr": 0.04760952285695235,
|
312 |
-
"acc_norm": 0.34,
|
313 |
-
"acc_norm_stderr": 0.04760952285695235
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.41,
|
317 |
-
"acc_stderr": 0.049431107042371025,
|
318 |
-
"acc_norm": 0.41,
|
319 |
-
"acc_norm_stderr": 0.049431107042371025
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.2647058823529412,
|
323 |
-
"acc_stderr": 0.02679956202488769,
|
324 |
-
"acc_norm": 0.2647058823529412,
|
325 |
-
"acc_norm_stderr": 0.02679956202488769
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.49387755102040815,
|
329 |
-
"acc_stderr": 0.032006820201639086,
|
330 |
-
"acc_norm": 0.49387755102040815,
|
331 |
-
"acc_norm_stderr": 0.032006820201639086
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.4092827004219409,
|
335 |
-
"acc_stderr": 0.032007041833595914,
|
336 |
-
"acc_norm": 0.4092827004219409,
|
337 |
-
"acc_norm_stderr": 0.032007041833595914
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.31290743155149936,
|
341 |
-
"acc_stderr": 0.011842529823062999,
|
342 |
-
"acc_norm": 0.31290743155149936,
|
343 |
-
"acc_norm_stderr": 0.011842529823062999
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.29901960784313725,
|
347 |
-
"acc_stderr": 0.03213325717373616,
|
348 |
-
"acc_norm": 0.29901960784313725,
|
349 |
-
"acc_norm_stderr": 0.03213325717373616
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.3878787878787879,
|
353 |
-
"acc_stderr": 0.038049136539710114,
|
354 |
-
"acc_norm": 0.3878787878787879,
|
355 |
-
"acc_norm_stderr": 0.038049136539710114
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.2631578947368421,
|
359 |
-
"mc1_stderr": 0.015415241740237035,
|
360 |
-
"mc2": 0.42145051773986575,
|
361 |
-
"mc2_stderr": 0.015233960921162444
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.3415492957746479,
|
365 |
-
"acc_stderr": 0.01625636906118511,
|
366 |
-
"acc_norm": 0.42488262910798125,
|
367 |
-
"acc_norm_stderr": 0.016945248826821704
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "Chang-Su/llama-2-13b-chat-ko",
|
436 |
-
"model_sha": "3a82a33f61584cbe72dc32c15d55bfd182cefd8b",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
DILAB-HYU/KoQuality-Polyglot-5.8b/result_2023-10-12 13:21:04.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.2977815699658703,
|
5 |
-
"acc_stderr": 0.01336308010724449,
|
6 |
-
"acc_norm": 0.3370307167235495,
|
7 |
-
"acc_norm_stderr": 0.013813476652902272
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.38458474407488547,
|
11 |
-
"acc_stderr": 0.004855027248398158,
|
12 |
-
"acc_norm": 0.4970125473013344,
|
13 |
-
"acc_norm_stderr": 0.004989692344313998
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.1695906432748538,
|
17 |
-
"acc_stderr": 0.028782108105401712,
|
18 |
-
"acc_norm": 0.1695906432748538,
|
19 |
-
"acc_norm_stderr": 0.028782108105401712
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.3300970873786408,
|
23 |
-
"acc_stderr": 0.0465614711001235,
|
24 |
-
"acc_norm": 0.3300970873786408,
|
25 |
-
"acc_norm_stderr": 0.0465614711001235
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.20561941251596424,
|
29 |
-
"acc_stderr": 0.014452500456785823,
|
30 |
-
"acc_norm": 0.20561941251596424,
|
31 |
-
"acc_norm_stderr": 0.014452500456785823
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.22962962962962963,
|
35 |
-
"acc_stderr": 0.036333844140734636,
|
36 |
-
"acc_norm": 0.22962962962962963,
|
37 |
-
"acc_norm_stderr": 0.036333844140734636
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.27,
|
41 |
-
"acc_stderr": 0.0446196043338474,
|
42 |
-
"acc_norm": 0.27,
|
43 |
-
"acc_norm_stderr": 0.0446196043338474
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.17872340425531916,
|
47 |
-
"acc_stderr": 0.025045373272050957,
|
48 |
-
"acc_norm": 0.17872340425531916,
|
49 |
-
"acc_norm_stderr": 0.025045373272050957
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.20481927710843373,
|
53 |
-
"acc_stderr": 0.031417842916639245,
|
54 |
-
"acc_norm": 0.20481927710843373,
|
55 |
-
"acc_norm_stderr": 0.031417842916639245
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.24115755627009647,
|
59 |
-
"acc_stderr": 0.024296594034763426,
|
60 |
-
"acc_norm": 0.24115755627009647,
|
61 |
-
"acc_norm_stderr": 0.024296594034763426
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.1031390134529148,
|
65 |
-
"acc_stderr": 0.020412564289839272,
|
66 |
-
"acc_norm": 0.1031390134529148,
|
67 |
-
"acc_norm_stderr": 0.020412564289839272
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.2748091603053435,
|
71 |
-
"acc_stderr": 0.039153454088478354,
|
72 |
-
"acc_norm": 0.2748091603053435,
|
73 |
-
"acc_norm_stderr": 0.039153454088478354
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.27,
|
77 |
-
"acc_stderr": 0.0446196043338474,
|
78 |
-
"acc_norm": 0.27,
|
79 |
-
"acc_norm_stderr": 0.0446196043338474
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.3333333333333333,
|
83 |
-
"acc_stderr": 0.03358618145732523,
|
84 |
-
"acc_norm": 0.3333333333333333,
|
85 |
-
"acc_norm_stderr": 0.03358618145732523
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.2413793103448276,
|
89 |
-
"acc_stderr": 0.03565998174135302,
|
90 |
-
"acc_norm": 0.2413793103448276,
|
91 |
-
"acc_norm_stderr": 0.03565998174135302
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.3627450980392157,
|
95 |
-
"acc_stderr": 0.04784060704105653,
|
96 |
-
"acc_norm": 0.3627450980392157,
|
97 |
-
"acc_norm_stderr": 0.04784060704105653
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.3487394957983193,
|
101 |
-
"acc_stderr": 0.030956636328566548,
|
102 |
-
"acc_norm": 0.3487394957983193,
|
103 |
-
"acc_norm_stderr": 0.030956636328566548
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.36153846153846153,
|
107 |
-
"acc_stderr": 0.024359581465396983,
|
108 |
-
"acc_norm": 0.36153846153846153,
|
109 |
-
"acc_norm_stderr": 0.024359581465396983
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.24,
|
113 |
-
"acc_stderr": 0.04292346959909284,
|
114 |
-
"acc_norm": 0.24,
|
115 |
-
"acc_norm_stderr": 0.04292346959909284
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.2,
|
119 |
-
"acc_stderr": 0.040201512610368445,
|
120 |
-
"acc_norm": 0.2,
|
121 |
-
"acc_norm_stderr": 0.040201512610368445
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.21296296296296297,
|
125 |
-
"acc_stderr": 0.03957835471980981,
|
126 |
-
"acc_norm": 0.21296296296296297,
|
127 |
-
"acc_norm_stderr": 0.03957835471980981
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.27586206896551724,
|
131 |
-
"acc_stderr": 0.03144712581678241,
|
132 |
-
"acc_norm": 0.27586206896551724,
|
133 |
-
"acc_norm_stderr": 0.03144712581678241
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.3193548387096774,
|
137 |
-
"acc_stderr": 0.02652270967466777,
|
138 |
-
"acc_norm": 0.3193548387096774,
|
139 |
-
"acc_norm_stderr": 0.02652270967466777
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.17094017094017094,
|
143 |
-
"acc_stderr": 0.024662496845209828,
|
144 |
-
"acc_norm": 0.17094017094017094,
|
145 |
-
"acc_norm_stderr": 0.024662496845209828
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.3018867924528302,
|
149 |
-
"acc_stderr": 0.02825420034443866,
|
150 |
-
"acc_norm": 0.3018867924528302,
|
151 |
-
"acc_norm_stderr": 0.02825420034443866
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.22727272727272727,
|
155 |
-
"acc_stderr": 0.040139645540727735,
|
156 |
-
"acc_norm": 0.22727272727272727,
|
157 |
-
"acc_norm_stderr": 0.040139645540727735
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.2740740740740741,
|
161 |
-
"acc_stderr": 0.027195934804085626,
|
162 |
-
"acc_norm": 0.2740740740740741,
|
163 |
-
"acc_norm_stderr": 0.027195934804085626
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.33112582781456956,
|
167 |
-
"acc_stderr": 0.038425817186598696,
|
168 |
-
"acc_norm": 0.33112582781456956,
|
169 |
-
"acc_norm_stderr": 0.038425817186598696
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.26865671641791045,
|
173 |
-
"acc_stderr": 0.03134328358208954,
|
174 |
-
"acc_norm": 0.26865671641791045,
|
175 |
-
"acc_norm_stderr": 0.03134328358208954
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.34104046242774566,
|
179 |
-
"acc_stderr": 0.036146654241808254,
|
180 |
-
"acc_norm": 0.34104046242774566,
|
181 |
-
"acc_norm_stderr": 0.036146654241808254
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.24603174603174602,
|
185 |
-
"acc_stderr": 0.022182037202948365,
|
186 |
-
"acc_norm": 0.24603174603174602,
|
187 |
-
"acc_norm_stderr": 0.022182037202948365
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.2569444444444444,
|
191 |
-
"acc_stderr": 0.03653946969442099,
|
192 |
-
"acc_norm": 0.2569444444444444,
|
193 |
-
"acc_norm_stderr": 0.03653946969442099
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.38,
|
197 |
-
"acc_stderr": 0.04878317312145632,
|
198 |
-
"acc_norm": 0.38,
|
199 |
-
"acc_norm_stderr": 0.04878317312145632
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.22,
|
203 |
-
"acc_stderr": 0.041633319989322695,
|
204 |
-
"acc_norm": 0.22,
|
205 |
-
"acc_norm_stderr": 0.041633319989322695
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.2254335260115607,
|
209 |
-
"acc_stderr": 0.022497230190967547,
|
210 |
-
"acc_norm": 0.2254335260115607,
|
211 |
-
"acc_norm_stderr": 0.022497230190967547
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.2392638036809816,
|
215 |
-
"acc_stderr": 0.033519538795212696,
|
216 |
-
"acc_norm": 0.2392638036809816,
|
217 |
-
"acc_norm_stderr": 0.033519538795212696
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.22530864197530864,
|
221 |
-
"acc_stderr": 0.02324620264781975,
|
222 |
-
"acc_norm": 0.22530864197530864,
|
223 |
-
"acc_norm_stderr": 0.02324620264781975
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.29,
|
227 |
-
"acc_stderr": 0.04560480215720685,
|
228 |
-
"acc_norm": 0.29,
|
229 |
-
"acc_norm_stderr": 0.04560480215720685
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.36787564766839376,
|
233 |
-
"acc_stderr": 0.034801756684660366,
|
234 |
-
"acc_norm": 0.36787564766839376,
|
235 |
-
"acc_norm_stderr": 0.034801756684660366
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.22807017543859648,
|
239 |
-
"acc_stderr": 0.03947152782669415,
|
240 |
-
"acc_norm": 0.22807017543859648,
|
241 |
-
"acc_norm_stderr": 0.03947152782669415
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.3541284403669725,
|
245 |
-
"acc_stderr": 0.0205047290138291,
|
246 |
-
"acc_norm": 0.3541284403669725,
|
247 |
-
"acc_norm_stderr": 0.0205047290138291
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.373015873015873,
|
251 |
-
"acc_stderr": 0.04325506042017086,
|
252 |
-
"acc_norm": 0.373015873015873,
|
253 |
-
"acc_norm_stderr": 0.04325506042017086
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.2973856209150327,
|
257 |
-
"acc_stderr": 0.026173908506718576,
|
258 |
-
"acc_norm": 0.2973856209150327,
|
259 |
-
"acc_norm_stderr": 0.026173908506718576
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.22,
|
263 |
-
"acc_stderr": 0.041633319989322695,
|
264 |
-
"acc_norm": 0.22,
|
265 |
-
"acc_norm_stderr": 0.041633319989322695
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.12396694214876033,
|
269 |
-
"acc_stderr": 0.03008309871603522,
|
270 |
-
"acc_norm": 0.12396694214876033,
|
271 |
-
"acc_norm_stderr": 0.03008309871603522
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.32894736842105265,
|
275 |
-
"acc_stderr": 0.03823428969926606,
|
276 |
-
"acc_norm": 0.32894736842105265,
|
277 |
-
"acc_norm_stderr": 0.03823428969926606
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.21241830065359477,
|
281 |
-
"acc_stderr": 0.016547148636203147,
|
282 |
-
"acc_norm": 0.21241830065359477,
|
283 |
-
"acc_norm_stderr": 0.016547148636203147
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.24468085106382978,
|
287 |
-
"acc_stderr": 0.025645553622266733,
|
288 |
-
"acc_norm": 0.24468085106382978,
|
289 |
-
"acc_norm_stderr": 0.025645553622266733
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.16964285714285715,
|
293 |
-
"acc_stderr": 0.03562367850095391,
|
294 |
-
"acc_norm": 0.16964285714285715,
|
295 |
-
"acc_norm_stderr": 0.03562367850095391
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.4722222222222222,
|
299 |
-
"acc_stderr": 0.0340470532865388,
|
300 |
-
"acc_norm": 0.4722222222222222,
|
301 |
-
"acc_norm_stderr": 0.0340470532865388
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.27262569832402234,
|
305 |
-
"acc_stderr": 0.014893391735249608,
|
306 |
-
"acc_norm": 0.27262569832402234,
|
307 |
-
"acc_norm_stderr": 0.014893391735249608
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.33,
|
311 |
-
"acc_stderr": 0.04725815626252604,
|
312 |
-
"acc_norm": 0.33,
|
313 |
-
"acc_norm_stderr": 0.04725815626252604
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.25,
|
317 |
-
"acc_stderr": 0.04351941398892446,
|
318 |
-
"acc_norm": 0.25,
|
319 |
-
"acc_norm_stderr": 0.04351941398892446
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.4485294117647059,
|
323 |
-
"acc_stderr": 0.030211479609121593,
|
324 |
-
"acc_norm": 0.4485294117647059,
|
325 |
-
"acc_norm_stderr": 0.030211479609121593
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.4163265306122449,
|
329 |
-
"acc_stderr": 0.03155782816556163,
|
330 |
-
"acc_norm": 0.4163265306122449,
|
331 |
-
"acc_norm_stderr": 0.03155782816556163
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.19831223628691982,
|
335 |
-
"acc_stderr": 0.025955020841621112,
|
336 |
-
"acc_norm": 0.19831223628691982,
|
337 |
-
"acc_norm_stderr": 0.025955020841621112
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.24315514993481094,
|
341 |
-
"acc_stderr": 0.010956556654417356,
|
342 |
-
"acc_norm": 0.24315514993481094,
|
343 |
-
"acc_norm_stderr": 0.010956556654417356
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.25980392156862747,
|
347 |
-
"acc_stderr": 0.030778554678693257,
|
348 |
-
"acc_norm": 0.25980392156862747,
|
349 |
-
"acc_norm_stderr": 0.030778554678693257
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.23636363636363636,
|
353 |
-
"acc_stderr": 0.033175059300091805,
|
354 |
-
"acc_norm": 0.23636363636363636,
|
355 |
-
"acc_norm_stderr": 0.033175059300091805
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.2423500611995104,
|
359 |
-
"mc1_stderr": 0.015000674373570342,
|
360 |
-
"mc2": 0.4081734277840062,
|
361 |
-
"mc2_stderr": 0.014989124693241153
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.2687793427230047,
|
365 |
-
"acc_stderr": 0.015196983421381469,
|
366 |
-
"acc_norm": 0.3380281690140845,
|
367 |
-
"acc_norm_stderr": 0.016215540194273168
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "DILAB-HYU/KoQuality-Polyglot-5.8b",
|
436 |
-
"model_sha": "3bd0773198883587e1ced9f32a1763da2b64a536",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
DopeorNope/COLA3-7B/result_2023-10-03 08:35:59.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.3310580204778157,
|
5 |
-
"acc_stderr": 0.013752062419817832,
|
6 |
-
"acc_norm": 0.3916382252559727,
|
7 |
-
"acc_norm_stderr": 0.014264122124938215
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.3873730332603067,
|
11 |
-
"acc_stderr": 0.004861544478451855,
|
12 |
-
"acc_norm": 0.5097590121489743,
|
13 |
-
"acc_norm_stderr": 0.004988830884131634
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.4444444444444444,
|
17 |
-
"acc_stderr": 0.03811079669833531,
|
18 |
-
"acc_norm": 0.4444444444444444,
|
19 |
-
"acc_norm_stderr": 0.03811079669833531
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.3300970873786408,
|
23 |
-
"acc_stderr": 0.0465614711001235,
|
24 |
-
"acc_norm": 0.3300970873786408,
|
25 |
-
"acc_norm_stderr": 0.0465614711001235
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.3895274584929757,
|
29 |
-
"acc_stderr": 0.017438082556264594,
|
30 |
-
"acc_norm": 0.3895274584929757,
|
31 |
-
"acc_norm_stderr": 0.017438082556264594
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.4,
|
35 |
-
"acc_stderr": 0.04232073695151589,
|
36 |
-
"acc_norm": 0.4,
|
37 |
-
"acc_norm_stderr": 0.04232073695151589
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.28,
|
41 |
-
"acc_stderr": 0.04512608598542127,
|
42 |
-
"acc_norm": 0.28,
|
43 |
-
"acc_norm_stderr": 0.04512608598542127
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.28936170212765955,
|
47 |
-
"acc_stderr": 0.029644006577009618,
|
48 |
-
"acc_norm": 0.28936170212765955,
|
49 |
-
"acc_norm_stderr": 0.029644006577009618
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.3433734939759036,
|
53 |
-
"acc_stderr": 0.03696584317010601,
|
54 |
-
"acc_norm": 0.3433734939759036,
|
55 |
-
"acc_norm_stderr": 0.03696584317010601
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.40192926045016075,
|
59 |
-
"acc_stderr": 0.027846476005930477,
|
60 |
-
"acc_norm": 0.40192926045016075,
|
61 |
-
"acc_norm_stderr": 0.027846476005930477
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.39461883408071746,
|
65 |
-
"acc_stderr": 0.03280400504755291,
|
66 |
-
"acc_norm": 0.39461883408071746,
|
67 |
-
"acc_norm_stderr": 0.03280400504755291
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.48091603053435117,
|
71 |
-
"acc_stderr": 0.04382094705550988,
|
72 |
-
"acc_norm": 0.48091603053435117,
|
73 |
-
"acc_norm_stderr": 0.04382094705550988
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.3,
|
77 |
-
"acc_stderr": 0.04605661864718381,
|
78 |
-
"acc_norm": 0.3,
|
79 |
-
"acc_norm_stderr": 0.04605661864718381
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.43434343434343436,
|
83 |
-
"acc_stderr": 0.035315058793591834,
|
84 |
-
"acc_norm": 0.43434343434343436,
|
85 |
-
"acc_norm_stderr": 0.035315058793591834
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.3931034482758621,
|
89 |
-
"acc_stderr": 0.040703290137070705,
|
90 |
-
"acc_norm": 0.3931034482758621,
|
91 |
-
"acc_norm_stderr": 0.040703290137070705
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.19607843137254902,
|
95 |
-
"acc_stderr": 0.03950581861179962,
|
96 |
-
"acc_norm": 0.19607843137254902,
|
97 |
-
"acc_norm_stderr": 0.03950581861179962
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.3739495798319328,
|
101 |
-
"acc_stderr": 0.031429466378837076,
|
102 |
-
"acc_norm": 0.3739495798319328,
|
103 |
-
"acc_norm_stderr": 0.031429466378837076
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.2948717948717949,
|
107 |
-
"acc_stderr": 0.02311936275823229,
|
108 |
-
"acc_norm": 0.2948717948717949,
|
109 |
-
"acc_norm_stderr": 0.02311936275823229
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.43,
|
113 |
-
"acc_stderr": 0.04975698519562428,
|
114 |
-
"acc_norm": 0.43,
|
115 |
-
"acc_norm_stderr": 0.04975698519562428
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.26,
|
119 |
-
"acc_stderr": 0.04408440022768079,
|
120 |
-
"acc_norm": 0.26,
|
121 |
-
"acc_norm_stderr": 0.04408440022768079
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.39814814814814814,
|
125 |
-
"acc_stderr": 0.047323326159788126,
|
126 |
-
"acc_norm": 0.39814814814814814,
|
127 |
-
"acc_norm_stderr": 0.047323326159788126
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.2561576354679803,
|
131 |
-
"acc_stderr": 0.0307127300709826,
|
132 |
-
"acc_norm": 0.2561576354679803,
|
133 |
-
"acc_norm_stderr": 0.0307127300709826
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.34516129032258064,
|
137 |
-
"acc_stderr": 0.027045746573534327,
|
138 |
-
"acc_norm": 0.34516129032258064,
|
139 |
-
"acc_norm_stderr": 0.027045746573534327
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.48717948717948717,
|
143 |
-
"acc_stderr": 0.032745319388423504,
|
144 |
-
"acc_norm": 0.48717948717948717,
|
145 |
-
"acc_norm_stderr": 0.032745319388423504
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.33584905660377357,
|
149 |
-
"acc_stderr": 0.029067220146644823,
|
150 |
-
"acc_norm": 0.33584905660377357,
|
151 |
-
"acc_norm_stderr": 0.029067220146644823
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.41818181818181815,
|
155 |
-
"acc_stderr": 0.0472457740573157,
|
156 |
-
"acc_norm": 0.41818181818181815,
|
157 |
-
"acc_norm_stderr": 0.0472457740573157
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.25555555555555554,
|
161 |
-
"acc_stderr": 0.02659393910184408,
|
162 |
-
"acc_norm": 0.25555555555555554,
|
163 |
-
"acc_norm_stderr": 0.02659393910184408
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.2781456953642384,
|
167 |
-
"acc_stderr": 0.03658603262763743,
|
168 |
-
"acc_norm": 0.2781456953642384,
|
169 |
-
"acc_norm_stderr": 0.03658603262763743
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.46766169154228854,
|
173 |
-
"acc_stderr": 0.035281314729336065,
|
174 |
-
"acc_norm": 0.46766169154228854,
|
175 |
-
"acc_norm_stderr": 0.035281314729336065
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.32947976878612717,
|
179 |
-
"acc_stderr": 0.03583901754736411,
|
180 |
-
"acc_norm": 0.32947976878612717,
|
181 |
-
"acc_norm_stderr": 0.03583901754736411
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.25396825396825395,
|
185 |
-
"acc_stderr": 0.022418042891113942,
|
186 |
-
"acc_norm": 0.25396825396825395,
|
187 |
-
"acc_norm_stderr": 0.022418042891113942
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.3333333333333333,
|
191 |
-
"acc_stderr": 0.039420826399272135,
|
192 |
-
"acc_norm": 0.3333333333333333,
|
193 |
-
"acc_norm_stderr": 0.039420826399272135
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.24,
|
197 |
-
"acc_stderr": 0.042923469599092816,
|
198 |
-
"acc_norm": 0.24,
|
199 |
-
"acc_norm_stderr": 0.042923469599092816
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.4,
|
203 |
-
"acc_stderr": 0.049236596391733084,
|
204 |
-
"acc_norm": 0.4,
|
205 |
-
"acc_norm_stderr": 0.049236596391733084
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.407514450867052,
|
209 |
-
"acc_stderr": 0.026454578146931505,
|
210 |
-
"acc_norm": 0.407514450867052,
|
211 |
-
"acc_norm_stderr": 0.026454578146931505
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.32515337423312884,
|
215 |
-
"acc_stderr": 0.03680350371286461,
|
216 |
-
"acc_norm": 0.32515337423312884,
|
217 |
-
"acc_norm_stderr": 0.03680350371286461
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.39197530864197533,
|
221 |
-
"acc_stderr": 0.02716368603827123,
|
222 |
-
"acc_norm": 0.39197530864197533,
|
223 |
-
"acc_norm_stderr": 0.02716368603827123
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.4,
|
227 |
-
"acc_stderr": 0.04923659639173309,
|
228 |
-
"acc_norm": 0.4,
|
229 |
-
"acc_norm_stderr": 0.04923659639173309
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.41450777202072536,
|
233 |
-
"acc_stderr": 0.03555300319557672,
|
234 |
-
"acc_norm": 0.41450777202072536,
|
235 |
-
"acc_norm_stderr": 0.03555300319557672
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.2631578947368421,
|
239 |
-
"acc_stderr": 0.04142439719489362,
|
240 |
-
"acc_norm": 0.2631578947368421,
|
241 |
-
"acc_norm_stderr": 0.04142439719489362
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.42018348623853213,
|
245 |
-
"acc_stderr": 0.021162420048273508,
|
246 |
-
"acc_norm": 0.42018348623853213,
|
247 |
-
"acc_norm_stderr": 0.021162420048273508
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.24603174603174602,
|
251 |
-
"acc_stderr": 0.03852273364924315,
|
252 |
-
"acc_norm": 0.24603174603174602,
|
253 |
-
"acc_norm_stderr": 0.03852273364924315
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.41830065359477125,
|
257 |
-
"acc_stderr": 0.02824513402438729,
|
258 |
-
"acc_norm": 0.41830065359477125,
|
259 |
-
"acc_norm_stderr": 0.02824513402438729
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.32,
|
263 |
-
"acc_stderr": 0.046882617226215034,
|
264 |
-
"acc_norm": 0.32,
|
265 |
-
"acc_norm_stderr": 0.046882617226215034
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.5289256198347108,
|
269 |
-
"acc_stderr": 0.04556710331269498,
|
270 |
-
"acc_norm": 0.5289256198347108,
|
271 |
-
"acc_norm_stderr": 0.04556710331269498
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.3026315789473684,
|
275 |
-
"acc_stderr": 0.03738520676119667,
|
276 |
-
"acc_norm": 0.3026315789473684,
|
277 |
-
"acc_norm_stderr": 0.03738520676119667
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.31699346405228757,
|
281 |
-
"acc_stderr": 0.018824219512706214,
|
282 |
-
"acc_norm": 0.31699346405228757,
|
283 |
-
"acc_norm_stderr": 0.018824219512706214
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.2872340425531915,
|
287 |
-
"acc_stderr": 0.026992199173064356,
|
288 |
-
"acc_norm": 0.2872340425531915,
|
289 |
-
"acc_norm_stderr": 0.026992199173064356
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.25892857142857145,
|
293 |
-
"acc_stderr": 0.04157751539865629,
|
294 |
-
"acc_norm": 0.25892857142857145,
|
295 |
-
"acc_norm_stderr": 0.04157751539865629
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.3287037037037037,
|
299 |
-
"acc_stderr": 0.03203614084670058,
|
300 |
-
"acc_norm": 0.3287037037037037,
|
301 |
-
"acc_norm_stderr": 0.03203614084670058
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.25139664804469275,
|
305 |
-
"acc_stderr": 0.014508979453553977,
|
306 |
-
"acc_norm": 0.25139664804469275,
|
307 |
-
"acc_norm_stderr": 0.014508979453553977
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.3,
|
311 |
-
"acc_stderr": 0.046056618647183814,
|
312 |
-
"acc_norm": 0.3,
|
313 |
-
"acc_norm_stderr": 0.046056618647183814
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.26,
|
317 |
-
"acc_stderr": 0.044084400227680794,
|
318 |
-
"acc_norm": 0.26,
|
319 |
-
"acc_norm_stderr": 0.044084400227680794
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.4227941176470588,
|
323 |
-
"acc_stderr": 0.030008562845003476,
|
324 |
-
"acc_norm": 0.4227941176470588,
|
325 |
-
"acc_norm_stderr": 0.030008562845003476
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.3877551020408163,
|
329 |
-
"acc_stderr": 0.031192230726795656,
|
330 |
-
"acc_norm": 0.3877551020408163,
|
331 |
-
"acc_norm_stderr": 0.031192230726795656
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.5147679324894515,
|
335 |
-
"acc_stderr": 0.032533028078777386,
|
336 |
-
"acc_norm": 0.5147679324894515,
|
337 |
-
"acc_norm_stderr": 0.032533028078777386
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.3305084745762712,
|
341 |
-
"acc_stderr": 0.01201414210184297,
|
342 |
-
"acc_norm": 0.3305084745762712,
|
343 |
-
"acc_norm_stderr": 0.01201414210184297
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.3627450980392157,
|
347 |
-
"acc_stderr": 0.033744993563193555,
|
348 |
-
"acc_norm": 0.3627450980392157,
|
349 |
-
"acc_norm_stderr": 0.033744993563193555
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.37575757575757573,
|
353 |
-
"acc_stderr": 0.03781887353205982,
|
354 |
-
"acc_norm": 0.37575757575757573,
|
355 |
-
"acc_norm_stderr": 0.03781887353205982
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.23990208078335373,
|
359 |
-
"mc1_stderr": 0.01494881267906214,
|
360 |
-
"mc2": 0.3781293727977648,
|
361 |
-
"mc2_stderr": 0.014917319628125631
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.5610328638497653,
|
365 |
-
"acc_stderr": 0.017011608310486023,
|
366 |
-
"acc_norm": 0.6490610328638498,
|
367 |
-
"acc_norm_stderr": 0.016360395003030395
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "DopeorNope/COLA3-7B",
|
436 |
-
"model_sha": "831fc99b2b9d86ad17129c419953502f2d4f8da7",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
DopeorNope/COLA3_13B/result_2023-10-05 10:17:21.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.36860068259385664,
|
5 |
-
"acc_stderr": 0.014097810678042192,
|
6 |
-
"acc_norm": 0.42235494880546076,
|
7 |
-
"acc_norm_stderr": 0.014434138713379981
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.4082852021509659,
|
11 |
-
"acc_stderr": 0.004905119039849461,
|
12 |
-
"acc_norm": 0.5435172276438957,
|
13 |
-
"acc_norm_stderr": 0.004970846697552308
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.5555555555555556,
|
17 |
-
"acc_stderr": 0.03811079669833531,
|
18 |
-
"acc_norm": 0.5555555555555556,
|
19 |
-
"acc_norm_stderr": 0.03811079669833531
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.4563106796116505,
|
23 |
-
"acc_stderr": 0.049318019942204146,
|
24 |
-
"acc_norm": 0.4563106796116505,
|
25 |
-
"acc_norm_stderr": 0.049318019942204146
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.5057471264367817,
|
29 |
-
"acc_stderr": 0.017878782326129224,
|
30 |
-
"acc_norm": 0.5057471264367817,
|
31 |
-
"acc_norm_stderr": 0.017878782326129224
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.4148148148148148,
|
35 |
-
"acc_stderr": 0.04256193767901407,
|
36 |
-
"acc_norm": 0.4148148148148148,
|
37 |
-
"acc_norm_stderr": 0.04256193767901407
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.29,
|
41 |
-
"acc_stderr": 0.045604802157206824,
|
42 |
-
"acc_norm": 0.29,
|
43 |
-
"acc_norm_stderr": 0.045604802157206824
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.33191489361702126,
|
47 |
-
"acc_stderr": 0.03078373675774564,
|
48 |
-
"acc_norm": 0.33191489361702126,
|
49 |
-
"acc_norm_stderr": 0.03078373675774564
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.4397590361445783,
|
53 |
-
"acc_stderr": 0.03864139923699122,
|
54 |
-
"acc_norm": 0.4397590361445783,
|
55 |
-
"acc_norm_stderr": 0.03864139923699122
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.5080385852090032,
|
59 |
-
"acc_stderr": 0.028394421370984538,
|
60 |
-
"acc_norm": 0.5080385852090032,
|
61 |
-
"acc_norm_stderr": 0.028394421370984538
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.37668161434977576,
|
65 |
-
"acc_stderr": 0.03252113489929187,
|
66 |
-
"acc_norm": 0.37668161434977576,
|
67 |
-
"acc_norm_stderr": 0.03252113489929187
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.48854961832061067,
|
71 |
-
"acc_stderr": 0.04384140024078016,
|
72 |
-
"acc_norm": 0.48854961832061067,
|
73 |
-
"acc_norm_stderr": 0.04384140024078016
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.36,
|
77 |
-
"acc_stderr": 0.04824181513244218,
|
78 |
-
"acc_norm": 0.36,
|
79 |
-
"acc_norm_stderr": 0.04824181513244218
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.5505050505050505,
|
83 |
-
"acc_stderr": 0.035441324919479704,
|
84 |
-
"acc_norm": 0.5505050505050505,
|
85 |
-
"acc_norm_stderr": 0.035441324919479704
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.45517241379310347,
|
89 |
-
"acc_stderr": 0.04149886942192117,
|
90 |
-
"acc_norm": 0.45517241379310347,
|
91 |
-
"acc_norm_stderr": 0.04149886942192117
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.19607843137254902,
|
95 |
-
"acc_stderr": 0.03950581861179961,
|
96 |
-
"acc_norm": 0.19607843137254902,
|
97 |
-
"acc_norm_stderr": 0.03950581861179961
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.4369747899159664,
|
101 |
-
"acc_stderr": 0.03221943636566197,
|
102 |
-
"acc_norm": 0.4369747899159664,
|
103 |
-
"acc_norm_stderr": 0.03221943636566197
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.3923076923076923,
|
107 |
-
"acc_stderr": 0.02475600038213094,
|
108 |
-
"acc_norm": 0.3923076923076923,
|
109 |
-
"acc_norm_stderr": 0.02475600038213094
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.46,
|
113 |
-
"acc_stderr": 0.05009082659620333,
|
114 |
-
"acc_norm": 0.46,
|
115 |
-
"acc_norm_stderr": 0.05009082659620333
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.36,
|
119 |
-
"acc_stderr": 0.048241815132442176,
|
120 |
-
"acc_norm": 0.36,
|
121 |
-
"acc_norm_stderr": 0.048241815132442176
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.4722222222222222,
|
125 |
-
"acc_stderr": 0.04826217294139894,
|
126 |
-
"acc_norm": 0.4722222222222222,
|
127 |
-
"acc_norm_stderr": 0.04826217294139894
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.4187192118226601,
|
131 |
-
"acc_stderr": 0.03471192860518468,
|
132 |
-
"acc_norm": 0.4187192118226601,
|
133 |
-
"acc_norm_stderr": 0.03471192860518468
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.432258064516129,
|
137 |
-
"acc_stderr": 0.028181739720019416,
|
138 |
-
"acc_norm": 0.432258064516129,
|
139 |
-
"acc_norm_stderr": 0.028181739720019416
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.6495726495726496,
|
143 |
-
"acc_stderr": 0.0312561082442188,
|
144 |
-
"acc_norm": 0.6495726495726496,
|
145 |
-
"acc_norm_stderr": 0.0312561082442188
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.4679245283018868,
|
149 |
-
"acc_stderr": 0.030709486992556545,
|
150 |
-
"acc_norm": 0.4679245283018868,
|
151 |
-
"acc_norm_stderr": 0.030709486992556545
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.44545454545454544,
|
155 |
-
"acc_stderr": 0.04760548821460325,
|
156 |
-
"acc_norm": 0.44545454545454544,
|
157 |
-
"acc_norm_stderr": 0.04760548821460325
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.2518518518518518,
|
161 |
-
"acc_stderr": 0.026466117538959916,
|
162 |
-
"acc_norm": 0.2518518518518518,
|
163 |
-
"acc_norm_stderr": 0.026466117538959916
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.2781456953642384,
|
167 |
-
"acc_stderr": 0.03658603262763743,
|
168 |
-
"acc_norm": 0.2781456953642384,
|
169 |
-
"acc_norm_stderr": 0.03658603262763743
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.5572139303482587,
|
173 |
-
"acc_stderr": 0.03512310964123937,
|
174 |
-
"acc_norm": 0.5572139303482587,
|
175 |
-
"acc_norm_stderr": 0.03512310964123937
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.3699421965317919,
|
179 |
-
"acc_stderr": 0.03681229633394319,
|
180 |
-
"acc_norm": 0.3699421965317919,
|
181 |
-
"acc_norm_stderr": 0.03681229633394319
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.2671957671957672,
|
185 |
-
"acc_stderr": 0.022789673145776575,
|
186 |
-
"acc_norm": 0.2671957671957672,
|
187 |
-
"acc_norm_stderr": 0.022789673145776575
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.3472222222222222,
|
191 |
-
"acc_stderr": 0.039812405437178615,
|
192 |
-
"acc_norm": 0.3472222222222222,
|
193 |
-
"acc_norm_stderr": 0.039812405437178615
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.33,
|
197 |
-
"acc_stderr": 0.047258156262526045,
|
198 |
-
"acc_norm": 0.33,
|
199 |
-
"acc_norm_stderr": 0.047258156262526045
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.57,
|
203 |
-
"acc_stderr": 0.04975698519562428,
|
204 |
-
"acc_norm": 0.57,
|
205 |
-
"acc_norm_stderr": 0.04975698519562428
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.4508670520231214,
|
209 |
-
"acc_stderr": 0.026788811931562757,
|
210 |
-
"acc_norm": 0.4508670520231214,
|
211 |
-
"acc_norm_stderr": 0.026788811931562757
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.4110429447852761,
|
215 |
-
"acc_stderr": 0.038656978537853624,
|
216 |
-
"acc_norm": 0.4110429447852761,
|
217 |
-
"acc_norm_stderr": 0.038656978537853624
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.46296296296296297,
|
221 |
-
"acc_stderr": 0.02774431344337654,
|
222 |
-
"acc_norm": 0.46296296296296297,
|
223 |
-
"acc_norm_stderr": 0.02774431344337654
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.3,
|
227 |
-
"acc_stderr": 0.046056618647183814,
|
228 |
-
"acc_norm": 0.3,
|
229 |
-
"acc_norm_stderr": 0.046056618647183814
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.43005181347150256,
|
233 |
-
"acc_stderr": 0.03572954333144807,
|
234 |
-
"acc_norm": 0.43005181347150256,
|
235 |
-
"acc_norm_stderr": 0.03572954333144807
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.2631578947368421,
|
239 |
-
"acc_stderr": 0.04142439719489361,
|
240 |
-
"acc_norm": 0.2631578947368421,
|
241 |
-
"acc_norm_stderr": 0.04142439719489361
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.48256880733944957,
|
245 |
-
"acc_stderr": 0.021424291871853147,
|
246 |
-
"acc_norm": 0.48256880733944957,
|
247 |
-
"acc_norm_stderr": 0.021424291871853147
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.25396825396825395,
|
251 |
-
"acc_stderr": 0.03893259610604674,
|
252 |
-
"acc_norm": 0.25396825396825395,
|
253 |
-
"acc_norm_stderr": 0.03893259610604674
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.42483660130718953,
|
257 |
-
"acc_stderr": 0.02830457667314112,
|
258 |
-
"acc_norm": 0.42483660130718953,
|
259 |
-
"acc_norm_stderr": 0.02830457667314112
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.37,
|
263 |
-
"acc_stderr": 0.04852365870939098,
|
264 |
-
"acc_norm": 0.37,
|
265 |
-
"acc_norm_stderr": 0.04852365870939098
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.5619834710743802,
|
269 |
-
"acc_stderr": 0.045291468044357915,
|
270 |
-
"acc_norm": 0.5619834710743802,
|
271 |
-
"acc_norm_stderr": 0.045291468044357915
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.40789473684210525,
|
275 |
-
"acc_stderr": 0.039993097127774734,
|
276 |
-
"acc_norm": 0.40789473684210525,
|
277 |
-
"acc_norm_stderr": 0.039993097127774734
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.32189542483660133,
|
281 |
-
"acc_stderr": 0.018901015322093085,
|
282 |
-
"acc_norm": 0.32189542483660133,
|
283 |
-
"acc_norm_stderr": 0.018901015322093085
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.3120567375886525,
|
287 |
-
"acc_stderr": 0.02764012054516994,
|
288 |
-
"acc_norm": 0.3120567375886525,
|
289 |
-
"acc_norm_stderr": 0.02764012054516994
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.21428571428571427,
|
293 |
-
"acc_stderr": 0.038946411200447915,
|
294 |
-
"acc_norm": 0.21428571428571427,
|
295 |
-
"acc_norm_stderr": 0.038946411200447915
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.3194444444444444,
|
299 |
-
"acc_stderr": 0.03179876342176851,
|
300 |
-
"acc_norm": 0.3194444444444444,
|
301 |
-
"acc_norm_stderr": 0.03179876342176851
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.2424581005586592,
|
305 |
-
"acc_stderr": 0.01433352205921789,
|
306 |
-
"acc_norm": 0.2424581005586592,
|
307 |
-
"acc_norm_stderr": 0.01433352205921789
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.3,
|
311 |
-
"acc_stderr": 0.046056618647183814,
|
312 |
-
"acc_norm": 0.3,
|
313 |
-
"acc_norm_stderr": 0.046056618647183814
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.43,
|
317 |
-
"acc_stderr": 0.049756985195624284,
|
318 |
-
"acc_norm": 0.43,
|
319 |
-
"acc_norm_stderr": 0.049756985195624284
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.2426470588235294,
|
323 |
-
"acc_stderr": 0.026040662474201285,
|
324 |
-
"acc_norm": 0.2426470588235294,
|
325 |
-
"acc_norm_stderr": 0.026040662474201285
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.46938775510204084,
|
329 |
-
"acc_stderr": 0.031949171367580624,
|
330 |
-
"acc_norm": 0.46938775510204084,
|
331 |
-
"acc_norm_stderr": 0.031949171367580624
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.5063291139240507,
|
335 |
-
"acc_stderr": 0.03254462010767859,
|
336 |
-
"acc_norm": 0.5063291139240507,
|
337 |
-
"acc_norm_stderr": 0.03254462010767859
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.30247718383311606,
|
341 |
-
"acc_stderr": 0.0117315242341657,
|
342 |
-
"acc_norm": 0.30247718383311606,
|
343 |
-
"acc_norm_stderr": 0.0117315242341657
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.38235294117647056,
|
347 |
-
"acc_stderr": 0.03410785338904719,
|
348 |
-
"acc_norm": 0.38235294117647056,
|
349 |
-
"acc_norm_stderr": 0.03410785338904719
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.4727272727272727,
|
353 |
-
"acc_stderr": 0.03898531605579419,
|
354 |
-
"acc_norm": 0.4727272727272727,
|
355 |
-
"acc_norm_stderr": 0.03898531605579419
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.2533659730722154,
|
359 |
-
"mc1_stderr": 0.015225899340826824,
|
360 |
-
"mc2": 0.40933802446057865,
|
361 |
-
"mc2_stderr": 0.014937193336867839
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.3720657276995305,
|
365 |
-
"acc_stderr": 0.016569223163823556,
|
366 |
-
"acc_norm": 0.4835680751173709,
|
367 |
-
"acc_norm_stderr": 0.017130520993936017
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "DopeorNope/COLA3_13B",
|
436 |
-
"model_sha": "7725e7a1c6f8f022c7c4ec0286dd9f7fada126bd",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
DopeorNope/COLA_LO-7B/result_2023-10-03 17:04:14.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.3319112627986348,
|
5 |
-
"acc_stderr": 0.013760988200880533,
|
6 |
-
"acc_norm": 0.38993174061433444,
|
7 |
-
"acc_norm_stderr": 0.014252959848892884
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.3857797251543517,
|
11 |
-
"acc_stderr": 0.004857840934549158,
|
12 |
-
"acc_norm": 0.5046803425612428,
|
13 |
-
"acc_norm_stderr": 0.004989562798280523
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.38011695906432746,
|
17 |
-
"acc_stderr": 0.037229657413855394,
|
18 |
-
"acc_norm": 0.38011695906432746,
|
19 |
-
"acc_norm_stderr": 0.037229657413855394
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.33980582524271846,
|
23 |
-
"acc_stderr": 0.04689765937278134,
|
24 |
-
"acc_norm": 0.33980582524271846,
|
25 |
-
"acc_norm_stderr": 0.04689765937278134
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.41762452107279696,
|
29 |
-
"acc_stderr": 0.01763563732695152,
|
30 |
-
"acc_norm": 0.41762452107279696,
|
31 |
-
"acc_norm_stderr": 0.01763563732695152
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.37037037037037035,
|
35 |
-
"acc_stderr": 0.04171654161354544,
|
36 |
-
"acc_norm": 0.37037037037037035,
|
37 |
-
"acc_norm_stderr": 0.04171654161354544
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.33,
|
41 |
-
"acc_stderr": 0.047258156262526045,
|
42 |
-
"acc_norm": 0.33,
|
43 |
-
"acc_norm_stderr": 0.047258156262526045
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.2936170212765957,
|
47 |
-
"acc_stderr": 0.02977164271249123,
|
48 |
-
"acc_norm": 0.2936170212765957,
|
49 |
-
"acc_norm_stderr": 0.02977164271249123
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.35542168674698793,
|
53 |
-
"acc_stderr": 0.03726214354322415,
|
54 |
-
"acc_norm": 0.35542168674698793,
|
55 |
-
"acc_norm_stderr": 0.03726214354322415
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.43086816720257237,
|
59 |
-
"acc_stderr": 0.028125340983972714,
|
60 |
-
"acc_norm": 0.43086816720257237,
|
61 |
-
"acc_norm_stderr": 0.028125340983972714
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.42152466367713004,
|
65 |
-
"acc_stderr": 0.03314190222110658,
|
66 |
-
"acc_norm": 0.42152466367713004,
|
67 |
-
"acc_norm_stderr": 0.03314190222110658
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.45038167938931295,
|
71 |
-
"acc_stderr": 0.04363643698524779,
|
72 |
-
"acc_norm": 0.45038167938931295,
|
73 |
-
"acc_norm_stderr": 0.04363643698524779
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.37,
|
77 |
-
"acc_stderr": 0.04852365870939099,
|
78 |
-
"acc_norm": 0.37,
|
79 |
-
"acc_norm_stderr": 0.04852365870939099
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.41414141414141414,
|
83 |
-
"acc_stderr": 0.03509438348879629,
|
84 |
-
"acc_norm": 0.41414141414141414,
|
85 |
-
"acc_norm_stderr": 0.03509438348879629
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.3724137931034483,
|
89 |
-
"acc_stderr": 0.0402873153294756,
|
90 |
-
"acc_norm": 0.3724137931034483,
|
91 |
-
"acc_norm_stderr": 0.0402873153294756
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.21568627450980393,
|
95 |
-
"acc_stderr": 0.04092563958237655,
|
96 |
-
"acc_norm": 0.21568627450980393,
|
97 |
-
"acc_norm_stderr": 0.04092563958237655
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.3697478991596639,
|
101 |
-
"acc_stderr": 0.031357095996135904,
|
102 |
-
"acc_norm": 0.3697478991596639,
|
103 |
-
"acc_norm_stderr": 0.031357095996135904
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.26153846153846155,
|
107 |
-
"acc_stderr": 0.022282141204204433,
|
108 |
-
"acc_norm": 0.26153846153846155,
|
109 |
-
"acc_norm_stderr": 0.022282141204204433
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.41,
|
113 |
-
"acc_stderr": 0.04943110704237102,
|
114 |
-
"acc_norm": 0.41,
|
115 |
-
"acc_norm_stderr": 0.04943110704237102
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.3,
|
119 |
-
"acc_stderr": 0.046056618647183814,
|
120 |
-
"acc_norm": 0.3,
|
121 |
-
"acc_norm_stderr": 0.046056618647183814
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.37962962962962965,
|
125 |
-
"acc_stderr": 0.04691521224077742,
|
126 |
-
"acc_norm": 0.37962962962962965,
|
127 |
-
"acc_norm_stderr": 0.04691521224077742
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.2315270935960591,
|
131 |
-
"acc_stderr": 0.02967833314144444,
|
132 |
-
"acc_norm": 0.2315270935960591,
|
133 |
-
"acc_norm_stderr": 0.02967833314144444
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.36774193548387096,
|
137 |
-
"acc_stderr": 0.02743086657997347,
|
138 |
-
"acc_norm": 0.36774193548387096,
|
139 |
-
"acc_norm_stderr": 0.02743086657997347
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.5042735042735043,
|
143 |
-
"acc_stderr": 0.03275489264382132,
|
144 |
-
"acc_norm": 0.5042735042735043,
|
145 |
-
"acc_norm_stderr": 0.03275489264382132
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.33962264150943394,
|
149 |
-
"acc_stderr": 0.029146904747798352,
|
150 |
-
"acc_norm": 0.33962264150943394,
|
151 |
-
"acc_norm_stderr": 0.029146904747798352
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.37272727272727274,
|
155 |
-
"acc_stderr": 0.046313813194254635,
|
156 |
-
"acc_norm": 0.37272727272727274,
|
157 |
-
"acc_norm_stderr": 0.046313813194254635
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.24814814814814815,
|
161 |
-
"acc_stderr": 0.0263357394040558,
|
162 |
-
"acc_norm": 0.24814814814814815,
|
163 |
-
"acc_norm_stderr": 0.0263357394040558
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.23178807947019867,
|
167 |
-
"acc_stderr": 0.03445406271987054,
|
168 |
-
"acc_norm": 0.23178807947019867,
|
169 |
-
"acc_norm_stderr": 0.03445406271987054
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.43283582089552236,
|
173 |
-
"acc_stderr": 0.0350349092367328,
|
174 |
-
"acc_norm": 0.43283582089552236,
|
175 |
-
"acc_norm_stderr": 0.0350349092367328
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.30057803468208094,
|
179 |
-
"acc_stderr": 0.03496101481191181,
|
180 |
-
"acc_norm": 0.30057803468208094,
|
181 |
-
"acc_norm_stderr": 0.03496101481191181
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.2328042328042328,
|
185 |
-
"acc_stderr": 0.021765961672154537,
|
186 |
-
"acc_norm": 0.2328042328042328,
|
187 |
-
"acc_norm_stderr": 0.021765961672154537
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.3333333333333333,
|
191 |
-
"acc_stderr": 0.039420826399272135,
|
192 |
-
"acc_norm": 0.3333333333333333,
|
193 |
-
"acc_norm_stderr": 0.039420826399272135
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.24,
|
197 |
-
"acc_stderr": 0.042923469599092816,
|
198 |
-
"acc_norm": 0.24,
|
199 |
-
"acc_norm_stderr": 0.042923469599092816
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.4,
|
203 |
-
"acc_stderr": 0.049236596391733084,
|
204 |
-
"acc_norm": 0.4,
|
205 |
-
"acc_norm_stderr": 0.049236596391733084
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.45664739884393063,
|
209 |
-
"acc_stderr": 0.02681771813034892,
|
210 |
-
"acc_norm": 0.45664739884393063,
|
211 |
-
"acc_norm_stderr": 0.02681771813034892
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.3312883435582822,
|
215 |
-
"acc_stderr": 0.03697983910025588,
|
216 |
-
"acc_norm": 0.3312883435582822,
|
217 |
-
"acc_norm_stderr": 0.03697983910025588
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.39197530864197533,
|
221 |
-
"acc_stderr": 0.02716368603827123,
|
222 |
-
"acc_norm": 0.39197530864197533,
|
223 |
-
"acc_norm_stderr": 0.02716368603827123
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.33,
|
227 |
-
"acc_stderr": 0.04725815626252605,
|
228 |
-
"acc_norm": 0.33,
|
229 |
-
"acc_norm_stderr": 0.04725815626252605
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.44041450777202074,
|
233 |
-
"acc_stderr": 0.035827245300360945,
|
234 |
-
"acc_norm": 0.44041450777202074,
|
235 |
-
"acc_norm_stderr": 0.035827245300360945
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.2543859649122807,
|
239 |
-
"acc_stderr": 0.04096985139843671,
|
240 |
-
"acc_norm": 0.2543859649122807,
|
241 |
-
"acc_norm_stderr": 0.04096985139843671
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.3798165137614679,
|
245 |
-
"acc_stderr": 0.020808825617866244,
|
246 |
-
"acc_norm": 0.3798165137614679,
|
247 |
-
"acc_norm_stderr": 0.020808825617866244
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.24603174603174602,
|
251 |
-
"acc_stderr": 0.03852273364924318,
|
252 |
-
"acc_norm": 0.24603174603174602,
|
253 |
-
"acc_norm_stderr": 0.03852273364924318
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.4117647058823529,
|
257 |
-
"acc_stderr": 0.02818059632825929,
|
258 |
-
"acc_norm": 0.4117647058823529,
|
259 |
-
"acc_norm_stderr": 0.02818059632825929
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.34,
|
263 |
-
"acc_stderr": 0.04760952285695235,
|
264 |
-
"acc_norm": 0.34,
|
265 |
-
"acc_norm_stderr": 0.04760952285695235
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.48760330578512395,
|
269 |
-
"acc_stderr": 0.04562951548180765,
|
270 |
-
"acc_norm": 0.48760330578512395,
|
271 |
-
"acc_norm_stderr": 0.04562951548180765
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.3026315789473684,
|
275 |
-
"acc_stderr": 0.03738520676119667,
|
276 |
-
"acc_norm": 0.3026315789473684,
|
277 |
-
"acc_norm_stderr": 0.03738520676119667
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.32189542483660133,
|
281 |
-
"acc_stderr": 0.018901015322093095,
|
282 |
-
"acc_norm": 0.32189542483660133,
|
283 |
-
"acc_norm_stderr": 0.018901015322093095
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.3191489361702128,
|
287 |
-
"acc_stderr": 0.027807990141320203,
|
288 |
-
"acc_norm": 0.3191489361702128,
|
289 |
-
"acc_norm_stderr": 0.027807990141320203
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.2767857142857143,
|
293 |
-
"acc_stderr": 0.042466243366976256,
|
294 |
-
"acc_norm": 0.2767857142857143,
|
295 |
-
"acc_norm_stderr": 0.042466243366976256
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.35185185185185186,
|
299 |
-
"acc_stderr": 0.032568505702936464,
|
300 |
-
"acc_norm": 0.35185185185185186,
|
301 |
-
"acc_norm_stderr": 0.032568505702936464
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.24134078212290502,
|
305 |
-
"acc_stderr": 0.014310999547961443,
|
306 |
-
"acc_norm": 0.24134078212290502,
|
307 |
-
"acc_norm_stderr": 0.014310999547961443
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.28,
|
311 |
-
"acc_stderr": 0.045126085985421276,
|
312 |
-
"acc_norm": 0.28,
|
313 |
-
"acc_norm_stderr": 0.045126085985421276
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.28,
|
317 |
-
"acc_stderr": 0.04512608598542127,
|
318 |
-
"acc_norm": 0.28,
|
319 |
-
"acc_norm_stderr": 0.04512608598542127
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.4264705882352941,
|
323 |
-
"acc_stderr": 0.03004261583271486,
|
324 |
-
"acc_norm": 0.4264705882352941,
|
325 |
-
"acc_norm_stderr": 0.03004261583271486
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.39591836734693875,
|
329 |
-
"acc_stderr": 0.03130802899065686,
|
330 |
-
"acc_norm": 0.39591836734693875,
|
331 |
-
"acc_norm_stderr": 0.03130802899065686
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.42616033755274263,
|
335 |
-
"acc_stderr": 0.03219035703131774,
|
336 |
-
"acc_norm": 0.42616033755274263,
|
337 |
-
"acc_norm_stderr": 0.03219035703131774
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.33376792698826596,
|
341 |
-
"acc_stderr": 0.012043812655846146,
|
342 |
-
"acc_norm": 0.33376792698826596,
|
343 |
-
"acc_norm_stderr": 0.012043812655846146
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.3627450980392157,
|
347 |
-
"acc_stderr": 0.033744993563193555,
|
348 |
-
"acc_norm": 0.3627450980392157,
|
349 |
-
"acc_norm_stderr": 0.033744993563193555
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.3575757575757576,
|
353 |
-
"acc_stderr": 0.03742597043806587,
|
354 |
-
"acc_norm": 0.3575757575757576,
|
355 |
-
"acc_norm_stderr": 0.03742597043806587
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.2484700122399021,
|
359 |
-
"mc1_stderr": 0.015127427096520688,
|
360 |
-
"mc2": 0.3821911392219441,
|
361 |
-
"mc2_stderr": 0.014928316371274168
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.5692488262910798,
|
365 |
-
"acc_stderr": 0.01697459912173145,
|
366 |
-
"acc_norm": 0.6326291079812206,
|
367 |
-
"acc_norm_stderr": 0.016525787977931604
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "DopeorNope/COLA_LO-7B",
|
436 |
-
"model_sha": "4cccb5249ea36f58588c32fe58c6f104f89f0487",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
DopeorNope/KOAT-5.8b/result_2023-10-01 15:52:29.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.25597269624573377,
|
5 |
-
"acc_stderr": 0.012753013241244513,
|
6 |
-
"acc_norm": 0.30716723549488056,
|
7 |
-
"acc_norm_stderr": 0.013481034054980945
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.3408683529177455,
|
11 |
-
"acc_stderr": 0.00473032455662415,
|
12 |
-
"acc_norm": 0.4153555068711412,
|
13 |
-
"acc_norm_stderr": 0.004917761181740164
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.25146198830409355,
|
17 |
-
"acc_stderr": 0.033275044238468436,
|
18 |
-
"acc_norm": 0.25146198830409355,
|
19 |
-
"acc_norm_stderr": 0.033275044238468436
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.23300970873786409,
|
23 |
-
"acc_stderr": 0.041858325989283136,
|
24 |
-
"acc_norm": 0.23300970873786409,
|
25 |
-
"acc_norm_stderr": 0.041858325989283136
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.21966794380587484,
|
29 |
-
"acc_stderr": 0.01480538447837116,
|
30 |
-
"acc_norm": 0.21966794380587484,
|
31 |
-
"acc_norm_stderr": 0.01480538447837116
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.24444444444444444,
|
35 |
-
"acc_stderr": 0.03712537833614866,
|
36 |
-
"acc_norm": 0.24444444444444444,
|
37 |
-
"acc_norm_stderr": 0.03712537833614866
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.27,
|
41 |
-
"acc_stderr": 0.0446196043338474,
|
42 |
-
"acc_norm": 0.27,
|
43 |
-
"acc_norm_stderr": 0.0446196043338474
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.22127659574468084,
|
47 |
-
"acc_stderr": 0.027136349602424063,
|
48 |
-
"acc_norm": 0.22127659574468084,
|
49 |
-
"acc_norm_stderr": 0.027136349602424063
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.26506024096385544,
|
53 |
-
"acc_stderr": 0.03436024037944968,
|
54 |
-
"acc_norm": 0.26506024096385544,
|
55 |
-
"acc_norm_stderr": 0.03436024037944968
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.24758842443729903,
|
59 |
-
"acc_stderr": 0.024513879973621967,
|
60 |
-
"acc_norm": 0.24758842443729903,
|
61 |
-
"acc_norm_stderr": 0.024513879973621967
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.25112107623318386,
|
65 |
-
"acc_stderr": 0.029105220833224605,
|
66 |
-
"acc_norm": 0.25112107623318386,
|
67 |
-
"acc_norm_stderr": 0.029105220833224605
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.22900763358778625,
|
71 |
-
"acc_stderr": 0.036853466317118506,
|
72 |
-
"acc_norm": 0.22900763358778625,
|
73 |
-
"acc_norm_stderr": 0.036853466317118506
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.24,
|
77 |
-
"acc_stderr": 0.04292346959909281,
|
78 |
-
"acc_norm": 0.24,
|
79 |
-
"acc_norm_stderr": 0.04292346959909281
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.23737373737373738,
|
83 |
-
"acc_stderr": 0.03031371053819888,
|
84 |
-
"acc_norm": 0.23737373737373738,
|
85 |
-
"acc_norm_stderr": 0.03031371053819888
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.2206896551724138,
|
89 |
-
"acc_stderr": 0.034559302019248124,
|
90 |
-
"acc_norm": 0.2206896551724138,
|
91 |
-
"acc_norm_stderr": 0.034559302019248124
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.13725490196078433,
|
95 |
-
"acc_stderr": 0.03424084669891521,
|
96 |
-
"acc_norm": 0.13725490196078433,
|
97 |
-
"acc_norm_stderr": 0.03424084669891521
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.31512605042016806,
|
101 |
-
"acc_stderr": 0.030176808288974337,
|
102 |
-
"acc_norm": 0.31512605042016806,
|
103 |
-
"acc_norm_stderr": 0.030176808288974337
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.2794871794871795,
|
107 |
-
"acc_stderr": 0.022752388839776826,
|
108 |
-
"acc_norm": 0.2794871794871795,
|
109 |
-
"acc_norm_stderr": 0.022752388839776826
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.29,
|
113 |
-
"acc_stderr": 0.045604802157206845,
|
114 |
-
"acc_norm": 0.29,
|
115 |
-
"acc_norm_stderr": 0.045604802157206845
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.13,
|
119 |
-
"acc_stderr": 0.033799766898963086,
|
120 |
-
"acc_norm": 0.13,
|
121 |
-
"acc_norm_stderr": 0.033799766898963086
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.18518518518518517,
|
125 |
-
"acc_stderr": 0.03755265865037181,
|
126 |
-
"acc_norm": 0.18518518518518517,
|
127 |
-
"acc_norm_stderr": 0.03755265865037181
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.26108374384236455,
|
131 |
-
"acc_stderr": 0.030903796952114485,
|
132 |
-
"acc_norm": 0.26108374384236455,
|
133 |
-
"acc_norm_stderr": 0.030903796952114485
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.3161290322580645,
|
137 |
-
"acc_stderr": 0.026450874489042767,
|
138 |
-
"acc_norm": 0.3161290322580645,
|
139 |
-
"acc_norm_stderr": 0.026450874489042767
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.2564102564102564,
|
143 |
-
"acc_stderr": 0.028605953702004264,
|
144 |
-
"acc_norm": 0.2564102564102564,
|
145 |
-
"acc_norm_stderr": 0.028605953702004264
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.21132075471698114,
|
149 |
-
"acc_stderr": 0.02512576648482784,
|
150 |
-
"acc_norm": 0.21132075471698114,
|
151 |
-
"acc_norm_stderr": 0.02512576648482784
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.32727272727272727,
|
155 |
-
"acc_stderr": 0.0449429086625209,
|
156 |
-
"acc_norm": 0.32727272727272727,
|
157 |
-
"acc_norm_stderr": 0.0449429086625209
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.25925925925925924,
|
161 |
-
"acc_stderr": 0.026719240783712166,
|
162 |
-
"acc_norm": 0.25925925925925924,
|
163 |
-
"acc_norm_stderr": 0.026719240783712166
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.26490066225165565,
|
167 |
-
"acc_stderr": 0.03603038545360384,
|
168 |
-
"acc_norm": 0.26490066225165565,
|
169 |
-
"acc_norm_stderr": 0.03603038545360384
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.2537313432835821,
|
173 |
-
"acc_stderr": 0.030769444967296014,
|
174 |
-
"acc_norm": 0.2537313432835821,
|
175 |
-
"acc_norm_stderr": 0.030769444967296014
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.26011560693641617,
|
179 |
-
"acc_stderr": 0.033450369167889904,
|
180 |
-
"acc_norm": 0.26011560693641617,
|
181 |
-
"acc_norm_stderr": 0.033450369167889904
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.24338624338624337,
|
185 |
-
"acc_stderr": 0.02210112878741543,
|
186 |
-
"acc_norm": 0.24338624338624337,
|
187 |
-
"acc_norm_stderr": 0.02210112878741543
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.2361111111111111,
|
191 |
-
"acc_stderr": 0.03551446610810826,
|
192 |
-
"acc_norm": 0.2361111111111111,
|
193 |
-
"acc_norm_stderr": 0.03551446610810826
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.27,
|
197 |
-
"acc_stderr": 0.04461960433384741,
|
198 |
-
"acc_norm": 0.27,
|
199 |
-
"acc_norm_stderr": 0.04461960433384741
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.29,
|
203 |
-
"acc_stderr": 0.04560480215720684,
|
204 |
-
"acc_norm": 0.29,
|
205 |
-
"acc_norm_stderr": 0.04560480215720684
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.24566473988439305,
|
209 |
-
"acc_stderr": 0.02317629820399201,
|
210 |
-
"acc_norm": 0.24566473988439305,
|
211 |
-
"acc_norm_stderr": 0.02317629820399201
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.25153374233128833,
|
215 |
-
"acc_stderr": 0.03408997886857529,
|
216 |
-
"acc_norm": 0.25153374233128833,
|
217 |
-
"acc_norm_stderr": 0.03408997886857529
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.27469135802469136,
|
221 |
-
"acc_stderr": 0.024836057868294677,
|
222 |
-
"acc_norm": 0.27469135802469136,
|
223 |
-
"acc_norm_stderr": 0.024836057868294677
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.29,
|
227 |
-
"acc_stderr": 0.04560480215720684,
|
228 |
-
"acc_norm": 0.29,
|
229 |
-
"acc_norm_stderr": 0.04560480215720684
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.27461139896373055,
|
233 |
-
"acc_stderr": 0.03221024508041154,
|
234 |
-
"acc_norm": 0.27461139896373055,
|
235 |
-
"acc_norm_stderr": 0.03221024508041154
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.30701754385964913,
|
239 |
-
"acc_stderr": 0.043391383225798594,
|
240 |
-
"acc_norm": 0.30701754385964913,
|
241 |
-
"acc_norm_stderr": 0.043391383225798594
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.23486238532110093,
|
245 |
-
"acc_stderr": 0.018175110510343602,
|
246 |
-
"acc_norm": 0.23486238532110093,
|
247 |
-
"acc_norm_stderr": 0.018175110510343602
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.30158730158730157,
|
251 |
-
"acc_stderr": 0.04104947269903394,
|
252 |
-
"acc_norm": 0.30158730158730157,
|
253 |
-
"acc_norm_stderr": 0.04104947269903394
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.2777777777777778,
|
257 |
-
"acc_stderr": 0.025646863097137904,
|
258 |
-
"acc_norm": 0.2777777777777778,
|
259 |
-
"acc_norm_stderr": 0.025646863097137904
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.28,
|
263 |
-
"acc_stderr": 0.04512608598542128,
|
264 |
-
"acc_norm": 0.28,
|
265 |
-
"acc_norm_stderr": 0.04512608598542128
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.2066115702479339,
|
269 |
-
"acc_stderr": 0.03695980128098824,
|
270 |
-
"acc_norm": 0.2066115702479339,
|
271 |
-
"acc_norm_stderr": 0.03695980128098824
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.25,
|
275 |
-
"acc_stderr": 0.03523807393012047,
|
276 |
-
"acc_norm": 0.25,
|
277 |
-
"acc_norm_stderr": 0.03523807393012047
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.21895424836601307,
|
281 |
-
"acc_stderr": 0.01672993756553755,
|
282 |
-
"acc_norm": 0.21895424836601307,
|
283 |
-
"acc_norm_stderr": 0.01672993756553755
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.2553191489361702,
|
287 |
-
"acc_stderr": 0.026011992930902002,
|
288 |
-
"acc_norm": 0.2553191489361702,
|
289 |
-
"acc_norm_stderr": 0.026011992930902002
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.15178571428571427,
|
293 |
-
"acc_stderr": 0.034057028381856924,
|
294 |
-
"acc_norm": 0.15178571428571427,
|
295 |
-
"acc_norm_stderr": 0.034057028381856924
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.27314814814814814,
|
299 |
-
"acc_stderr": 0.03038805130167812,
|
300 |
-
"acc_norm": 0.27314814814814814,
|
301 |
-
"acc_norm_stderr": 0.03038805130167812
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.25027932960893856,
|
305 |
-
"acc_stderr": 0.014487500852850417,
|
306 |
-
"acc_norm": 0.25027932960893856,
|
307 |
-
"acc_norm_stderr": 0.014487500852850417
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.21,
|
311 |
-
"acc_stderr": 0.04093601807403326,
|
312 |
-
"acc_norm": 0.21,
|
313 |
-
"acc_norm_stderr": 0.04093601807403326
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.28,
|
317 |
-
"acc_stderr": 0.045126085985421296,
|
318 |
-
"acc_norm": 0.28,
|
319 |
-
"acc_norm_stderr": 0.045126085985421296
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.2647058823529412,
|
323 |
-
"acc_stderr": 0.026799562024887685,
|
324 |
-
"acc_norm": 0.2647058823529412,
|
325 |
-
"acc_norm_stderr": 0.026799562024887685
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.22448979591836735,
|
329 |
-
"acc_stderr": 0.02671143055553839,
|
330 |
-
"acc_norm": 0.22448979591836735,
|
331 |
-
"acc_norm_stderr": 0.02671143055553839
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.23628691983122363,
|
335 |
-
"acc_stderr": 0.027652153144159267,
|
336 |
-
"acc_norm": 0.23628691983122363,
|
337 |
-
"acc_norm_stderr": 0.027652153144159267
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.2653194263363755,
|
341 |
-
"acc_stderr": 0.011276198843958873,
|
342 |
-
"acc_norm": 0.2653194263363755,
|
343 |
-
"acc_norm_stderr": 0.011276198843958873
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.25980392156862747,
|
347 |
-
"acc_stderr": 0.030778554678693268,
|
348 |
-
"acc_norm": 0.25980392156862747,
|
349 |
-
"acc_norm_stderr": 0.030778554678693268
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.26666666666666666,
|
353 |
-
"acc_stderr": 0.03453131801885415,
|
354 |
-
"acc_norm": 0.26666666666666666,
|
355 |
-
"acc_norm_stderr": 0.03453131801885415
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.24479804161566707,
|
359 |
-
"mc1_stderr": 0.01505186948671501,
|
360 |
-
"mc2": 0.41023662722679205,
|
361 |
-
"mc2_stderr": 0.016160843398647234
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.38028169014084506,
|
365 |
-
"acc_stderr": 0.016641217297503577,
|
366 |
-
"acc_norm": 0.4460093896713615,
|
367 |
-
"acc_norm_stderr": 0.017039561832563676
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "DopeorNope/KOAT-5.8b",
|
436 |
-
"model_sha": "768c40d2ffbddbc8aa15eed33234eef248eb43e7",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
DopeorNope/ZeroCoka-7B/result_2023-10-11 12:06:32.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.27986348122866894,
|
5 |
-
"acc_stderr": 0.013119040897725923,
|
6 |
-
"acc_norm": 0.3455631399317406,
|
7 |
-
"acc_norm_stderr": 0.013896938461145687
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.36566421031667,
|
11 |
-
"acc_stderr": 0.0048063163427093936,
|
12 |
-
"acc_norm": 0.48466440948018324,
|
13 |
-
"acc_norm_stderr": 0.004987433862274562
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.39766081871345027,
|
17 |
-
"acc_stderr": 0.0375363895576169,
|
18 |
-
"acc_norm": 0.39766081871345027,
|
19 |
-
"acc_norm_stderr": 0.0375363895576169
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.32038834951456313,
|
23 |
-
"acc_stderr": 0.0462028408228004,
|
24 |
-
"acc_norm": 0.32038834951456313,
|
25 |
-
"acc_norm_stderr": 0.0462028408228004
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.41890166028097064,
|
29 |
-
"acc_stderr": 0.017643205052377185,
|
30 |
-
"acc_norm": 0.41890166028097064,
|
31 |
-
"acc_norm_stderr": 0.017643205052377185
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.362962962962963,
|
35 |
-
"acc_stderr": 0.041539484047424004,
|
36 |
-
"acc_norm": 0.362962962962963,
|
37 |
-
"acc_norm_stderr": 0.041539484047424004
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.32,
|
41 |
-
"acc_stderr": 0.046882617226215034,
|
42 |
-
"acc_norm": 0.32,
|
43 |
-
"acc_norm_stderr": 0.046882617226215034
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.28936170212765955,
|
47 |
-
"acc_stderr": 0.029644006577009618,
|
48 |
-
"acc_norm": 0.28936170212765955,
|
49 |
-
"acc_norm_stderr": 0.029644006577009618
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.3132530120481928,
|
53 |
-
"acc_stderr": 0.036108050180310235,
|
54 |
-
"acc_norm": 0.3132530120481928,
|
55 |
-
"acc_norm_stderr": 0.036108050180310235
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.3858520900321543,
|
59 |
-
"acc_stderr": 0.027648149599751464,
|
60 |
-
"acc_norm": 0.3858520900321543,
|
61 |
-
"acc_norm_stderr": 0.027648149599751464
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.3721973094170404,
|
65 |
-
"acc_stderr": 0.03244305283008731,
|
66 |
-
"acc_norm": 0.3721973094170404,
|
67 |
-
"acc_norm_stderr": 0.03244305283008731
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.45038167938931295,
|
71 |
-
"acc_stderr": 0.04363643698524779,
|
72 |
-
"acc_norm": 0.45038167938931295,
|
73 |
-
"acc_norm_stderr": 0.04363643698524779
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.33,
|
77 |
-
"acc_stderr": 0.04725815626252605,
|
78 |
-
"acc_norm": 0.33,
|
79 |
-
"acc_norm_stderr": 0.04725815626252605
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.3838383838383838,
|
83 |
-
"acc_stderr": 0.03464881675016339,
|
84 |
-
"acc_norm": 0.3838383838383838,
|
85 |
-
"acc_norm_stderr": 0.03464881675016339
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.4206896551724138,
|
89 |
-
"acc_stderr": 0.0411391498118926,
|
90 |
-
"acc_norm": 0.4206896551724138,
|
91 |
-
"acc_norm_stderr": 0.0411391498118926
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.20588235294117646,
|
95 |
-
"acc_stderr": 0.04023382273617746,
|
96 |
-
"acc_norm": 0.20588235294117646,
|
97 |
-
"acc_norm_stderr": 0.04023382273617746
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.3865546218487395,
|
101 |
-
"acc_stderr": 0.0316314580755238,
|
102 |
-
"acc_norm": 0.3865546218487395,
|
103 |
-
"acc_norm_stderr": 0.0316314580755238
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.3076923076923077,
|
107 |
-
"acc_stderr": 0.02340092891831049,
|
108 |
-
"acc_norm": 0.3076923076923077,
|
109 |
-
"acc_norm_stderr": 0.02340092891831049
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.44,
|
113 |
-
"acc_stderr": 0.04988876515698589,
|
114 |
-
"acc_norm": 0.44,
|
115 |
-
"acc_norm_stderr": 0.04988876515698589
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.26,
|
119 |
-
"acc_stderr": 0.04408440022768079,
|
120 |
-
"acc_norm": 0.26,
|
121 |
-
"acc_norm_stderr": 0.04408440022768079
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.46296296296296297,
|
125 |
-
"acc_stderr": 0.04820403072760628,
|
126 |
-
"acc_norm": 0.46296296296296297,
|
127 |
-
"acc_norm_stderr": 0.04820403072760628
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.2561576354679803,
|
131 |
-
"acc_stderr": 0.0307127300709826,
|
132 |
-
"acc_norm": 0.2561576354679803,
|
133 |
-
"acc_norm_stderr": 0.0307127300709826
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.3709677419354839,
|
137 |
-
"acc_stderr": 0.02748054188795359,
|
138 |
-
"acc_norm": 0.3709677419354839,
|
139 |
-
"acc_norm_stderr": 0.02748054188795359
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.4829059829059829,
|
143 |
-
"acc_stderr": 0.032736940493481824,
|
144 |
-
"acc_norm": 0.4829059829059829,
|
145 |
-
"acc_norm_stderr": 0.032736940493481824
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.3471698113207547,
|
149 |
-
"acc_stderr": 0.02930010170554965,
|
150 |
-
"acc_norm": 0.3471698113207547,
|
151 |
-
"acc_norm_stderr": 0.02930010170554965
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.33636363636363636,
|
155 |
-
"acc_stderr": 0.04525393596302506,
|
156 |
-
"acc_norm": 0.33636363636363636,
|
157 |
-
"acc_norm_stderr": 0.04525393596302506
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.23333333333333334,
|
161 |
-
"acc_stderr": 0.02578787422095932,
|
162 |
-
"acc_norm": 0.23333333333333334,
|
163 |
-
"acc_norm_stderr": 0.02578787422095932
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.2913907284768212,
|
167 |
-
"acc_stderr": 0.03710185726119996,
|
168 |
-
"acc_norm": 0.2913907284768212,
|
169 |
-
"acc_norm_stderr": 0.03710185726119996
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.4228855721393035,
|
173 |
-
"acc_stderr": 0.03493231777421282,
|
174 |
-
"acc_norm": 0.4228855721393035,
|
175 |
-
"acc_norm_stderr": 0.03493231777421282
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.3063583815028902,
|
179 |
-
"acc_stderr": 0.03514942551267437,
|
180 |
-
"acc_norm": 0.3063583815028902,
|
181 |
-
"acc_norm_stderr": 0.03514942551267437
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.24074074074074073,
|
185 |
-
"acc_stderr": 0.0220190800122179,
|
186 |
-
"acc_norm": 0.24074074074074073,
|
187 |
-
"acc_norm_stderr": 0.0220190800122179
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.3125,
|
191 |
-
"acc_stderr": 0.038760854559127644,
|
192 |
-
"acc_norm": 0.3125,
|
193 |
-
"acc_norm_stderr": 0.038760854559127644
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.21,
|
197 |
-
"acc_stderr": 0.040936018074033256,
|
198 |
-
"acc_norm": 0.21,
|
199 |
-
"acc_norm_stderr": 0.040936018074033256
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.42,
|
203 |
-
"acc_stderr": 0.049604496374885836,
|
204 |
-
"acc_norm": 0.42,
|
205 |
-
"acc_norm_stderr": 0.049604496374885836
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.38439306358381503,
|
209 |
-
"acc_stderr": 0.026189666966272035,
|
210 |
-
"acc_norm": 0.38439306358381503,
|
211 |
-
"acc_norm_stderr": 0.026189666966272035
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.3067484662576687,
|
215 |
-
"acc_stderr": 0.03623089915724145,
|
216 |
-
"acc_norm": 0.3067484662576687,
|
217 |
-
"acc_norm_stderr": 0.03623089915724145
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.39197530864197533,
|
221 |
-
"acc_stderr": 0.027163686038271226,
|
222 |
-
"acc_norm": 0.39197530864197533,
|
223 |
-
"acc_norm_stderr": 0.027163686038271226
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.35,
|
227 |
-
"acc_stderr": 0.047937248544110196,
|
228 |
-
"acc_norm": 0.35,
|
229 |
-
"acc_norm_stderr": 0.047937248544110196
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.38860103626943004,
|
233 |
-
"acc_stderr": 0.03517739796373132,
|
234 |
-
"acc_norm": 0.38860103626943004,
|
235 |
-
"acc_norm_stderr": 0.03517739796373132
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.2807017543859649,
|
239 |
-
"acc_stderr": 0.042270544512322,
|
240 |
-
"acc_norm": 0.2807017543859649,
|
241 |
-
"acc_norm_stderr": 0.042270544512322
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.3779816513761468,
|
245 |
-
"acc_stderr": 0.02078918706672811,
|
246 |
-
"acc_norm": 0.3779816513761468,
|
247 |
-
"acc_norm_stderr": 0.02078918706672811
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.25396825396825395,
|
251 |
-
"acc_stderr": 0.03893259610604672,
|
252 |
-
"acc_norm": 0.25396825396825395,
|
253 |
-
"acc_norm_stderr": 0.03893259610604672
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.45098039215686275,
|
257 |
-
"acc_stderr": 0.028491993586171566,
|
258 |
-
"acc_norm": 0.45098039215686275,
|
259 |
-
"acc_norm_stderr": 0.028491993586171566
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.34,
|
263 |
-
"acc_stderr": 0.04760952285695235,
|
264 |
-
"acc_norm": 0.34,
|
265 |
-
"acc_norm_stderr": 0.04760952285695235
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.5041322314049587,
|
269 |
-
"acc_stderr": 0.045641987674327526,
|
270 |
-
"acc_norm": 0.5041322314049587,
|
271 |
-
"acc_norm_stderr": 0.045641987674327526
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.32894736842105265,
|
275 |
-
"acc_stderr": 0.03823428969926605,
|
276 |
-
"acc_norm": 0.32894736842105265,
|
277 |
-
"acc_norm_stderr": 0.03823428969926605
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.3104575163398693,
|
281 |
-
"acc_stderr": 0.018718067052623216,
|
282 |
-
"acc_norm": 0.3104575163398693,
|
283 |
-
"acc_norm_stderr": 0.018718067052623216
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.32269503546099293,
|
287 |
-
"acc_stderr": 0.027889139300534778,
|
288 |
-
"acc_norm": 0.32269503546099293,
|
289 |
-
"acc_norm_stderr": 0.027889139300534778
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.25,
|
293 |
-
"acc_stderr": 0.04109974682633932,
|
294 |
-
"acc_norm": 0.25,
|
295 |
-
"acc_norm_stderr": 0.04109974682633932
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.3425925925925926,
|
299 |
-
"acc_stderr": 0.03236585252602157,
|
300 |
-
"acc_norm": 0.3425925925925926,
|
301 |
-
"acc_norm_stderr": 0.03236585252602157
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.24804469273743016,
|
305 |
-
"acc_stderr": 0.014444157808261446,
|
306 |
-
"acc_norm": 0.24804469273743016,
|
307 |
-
"acc_norm_stderr": 0.014444157808261446
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.33,
|
311 |
-
"acc_stderr": 0.04725815626252604,
|
312 |
-
"acc_norm": 0.33,
|
313 |
-
"acc_norm_stderr": 0.04725815626252604
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.28,
|
317 |
-
"acc_stderr": 0.04512608598542128,
|
318 |
-
"acc_norm": 0.28,
|
319 |
-
"acc_norm_stderr": 0.04512608598542128
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.4117647058823529,
|
323 |
-
"acc_stderr": 0.029896163033125474,
|
324 |
-
"acc_norm": 0.4117647058823529,
|
325 |
-
"acc_norm_stderr": 0.029896163033125474
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.3142857142857143,
|
329 |
-
"acc_stderr": 0.029719329422417468,
|
330 |
-
"acc_norm": 0.3142857142857143,
|
331 |
-
"acc_norm_stderr": 0.029719329422417468
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.45147679324894513,
|
335 |
-
"acc_stderr": 0.0323936001739747,
|
336 |
-
"acc_norm": 0.45147679324894513,
|
337 |
-
"acc_norm_stderr": 0.0323936001739747
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.3135593220338983,
|
341 |
-
"acc_stderr": 0.01184923429145932,
|
342 |
-
"acc_norm": 0.3135593220338983,
|
343 |
-
"acc_norm_stderr": 0.01184923429145932
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.36764705882352944,
|
347 |
-
"acc_stderr": 0.03384132045674118,
|
348 |
-
"acc_norm": 0.36764705882352944,
|
349 |
-
"acc_norm_stderr": 0.03384132045674118
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.3878787878787879,
|
353 |
-
"acc_stderr": 0.038049136539710114,
|
354 |
-
"acc_norm": 0.3878787878787879,
|
355 |
-
"acc_norm_stderr": 0.038049136539710114
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.2386780905752754,
|
359 |
-
"mc1_stderr": 0.014922629695456411,
|
360 |
-
"mc2": 0.3826229918315052,
|
361 |
-
"mc2_stderr": 0.015120737226444851
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.32981220657276994,
|
365 |
-
"acc_stderr": 0.01611635552339568,
|
366 |
-
"acc_norm": 0.3967136150234742,
|
367 |
-
"acc_norm_stderr": 0.01677009546349846
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "DopeorNope/ZeroCoka-7B",
|
436 |
-
"model_sha": "3025135b08f7d052531fcd8f6a4a5a97e4e25c76",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
DopeorNope/Zero_COKE_K-13B/result_2023-10-08 06:50:15.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.35238907849829354,
|
5 |
-
"acc_stderr": 0.01396014260059869,
|
6 |
-
"acc_norm": 0.3984641638225256,
|
7 |
-
"acc_norm_stderr": 0.014306946052735569
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.3679545907189803,
|
11 |
-
"acc_stderr": 0.0048126332800782715,
|
12 |
-
"acc_norm": 0.46932881896036643,
|
13 |
-
"acc_norm_stderr": 0.004980384575535391
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.47368421052631576,
|
17 |
-
"acc_stderr": 0.038295098689947286,
|
18 |
-
"acc_norm": 0.47368421052631576,
|
19 |
-
"acc_norm_stderr": 0.038295098689947286
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.5728155339805825,
|
23 |
-
"acc_stderr": 0.04897957737781168,
|
24 |
-
"acc_norm": 0.5728155339805825,
|
25 |
-
"acc_norm_stderr": 0.04897957737781168
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.46871008939974457,
|
29 |
-
"acc_stderr": 0.01784491809046854,
|
30 |
-
"acc_norm": 0.46871008939974457,
|
31 |
-
"acc_norm_stderr": 0.01784491809046854
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.34814814814814815,
|
35 |
-
"acc_stderr": 0.041153246103369526,
|
36 |
-
"acc_norm": 0.34814814814814815,
|
37 |
-
"acc_norm_stderr": 0.041153246103369526
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.29,
|
41 |
-
"acc_stderr": 0.045604802157206824,
|
42 |
-
"acc_norm": 0.29,
|
43 |
-
"acc_norm_stderr": 0.045604802157206824
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.40425531914893614,
|
47 |
-
"acc_stderr": 0.03208115750788684,
|
48 |
-
"acc_norm": 0.40425531914893614,
|
49 |
-
"acc_norm_stderr": 0.03208115750788684
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.3855421686746988,
|
53 |
-
"acc_stderr": 0.037891344246115496,
|
54 |
-
"acc_norm": 0.3855421686746988,
|
55 |
-
"acc_norm_stderr": 0.037891344246115496
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.44694533762057875,
|
59 |
-
"acc_stderr": 0.028237769422085335,
|
60 |
-
"acc_norm": 0.44694533762057875,
|
61 |
-
"acc_norm_stderr": 0.028237769422085335
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.42152466367713004,
|
65 |
-
"acc_stderr": 0.03314190222110656,
|
66 |
-
"acc_norm": 0.42152466367713004,
|
67 |
-
"acc_norm_stderr": 0.03314190222110656
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.4732824427480916,
|
71 |
-
"acc_stderr": 0.04379024936553894,
|
72 |
-
"acc_norm": 0.4732824427480916,
|
73 |
-
"acc_norm_stderr": 0.04379024936553894
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.41,
|
77 |
-
"acc_stderr": 0.04943110704237102,
|
78 |
-
"acc_norm": 0.41,
|
79 |
-
"acc_norm_stderr": 0.04943110704237102
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.5404040404040404,
|
83 |
-
"acc_stderr": 0.035507024651313425,
|
84 |
-
"acc_norm": 0.5404040404040404,
|
85 |
-
"acc_norm_stderr": 0.035507024651313425
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.3931034482758621,
|
89 |
-
"acc_stderr": 0.0407032901370707,
|
90 |
-
"acc_norm": 0.3931034482758621,
|
91 |
-
"acc_norm_stderr": 0.0407032901370707
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.21568627450980393,
|
95 |
-
"acc_stderr": 0.04092563958237655,
|
96 |
-
"acc_norm": 0.21568627450980393,
|
97 |
-
"acc_norm_stderr": 0.04092563958237655
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.4411764705882353,
|
101 |
-
"acc_stderr": 0.0322529423239964,
|
102 |
-
"acc_norm": 0.4411764705882353,
|
103 |
-
"acc_norm_stderr": 0.0322529423239964
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.45897435897435895,
|
107 |
-
"acc_stderr": 0.025265525491284295,
|
108 |
-
"acc_norm": 0.45897435897435895,
|
109 |
-
"acc_norm_stderr": 0.025265525491284295
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.45,
|
113 |
-
"acc_stderr": 0.049999999999999996,
|
114 |
-
"acc_norm": 0.45,
|
115 |
-
"acc_norm_stderr": 0.049999999999999996
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.31,
|
119 |
-
"acc_stderr": 0.04648231987117316,
|
120 |
-
"acc_norm": 0.31,
|
121 |
-
"acc_norm_stderr": 0.04648231987117316
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.49074074074074076,
|
125 |
-
"acc_stderr": 0.04832853553437055,
|
126 |
-
"acc_norm": 0.49074074074074076,
|
127 |
-
"acc_norm_stderr": 0.04832853553437055
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.31527093596059114,
|
131 |
-
"acc_stderr": 0.03269080871970186,
|
132 |
-
"acc_norm": 0.31527093596059114,
|
133 |
-
"acc_norm_stderr": 0.03269080871970186
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.4483870967741935,
|
137 |
-
"acc_stderr": 0.02829205683011273,
|
138 |
-
"acc_norm": 0.4483870967741935,
|
139 |
-
"acc_norm_stderr": 0.02829205683011273
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.6025641025641025,
|
143 |
-
"acc_stderr": 0.03205953453789293,
|
144 |
-
"acc_norm": 0.6025641025641025,
|
145 |
-
"acc_norm_stderr": 0.03205953453789293
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.4528301886792453,
|
149 |
-
"acc_stderr": 0.030635627957961823,
|
150 |
-
"acc_norm": 0.4528301886792453,
|
151 |
-
"acc_norm_stderr": 0.030635627957961823
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.4909090909090909,
|
155 |
-
"acc_stderr": 0.04788339768702861,
|
156 |
-
"acc_norm": 0.4909090909090909,
|
157 |
-
"acc_norm_stderr": 0.04788339768702861
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.3296296296296296,
|
161 |
-
"acc_stderr": 0.028661201116524586,
|
162 |
-
"acc_norm": 0.3296296296296296,
|
163 |
-
"acc_norm_stderr": 0.028661201116524586
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.3443708609271523,
|
167 |
-
"acc_stderr": 0.03879687024073327,
|
168 |
-
"acc_norm": 0.3443708609271523,
|
169 |
-
"acc_norm_stderr": 0.03879687024073327
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.5870646766169154,
|
173 |
-
"acc_stderr": 0.03481520803367348,
|
174 |
-
"acc_norm": 0.5870646766169154,
|
175 |
-
"acc_norm_stderr": 0.03481520803367348
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.34104046242774566,
|
179 |
-
"acc_stderr": 0.03614665424180826,
|
180 |
-
"acc_norm": 0.34104046242774566,
|
181 |
-
"acc_norm_stderr": 0.03614665424180826
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.32275132275132273,
|
185 |
-
"acc_stderr": 0.024078943243597016,
|
186 |
-
"acc_norm": 0.32275132275132273,
|
187 |
-
"acc_norm_stderr": 0.024078943243597016
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.3958333333333333,
|
191 |
-
"acc_stderr": 0.04089465449325582,
|
192 |
-
"acc_norm": 0.3958333333333333,
|
193 |
-
"acc_norm_stderr": 0.04089465449325582
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.34,
|
197 |
-
"acc_stderr": 0.04760952285695235,
|
198 |
-
"acc_norm": 0.34,
|
199 |
-
"acc_norm_stderr": 0.04760952285695235
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.61,
|
203 |
-
"acc_stderr": 0.04902071300001974,
|
204 |
-
"acc_norm": 0.61,
|
205 |
-
"acc_norm_stderr": 0.04902071300001974
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.5173410404624278,
|
209 |
-
"acc_stderr": 0.026902900458666647,
|
210 |
-
"acc_norm": 0.5173410404624278,
|
211 |
-
"acc_norm_stderr": 0.026902900458666647
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.38650306748466257,
|
215 |
-
"acc_stderr": 0.03825825548848607,
|
216 |
-
"acc_norm": 0.38650306748466257,
|
217 |
-
"acc_norm_stderr": 0.03825825548848607
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.4382716049382716,
|
221 |
-
"acc_stderr": 0.027607914087400473,
|
222 |
-
"acc_norm": 0.4382716049382716,
|
223 |
-
"acc_norm_stderr": 0.027607914087400473
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.37,
|
227 |
-
"acc_stderr": 0.04852365870939098,
|
228 |
-
"acc_norm": 0.37,
|
229 |
-
"acc_norm_stderr": 0.04852365870939098
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.5025906735751295,
|
233 |
-
"acc_stderr": 0.03608390745384487,
|
234 |
-
"acc_norm": 0.5025906735751295,
|
235 |
-
"acc_norm_stderr": 0.03608390745384487
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.20175438596491227,
|
239 |
-
"acc_stderr": 0.03775205013583639,
|
240 |
-
"acc_norm": 0.20175438596491227,
|
241 |
-
"acc_norm_stderr": 0.03775205013583639
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.46972477064220186,
|
245 |
-
"acc_stderr": 0.021397988604936965,
|
246 |
-
"acc_norm": 0.46972477064220186,
|
247 |
-
"acc_norm_stderr": 0.021397988604936965
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.40476190476190477,
|
251 |
-
"acc_stderr": 0.04390259265377563,
|
252 |
-
"acc_norm": 0.40476190476190477,
|
253 |
-
"acc_norm_stderr": 0.04390259265377563
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.46405228758169936,
|
257 |
-
"acc_stderr": 0.028555827516528777,
|
258 |
-
"acc_norm": 0.46405228758169936,
|
259 |
-
"acc_norm_stderr": 0.028555827516528777
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.43,
|
263 |
-
"acc_stderr": 0.04975698519562428,
|
264 |
-
"acc_norm": 0.43,
|
265 |
-
"acc_norm_stderr": 0.04975698519562428
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.5289256198347108,
|
269 |
-
"acc_stderr": 0.04556710331269498,
|
270 |
-
"acc_norm": 0.5289256198347108,
|
271 |
-
"acc_norm_stderr": 0.04556710331269498
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.35526315789473684,
|
275 |
-
"acc_stderr": 0.03894734487013317,
|
276 |
-
"acc_norm": 0.35526315789473684,
|
277 |
-
"acc_norm_stderr": 0.03894734487013317
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.3284313725490196,
|
281 |
-
"acc_stderr": 0.018999707383162666,
|
282 |
-
"acc_norm": 0.3284313725490196,
|
283 |
-
"acc_norm_stderr": 0.018999707383162666
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.3404255319148936,
|
287 |
-
"acc_stderr": 0.028267657482650147,
|
288 |
-
"acc_norm": 0.3404255319148936,
|
289 |
-
"acc_norm_stderr": 0.028267657482650147
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.24107142857142858,
|
293 |
-
"acc_stderr": 0.04059867246952687,
|
294 |
-
"acc_norm": 0.24107142857142858,
|
295 |
-
"acc_norm_stderr": 0.04059867246952687
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.35185185185185186,
|
299 |
-
"acc_stderr": 0.03256850570293648,
|
300 |
-
"acc_norm": 0.35185185185185186,
|
301 |
-
"acc_norm_stderr": 0.03256850570293648
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.2927374301675978,
|
305 |
-
"acc_stderr": 0.015218109544410182,
|
306 |
-
"acc_norm": 0.2927374301675978,
|
307 |
-
"acc_norm_stderr": 0.015218109544410182
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.34,
|
311 |
-
"acc_stderr": 0.04760952285695235,
|
312 |
-
"acc_norm": 0.34,
|
313 |
-
"acc_norm_stderr": 0.04760952285695235
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.38,
|
317 |
-
"acc_stderr": 0.04878317312145632,
|
318 |
-
"acc_norm": 0.38,
|
319 |
-
"acc_norm_stderr": 0.04878317312145632
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.39705882352941174,
|
323 |
-
"acc_stderr": 0.029722152099280065,
|
324 |
-
"acc_norm": 0.39705882352941174,
|
325 |
-
"acc_norm_stderr": 0.029722152099280065
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.5142857142857142,
|
329 |
-
"acc_stderr": 0.03199615232806287,
|
330 |
-
"acc_norm": 0.5142857142857142,
|
331 |
-
"acc_norm_stderr": 0.03199615232806287
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.5569620253164557,
|
335 |
-
"acc_stderr": 0.03233532777533485,
|
336 |
-
"acc_norm": 0.5569620253164557,
|
337 |
-
"acc_norm_stderr": 0.03233532777533485
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.3239895697522816,
|
341 |
-
"acc_stderr": 0.011952840809646563,
|
342 |
-
"acc_norm": 0.3239895697522816,
|
343 |
-
"acc_norm_stderr": 0.011952840809646563
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.4411764705882353,
|
347 |
-
"acc_stderr": 0.03484941514429231,
|
348 |
-
"acc_norm": 0.4411764705882353,
|
349 |
-
"acc_norm_stderr": 0.03484941514429231
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.4909090909090909,
|
353 |
-
"acc_stderr": 0.03903698647748441,
|
354 |
-
"acc_norm": 0.4909090909090909,
|
355 |
-
"acc_norm_stderr": 0.03903698647748441
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.32068543451652387,
|
359 |
-
"mc1_stderr": 0.0163391703732809,
|
360 |
-
"mc2": 0.498111749136946,
|
361 |
-
"mc2_stderr": 0.015897921630313217
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.318075117370892,
|
365 |
-
"acc_stderr": 0.015964978456287866,
|
366 |
-
"acc_norm": 0.32981220657276994,
|
367 |
-
"acc_norm_stderr": 0.016116355523395676
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "DopeorNope/Zero_COKE_K-13B",
|
436 |
-
"model_sha": "fda4838dd7feb06c1289ae143810c67a59a72961",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
EleutherAI/polyglot-ko-1.3b/result_2023-09-24 15:21:38.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.2235494880546075,
|
5 |
-
"acc_stderr": 0.012174896631202605,
|
6 |
-
"acc_norm": 0.2815699658703072,
|
7 |
-
"acc_norm_stderr": 0.013143376735009015
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.3345947022505477,
|
11 |
-
"acc_stderr": 0.004708842600177431,
|
12 |
-
"acc_norm": 0.4135630352519418,
|
13 |
-
"acc_norm_stderr": 0.0049146550633294974
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.27485380116959063,
|
17 |
-
"acc_stderr": 0.03424042924691585,
|
18 |
-
"acc_norm": 0.27485380116959063,
|
19 |
-
"acc_norm_stderr": 0.03424042924691585
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.27184466019417475,
|
23 |
-
"acc_stderr": 0.044052680241409216,
|
24 |
-
"acc_norm": 0.27184466019417475,
|
25 |
-
"acc_norm_stderr": 0.044052680241409216
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.26947637292464877,
|
29 |
-
"acc_stderr": 0.015866243073215065,
|
30 |
-
"acc_norm": 0.26947637292464877,
|
31 |
-
"acc_norm_stderr": 0.015866243073215065
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.26666666666666666,
|
35 |
-
"acc_stderr": 0.038201699145179055,
|
36 |
-
"acc_norm": 0.26666666666666666,
|
37 |
-
"acc_norm_stderr": 0.038201699145179055
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.3,
|
41 |
-
"acc_stderr": 0.046056618647183814,
|
42 |
-
"acc_norm": 0.3,
|
43 |
-
"acc_norm_stderr": 0.046056618647183814
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.2127659574468085,
|
47 |
-
"acc_stderr": 0.026754391348039783,
|
48 |
-
"acc_norm": 0.2127659574468085,
|
49 |
-
"acc_norm_stderr": 0.026754391348039783
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.24096385542168675,
|
53 |
-
"acc_stderr": 0.033293941190735296,
|
54 |
-
"acc_norm": 0.24096385542168675,
|
55 |
-
"acc_norm_stderr": 0.033293941190735296
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.2379421221864952,
|
59 |
-
"acc_stderr": 0.024185150647818707,
|
60 |
-
"acc_norm": 0.2379421221864952,
|
61 |
-
"acc_norm_stderr": 0.024185150647818707
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.2825112107623318,
|
65 |
-
"acc_stderr": 0.030216831011508766,
|
66 |
-
"acc_norm": 0.2825112107623318,
|
67 |
-
"acc_norm_stderr": 0.030216831011508766
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.21374045801526717,
|
71 |
-
"acc_stderr": 0.0359546161177469,
|
72 |
-
"acc_norm": 0.21374045801526717,
|
73 |
-
"acc_norm_stderr": 0.0359546161177469
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.24,
|
77 |
-
"acc_stderr": 0.042923469599092816,
|
78 |
-
"acc_norm": 0.24,
|
79 |
-
"acc_norm_stderr": 0.042923469599092816
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.2474747474747475,
|
83 |
-
"acc_stderr": 0.03074630074212451,
|
84 |
-
"acc_norm": 0.2474747474747475,
|
85 |
-
"acc_norm_stderr": 0.03074630074212451
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.22758620689655173,
|
89 |
-
"acc_stderr": 0.03493950380131184,
|
90 |
-
"acc_norm": 0.22758620689655173,
|
91 |
-
"acc_norm_stderr": 0.03493950380131184
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.22549019607843138,
|
95 |
-
"acc_stderr": 0.041583075330832865,
|
96 |
-
"acc_norm": 0.22549019607843138,
|
97 |
-
"acc_norm_stderr": 0.041583075330832865
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.31512605042016806,
|
101 |
-
"acc_stderr": 0.030176808288974337,
|
102 |
-
"acc_norm": 0.31512605042016806,
|
103 |
-
"acc_norm_stderr": 0.030176808288974337
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.2205128205128205,
|
107 |
-
"acc_stderr": 0.02102067268082791,
|
108 |
-
"acc_norm": 0.2205128205128205,
|
109 |
-
"acc_norm_stderr": 0.02102067268082791
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.18,
|
113 |
-
"acc_stderr": 0.038612291966536955,
|
114 |
-
"acc_norm": 0.18,
|
115 |
-
"acc_norm_stderr": 0.038612291966536955
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.31,
|
119 |
-
"acc_stderr": 0.04648231987117316,
|
120 |
-
"acc_norm": 0.31,
|
121 |
-
"acc_norm_stderr": 0.04648231987117316
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.25,
|
125 |
-
"acc_stderr": 0.04186091791394607,
|
126 |
-
"acc_norm": 0.25,
|
127 |
-
"acc_norm_stderr": 0.04186091791394607
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.2660098522167488,
|
131 |
-
"acc_stderr": 0.03108982600293752,
|
132 |
-
"acc_norm": 0.2660098522167488,
|
133 |
-
"acc_norm_stderr": 0.03108982600293752
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.3,
|
137 |
-
"acc_stderr": 0.02606936229533513,
|
138 |
-
"acc_norm": 0.3,
|
139 |
-
"acc_norm_stderr": 0.02606936229533513
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.23076923076923078,
|
143 |
-
"acc_stderr": 0.027601921381417607,
|
144 |
-
"acc_norm": 0.23076923076923078,
|
145 |
-
"acc_norm_stderr": 0.027601921381417607
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.25660377358490566,
|
149 |
-
"acc_stderr": 0.026880647889051968,
|
150 |
-
"acc_norm": 0.25660377358490566,
|
151 |
-
"acc_norm_stderr": 0.026880647889051968
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.2545454545454545,
|
155 |
-
"acc_stderr": 0.04172343038705383,
|
156 |
-
"acc_norm": 0.2545454545454545,
|
157 |
-
"acc_norm_stderr": 0.04172343038705383
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.2962962962962963,
|
161 |
-
"acc_stderr": 0.02784081149587194,
|
162 |
-
"acc_norm": 0.2962962962962963,
|
163 |
-
"acc_norm_stderr": 0.02784081149587194
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.304635761589404,
|
167 |
-
"acc_stderr": 0.03757949922943342,
|
168 |
-
"acc_norm": 0.304635761589404,
|
169 |
-
"acc_norm_stderr": 0.03757949922943342
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.25870646766169153,
|
173 |
-
"acc_stderr": 0.03096590312357303,
|
174 |
-
"acc_norm": 0.25870646766169153,
|
175 |
-
"acc_norm_stderr": 0.03096590312357303
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.2254335260115607,
|
179 |
-
"acc_stderr": 0.03186209851641144,
|
180 |
-
"acc_norm": 0.2254335260115607,
|
181 |
-
"acc_norm_stderr": 0.03186209851641144
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.2566137566137566,
|
185 |
-
"acc_stderr": 0.022494510767503154,
|
186 |
-
"acc_norm": 0.2566137566137566,
|
187 |
-
"acc_norm_stderr": 0.022494510767503154
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.2638888888888889,
|
191 |
-
"acc_stderr": 0.03685651095897532,
|
192 |
-
"acc_norm": 0.2638888888888889,
|
193 |
-
"acc_norm_stderr": 0.03685651095897532
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.23,
|
197 |
-
"acc_stderr": 0.04229525846816505,
|
198 |
-
"acc_norm": 0.23,
|
199 |
-
"acc_norm_stderr": 0.04229525846816505
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.22,
|
203 |
-
"acc_stderr": 0.04163331998932269,
|
204 |
-
"acc_norm": 0.22,
|
205 |
-
"acc_norm_stderr": 0.04163331998932269
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.24855491329479767,
|
209 |
-
"acc_stderr": 0.023267528432100174,
|
210 |
-
"acc_norm": 0.24855491329479767,
|
211 |
-
"acc_norm_stderr": 0.023267528432100174
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.31901840490797545,
|
215 |
-
"acc_stderr": 0.03661997551073836,
|
216 |
-
"acc_norm": 0.31901840490797545,
|
217 |
-
"acc_norm_stderr": 0.03661997551073836
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.2623456790123457,
|
221 |
-
"acc_stderr": 0.024477222856135114,
|
222 |
-
"acc_norm": 0.2623456790123457,
|
223 |
-
"acc_norm_stderr": 0.024477222856135114
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.25,
|
227 |
-
"acc_stderr": 0.04351941398892446,
|
228 |
-
"acc_norm": 0.25,
|
229 |
-
"acc_norm_stderr": 0.04351941398892446
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.33678756476683935,
|
233 |
-
"acc_stderr": 0.03410780251836184,
|
234 |
-
"acc_norm": 0.33678756476683935,
|
235 |
-
"acc_norm_stderr": 0.03410780251836184
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.20175438596491227,
|
239 |
-
"acc_stderr": 0.037752050135836386,
|
240 |
-
"acc_norm": 0.20175438596491227,
|
241 |
-
"acc_norm_stderr": 0.037752050135836386
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.24220183486238533,
|
245 |
-
"acc_stderr": 0.01836817630659862,
|
246 |
-
"acc_norm": 0.24220183486238533,
|
247 |
-
"acc_norm_stderr": 0.01836817630659862
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.23015873015873015,
|
251 |
-
"acc_stderr": 0.03764950879790606,
|
252 |
-
"acc_norm": 0.23015873015873015,
|
253 |
-
"acc_norm_stderr": 0.03764950879790606
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.23529411764705882,
|
257 |
-
"acc_stderr": 0.024288619466046102,
|
258 |
-
"acc_norm": 0.23529411764705882,
|
259 |
-
"acc_norm_stderr": 0.024288619466046102
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.18,
|
263 |
-
"acc_stderr": 0.03861229196653695,
|
264 |
-
"acc_norm": 0.18,
|
265 |
-
"acc_norm_stderr": 0.03861229196653695
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.256198347107438,
|
269 |
-
"acc_stderr": 0.039849796533028704,
|
270 |
-
"acc_norm": 0.256198347107438,
|
271 |
-
"acc_norm_stderr": 0.039849796533028704
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.21710526315789475,
|
275 |
-
"acc_stderr": 0.033550453048829226,
|
276 |
-
"acc_norm": 0.21710526315789475,
|
277 |
-
"acc_norm_stderr": 0.033550453048829226
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.24019607843137256,
|
281 |
-
"acc_stderr": 0.01728276069516743,
|
282 |
-
"acc_norm": 0.24019607843137256,
|
283 |
-
"acc_norm_stderr": 0.01728276069516743
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.2553191489361702,
|
287 |
-
"acc_stderr": 0.02601199293090201,
|
288 |
-
"acc_norm": 0.2553191489361702,
|
289 |
-
"acc_norm_stderr": 0.02601199293090201
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.21428571428571427,
|
293 |
-
"acc_stderr": 0.03894641120044793,
|
294 |
-
"acc_norm": 0.21428571428571427,
|
295 |
-
"acc_norm_stderr": 0.03894641120044793
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.46296296296296297,
|
299 |
-
"acc_stderr": 0.03400603625538272,
|
300 |
-
"acc_norm": 0.46296296296296297,
|
301 |
-
"acc_norm_stderr": 0.03400603625538272
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.24692737430167597,
|
305 |
-
"acc_stderr": 0.014422292204808852,
|
306 |
-
"acc_norm": 0.24692737430167597,
|
307 |
-
"acc_norm_stderr": 0.014422292204808852
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.25,
|
311 |
-
"acc_stderr": 0.04351941398892446,
|
312 |
-
"acc_norm": 0.25,
|
313 |
-
"acc_norm_stderr": 0.04351941398892446
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.3,
|
317 |
-
"acc_stderr": 0.046056618647183814,
|
318 |
-
"acc_norm": 0.3,
|
319 |
-
"acc_norm_stderr": 0.046056618647183814
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.4411764705882353,
|
323 |
-
"acc_stderr": 0.030161911930767102,
|
324 |
-
"acc_norm": 0.4411764705882353,
|
325 |
-
"acc_norm_stderr": 0.030161911930767102
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.3795918367346939,
|
329 |
-
"acc_stderr": 0.03106721126287249,
|
330 |
-
"acc_norm": 0.3795918367346939,
|
331 |
-
"acc_norm_stderr": 0.03106721126287249
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.2109704641350211,
|
335 |
-
"acc_stderr": 0.02655837250266192,
|
336 |
-
"acc_norm": 0.2109704641350211,
|
337 |
-
"acc_norm_stderr": 0.02655837250266192
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.23468057366362452,
|
341 |
-
"acc_stderr": 0.010824026872449344,
|
342 |
-
"acc_norm": 0.23468057366362452,
|
343 |
-
"acc_norm_stderr": 0.010824026872449344
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.25,
|
347 |
-
"acc_stderr": 0.03039153369274154,
|
348 |
-
"acc_norm": 0.25,
|
349 |
-
"acc_norm_stderr": 0.03039153369274154
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.22424242424242424,
|
353 |
-
"acc_stderr": 0.03256866661681102,
|
354 |
-
"acc_norm": 0.22424242424242424,
|
355 |
-
"acc_norm_stderr": 0.03256866661681102
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.25091799265605874,
|
359 |
-
"mc1_stderr": 0.015176985027707682,
|
360 |
-
"mc2": 0.4116568832959107,
|
361 |
-
"mc2_stderr": 0.015044504977529799
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.12206572769953052,
|
365 |
-
"acc_stderr": 0.011221814716156896,
|
366 |
-
"acc_norm": 0.16901408450704225,
|
367 |
-
"acc_norm_stderr": 0.01284675672446505
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "EleutherAI/polyglot-ko-1.3b",
|
436 |
-
"model_sha": "557e162cf6e944fdbae05bab2e45d066a125eacb",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
EleutherAI/polyglot-ko-12.8b/result_2023-09-26 09:55:07.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.2858361774744027,
|
5 |
-
"acc_stderr": 0.013203196088537365,
|
6 |
-
"acc_norm": 0.33532423208191126,
|
7 |
-
"acc_norm_stderr": 0.013796182947785562
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.385381398127863,
|
11 |
-
"acc_stderr": 0.004856906473719383,
|
12 |
-
"acc_norm": 0.5027882891854212,
|
13 |
-
"acc_norm_stderr": 0.004989703824167094
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.30994152046783624,
|
17 |
-
"acc_stderr": 0.03546976959393161,
|
18 |
-
"acc_norm": 0.30994152046783624,
|
19 |
-
"acc_norm_stderr": 0.03546976959393161
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.18446601941747573,
|
23 |
-
"acc_stderr": 0.03840423627288276,
|
24 |
-
"acc_norm": 0.18446601941747573,
|
25 |
-
"acc_norm_stderr": 0.03840423627288276
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.25925925925925924,
|
29 |
-
"acc_stderr": 0.015671006009339572,
|
30 |
-
"acc_norm": 0.25925925925925924,
|
31 |
-
"acc_norm_stderr": 0.015671006009339572
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.22962962962962963,
|
35 |
-
"acc_stderr": 0.036333844140734636,
|
36 |
-
"acc_norm": 0.22962962962962963,
|
37 |
-
"acc_norm_stderr": 0.036333844140734636
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.27,
|
41 |
-
"acc_stderr": 0.0446196043338474,
|
42 |
-
"acc_norm": 0.27,
|
43 |
-
"acc_norm_stderr": 0.0446196043338474
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.2127659574468085,
|
47 |
-
"acc_stderr": 0.026754391348039787,
|
48 |
-
"acc_norm": 0.2127659574468085,
|
49 |
-
"acc_norm_stderr": 0.026754391348039787
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.21686746987951808,
|
53 |
-
"acc_stderr": 0.03208284450356365,
|
54 |
-
"acc_norm": 0.21686746987951808,
|
55 |
-
"acc_norm_stderr": 0.03208284450356365
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.31189710610932475,
|
59 |
-
"acc_stderr": 0.02631185807185416,
|
60 |
-
"acc_norm": 0.31189710610932475,
|
61 |
-
"acc_norm_stderr": 0.02631185807185416
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.20179372197309417,
|
65 |
-
"acc_stderr": 0.02693611191280227,
|
66 |
-
"acc_norm": 0.20179372197309417,
|
67 |
-
"acc_norm_stderr": 0.02693611191280227
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.22900763358778625,
|
71 |
-
"acc_stderr": 0.036853466317118506,
|
72 |
-
"acc_norm": 0.22900763358778625,
|
73 |
-
"acc_norm_stderr": 0.036853466317118506
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.23,
|
77 |
-
"acc_stderr": 0.04229525846816506,
|
78 |
-
"acc_norm": 0.23,
|
79 |
-
"acc_norm_stderr": 0.04229525846816506
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.24242424242424243,
|
83 |
-
"acc_stderr": 0.03053289223393203,
|
84 |
-
"acc_norm": 0.24242424242424243,
|
85 |
-
"acc_norm_stderr": 0.03053289223393203
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.2896551724137931,
|
89 |
-
"acc_stderr": 0.03780019230438014,
|
90 |
-
"acc_norm": 0.2896551724137931,
|
91 |
-
"acc_norm_stderr": 0.03780019230438014
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.3137254901960784,
|
95 |
-
"acc_stderr": 0.04617034827006716,
|
96 |
-
"acc_norm": 0.3137254901960784,
|
97 |
-
"acc_norm_stderr": 0.04617034827006716
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.23109243697478993,
|
101 |
-
"acc_stderr": 0.027381406927868963,
|
102 |
-
"acc_norm": 0.23109243697478993,
|
103 |
-
"acc_norm_stderr": 0.027381406927868963
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.21025641025641026,
|
107 |
-
"acc_stderr": 0.020660597485026928,
|
108 |
-
"acc_norm": 0.21025641025641026,
|
109 |
-
"acc_norm_stderr": 0.020660597485026928
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.26,
|
113 |
-
"acc_stderr": 0.0440844002276808,
|
114 |
-
"acc_norm": 0.26,
|
115 |
-
"acc_norm_stderr": 0.0440844002276808
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.21,
|
119 |
-
"acc_stderr": 0.040936018074033256,
|
120 |
-
"acc_norm": 0.21,
|
121 |
-
"acc_norm_stderr": 0.040936018074033256
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.25,
|
125 |
-
"acc_stderr": 0.04186091791394607,
|
126 |
-
"acc_norm": 0.25,
|
127 |
-
"acc_norm_stderr": 0.04186091791394607
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.26108374384236455,
|
131 |
-
"acc_stderr": 0.0309037969521145,
|
132 |
-
"acc_norm": 0.26108374384236455,
|
133 |
-
"acc_norm_stderr": 0.0309037969521145
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.25161290322580643,
|
137 |
-
"acc_stderr": 0.024685979286239963,
|
138 |
-
"acc_norm": 0.25161290322580643,
|
139 |
-
"acc_norm_stderr": 0.024685979286239963
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.23076923076923078,
|
143 |
-
"acc_stderr": 0.027601921381417604,
|
144 |
-
"acc_norm": 0.23076923076923078,
|
145 |
-
"acc_norm_stderr": 0.027601921381417604
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.23773584905660378,
|
149 |
-
"acc_stderr": 0.026199808807561932,
|
150 |
-
"acc_norm": 0.23773584905660378,
|
151 |
-
"acc_norm_stderr": 0.026199808807561932
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.21818181818181817,
|
155 |
-
"acc_stderr": 0.03955932861795833,
|
156 |
-
"acc_norm": 0.21818181818181817,
|
157 |
-
"acc_norm_stderr": 0.03955932861795833
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.26296296296296295,
|
161 |
-
"acc_stderr": 0.02684205787383371,
|
162 |
-
"acc_norm": 0.26296296296296295,
|
163 |
-
"acc_norm_stderr": 0.02684205787383371
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.2847682119205298,
|
167 |
-
"acc_stderr": 0.03684881521389024,
|
168 |
-
"acc_norm": 0.2847682119205298,
|
169 |
-
"acc_norm_stderr": 0.03684881521389024
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.263681592039801,
|
173 |
-
"acc_stderr": 0.03115715086935554,
|
174 |
-
"acc_norm": 0.263681592039801,
|
175 |
-
"acc_norm_stderr": 0.03115715086935554
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.24855491329479767,
|
179 |
-
"acc_stderr": 0.03295304696818317,
|
180 |
-
"acc_norm": 0.24855491329479767,
|
181 |
-
"acc_norm_stderr": 0.03295304696818317
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.2671957671957672,
|
185 |
-
"acc_stderr": 0.022789673145776578,
|
186 |
-
"acc_norm": 0.2671957671957672,
|
187 |
-
"acc_norm_stderr": 0.022789673145776578
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.2569444444444444,
|
191 |
-
"acc_stderr": 0.036539469694421,
|
192 |
-
"acc_norm": 0.2569444444444444,
|
193 |
-
"acc_norm_stderr": 0.036539469694421
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.2,
|
197 |
-
"acc_stderr": 0.04020151261036846,
|
198 |
-
"acc_norm": 0.2,
|
199 |
-
"acc_norm_stderr": 0.04020151261036846
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.26,
|
203 |
-
"acc_stderr": 0.04408440022768079,
|
204 |
-
"acc_norm": 0.26,
|
205 |
-
"acc_norm_stderr": 0.04408440022768079
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.23121387283236994,
|
209 |
-
"acc_stderr": 0.022698657167855716,
|
210 |
-
"acc_norm": 0.23121387283236994,
|
211 |
-
"acc_norm_stderr": 0.022698657167855716
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.2883435582822086,
|
215 |
-
"acc_stderr": 0.035590395316173425,
|
216 |
-
"acc_norm": 0.2883435582822086,
|
217 |
-
"acc_norm_stderr": 0.035590395316173425
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.2777777777777778,
|
221 |
-
"acc_stderr": 0.024922001168886338,
|
222 |
-
"acc_norm": 0.2777777777777778,
|
223 |
-
"acc_norm_stderr": 0.024922001168886338
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.33,
|
227 |
-
"acc_stderr": 0.047258156262526045,
|
228 |
-
"acc_norm": 0.33,
|
229 |
-
"acc_norm_stderr": 0.047258156262526045
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.2694300518134715,
|
233 |
-
"acc_stderr": 0.03201867122877794,
|
234 |
-
"acc_norm": 0.2694300518134715,
|
235 |
-
"acc_norm_stderr": 0.03201867122877794
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.23684210526315788,
|
239 |
-
"acc_stderr": 0.03999423879281336,
|
240 |
-
"acc_norm": 0.23684210526315788,
|
241 |
-
"acc_norm_stderr": 0.03999423879281336
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.26422018348623855,
|
245 |
-
"acc_stderr": 0.0189041641715102,
|
246 |
-
"acc_norm": 0.26422018348623855,
|
247 |
-
"acc_norm_stderr": 0.0189041641715102
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.19047619047619047,
|
251 |
-
"acc_stderr": 0.035122074123020534,
|
252 |
-
"acc_norm": 0.19047619047619047,
|
253 |
-
"acc_norm_stderr": 0.035122074123020534
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.2581699346405229,
|
257 |
-
"acc_stderr": 0.02505850331695815,
|
258 |
-
"acc_norm": 0.2581699346405229,
|
259 |
-
"acc_norm_stderr": 0.02505850331695815
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.22,
|
263 |
-
"acc_stderr": 0.041633319989322674,
|
264 |
-
"acc_norm": 0.22,
|
265 |
-
"acc_norm_stderr": 0.041633319989322674
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.36363636363636365,
|
269 |
-
"acc_stderr": 0.043913262867240704,
|
270 |
-
"acc_norm": 0.36363636363636365,
|
271 |
-
"acc_norm_stderr": 0.043913262867240704
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.28289473684210525,
|
275 |
-
"acc_stderr": 0.03665349695640767,
|
276 |
-
"acc_norm": 0.28289473684210525,
|
277 |
-
"acc_norm_stderr": 0.03665349695640767
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.20098039215686275,
|
281 |
-
"acc_stderr": 0.016211938889655574,
|
282 |
-
"acc_norm": 0.20098039215686275,
|
283 |
-
"acc_norm_stderr": 0.016211938889655574
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.22695035460992907,
|
287 |
-
"acc_stderr": 0.02498710636564298,
|
288 |
-
"acc_norm": 0.22695035460992907,
|
289 |
-
"acc_norm_stderr": 0.02498710636564298
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.25,
|
293 |
-
"acc_stderr": 0.04109974682633932,
|
294 |
-
"acc_norm": 0.25,
|
295 |
-
"acc_norm_stderr": 0.04109974682633932
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.3611111111111111,
|
299 |
-
"acc_stderr": 0.03275773486100999,
|
300 |
-
"acc_norm": 0.3611111111111111,
|
301 |
-
"acc_norm_stderr": 0.03275773486100999
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.24692737430167597,
|
305 |
-
"acc_stderr": 0.014422292204808852,
|
306 |
-
"acc_norm": 0.24692737430167597,
|
307 |
-
"acc_norm_stderr": 0.014422292204808852
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.27,
|
311 |
-
"acc_stderr": 0.04461960433384741,
|
312 |
-
"acc_norm": 0.27,
|
313 |
-
"acc_norm_stderr": 0.04461960433384741
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.31,
|
317 |
-
"acc_stderr": 0.04648231987117316,
|
318 |
-
"acc_norm": 0.31,
|
319 |
-
"acc_norm_stderr": 0.04648231987117316
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.44485294117647056,
|
323 |
-
"acc_stderr": 0.030187532060329383,
|
324 |
-
"acc_norm": 0.44485294117647056,
|
325 |
-
"acc_norm_stderr": 0.030187532060329383
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.22857142857142856,
|
329 |
-
"acc_stderr": 0.026882144922307748,
|
330 |
-
"acc_norm": 0.22857142857142856,
|
331 |
-
"acc_norm_stderr": 0.026882144922307748
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.32489451476793246,
|
335 |
-
"acc_stderr": 0.030486039389105303,
|
336 |
-
"acc_norm": 0.32489451476793246,
|
337 |
-
"acc_norm_stderr": 0.030486039389105303
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.25684485006518903,
|
341 |
-
"acc_stderr": 0.011158455853098857,
|
342 |
-
"acc_norm": 0.25684485006518903,
|
343 |
-
"acc_norm_stderr": 0.011158455853098857
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.27941176470588236,
|
347 |
-
"acc_stderr": 0.031493281045079556,
|
348 |
-
"acc_norm": 0.27941176470588236,
|
349 |
-
"acc_norm_stderr": 0.031493281045079556
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.23030303030303031,
|
353 |
-
"acc_stderr": 0.032876667586034886,
|
354 |
-
"acc_norm": 0.23030303030303031,
|
355 |
-
"acc_norm_stderr": 0.032876667586034886
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.2350061199510404,
|
359 |
-
"mc1_stderr": 0.014843061507731613,
|
360 |
-
"mc2": 0.390667104295536,
|
361 |
-
"mc2_stderr": 0.014736649975849761
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.49413145539906106,
|
365 |
-
"acc_stderr": 0.017138598632436254,
|
366 |
-
"acc_norm": 0.5868544600938967,
|
367 |
-
"acc_norm_stderr": 0.016879203885533163
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "EleutherAI/polyglot-ko-12.8b",
|
436 |
-
"model_sha": "09dfc839067bf44e7f52976eca8adbc17f04e1b0",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
EleutherAI/polyglot-ko-3.8b/result_2023-09-26 09:54:58.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.2525597269624573,
|
5 |
-
"acc_stderr": 0.01269672898020771,
|
6 |
-
"acc_norm": 0.3046075085324232,
|
7 |
-
"acc_norm_stderr": 0.013449522109932494
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.3511252738498307,
|
11 |
-
"acc_stderr": 0.004763465139038552,
|
12 |
-
"acc_norm": 0.4420434176458873,
|
13 |
-
"acc_norm_stderr": 0.004956147046108961
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.2222222222222222,
|
17 |
-
"acc_stderr": 0.03188578017686398,
|
18 |
-
"acc_norm": 0.2222222222222222,
|
19 |
-
"acc_norm_stderr": 0.03188578017686398
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.30097087378640774,
|
23 |
-
"acc_stderr": 0.045416094465039476,
|
24 |
-
"acc_norm": 0.30097087378640774,
|
25 |
-
"acc_norm_stderr": 0.045416094465039476
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.23627075351213284,
|
29 |
-
"acc_stderr": 0.015190473717037497,
|
30 |
-
"acc_norm": 0.23627075351213284,
|
31 |
-
"acc_norm_stderr": 0.015190473717037497
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.23703703703703705,
|
35 |
-
"acc_stderr": 0.03673731683969506,
|
36 |
-
"acc_norm": 0.23703703703703705,
|
37 |
-
"acc_norm_stderr": 0.03673731683969506
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.27,
|
41 |
-
"acc_stderr": 0.04461960433384741,
|
42 |
-
"acc_norm": 0.27,
|
43 |
-
"acc_norm_stderr": 0.04461960433384741
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.2,
|
47 |
-
"acc_stderr": 0.026148818018424502,
|
48 |
-
"acc_norm": 0.2,
|
49 |
-
"acc_norm_stderr": 0.026148818018424502
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.25301204819277107,
|
53 |
-
"acc_stderr": 0.03384429155233135,
|
54 |
-
"acc_norm": 0.25301204819277107,
|
55 |
-
"acc_norm_stderr": 0.03384429155233135
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.24437299035369775,
|
59 |
-
"acc_stderr": 0.024406162094668886,
|
60 |
-
"acc_norm": 0.24437299035369775,
|
61 |
-
"acc_norm_stderr": 0.024406162094668886
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.21524663677130046,
|
65 |
-
"acc_stderr": 0.027584066602208263,
|
66 |
-
"acc_norm": 0.21524663677130046,
|
67 |
-
"acc_norm_stderr": 0.027584066602208263
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.19083969465648856,
|
71 |
-
"acc_stderr": 0.034465133507525954,
|
72 |
-
"acc_norm": 0.19083969465648856,
|
73 |
-
"acc_norm_stderr": 0.034465133507525954
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.21,
|
77 |
-
"acc_stderr": 0.040936018074033256,
|
78 |
-
"acc_norm": 0.21,
|
79 |
-
"acc_norm_stderr": 0.040936018074033256
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.31313131313131315,
|
83 |
-
"acc_stderr": 0.033042050878136525,
|
84 |
-
"acc_norm": 0.31313131313131315,
|
85 |
-
"acc_norm_stderr": 0.033042050878136525
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.2413793103448276,
|
89 |
-
"acc_stderr": 0.03565998174135302,
|
90 |
-
"acc_norm": 0.2413793103448276,
|
91 |
-
"acc_norm_stderr": 0.03565998174135302
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.19607843137254902,
|
95 |
-
"acc_stderr": 0.03950581861179962,
|
96 |
-
"acc_norm": 0.19607843137254902,
|
97 |
-
"acc_norm_stderr": 0.03950581861179962
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.3487394957983193,
|
101 |
-
"acc_stderr": 0.030956636328566548,
|
102 |
-
"acc_norm": 0.3487394957983193,
|
103 |
-
"acc_norm_stderr": 0.030956636328566548
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.3564102564102564,
|
107 |
-
"acc_stderr": 0.024283140529467295,
|
108 |
-
"acc_norm": 0.3564102564102564,
|
109 |
-
"acc_norm_stderr": 0.024283140529467295
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.16,
|
113 |
-
"acc_stderr": 0.03684529491774709,
|
114 |
-
"acc_norm": 0.16,
|
115 |
-
"acc_norm_stderr": 0.03684529491774709
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.32,
|
119 |
-
"acc_stderr": 0.04688261722621504,
|
120 |
-
"acc_norm": 0.32,
|
121 |
-
"acc_norm_stderr": 0.04688261722621504
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.21296296296296297,
|
125 |
-
"acc_stderr": 0.03957835471980981,
|
126 |
-
"acc_norm": 0.21296296296296297,
|
127 |
-
"acc_norm_stderr": 0.03957835471980981
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.29064039408866993,
|
131 |
-
"acc_stderr": 0.03194740072265541,
|
132 |
-
"acc_norm": 0.29064039408866993,
|
133 |
-
"acc_norm_stderr": 0.03194740072265541
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.3032258064516129,
|
137 |
-
"acc_stderr": 0.026148685930671742,
|
138 |
-
"acc_norm": 0.3032258064516129,
|
139 |
-
"acc_norm_stderr": 0.026148685930671742
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.19658119658119658,
|
143 |
-
"acc_stderr": 0.02603538609895129,
|
144 |
-
"acc_norm": 0.19658119658119658,
|
145 |
-
"acc_norm_stderr": 0.02603538609895129
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.3283018867924528,
|
149 |
-
"acc_stderr": 0.02890159361241178,
|
150 |
-
"acc_norm": 0.3283018867924528,
|
151 |
-
"acc_norm_stderr": 0.02890159361241178
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.23636363636363636,
|
155 |
-
"acc_stderr": 0.04069306319721376,
|
156 |
-
"acc_norm": 0.23636363636363636,
|
157 |
-
"acc_norm_stderr": 0.04069306319721376
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.25925925925925924,
|
161 |
-
"acc_stderr": 0.02671924078371216,
|
162 |
-
"acc_norm": 0.25925925925925924,
|
163 |
-
"acc_norm_stderr": 0.02671924078371216
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.32450331125827814,
|
167 |
-
"acc_stderr": 0.03822746937658753,
|
168 |
-
"acc_norm": 0.32450331125827814,
|
169 |
-
"acc_norm_stderr": 0.03822746937658753
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.26865671641791045,
|
173 |
-
"acc_stderr": 0.03134328358208954,
|
174 |
-
"acc_norm": 0.26865671641791045,
|
175 |
-
"acc_norm_stderr": 0.03134328358208954
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.32947976878612717,
|
179 |
-
"acc_stderr": 0.03583901754736412,
|
180 |
-
"acc_norm": 0.32947976878612717,
|
181 |
-
"acc_norm_stderr": 0.03583901754736412
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.24603174603174602,
|
185 |
-
"acc_stderr": 0.022182037202948368,
|
186 |
-
"acc_norm": 0.24603174603174602,
|
187 |
-
"acc_norm_stderr": 0.022182037202948368
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.2569444444444444,
|
191 |
-
"acc_stderr": 0.03653946969442099,
|
192 |
-
"acc_norm": 0.2569444444444444,
|
193 |
-
"acc_norm_stderr": 0.03653946969442099
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.4,
|
197 |
-
"acc_stderr": 0.049236596391733084,
|
198 |
-
"acc_norm": 0.4,
|
199 |
-
"acc_norm_stderr": 0.049236596391733084
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.26,
|
203 |
-
"acc_stderr": 0.044084400227680794,
|
204 |
-
"acc_norm": 0.26,
|
205 |
-
"acc_norm_stderr": 0.044084400227680794
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.2398843930635838,
|
209 |
-
"acc_stderr": 0.022989592543123567,
|
210 |
-
"acc_norm": 0.2398843930635838,
|
211 |
-
"acc_norm_stderr": 0.022989592543123567
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.3312883435582822,
|
215 |
-
"acc_stderr": 0.03697983910025588,
|
216 |
-
"acc_norm": 0.3312883435582822,
|
217 |
-
"acc_norm_stderr": 0.03697983910025588
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.26851851851851855,
|
221 |
-
"acc_stderr": 0.024659685185967277,
|
222 |
-
"acc_norm": 0.26851851851851855,
|
223 |
-
"acc_norm_stderr": 0.024659685185967277
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.35,
|
227 |
-
"acc_stderr": 0.0479372485441102,
|
228 |
-
"acc_norm": 0.35,
|
229 |
-
"acc_norm_stderr": 0.0479372485441102
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.3626943005181347,
|
233 |
-
"acc_stderr": 0.034697137917043715,
|
234 |
-
"acc_norm": 0.3626943005181347,
|
235 |
-
"acc_norm_stderr": 0.034697137917043715
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.22807017543859648,
|
239 |
-
"acc_stderr": 0.03947152782669415,
|
240 |
-
"acc_norm": 0.22807017543859648,
|
241 |
-
"acc_norm_stderr": 0.03947152782669415
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.25688073394495414,
|
245 |
-
"acc_stderr": 0.018732492928342448,
|
246 |
-
"acc_norm": 0.25688073394495414,
|
247 |
-
"acc_norm_stderr": 0.018732492928342448
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.3492063492063492,
|
251 |
-
"acc_stderr": 0.04263906892795132,
|
252 |
-
"acc_norm": 0.3492063492063492,
|
253 |
-
"acc_norm_stderr": 0.04263906892795132
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.2549019607843137,
|
257 |
-
"acc_stderr": 0.024954184324879905,
|
258 |
-
"acc_norm": 0.2549019607843137,
|
259 |
-
"acc_norm_stderr": 0.024954184324879905
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.26,
|
263 |
-
"acc_stderr": 0.04408440022768079,
|
264 |
-
"acc_norm": 0.26,
|
265 |
-
"acc_norm_stderr": 0.04408440022768079
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.1652892561983471,
|
269 |
-
"acc_stderr": 0.03390780612972776,
|
270 |
-
"acc_norm": 0.1652892561983471,
|
271 |
-
"acc_norm_stderr": 0.03390780612972776
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.29605263157894735,
|
275 |
-
"acc_stderr": 0.03715062154998904,
|
276 |
-
"acc_norm": 0.29605263157894735,
|
277 |
-
"acc_norm_stderr": 0.03715062154998904
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.21895424836601307,
|
281 |
-
"acc_stderr": 0.016729937565537537,
|
282 |
-
"acc_norm": 0.21895424836601307,
|
283 |
-
"acc_norm_stderr": 0.016729937565537537
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.24822695035460993,
|
287 |
-
"acc_stderr": 0.025770015644290396,
|
288 |
-
"acc_norm": 0.24822695035460993,
|
289 |
-
"acc_norm_stderr": 0.025770015644290396
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.15178571428571427,
|
293 |
-
"acc_stderr": 0.03405702838185692,
|
294 |
-
"acc_norm": 0.15178571428571427,
|
295 |
-
"acc_norm_stderr": 0.03405702838185692
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.4722222222222222,
|
299 |
-
"acc_stderr": 0.0340470532865388,
|
300 |
-
"acc_norm": 0.4722222222222222,
|
301 |
-
"acc_norm_stderr": 0.0340470532865388
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.26145251396648045,
|
305 |
-
"acc_stderr": 0.014696599650364546,
|
306 |
-
"acc_norm": 0.26145251396648045,
|
307 |
-
"acc_norm_stderr": 0.014696599650364546
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.26,
|
311 |
-
"acc_stderr": 0.04408440022768078,
|
312 |
-
"acc_norm": 0.26,
|
313 |
-
"acc_norm_stderr": 0.04408440022768078
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.22,
|
317 |
-
"acc_stderr": 0.04163331998932269,
|
318 |
-
"acc_norm": 0.22,
|
319 |
-
"acc_norm_stderr": 0.04163331998932269
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.44485294117647056,
|
323 |
-
"acc_stderr": 0.030187532060329383,
|
324 |
-
"acc_norm": 0.44485294117647056,
|
325 |
-
"acc_norm_stderr": 0.030187532060329383
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.39591836734693875,
|
329 |
-
"acc_stderr": 0.03130802899065685,
|
330 |
-
"acc_norm": 0.39591836734693875,
|
331 |
-
"acc_norm_stderr": 0.03130802899065685
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.23628691983122363,
|
335 |
-
"acc_stderr": 0.02765215314415926,
|
336 |
-
"acc_norm": 0.23628691983122363,
|
337 |
-
"acc_norm_stderr": 0.02765215314415926
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.24445893089960888,
|
341 |
-
"acc_stderr": 0.010976425013113912,
|
342 |
-
"acc_norm": 0.24445893089960888,
|
343 |
-
"acc_norm_stderr": 0.010976425013113912
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.2647058823529412,
|
347 |
-
"acc_stderr": 0.03096451792692341,
|
348 |
-
"acc_norm": 0.2647058823529412,
|
349 |
-
"acc_norm_stderr": 0.03096451792692341
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.296969696969697,
|
353 |
-
"acc_stderr": 0.03567969772268046,
|
354 |
-
"acc_norm": 0.296969696969697,
|
355 |
-
"acc_norm_stderr": 0.03567969772268046
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.24969400244798043,
|
359 |
-
"mc1_stderr": 0.015152286907148125,
|
360 |
-
"mc2": 0.40454723614569765,
|
361 |
-
"mc2_stderr": 0.014981033793701278
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.05046948356807512,
|
365 |
-
"acc_stderr": 0.007504195050541823,
|
366 |
-
"acc_norm": 0.09507042253521127,
|
367 |
-
"acc_norm_stderr": 0.010054612173655424
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "EleutherAI/polyglot-ko-3.8b",
|
436 |
-
"model_sha": "3c696a71c16b4a4622b7cabf6c5da4ba5a73b548",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
EleutherAI/polyglot-ko-5.8b/result_2023-09-24 15:21:38.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.2687713310580205,
|
5 |
-
"acc_stderr": 0.012955065963710675,
|
6 |
-
"acc_norm": 0.32764505119453924,
|
7 |
-
"acc_norm_stderr": 0.013715847940719339
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.3690499900418243,
|
11 |
-
"acc_stderr": 0.004815613144385398,
|
12 |
-
"acc_norm": 0.4814777932682733,
|
13 |
-
"acc_norm_stderr": 0.004986356526063965
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.17543859649122806,
|
17 |
-
"acc_stderr": 0.029170885500727665,
|
18 |
-
"acc_norm": 0.17543859649122806,
|
19 |
-
"acc_norm_stderr": 0.029170885500727665
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.3592233009708738,
|
23 |
-
"acc_stderr": 0.04750458399041693,
|
24 |
-
"acc_norm": 0.3592233009708738,
|
25 |
-
"acc_norm_stderr": 0.04750458399041693
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.20178799489144317,
|
29 |
-
"acc_stderr": 0.014351702181636861,
|
30 |
-
"acc_norm": 0.20178799489144317,
|
31 |
-
"acc_norm_stderr": 0.014351702181636861
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.22962962962962963,
|
35 |
-
"acc_stderr": 0.036333844140734636,
|
36 |
-
"acc_norm": 0.22962962962962963,
|
37 |
-
"acc_norm_stderr": 0.036333844140734636
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.22,
|
41 |
-
"acc_stderr": 0.0416333199893227,
|
42 |
-
"acc_norm": 0.22,
|
43 |
-
"acc_norm_stderr": 0.0416333199893227
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.17446808510638298,
|
47 |
-
"acc_stderr": 0.02480944233550398,
|
48 |
-
"acc_norm": 0.17446808510638298,
|
49 |
-
"acc_norm_stderr": 0.02480944233550398
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.1927710843373494,
|
53 |
-
"acc_stderr": 0.030709824050565264,
|
54 |
-
"acc_norm": 0.1927710843373494,
|
55 |
-
"acc_norm_stderr": 0.030709824050565264
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.24115755627009647,
|
59 |
-
"acc_stderr": 0.024296594034763426,
|
60 |
-
"acc_norm": 0.24115755627009647,
|
61 |
-
"acc_norm_stderr": 0.024296594034763426
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.11659192825112108,
|
65 |
-
"acc_stderr": 0.021539639816244467,
|
66 |
-
"acc_norm": 0.11659192825112108,
|
67 |
-
"acc_norm_stderr": 0.021539639816244467
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.2748091603053435,
|
71 |
-
"acc_stderr": 0.03915345408847835,
|
72 |
-
"acc_norm": 0.2748091603053435,
|
73 |
-
"acc_norm_stderr": 0.03915345408847835
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.24,
|
77 |
-
"acc_stderr": 0.04292346959909284,
|
78 |
-
"acc_norm": 0.24,
|
79 |
-
"acc_norm_stderr": 0.04292346959909284
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.35353535353535354,
|
83 |
-
"acc_stderr": 0.03406086723547153,
|
84 |
-
"acc_norm": 0.35353535353535354,
|
85 |
-
"acc_norm_stderr": 0.03406086723547153
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.2413793103448276,
|
89 |
-
"acc_stderr": 0.03565998174135302,
|
90 |
-
"acc_norm": 0.2413793103448276,
|
91 |
-
"acc_norm_stderr": 0.03565998174135302
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.37254901960784315,
|
95 |
-
"acc_stderr": 0.048108401480826346,
|
96 |
-
"acc_norm": 0.37254901960784315,
|
97 |
-
"acc_norm_stderr": 0.048108401480826346
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.3487394957983193,
|
101 |
-
"acc_stderr": 0.030956636328566548,
|
102 |
-
"acc_norm": 0.3487394957983193,
|
103 |
-
"acc_norm_stderr": 0.030956636328566548
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.3641025641025641,
|
107 |
-
"acc_stderr": 0.024396672985094778,
|
108 |
-
"acc_norm": 0.3641025641025641,
|
109 |
-
"acc_norm_stderr": 0.024396672985094778
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.18,
|
113 |
-
"acc_stderr": 0.03861229196653694,
|
114 |
-
"acc_norm": 0.18,
|
115 |
-
"acc_norm_stderr": 0.03861229196653694
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.16,
|
119 |
-
"acc_stderr": 0.03684529491774709,
|
120 |
-
"acc_norm": 0.16,
|
121 |
-
"acc_norm_stderr": 0.03684529491774709
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.21296296296296297,
|
125 |
-
"acc_stderr": 0.03957835471980981,
|
126 |
-
"acc_norm": 0.21296296296296297,
|
127 |
-
"acc_norm_stderr": 0.03957835471980981
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.28078817733990147,
|
131 |
-
"acc_stderr": 0.0316185633535861,
|
132 |
-
"acc_norm": 0.28078817733990147,
|
133 |
-
"acc_norm_stderr": 0.0316185633535861
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.3161290322580645,
|
137 |
-
"acc_stderr": 0.026450874489042764,
|
138 |
-
"acc_norm": 0.3161290322580645,
|
139 |
-
"acc_norm_stderr": 0.026450874489042764
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.19658119658119658,
|
143 |
-
"acc_stderr": 0.02603538609895129,
|
144 |
-
"acc_norm": 0.19658119658119658,
|
145 |
-
"acc_norm_stderr": 0.02603538609895129
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.2981132075471698,
|
149 |
-
"acc_stderr": 0.028152837942493864,
|
150 |
-
"acc_norm": 0.2981132075471698,
|
151 |
-
"acc_norm_stderr": 0.028152837942493864
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.22727272727272727,
|
155 |
-
"acc_stderr": 0.040139645540727735,
|
156 |
-
"acc_norm": 0.22727272727272727,
|
157 |
-
"acc_norm_stderr": 0.040139645540727735
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.26296296296296295,
|
161 |
-
"acc_stderr": 0.02684205787383371,
|
162 |
-
"acc_norm": 0.26296296296296295,
|
163 |
-
"acc_norm_stderr": 0.02684205787383371
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.33112582781456956,
|
167 |
-
"acc_stderr": 0.038425817186598696,
|
168 |
-
"acc_norm": 0.33112582781456956,
|
169 |
-
"acc_norm_stderr": 0.038425817186598696
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.26865671641791045,
|
173 |
-
"acc_stderr": 0.03134328358208954,
|
174 |
-
"acc_norm": 0.26865671641791045,
|
175 |
-
"acc_norm_stderr": 0.03134328358208954
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.3352601156069364,
|
179 |
-
"acc_stderr": 0.03599586301247078,
|
180 |
-
"acc_norm": 0.3352601156069364,
|
181 |
-
"acc_norm_stderr": 0.03599586301247078
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.2698412698412698,
|
185 |
-
"acc_stderr": 0.022860838309232072,
|
186 |
-
"acc_norm": 0.2698412698412698,
|
187 |
-
"acc_norm_stderr": 0.022860838309232072
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.2638888888888889,
|
191 |
-
"acc_stderr": 0.03685651095897532,
|
192 |
-
"acc_norm": 0.2638888888888889,
|
193 |
-
"acc_norm_stderr": 0.03685651095897532
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.41,
|
197 |
-
"acc_stderr": 0.049431107042371025,
|
198 |
-
"acc_norm": 0.41,
|
199 |
-
"acc_norm_stderr": 0.049431107042371025
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.26,
|
203 |
-
"acc_stderr": 0.044084400227680794,
|
204 |
-
"acc_norm": 0.26,
|
205 |
-
"acc_norm_stderr": 0.044084400227680794
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.2138728323699422,
|
209 |
-
"acc_stderr": 0.022075709251757173,
|
210 |
-
"acc_norm": 0.2138728323699422,
|
211 |
-
"acc_norm_stderr": 0.022075709251757173
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.2331288343558282,
|
215 |
-
"acc_stderr": 0.0332201579577674,
|
216 |
-
"acc_norm": 0.2331288343558282,
|
217 |
-
"acc_norm_stderr": 0.0332201579577674
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.22530864197530864,
|
221 |
-
"acc_stderr": 0.02324620264781975,
|
222 |
-
"acc_norm": 0.22530864197530864,
|
223 |
-
"acc_norm_stderr": 0.02324620264781975
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.31,
|
227 |
-
"acc_stderr": 0.04648231987117316,
|
228 |
-
"acc_norm": 0.31,
|
229 |
-
"acc_norm_stderr": 0.04648231987117316
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.36787564766839376,
|
233 |
-
"acc_stderr": 0.034801756684660366,
|
234 |
-
"acc_norm": 0.36787564766839376,
|
235 |
-
"acc_norm_stderr": 0.034801756684660366
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.23684210526315788,
|
239 |
-
"acc_stderr": 0.039994238792813365,
|
240 |
-
"acc_norm": 0.23684210526315788,
|
241 |
-
"acc_norm_stderr": 0.039994238792813365
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.3504587155963303,
|
245 |
-
"acc_stderr": 0.02045607759982446,
|
246 |
-
"acc_norm": 0.3504587155963303,
|
247 |
-
"acc_norm_stderr": 0.02045607759982446
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.36507936507936506,
|
251 |
-
"acc_stderr": 0.04306241259127153,
|
252 |
-
"acc_norm": 0.36507936507936506,
|
253 |
-
"acc_norm_stderr": 0.04306241259127153
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.29411764705882354,
|
257 |
-
"acc_stderr": 0.026090162504279053,
|
258 |
-
"acc_norm": 0.29411764705882354,
|
259 |
-
"acc_norm_stderr": 0.026090162504279053
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.21,
|
263 |
-
"acc_stderr": 0.040936018074033256,
|
264 |
-
"acc_norm": 0.21,
|
265 |
-
"acc_norm_stderr": 0.040936018074033256
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.14049586776859505,
|
269 |
-
"acc_stderr": 0.031722334260021585,
|
270 |
-
"acc_norm": 0.14049586776859505,
|
271 |
-
"acc_norm_stderr": 0.031722334260021585
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.3355263157894737,
|
275 |
-
"acc_stderr": 0.038424985593952694,
|
276 |
-
"acc_norm": 0.3355263157894737,
|
277 |
-
"acc_norm_stderr": 0.038424985593952694
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.2173202614379085,
|
281 |
-
"acc_stderr": 0.016684820929148598,
|
282 |
-
"acc_norm": 0.2173202614379085,
|
283 |
-
"acc_norm_stderr": 0.016684820929148598
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.24113475177304963,
|
287 |
-
"acc_stderr": 0.02551873104953776,
|
288 |
-
"acc_norm": 0.24113475177304963,
|
289 |
-
"acc_norm_stderr": 0.02551873104953776
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.16964285714285715,
|
293 |
-
"acc_stderr": 0.03562367850095391,
|
294 |
-
"acc_norm": 0.16964285714285715,
|
295 |
-
"acc_norm_stderr": 0.03562367850095391
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.4722222222222222,
|
299 |
-
"acc_stderr": 0.0340470532865388,
|
300 |
-
"acc_norm": 0.4722222222222222,
|
301 |
-
"acc_norm_stderr": 0.0340470532865388
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.27262569832402234,
|
305 |
-
"acc_stderr": 0.014893391735249608,
|
306 |
-
"acc_norm": 0.27262569832402234,
|
307 |
-
"acc_norm_stderr": 0.014893391735249608
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.33,
|
311 |
-
"acc_stderr": 0.04725815626252604,
|
312 |
-
"acc_norm": 0.33,
|
313 |
-
"acc_norm_stderr": 0.04725815626252604
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.18,
|
317 |
-
"acc_stderr": 0.038612291966536934,
|
318 |
-
"acc_norm": 0.18,
|
319 |
-
"acc_norm_stderr": 0.038612291966536934
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.4485294117647059,
|
323 |
-
"acc_stderr": 0.030211479609121593,
|
324 |
-
"acc_norm": 0.4485294117647059,
|
325 |
-
"acc_norm_stderr": 0.030211479609121593
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.37142857142857144,
|
329 |
-
"acc_stderr": 0.030932858792789834,
|
330 |
-
"acc_norm": 0.37142857142857144,
|
331 |
-
"acc_norm_stderr": 0.030932858792789834
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.20253164556962025,
|
335 |
-
"acc_stderr": 0.026160568246601464,
|
336 |
-
"acc_norm": 0.20253164556962025,
|
337 |
-
"acc_norm_stderr": 0.026160568246601464
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.2438070404172099,
|
341 |
-
"acc_stderr": 0.010966507972178475,
|
342 |
-
"acc_norm": 0.2438070404172099,
|
343 |
-
"acc_norm_stderr": 0.010966507972178475
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.25980392156862747,
|
347 |
-
"acc_stderr": 0.030778554678693257,
|
348 |
-
"acc_norm": 0.25980392156862747,
|
349 |
-
"acc_norm_stderr": 0.030778554678693257
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.2727272727272727,
|
353 |
-
"acc_stderr": 0.03477691162163659,
|
354 |
-
"acc_norm": 0.2727272727272727,
|
355 |
-
"acc_norm_stderr": 0.03477691162163659
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.2533659730722154,
|
359 |
-
"mc1_stderr": 0.01522589934082683,
|
360 |
-
"mc2": 0.3923103125697379,
|
361 |
-
"mc2_stderr": 0.014648106435610566
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.1795774647887324,
|
365 |
-
"acc_stderr": 0.013157698435457041,
|
366 |
-
"acc_norm": 0.2312206572769953,
|
367 |
-
"acc_norm_stderr": 0.014452713321605408
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "EleutherAI/polyglot-ko-5.8b",
|
436 |
-
"model_sha": "581a4c3eebfac23536b3c9676bcfb05c6a97baa2",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
FINDA-FIT/llama-2-ko-plain/result_2023-09-30 03:54:00.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.19539249146757678,
|
5 |
-
"acc_stderr": 0.011586907189952911,
|
6 |
-
"acc_norm": 0.2636518771331058,
|
7 |
-
"acc_norm_stderr": 0.012875929151297047
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.2660824536944832,
|
11 |
-
"acc_stderr": 0.004410047530835032,
|
12 |
-
"acc_norm": 0.2788289185421231,
|
13 |
-
"acc_norm_stderr": 0.004475067344626752
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.21052631578947367,
|
17 |
-
"acc_stderr": 0.0312678171466318,
|
18 |
-
"acc_norm": 0.21052631578947367,
|
19 |
-
"acc_norm_stderr": 0.0312678171466318
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.30097087378640774,
|
23 |
-
"acc_stderr": 0.04541609446503949,
|
24 |
-
"acc_norm": 0.30097087378640774,
|
25 |
-
"acc_norm_stderr": 0.04541609446503949
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.2720306513409962,
|
29 |
-
"acc_stderr": 0.015913367447500524,
|
30 |
-
"acc_norm": 0.2720306513409962,
|
31 |
-
"acc_norm_stderr": 0.015913367447500524
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.2,
|
35 |
-
"acc_stderr": 0.034554737023254366,
|
36 |
-
"acc_norm": 0.2,
|
37 |
-
"acc_norm_stderr": 0.034554737023254366
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.33,
|
41 |
-
"acc_stderr": 0.047258156262526045,
|
42 |
-
"acc_norm": 0.33,
|
43 |
-
"acc_norm_stderr": 0.047258156262526045
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.2680851063829787,
|
47 |
-
"acc_stderr": 0.028957342788342347,
|
48 |
-
"acc_norm": 0.2680851063829787,
|
49 |
-
"acc_norm_stderr": 0.028957342788342347
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.25903614457831325,
|
53 |
-
"acc_stderr": 0.034106466140718564,
|
54 |
-
"acc_norm": 0.25903614457831325,
|
55 |
-
"acc_norm_stderr": 0.034106466140718564
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.28938906752411575,
|
59 |
-
"acc_stderr": 0.025755865922632945,
|
60 |
-
"acc_norm": 0.28938906752411575,
|
61 |
-
"acc_norm_stderr": 0.025755865922632945
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.2914798206278027,
|
65 |
-
"acc_stderr": 0.030500283176545902,
|
66 |
-
"acc_norm": 0.2914798206278027,
|
67 |
-
"acc_norm_stderr": 0.030500283176545902
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.21374045801526717,
|
71 |
-
"acc_stderr": 0.0359546161177469,
|
72 |
-
"acc_norm": 0.21374045801526717,
|
73 |
-
"acc_norm_stderr": 0.0359546161177469
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.28,
|
77 |
-
"acc_stderr": 0.04512608598542127,
|
78 |
-
"acc_norm": 0.28,
|
79 |
-
"acc_norm_stderr": 0.04512608598542127
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.2474747474747475,
|
83 |
-
"acc_stderr": 0.030746300742124505,
|
84 |
-
"acc_norm": 0.2474747474747475,
|
85 |
-
"acc_norm_stderr": 0.030746300742124505
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.2689655172413793,
|
89 |
-
"acc_stderr": 0.036951833116502325,
|
90 |
-
"acc_norm": 0.2689655172413793,
|
91 |
-
"acc_norm_stderr": 0.036951833116502325
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.20588235294117646,
|
95 |
-
"acc_stderr": 0.04023382273617747,
|
96 |
-
"acc_norm": 0.20588235294117646,
|
97 |
-
"acc_norm_stderr": 0.04023382273617747
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.3403361344537815,
|
101 |
-
"acc_stderr": 0.030778057422931666,
|
102 |
-
"acc_norm": 0.3403361344537815,
|
103 |
-
"acc_norm_stderr": 0.030778057422931666
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.31025641025641026,
|
107 |
-
"acc_stderr": 0.023454674889404285,
|
108 |
-
"acc_norm": 0.31025641025641026,
|
109 |
-
"acc_norm_stderr": 0.023454674889404285
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.21,
|
113 |
-
"acc_stderr": 0.040936018074033256,
|
114 |
-
"acc_norm": 0.21,
|
115 |
-
"acc_norm_stderr": 0.040936018074033256
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.18,
|
119 |
-
"acc_stderr": 0.03861229196653694,
|
120 |
-
"acc_norm": 0.18,
|
121 |
-
"acc_norm_stderr": 0.03861229196653694
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.2222222222222222,
|
125 |
-
"acc_stderr": 0.040191074725573483,
|
126 |
-
"acc_norm": 0.2222222222222222,
|
127 |
-
"acc_norm_stderr": 0.040191074725573483
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.31527093596059114,
|
131 |
-
"acc_stderr": 0.03269080871970186,
|
132 |
-
"acc_norm": 0.31527093596059114,
|
133 |
-
"acc_norm_stderr": 0.03269080871970186
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.3064516129032258,
|
137 |
-
"acc_stderr": 0.026226485652553873,
|
138 |
-
"acc_norm": 0.3064516129032258,
|
139 |
-
"acc_norm_stderr": 0.026226485652553873
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.19658119658119658,
|
143 |
-
"acc_stderr": 0.02603538609895129,
|
144 |
-
"acc_norm": 0.19658119658119658,
|
145 |
-
"acc_norm_stderr": 0.02603538609895129
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.23773584905660378,
|
149 |
-
"acc_stderr": 0.02619980880756193,
|
150 |
-
"acc_norm": 0.23773584905660378,
|
151 |
-
"acc_norm_stderr": 0.02619980880756193
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.2545454545454545,
|
155 |
-
"acc_stderr": 0.04172343038705383,
|
156 |
-
"acc_norm": 0.2545454545454545,
|
157 |
-
"acc_norm_stderr": 0.04172343038705383
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.2962962962962963,
|
161 |
-
"acc_stderr": 0.027840811495871937,
|
162 |
-
"acc_norm": 0.2962962962962963,
|
163 |
-
"acc_norm_stderr": 0.027840811495871937
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.31788079470198677,
|
167 |
-
"acc_stderr": 0.038020397601079024,
|
168 |
-
"acc_norm": 0.31788079470198677,
|
169 |
-
"acc_norm_stderr": 0.038020397601079024
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.20398009950248755,
|
173 |
-
"acc_stderr": 0.02849317624532609,
|
174 |
-
"acc_norm": 0.20398009950248755,
|
175 |
-
"acc_norm_stderr": 0.02849317624532609
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.2138728323699422,
|
179 |
-
"acc_stderr": 0.03126511206173044,
|
180 |
-
"acc_norm": 0.2138728323699422,
|
181 |
-
"acc_norm_stderr": 0.03126511206173044
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.2566137566137566,
|
185 |
-
"acc_stderr": 0.022494510767503154,
|
186 |
-
"acc_norm": 0.2566137566137566,
|
187 |
-
"acc_norm_stderr": 0.022494510767503154
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.2361111111111111,
|
191 |
-
"acc_stderr": 0.03551446610810826,
|
192 |
-
"acc_norm": 0.2361111111111111,
|
193 |
-
"acc_norm_stderr": 0.03551446610810826
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.28,
|
197 |
-
"acc_stderr": 0.04512608598542128,
|
198 |
-
"acc_norm": 0.28,
|
199 |
-
"acc_norm_stderr": 0.04512608598542128
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.25,
|
203 |
-
"acc_stderr": 0.04351941398892446,
|
204 |
-
"acc_norm": 0.25,
|
205 |
-
"acc_norm_stderr": 0.04351941398892446
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.23699421965317918,
|
209 |
-
"acc_stderr": 0.022894082489925992,
|
210 |
-
"acc_norm": 0.23699421965317918,
|
211 |
-
"acc_norm_stderr": 0.022894082489925992
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.27607361963190186,
|
215 |
-
"acc_stderr": 0.0351238528370505,
|
216 |
-
"acc_norm": 0.27607361963190186,
|
217 |
-
"acc_norm_stderr": 0.0351238528370505
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.2654320987654321,
|
221 |
-
"acc_stderr": 0.02456922360046085,
|
222 |
-
"acc_norm": 0.2654320987654321,
|
223 |
-
"acc_norm_stderr": 0.02456922360046085
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.31,
|
227 |
-
"acc_stderr": 0.04648231987117316,
|
228 |
-
"acc_norm": 0.31,
|
229 |
-
"acc_norm_stderr": 0.04648231987117316
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.30569948186528495,
|
233 |
-
"acc_stderr": 0.03324837939758159,
|
234 |
-
"acc_norm": 0.30569948186528495,
|
235 |
-
"acc_norm_stderr": 0.03324837939758159
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.2719298245614035,
|
239 |
-
"acc_stderr": 0.041857744240220575,
|
240 |
-
"acc_norm": 0.2719298245614035,
|
241 |
-
"acc_norm_stderr": 0.041857744240220575
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.24220183486238533,
|
245 |
-
"acc_stderr": 0.01836817630659862,
|
246 |
-
"acc_norm": 0.24220183486238533,
|
247 |
-
"acc_norm_stderr": 0.01836817630659862
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.18253968253968253,
|
251 |
-
"acc_stderr": 0.03455071019102148,
|
252 |
-
"acc_norm": 0.18253968253968253,
|
253 |
-
"acc_norm_stderr": 0.03455071019102148
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.27124183006535946,
|
257 |
-
"acc_stderr": 0.025457756696667874,
|
258 |
-
"acc_norm": 0.27124183006535946,
|
259 |
-
"acc_norm_stderr": 0.025457756696667874
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.26,
|
263 |
-
"acc_stderr": 0.04408440022768077,
|
264 |
-
"acc_norm": 0.26,
|
265 |
-
"acc_norm_stderr": 0.04408440022768077
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.3305785123966942,
|
269 |
-
"acc_stderr": 0.04294340845212095,
|
270 |
-
"acc_norm": 0.3305785123966942,
|
271 |
-
"acc_norm_stderr": 0.04294340845212095
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.3092105263157895,
|
275 |
-
"acc_stderr": 0.037610708698674805,
|
276 |
-
"acc_norm": 0.3092105263157895,
|
277 |
-
"acc_norm_stderr": 0.037610708698674805
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.2173202614379085,
|
281 |
-
"acc_stderr": 0.016684820929148594,
|
282 |
-
"acc_norm": 0.2173202614379085,
|
283 |
-
"acc_norm_stderr": 0.016684820929148594
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.25177304964539005,
|
287 |
-
"acc_stderr": 0.0258921511567094,
|
288 |
-
"acc_norm": 0.25177304964539005,
|
289 |
-
"acc_norm_stderr": 0.0258921511567094
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.17857142857142858,
|
293 |
-
"acc_stderr": 0.036352091215778065,
|
294 |
-
"acc_norm": 0.17857142857142858,
|
295 |
-
"acc_norm_stderr": 0.036352091215778065
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.4398148148148148,
|
299 |
-
"acc_stderr": 0.033851779760448106,
|
300 |
-
"acc_norm": 0.4398148148148148,
|
301 |
-
"acc_norm_stderr": 0.033851779760448106
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.24134078212290502,
|
305 |
-
"acc_stderr": 0.01431099954796145,
|
306 |
-
"acc_norm": 0.24134078212290502,
|
307 |
-
"acc_norm_stderr": 0.01431099954796145
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.32,
|
311 |
-
"acc_stderr": 0.04688261722621504,
|
312 |
-
"acc_norm": 0.32,
|
313 |
-
"acc_norm_stderr": 0.04688261722621504
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.3,
|
317 |
-
"acc_stderr": 0.046056618647183814,
|
318 |
-
"acc_norm": 0.3,
|
319 |
-
"acc_norm_stderr": 0.046056618647183814
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.4338235294117647,
|
323 |
-
"acc_stderr": 0.03010563657001664,
|
324 |
-
"acc_norm": 0.4338235294117647,
|
325 |
-
"acc_norm_stderr": 0.03010563657001664
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.2612244897959184,
|
329 |
-
"acc_stderr": 0.028123429335142804,
|
330 |
-
"acc_norm": 0.2612244897959184,
|
331 |
-
"acc_norm_stderr": 0.028123429335142804
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.2320675105485232,
|
335 |
-
"acc_stderr": 0.02747974455080851,
|
336 |
-
"acc_norm": 0.2320675105485232,
|
337 |
-
"acc_norm_stderr": 0.02747974455080851
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.23663624511082137,
|
341 |
-
"acc_stderr": 0.010855137351572742,
|
342 |
-
"acc_norm": 0.23663624511082137,
|
343 |
-
"acc_norm_stderr": 0.010855137351572742
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.24019607843137256,
|
347 |
-
"acc_stderr": 0.02998373305591362,
|
348 |
-
"acc_norm": 0.24019607843137256,
|
349 |
-
"acc_norm_stderr": 0.02998373305591362
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.23636363636363636,
|
353 |
-
"acc_stderr": 0.03317505930009179,
|
354 |
-
"acc_norm": 0.23636363636363636,
|
355 |
-
"acc_norm_stderr": 0.03317505930009179
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.27050183598531213,
|
359 |
-
"mc1_stderr": 0.015550778332842892,
|
360 |
-
"mc2": 0.5367542106571858,
|
361 |
-
"mc2_stderr": 0.01635449255335969
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.11267605633802817,
|
365 |
-
"acc_stderr": 0.010839072955995904,
|
366 |
-
"acc_norm": 0.36032863849765256,
|
367 |
-
"acc_norm_stderr": 0.016457469695705128
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "FINDA-FIT/llama-2-ko-plain",
|
436 |
-
"model_sha": "091fe3550bfa49baaebda838c10825484580f89d",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
FINDA-FIT/llama-ko-7b/result_2023-09-29 16:26:20.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.19795221843003413,
|
5 |
-
"acc_stderr": 0.011643990971573401,
|
6 |
-
"acc_norm": 0.26535836177474403,
|
7 |
-
"acc_norm_stderr": 0.012902554762313962
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.2633937462656841,
|
11 |
-
"acc_stderr": 0.004395739495688583,
|
12 |
-
"acc_norm": 0.27823142800239,
|
13 |
-
"acc_norm_stderr": 0.004472121485161932
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.22807017543859648,
|
17 |
-
"acc_stderr": 0.03218093795602357,
|
18 |
-
"acc_norm": 0.22807017543859648,
|
19 |
-
"acc_norm_stderr": 0.03218093795602357
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.20388349514563106,
|
23 |
-
"acc_stderr": 0.03989139859531771,
|
24 |
-
"acc_norm": 0.20388349514563106,
|
25 |
-
"acc_norm_stderr": 0.03989139859531771
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.2669220945083014,
|
29 |
-
"acc_stderr": 0.015818450894777552,
|
30 |
-
"acc_norm": 0.2669220945083014,
|
31 |
-
"acc_norm_stderr": 0.015818450894777552
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.22962962962962963,
|
35 |
-
"acc_stderr": 0.03633384414073463,
|
36 |
-
"acc_norm": 0.22962962962962963,
|
37 |
-
"acc_norm_stderr": 0.03633384414073463
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.29,
|
41 |
-
"acc_stderr": 0.045604802157206845,
|
42 |
-
"acc_norm": 0.29,
|
43 |
-
"acc_norm_stderr": 0.045604802157206845
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.24680851063829787,
|
47 |
-
"acc_stderr": 0.028185441301234102,
|
48 |
-
"acc_norm": 0.24680851063829787,
|
49 |
-
"acc_norm_stderr": 0.028185441301234102
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.29518072289156627,
|
53 |
-
"acc_stderr": 0.0355092018568963,
|
54 |
-
"acc_norm": 0.29518072289156627,
|
55 |
-
"acc_norm_stderr": 0.0355092018568963
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.26688102893890675,
|
59 |
-
"acc_stderr": 0.025122637608816657,
|
60 |
-
"acc_norm": 0.26688102893890675,
|
61 |
-
"acc_norm_stderr": 0.025122637608816657
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.2556053811659193,
|
65 |
-
"acc_stderr": 0.029275891003969923,
|
66 |
-
"acc_norm": 0.2556053811659193,
|
67 |
-
"acc_norm_stderr": 0.029275891003969923
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.2366412213740458,
|
71 |
-
"acc_stderr": 0.037276735755969195,
|
72 |
-
"acc_norm": 0.2366412213740458,
|
73 |
-
"acc_norm_stderr": 0.037276735755969195
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.35,
|
77 |
-
"acc_stderr": 0.0479372485441102,
|
78 |
-
"acc_norm": 0.35,
|
79 |
-
"acc_norm_stderr": 0.0479372485441102
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.2474747474747475,
|
83 |
-
"acc_stderr": 0.03074630074212451,
|
84 |
-
"acc_norm": 0.2474747474747475,
|
85 |
-
"acc_norm_stderr": 0.03074630074212451
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.2620689655172414,
|
89 |
-
"acc_stderr": 0.036646663372252565,
|
90 |
-
"acc_norm": 0.2620689655172414,
|
91 |
-
"acc_norm_stderr": 0.036646663372252565
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.17647058823529413,
|
95 |
-
"acc_stderr": 0.03793281185307811,
|
96 |
-
"acc_norm": 0.17647058823529413,
|
97 |
-
"acc_norm_stderr": 0.03793281185307811
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.3403361344537815,
|
101 |
-
"acc_stderr": 0.030778057422931666,
|
102 |
-
"acc_norm": 0.3403361344537815,
|
103 |
-
"acc_norm_stderr": 0.030778057422931666
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.30512820512820515,
|
107 |
-
"acc_stderr": 0.023346335293325887,
|
108 |
-
"acc_norm": 0.30512820512820515,
|
109 |
-
"acc_norm_stderr": 0.023346335293325887
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.17,
|
113 |
-
"acc_stderr": 0.03775251680686371,
|
114 |
-
"acc_norm": 0.17,
|
115 |
-
"acc_norm_stderr": 0.03775251680686371
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.15,
|
119 |
-
"acc_stderr": 0.03588702812826371,
|
120 |
-
"acc_norm": 0.15,
|
121 |
-
"acc_norm_stderr": 0.03588702812826371
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.2222222222222222,
|
125 |
-
"acc_stderr": 0.040191074725573483,
|
126 |
-
"acc_norm": 0.2222222222222222,
|
127 |
-
"acc_norm_stderr": 0.040191074725573483
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.32019704433497537,
|
131 |
-
"acc_stderr": 0.032826493853041504,
|
132 |
-
"acc_norm": 0.32019704433497537,
|
133 |
-
"acc_norm_stderr": 0.032826493853041504
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.31290322580645163,
|
137 |
-
"acc_stderr": 0.026377567028645858,
|
138 |
-
"acc_norm": 0.31290322580645163,
|
139 |
-
"acc_norm_stderr": 0.026377567028645858
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.19230769230769232,
|
143 |
-
"acc_stderr": 0.025819233256483727,
|
144 |
-
"acc_norm": 0.19230769230769232,
|
145 |
-
"acc_norm_stderr": 0.025819233256483727
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.22641509433962265,
|
149 |
-
"acc_stderr": 0.025757559893106727,
|
150 |
-
"acc_norm": 0.22641509433962265,
|
151 |
-
"acc_norm_stderr": 0.025757559893106727
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.23636363636363636,
|
155 |
-
"acc_stderr": 0.04069306319721376,
|
156 |
-
"acc_norm": 0.23636363636363636,
|
157 |
-
"acc_norm_stderr": 0.04069306319721376
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.25555555555555554,
|
161 |
-
"acc_stderr": 0.02659393910184408,
|
162 |
-
"acc_norm": 0.25555555555555554,
|
163 |
-
"acc_norm_stderr": 0.02659393910184408
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.33112582781456956,
|
167 |
-
"acc_stderr": 0.038425817186598696,
|
168 |
-
"acc_norm": 0.33112582781456956,
|
169 |
-
"acc_norm_stderr": 0.038425817186598696
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.23880597014925373,
|
173 |
-
"acc_stderr": 0.030147775935409224,
|
174 |
-
"acc_norm": 0.23880597014925373,
|
175 |
-
"acc_norm_stderr": 0.030147775935409224
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.21965317919075145,
|
179 |
-
"acc_stderr": 0.031568093627031744,
|
180 |
-
"acc_norm": 0.21965317919075145,
|
181 |
-
"acc_norm_stderr": 0.031568093627031744
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.25396825396825395,
|
185 |
-
"acc_stderr": 0.022418042891113946,
|
186 |
-
"acc_norm": 0.25396825396825395,
|
187 |
-
"acc_norm_stderr": 0.022418042891113946
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.2638888888888889,
|
191 |
-
"acc_stderr": 0.03685651095897532,
|
192 |
-
"acc_norm": 0.2638888888888889,
|
193 |
-
"acc_norm_stderr": 0.03685651095897532
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.32,
|
197 |
-
"acc_stderr": 0.04688261722621505,
|
198 |
-
"acc_norm": 0.32,
|
199 |
-
"acc_norm_stderr": 0.04688261722621505
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.26,
|
203 |
-
"acc_stderr": 0.044084400227680794,
|
204 |
-
"acc_norm": 0.26,
|
205 |
-
"acc_norm_stderr": 0.044084400227680794
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.23699421965317918,
|
209 |
-
"acc_stderr": 0.022894082489925992,
|
210 |
-
"acc_norm": 0.23699421965317918,
|
211 |
-
"acc_norm_stderr": 0.022894082489925992
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.27607361963190186,
|
215 |
-
"acc_stderr": 0.0351238528370505,
|
216 |
-
"acc_norm": 0.27607361963190186,
|
217 |
-
"acc_norm_stderr": 0.0351238528370505
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.2654320987654321,
|
221 |
-
"acc_stderr": 0.02456922360046085,
|
222 |
-
"acc_norm": 0.2654320987654321,
|
223 |
-
"acc_norm_stderr": 0.02456922360046085
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.32,
|
227 |
-
"acc_stderr": 0.04688261722621504,
|
228 |
-
"acc_norm": 0.32,
|
229 |
-
"acc_norm_stderr": 0.04688261722621504
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.30569948186528495,
|
233 |
-
"acc_stderr": 0.03324837939758159,
|
234 |
-
"acc_norm": 0.30569948186528495,
|
235 |
-
"acc_norm_stderr": 0.03324837939758159
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.2631578947368421,
|
239 |
-
"acc_stderr": 0.0414243971948936,
|
240 |
-
"acc_norm": 0.2631578947368421,
|
241 |
-
"acc_norm_stderr": 0.0414243971948936
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.23302752293577983,
|
245 |
-
"acc_stderr": 0.018125669180861514,
|
246 |
-
"acc_norm": 0.23302752293577983,
|
247 |
-
"acc_norm_stderr": 0.018125669180861514
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.16666666666666666,
|
251 |
-
"acc_stderr": 0.03333333333333337,
|
252 |
-
"acc_norm": 0.16666666666666666,
|
253 |
-
"acc_norm_stderr": 0.03333333333333337
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.24183006535947713,
|
257 |
-
"acc_stderr": 0.024518195641879334,
|
258 |
-
"acc_norm": 0.24183006535947713,
|
259 |
-
"acc_norm_stderr": 0.024518195641879334
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.25,
|
263 |
-
"acc_stderr": 0.04351941398892446,
|
264 |
-
"acc_norm": 0.25,
|
265 |
-
"acc_norm_stderr": 0.04351941398892446
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.2975206611570248,
|
269 |
-
"acc_stderr": 0.04173349148083497,
|
270 |
-
"acc_norm": 0.2975206611570248,
|
271 |
-
"acc_norm_stderr": 0.04173349148083497
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.23026315789473684,
|
275 |
-
"acc_stderr": 0.034260594244031654,
|
276 |
-
"acc_norm": 0.23026315789473684,
|
277 |
-
"acc_norm_stderr": 0.034260594244031654
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.21895424836601307,
|
281 |
-
"acc_stderr": 0.016729937565537537,
|
282 |
-
"acc_norm": 0.21895424836601307,
|
283 |
-
"acc_norm_stderr": 0.016729937565537537
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.24822695035460993,
|
287 |
-
"acc_stderr": 0.025770015644290396,
|
288 |
-
"acc_norm": 0.24822695035460993,
|
289 |
-
"acc_norm_stderr": 0.025770015644290396
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.2857142857142857,
|
293 |
-
"acc_stderr": 0.04287858751340456,
|
294 |
-
"acc_norm": 0.2857142857142857,
|
295 |
-
"acc_norm_stderr": 0.04287858751340456
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.4398148148148148,
|
299 |
-
"acc_stderr": 0.033851779760448106,
|
300 |
-
"acc_norm": 0.4398148148148148,
|
301 |
-
"acc_norm_stderr": 0.033851779760448106
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.24581005586592178,
|
305 |
-
"acc_stderr": 0.01440029642922561,
|
306 |
-
"acc_norm": 0.24581005586592178,
|
307 |
-
"acc_norm_stderr": 0.01440029642922561
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.33,
|
311 |
-
"acc_stderr": 0.04725815626252604,
|
312 |
-
"acc_norm": 0.33,
|
313 |
-
"acc_norm_stderr": 0.04725815626252604
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.3,
|
317 |
-
"acc_stderr": 0.046056618647183814,
|
318 |
-
"acc_norm": 0.3,
|
319 |
-
"acc_norm_stderr": 0.046056618647183814
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.44485294117647056,
|
323 |
-
"acc_stderr": 0.03018753206032938,
|
324 |
-
"acc_norm": 0.44485294117647056,
|
325 |
-
"acc_norm_stderr": 0.03018753206032938
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.23265306122448978,
|
329 |
-
"acc_stderr": 0.02704925791589618,
|
330 |
-
"acc_norm": 0.23265306122448978,
|
331 |
-
"acc_norm_stderr": 0.02704925791589618
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.2320675105485232,
|
335 |
-
"acc_stderr": 0.02747974455080851,
|
336 |
-
"acc_norm": 0.2320675105485232,
|
337 |
-
"acc_norm_stderr": 0.02747974455080851
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.2301173402868318,
|
341 |
-
"acc_stderr": 0.010750183177375553,
|
342 |
-
"acc_norm": 0.2301173402868318,
|
343 |
-
"acc_norm_stderr": 0.010750183177375553
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.25,
|
347 |
-
"acc_stderr": 0.03039153369274154,
|
348 |
-
"acc_norm": 0.25,
|
349 |
-
"acc_norm_stderr": 0.03039153369274154
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.22424242424242424,
|
353 |
-
"acc_stderr": 0.03256866661681102,
|
354 |
-
"acc_norm": 0.22424242424242424,
|
355 |
-
"acc_norm_stderr": 0.03256866661681102
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.2741738066095471,
|
359 |
-
"mc1_stderr": 0.015616518497219381,
|
360 |
-
"mc2": 0.538620436654127,
|
361 |
-
"mc2_stderr": 0.016366108934105512
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.09976525821596244,
|
365 |
-
"acc_stderr": 0.010273129672385398,
|
366 |
-
"acc_norm": 0.3380281690140845,
|
367 |
-
"acc_norm_stderr": 0.016215540194273178
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "FINDA-FIT/llama-ko-7b",
|
436 |
-
"model_sha": "c1f0b9f20d38c9494e1607bd30ce43da570d9d52",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
FINDA-FIT/llama-m/result_2023-09-30 08:24:55.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.19539249146757678,
|
5 |
-
"acc_stderr": 0.01158690718995291,
|
6 |
-
"acc_norm": 0.2619453924914676,
|
7 |
-
"acc_norm_stderr": 0.012849054826858112
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.2642899820752838,
|
11 |
-
"acc_stderr": 0.00440053218855021,
|
12 |
-
"acc_norm": 0.27763393746265685,
|
13 |
-
"acc_norm_stderr": 0.00446916572860033
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.21052631578947367,
|
17 |
-
"acc_stderr": 0.0312678171466318,
|
18 |
-
"acc_norm": 0.21052631578947367,
|
19 |
-
"acc_norm_stderr": 0.0312678171466318
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.3106796116504854,
|
23 |
-
"acc_stderr": 0.04582124160161549,
|
24 |
-
"acc_norm": 0.3106796116504854,
|
25 |
-
"acc_norm_stderr": 0.04582124160161549
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.2681992337164751,
|
29 |
-
"acc_stderr": 0.015842430835269438,
|
30 |
-
"acc_norm": 0.2681992337164751,
|
31 |
-
"acc_norm_stderr": 0.015842430835269438
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.2074074074074074,
|
35 |
-
"acc_stderr": 0.03502553170678316,
|
36 |
-
"acc_norm": 0.2074074074074074,
|
37 |
-
"acc_norm_stderr": 0.03502553170678316
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.29,
|
41 |
-
"acc_stderr": 0.045604802157206845,
|
42 |
-
"acc_norm": 0.29,
|
43 |
-
"acc_norm_stderr": 0.045604802157206845
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.2553191489361702,
|
47 |
-
"acc_stderr": 0.028504856470514203,
|
48 |
-
"acc_norm": 0.2553191489361702,
|
49 |
-
"acc_norm_stderr": 0.028504856470514203
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.30120481927710846,
|
53 |
-
"acc_stderr": 0.0357160923005348,
|
54 |
-
"acc_norm": 0.30120481927710846,
|
55 |
-
"acc_norm_stderr": 0.0357160923005348
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.2797427652733119,
|
59 |
-
"acc_stderr": 0.02549425935069491,
|
60 |
-
"acc_norm": 0.2797427652733119,
|
61 |
-
"acc_norm_stderr": 0.02549425935069491
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.23318385650224216,
|
65 |
-
"acc_stderr": 0.028380391147094716,
|
66 |
-
"acc_norm": 0.23318385650224216,
|
67 |
-
"acc_norm_stderr": 0.028380391147094716
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.2366412213740458,
|
71 |
-
"acc_stderr": 0.037276735755969195,
|
72 |
-
"acc_norm": 0.2366412213740458,
|
73 |
-
"acc_norm_stderr": 0.037276735755969195
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.29,
|
77 |
-
"acc_stderr": 0.045604802157206845,
|
78 |
-
"acc_norm": 0.29,
|
79 |
-
"acc_norm_stderr": 0.045604802157206845
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.24242424242424243,
|
83 |
-
"acc_stderr": 0.030532892233932032,
|
84 |
-
"acc_norm": 0.24242424242424243,
|
85 |
-
"acc_norm_stderr": 0.030532892233932032
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.2413793103448276,
|
89 |
-
"acc_stderr": 0.03565998174135302,
|
90 |
-
"acc_norm": 0.2413793103448276,
|
91 |
-
"acc_norm_stderr": 0.03565998174135302
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.17647058823529413,
|
95 |
-
"acc_stderr": 0.03793281185307811,
|
96 |
-
"acc_norm": 0.17647058823529413,
|
97 |
-
"acc_norm_stderr": 0.03793281185307811
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.3487394957983193,
|
101 |
-
"acc_stderr": 0.030956636328566545,
|
102 |
-
"acc_norm": 0.3487394957983193,
|
103 |
-
"acc_norm_stderr": 0.030956636328566545
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.3282051282051282,
|
107 |
-
"acc_stderr": 0.02380763319865727,
|
108 |
-
"acc_norm": 0.3282051282051282,
|
109 |
-
"acc_norm_stderr": 0.02380763319865727
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.22,
|
113 |
-
"acc_stderr": 0.0416333199893227,
|
114 |
-
"acc_norm": 0.22,
|
115 |
-
"acc_norm_stderr": 0.0416333199893227
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.15,
|
119 |
-
"acc_stderr": 0.03588702812826371,
|
120 |
-
"acc_norm": 0.15,
|
121 |
-
"acc_norm_stderr": 0.03588702812826371
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.2222222222222222,
|
125 |
-
"acc_stderr": 0.040191074725573483,
|
126 |
-
"acc_norm": 0.2222222222222222,
|
127 |
-
"acc_norm_stderr": 0.040191074725573483
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.3103448275862069,
|
131 |
-
"acc_stderr": 0.03255086769970103,
|
132 |
-
"acc_norm": 0.3103448275862069,
|
133 |
-
"acc_norm_stderr": 0.03255086769970103
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.3064516129032258,
|
137 |
-
"acc_stderr": 0.026226485652553873,
|
138 |
-
"acc_norm": 0.3064516129032258,
|
139 |
-
"acc_norm_stderr": 0.026226485652553873
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.19658119658119658,
|
143 |
-
"acc_stderr": 0.02603538609895129,
|
144 |
-
"acc_norm": 0.19658119658119658,
|
145 |
-
"acc_norm_stderr": 0.02603538609895129
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.23018867924528302,
|
149 |
-
"acc_stderr": 0.025907897122408173,
|
150 |
-
"acc_norm": 0.23018867924528302,
|
151 |
-
"acc_norm_stderr": 0.025907897122408173
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.20909090909090908,
|
155 |
-
"acc_stderr": 0.03895091015724138,
|
156 |
-
"acc_norm": 0.20909090909090908,
|
157 |
-
"acc_norm_stderr": 0.03895091015724138
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.25925925925925924,
|
161 |
-
"acc_stderr": 0.02671924078371217,
|
162 |
-
"acc_norm": 0.25925925925925924,
|
163 |
-
"acc_norm_stderr": 0.02671924078371217
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.32450331125827814,
|
167 |
-
"acc_stderr": 0.03822746937658754,
|
168 |
-
"acc_norm": 0.32450331125827814,
|
169 |
-
"acc_norm_stderr": 0.03822746937658754
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.22388059701492538,
|
173 |
-
"acc_stderr": 0.0294752502360172,
|
174 |
-
"acc_norm": 0.22388059701492538,
|
175 |
-
"acc_norm_stderr": 0.0294752502360172
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.2138728323699422,
|
179 |
-
"acc_stderr": 0.03126511206173043,
|
180 |
-
"acc_norm": 0.2138728323699422,
|
181 |
-
"acc_norm_stderr": 0.03126511206173043
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.2566137566137566,
|
185 |
-
"acc_stderr": 0.022494510767503154,
|
186 |
-
"acc_norm": 0.2566137566137566,
|
187 |
-
"acc_norm_stderr": 0.022494510767503154
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.22916666666666666,
|
191 |
-
"acc_stderr": 0.03514697467862388,
|
192 |
-
"acc_norm": 0.22916666666666666,
|
193 |
-
"acc_norm_stderr": 0.03514697467862388
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.33,
|
197 |
-
"acc_stderr": 0.047258156262526045,
|
198 |
-
"acc_norm": 0.33,
|
199 |
-
"acc_norm_stderr": 0.047258156262526045
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.25,
|
203 |
-
"acc_stderr": 0.04351941398892446,
|
204 |
-
"acc_norm": 0.25,
|
205 |
-
"acc_norm_stderr": 0.04351941398892446
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.23699421965317918,
|
209 |
-
"acc_stderr": 0.022894082489925992,
|
210 |
-
"acc_norm": 0.23699421965317918,
|
211 |
-
"acc_norm_stderr": 0.022894082489925992
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.27607361963190186,
|
215 |
-
"acc_stderr": 0.0351238528370505,
|
216 |
-
"acc_norm": 0.27607361963190186,
|
217 |
-
"acc_norm_stderr": 0.0351238528370505
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.25925925925925924,
|
221 |
-
"acc_stderr": 0.02438366553103545,
|
222 |
-
"acc_norm": 0.25925925925925924,
|
223 |
-
"acc_norm_stderr": 0.02438366553103545
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.34,
|
227 |
-
"acc_stderr": 0.04760952285695236,
|
228 |
-
"acc_norm": 0.34,
|
229 |
-
"acc_norm_stderr": 0.04760952285695236
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.27979274611398963,
|
233 |
-
"acc_stderr": 0.032396370467357015,
|
234 |
-
"acc_norm": 0.27979274611398963,
|
235 |
-
"acc_norm_stderr": 0.032396370467357015
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.2631578947368421,
|
239 |
-
"acc_stderr": 0.04142439719489362,
|
240 |
-
"acc_norm": 0.2631578947368421,
|
241 |
-
"acc_norm_stderr": 0.04142439719489362
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.24036697247706423,
|
245 |
-
"acc_stderr": 0.01832060732096407,
|
246 |
-
"acc_norm": 0.24036697247706423,
|
247 |
-
"acc_norm_stderr": 0.01832060732096407
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.15873015873015872,
|
251 |
-
"acc_stderr": 0.032684540130117436,
|
252 |
-
"acc_norm": 0.15873015873015872,
|
253 |
-
"acc_norm_stderr": 0.032684540130117436
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.25163398692810457,
|
257 |
-
"acc_stderr": 0.0248480182638752,
|
258 |
-
"acc_norm": 0.25163398692810457,
|
259 |
-
"acc_norm_stderr": 0.0248480182638752
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.23,
|
263 |
-
"acc_stderr": 0.04229525846816508,
|
264 |
-
"acc_norm": 0.23,
|
265 |
-
"acc_norm_stderr": 0.04229525846816508
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.2892561983471074,
|
269 |
-
"acc_stderr": 0.04139112727635464,
|
270 |
-
"acc_norm": 0.2892561983471074,
|
271 |
-
"acc_norm_stderr": 0.04139112727635464
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.25,
|
275 |
-
"acc_stderr": 0.03523807393012047,
|
276 |
-
"acc_norm": 0.25,
|
277 |
-
"acc_norm_stderr": 0.03523807393012047
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.2238562091503268,
|
281 |
-
"acc_stderr": 0.016863008585416617,
|
282 |
-
"acc_norm": 0.2238562091503268,
|
283 |
-
"acc_norm_stderr": 0.016863008585416617
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.24822695035460993,
|
287 |
-
"acc_stderr": 0.025770015644290396,
|
288 |
-
"acc_norm": 0.24822695035460993,
|
289 |
-
"acc_norm_stderr": 0.025770015644290396
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.25,
|
293 |
-
"acc_stderr": 0.04109974682633932,
|
294 |
-
"acc_norm": 0.25,
|
295 |
-
"acc_norm_stderr": 0.04109974682633932
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.4166666666666667,
|
299 |
-
"acc_stderr": 0.03362277436608043,
|
300 |
-
"acc_norm": 0.4166666666666667,
|
301 |
-
"acc_norm_stderr": 0.03362277436608043
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.25139664804469275,
|
305 |
-
"acc_stderr": 0.014508979453553983,
|
306 |
-
"acc_norm": 0.25139664804469275,
|
307 |
-
"acc_norm_stderr": 0.014508979453553983
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.31,
|
311 |
-
"acc_stderr": 0.04648231987117316,
|
312 |
-
"acc_norm": 0.31,
|
313 |
-
"acc_norm_stderr": 0.04648231987117316
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.3,
|
317 |
-
"acc_stderr": 0.046056618647183814,
|
318 |
-
"acc_norm": 0.3,
|
319 |
-
"acc_norm_stderr": 0.046056618647183814
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.4411764705882353,
|
323 |
-
"acc_stderr": 0.030161911930767102,
|
324 |
-
"acc_norm": 0.4411764705882353,
|
325 |
-
"acc_norm_stderr": 0.030161911930767102
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.2612244897959184,
|
329 |
-
"acc_stderr": 0.02812342933514279,
|
330 |
-
"acc_norm": 0.2612244897959184,
|
331 |
-
"acc_norm_stderr": 0.02812342933514279
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.22362869198312235,
|
335 |
-
"acc_stderr": 0.027123298205229972,
|
336 |
-
"acc_norm": 0.22362869198312235,
|
337 |
-
"acc_norm_stderr": 0.027123298205229972
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.2333767926988266,
|
341 |
-
"acc_stderr": 0.010803108481179088,
|
342 |
-
"acc_norm": 0.2333767926988266,
|
343 |
-
"acc_norm_stderr": 0.010803108481179088
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.2549019607843137,
|
347 |
-
"acc_stderr": 0.030587591351604243,
|
348 |
-
"acc_norm": 0.2549019607843137,
|
349 |
-
"acc_norm_stderr": 0.030587591351604243
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.23030303030303031,
|
353 |
-
"acc_stderr": 0.03287666758603489,
|
354 |
-
"acc_norm": 0.23030303030303031,
|
355 |
-
"acc_norm_stderr": 0.03287666758603489
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.2741738066095471,
|
359 |
-
"mc1_stderr": 0.015616518497219385,
|
360 |
-
"mc2": 0.5382255654218452,
|
361 |
-
"mc2_stderr": 0.01636582464762524
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.11267605633802817,
|
365 |
-
"acc_stderr": 0.010839072955995904,
|
366 |
-
"acc_norm": 0.3615023474178404,
|
367 |
-
"acc_norm_stderr": 0.01646912149043007
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "FINDA-FIT/llama-m",
|
436 |
-
"model_sha": "7c06c7acb6bd18e1cf52846483e430def93686f2",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
FINDA-FIT/llama-p/result_2023-09-30 17:05:38.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.3395904436860068,
|
5 |
-
"acc_stderr": 0.013839039762820169,
|
6 |
-
"acc_norm": 0.39590443686006827,
|
7 |
-
"acc_norm_stderr": 0.014291228393536588
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.38856801433977295,
|
11 |
-
"acc_stderr": 0.004864286176731823,
|
12 |
-
"acc_norm": 0.5073690499900418,
|
13 |
-
"acc_norm_stderr": 0.004989239462835233
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.391812865497076,
|
17 |
-
"acc_stderr": 0.037439798259263996,
|
18 |
-
"acc_norm": 0.391812865497076,
|
19 |
-
"acc_norm_stderr": 0.037439798259263996
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.27184466019417475,
|
23 |
-
"acc_stderr": 0.044052680241409216,
|
24 |
-
"acc_norm": 0.27184466019417475,
|
25 |
-
"acc_norm_stderr": 0.044052680241409216
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.3946360153256705,
|
29 |
-
"acc_stderr": 0.017478464305911545,
|
30 |
-
"acc_norm": 0.3946360153256705,
|
31 |
-
"acc_norm_stderr": 0.017478464305911545
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.35555555555555557,
|
35 |
-
"acc_stderr": 0.04135176749720386,
|
36 |
-
"acc_norm": 0.35555555555555557,
|
37 |
-
"acc_norm_stderr": 0.04135176749720386
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.34,
|
41 |
-
"acc_stderr": 0.04760952285695235,
|
42 |
-
"acc_norm": 0.34,
|
43 |
-
"acc_norm_stderr": 0.04760952285695235
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.26382978723404255,
|
47 |
-
"acc_stderr": 0.028809989854102956,
|
48 |
-
"acc_norm": 0.26382978723404255,
|
49 |
-
"acc_norm_stderr": 0.028809989854102956
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.30120481927710846,
|
53 |
-
"acc_stderr": 0.03571609230053481,
|
54 |
-
"acc_norm": 0.30120481927710846,
|
55 |
-
"acc_norm_stderr": 0.03571609230053481
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.4115755627009646,
|
59 |
-
"acc_stderr": 0.027950481494401266,
|
60 |
-
"acc_norm": 0.4115755627009646,
|
61 |
-
"acc_norm_stderr": 0.027950481494401266
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.3632286995515695,
|
65 |
-
"acc_stderr": 0.032277904428505,
|
66 |
-
"acc_norm": 0.3632286995515695,
|
67 |
-
"acc_norm_stderr": 0.032277904428505
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.42748091603053434,
|
71 |
-
"acc_stderr": 0.043389203057924,
|
72 |
-
"acc_norm": 0.42748091603053434,
|
73 |
-
"acc_norm_stderr": 0.043389203057924
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.37,
|
77 |
-
"acc_stderr": 0.048523658709391,
|
78 |
-
"acc_norm": 0.37,
|
79 |
-
"acc_norm_stderr": 0.048523658709391
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.35353535353535354,
|
83 |
-
"acc_stderr": 0.03406086723547153,
|
84 |
-
"acc_norm": 0.35353535353535354,
|
85 |
-
"acc_norm_stderr": 0.03406086723547153
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.31724137931034485,
|
89 |
-
"acc_stderr": 0.03878352372138621,
|
90 |
-
"acc_norm": 0.31724137931034485,
|
91 |
-
"acc_norm_stderr": 0.03878352372138621
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.13725490196078433,
|
95 |
-
"acc_stderr": 0.03424084669891523,
|
96 |
-
"acc_norm": 0.13725490196078433,
|
97 |
-
"acc_norm_stderr": 0.03424084669891523
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.33613445378151263,
|
101 |
-
"acc_stderr": 0.030684737115135367,
|
102 |
-
"acc_norm": 0.33613445378151263,
|
103 |
-
"acc_norm_stderr": 0.030684737115135367
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.258974358974359,
|
107 |
-
"acc_stderr": 0.02221110681006167,
|
108 |
-
"acc_norm": 0.258974358974359,
|
109 |
-
"acc_norm_stderr": 0.02221110681006167
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.45,
|
113 |
-
"acc_stderr": 0.05,
|
114 |
-
"acc_norm": 0.45,
|
115 |
-
"acc_norm_stderr": 0.05
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.28,
|
119 |
-
"acc_stderr": 0.04512608598542128,
|
120 |
-
"acc_norm": 0.28,
|
121 |
-
"acc_norm_stderr": 0.04512608598542128
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.39814814814814814,
|
125 |
-
"acc_stderr": 0.04732332615978814,
|
126 |
-
"acc_norm": 0.39814814814814814,
|
127 |
-
"acc_norm_stderr": 0.04732332615978814
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.2561576354679803,
|
131 |
-
"acc_stderr": 0.0307127300709826,
|
132 |
-
"acc_norm": 0.2561576354679803,
|
133 |
-
"acc_norm_stderr": 0.0307127300709826
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.3258064516129032,
|
137 |
-
"acc_stderr": 0.026662010578567104,
|
138 |
-
"acc_norm": 0.3258064516129032,
|
139 |
-
"acc_norm_stderr": 0.026662010578567104
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.5512820512820513,
|
143 |
-
"acc_stderr": 0.032583346493868806,
|
144 |
-
"acc_norm": 0.5512820512820513,
|
145 |
-
"acc_norm_stderr": 0.032583346493868806
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.35094339622641507,
|
149 |
-
"acc_stderr": 0.029373646253234686,
|
150 |
-
"acc_norm": 0.35094339622641507,
|
151 |
-
"acc_norm_stderr": 0.029373646253234686
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.39090909090909093,
|
155 |
-
"acc_stderr": 0.046737523336702384,
|
156 |
-
"acc_norm": 0.39090909090909093,
|
157 |
-
"acc_norm_stderr": 0.046737523336702384
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.26666666666666666,
|
161 |
-
"acc_stderr": 0.026962424325073828,
|
162 |
-
"acc_norm": 0.26666666666666666,
|
163 |
-
"acc_norm_stderr": 0.026962424325073828
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.1986754966887417,
|
167 |
-
"acc_stderr": 0.032578473844367746,
|
168 |
-
"acc_norm": 0.1986754966887417,
|
169 |
-
"acc_norm_stderr": 0.032578473844367746
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.4427860696517413,
|
173 |
-
"acc_stderr": 0.03512310964123936,
|
174 |
-
"acc_norm": 0.4427860696517413,
|
175 |
-
"acc_norm_stderr": 0.03512310964123936
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.3236994219653179,
|
179 |
-
"acc_stderr": 0.0356760379963917,
|
180 |
-
"acc_norm": 0.3236994219653179,
|
181 |
-
"acc_norm_stderr": 0.0356760379963917
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.2698412698412698,
|
185 |
-
"acc_stderr": 0.022860838309232072,
|
186 |
-
"acc_norm": 0.2698412698412698,
|
187 |
-
"acc_norm_stderr": 0.022860838309232072
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.2361111111111111,
|
191 |
-
"acc_stderr": 0.03551446610810826,
|
192 |
-
"acc_norm": 0.2361111111111111,
|
193 |
-
"acc_norm_stderr": 0.03551446610810826
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.16,
|
197 |
-
"acc_stderr": 0.03684529491774709,
|
198 |
-
"acc_norm": 0.16,
|
199 |
-
"acc_norm_stderr": 0.03684529491774709
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.42,
|
203 |
-
"acc_stderr": 0.04960449637488583,
|
204 |
-
"acc_norm": 0.42,
|
205 |
-
"acc_norm_stderr": 0.04960449637488583
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.3901734104046243,
|
209 |
-
"acc_stderr": 0.026261677607806642,
|
210 |
-
"acc_norm": 0.3901734104046243,
|
211 |
-
"acc_norm_stderr": 0.026261677607806642
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.34355828220858897,
|
215 |
-
"acc_stderr": 0.03731133519673893,
|
216 |
-
"acc_norm": 0.34355828220858897,
|
217 |
-
"acc_norm_stderr": 0.03731133519673893
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.39197530864197533,
|
221 |
-
"acc_stderr": 0.02716368603827123,
|
222 |
-
"acc_norm": 0.39197530864197533,
|
223 |
-
"acc_norm_stderr": 0.02716368603827123
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.3,
|
227 |
-
"acc_stderr": 0.046056618647183814,
|
228 |
-
"acc_norm": 0.3,
|
229 |
-
"acc_norm_stderr": 0.046056618647183814
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.32642487046632124,
|
233 |
-
"acc_stderr": 0.033840286211432945,
|
234 |
-
"acc_norm": 0.32642487046632124,
|
235 |
-
"acc_norm_stderr": 0.033840286211432945
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.2631578947368421,
|
239 |
-
"acc_stderr": 0.04142439719489361,
|
240 |
-
"acc_norm": 0.2631578947368421,
|
241 |
-
"acc_norm_stderr": 0.04142439719489361
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.3743119266055046,
|
245 |
-
"acc_stderr": 0.02074895940898831,
|
246 |
-
"acc_norm": 0.3743119266055046,
|
247 |
-
"acc_norm_stderr": 0.02074895940898831
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.23015873015873015,
|
251 |
-
"acc_stderr": 0.03764950879790604,
|
252 |
-
"acc_norm": 0.23015873015873015,
|
253 |
-
"acc_norm_stderr": 0.03764950879790604
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.4215686274509804,
|
257 |
-
"acc_stderr": 0.028275490156791434,
|
258 |
-
"acc_norm": 0.4215686274509804,
|
259 |
-
"acc_norm_stderr": 0.028275490156791434
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.29,
|
263 |
-
"acc_stderr": 0.04560480215720684,
|
264 |
-
"acc_norm": 0.29,
|
265 |
-
"acc_norm_stderr": 0.04560480215720684
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.5785123966942148,
|
269 |
-
"acc_stderr": 0.045077322787750874,
|
270 |
-
"acc_norm": 0.5785123966942148,
|
271 |
-
"acc_norm_stderr": 0.045077322787750874
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.40131578947368424,
|
275 |
-
"acc_stderr": 0.039889037033362836,
|
276 |
-
"acc_norm": 0.40131578947368424,
|
277 |
-
"acc_norm_stderr": 0.039889037033362836
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.369281045751634,
|
281 |
-
"acc_stderr": 0.019524316744866346,
|
282 |
-
"acc_norm": 0.369281045751634,
|
283 |
-
"acc_norm_stderr": 0.019524316744866346
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.30141843971631205,
|
287 |
-
"acc_stderr": 0.02737412888263115,
|
288 |
-
"acc_norm": 0.30141843971631205,
|
289 |
-
"acc_norm_stderr": 0.02737412888263115
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.2767857142857143,
|
293 |
-
"acc_stderr": 0.04246624336697624,
|
294 |
-
"acc_norm": 0.2767857142857143,
|
295 |
-
"acc_norm_stderr": 0.04246624336697624
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.27314814814814814,
|
299 |
-
"acc_stderr": 0.030388051301678116,
|
300 |
-
"acc_norm": 0.27314814814814814,
|
301 |
-
"acc_norm_stderr": 0.030388051301678116
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.2446927374301676,
|
305 |
-
"acc_stderr": 0.014378169884098424,
|
306 |
-
"acc_norm": 0.2446927374301676,
|
307 |
-
"acc_norm_stderr": 0.014378169884098424
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.31,
|
311 |
-
"acc_stderr": 0.04648231987117316,
|
312 |
-
"acc_norm": 0.31,
|
313 |
-
"acc_norm_stderr": 0.04648231987117316
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.3,
|
317 |
-
"acc_stderr": 0.046056618647183814,
|
318 |
-
"acc_norm": 0.3,
|
319 |
-
"acc_norm_stderr": 0.046056618647183814
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.31985294117647056,
|
323 |
-
"acc_stderr": 0.02833295951403124,
|
324 |
-
"acc_norm": 0.31985294117647056,
|
325 |
-
"acc_norm_stderr": 0.02833295951403124
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.37551020408163266,
|
329 |
-
"acc_stderr": 0.03100120903989484,
|
330 |
-
"acc_norm": 0.37551020408163266,
|
331 |
-
"acc_norm_stderr": 0.03100120903989484
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.5232067510548524,
|
335 |
-
"acc_stderr": 0.032512152011410174,
|
336 |
-
"acc_norm": 0.5232067510548524,
|
337 |
-
"acc_norm_stderr": 0.032512152011410174
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.2985658409387223,
|
341 |
-
"acc_stderr": 0.011688060141794208,
|
342 |
-
"acc_norm": 0.2985658409387223,
|
343 |
-
"acc_norm_stderr": 0.011688060141794208
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.38235294117647056,
|
347 |
-
"acc_stderr": 0.03410785338904719,
|
348 |
-
"acc_norm": 0.38235294117647056,
|
349 |
-
"acc_norm_stderr": 0.03410785338904719
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.3939393939393939,
|
353 |
-
"acc_stderr": 0.0381549430868893,
|
354 |
-
"acc_norm": 0.3939393939393939,
|
355 |
-
"acc_norm_stderr": 0.0381549430868893
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.24969400244798043,
|
359 |
-
"mc1_stderr": 0.015152286907148125,
|
360 |
-
"mc2": 0.38092210327853554,
|
361 |
-
"mc2_stderr": 0.014881931344043989
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.47417840375586856,
|
365 |
-
"acc_stderr": 0.017116907933735912,
|
366 |
-
"acc_norm": 0.5586854460093896,
|
367 |
-
"acc_norm_stderr": 0.017021311671847467
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "FINDA-FIT/llama-p",
|
436 |
-
"model_sha": "e54c345988c60cdafe797a2f15e916801ee4ab7b",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
FINDA-FIT/llama-r/result_2023-09-30 09:12:26.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.20136518771331058,
|
5 |
-
"acc_stderr": 0.011718927477444262,
|
6 |
-
"acc_norm": 0.2636518771331058,
|
7 |
-
"acc_norm_stderr": 0.01287592915129705
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.2665803624775941,
|
11 |
-
"acc_stderr": 0.004412674170976469,
|
12 |
-
"acc_norm": 0.27922724556861184,
|
13 |
-
"acc_norm_stderr": 0.004477025762200596
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.2046783625730994,
|
17 |
-
"acc_stderr": 0.03094445977853321,
|
18 |
-
"acc_norm": 0.2046783625730994,
|
19 |
-
"acc_norm_stderr": 0.03094445977853321
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.30097087378640774,
|
23 |
-
"acc_stderr": 0.04541609446503949,
|
24 |
-
"acc_norm": 0.30097087378640774,
|
25 |
-
"acc_norm_stderr": 0.04541609446503949
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.2656449553001277,
|
29 |
-
"acc_stderr": 0.01579430248788873,
|
30 |
-
"acc_norm": 0.2656449553001277,
|
31 |
-
"acc_norm_stderr": 0.01579430248788873
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.2074074074074074,
|
35 |
-
"acc_stderr": 0.03502553170678316,
|
36 |
-
"acc_norm": 0.2074074074074074,
|
37 |
-
"acc_norm_stderr": 0.03502553170678316
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.3,
|
41 |
-
"acc_stderr": 0.046056618647183814,
|
42 |
-
"acc_norm": 0.3,
|
43 |
-
"acc_norm_stderr": 0.046056618647183814
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.24680851063829787,
|
47 |
-
"acc_stderr": 0.028185441301234113,
|
48 |
-
"acc_norm": 0.24680851063829787,
|
49 |
-
"acc_norm_stderr": 0.028185441301234113
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.2710843373493976,
|
53 |
-
"acc_stderr": 0.034605799075530255,
|
54 |
-
"acc_norm": 0.2710843373493976,
|
55 |
-
"acc_norm_stderr": 0.034605799075530255
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.2958199356913183,
|
59 |
-
"acc_stderr": 0.02592237178881877,
|
60 |
-
"acc_norm": 0.2958199356913183,
|
61 |
-
"acc_norm_stderr": 0.02592237178881877
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.23766816143497757,
|
65 |
-
"acc_stderr": 0.028568079464714267,
|
66 |
-
"acc_norm": 0.23766816143497757,
|
67 |
-
"acc_norm_stderr": 0.028568079464714267
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.24427480916030533,
|
71 |
-
"acc_stderr": 0.037683359597287434,
|
72 |
-
"acc_norm": 0.24427480916030533,
|
73 |
-
"acc_norm_stderr": 0.037683359597287434
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.27,
|
77 |
-
"acc_stderr": 0.04461960433384739,
|
78 |
-
"acc_norm": 0.27,
|
79 |
-
"acc_norm_stderr": 0.04461960433384739
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.24242424242424243,
|
83 |
-
"acc_stderr": 0.030532892233932032,
|
84 |
-
"acc_norm": 0.24242424242424243,
|
85 |
-
"acc_norm_stderr": 0.030532892233932032
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.2482758620689655,
|
89 |
-
"acc_stderr": 0.03600105692727771,
|
90 |
-
"acc_norm": 0.2482758620689655,
|
91 |
-
"acc_norm_stderr": 0.03600105692727771
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.16666666666666666,
|
95 |
-
"acc_stderr": 0.03708284662416542,
|
96 |
-
"acc_norm": 0.16666666666666666,
|
97 |
-
"acc_norm_stderr": 0.03708284662416542
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.33613445378151263,
|
101 |
-
"acc_stderr": 0.03068473711513536,
|
102 |
-
"acc_norm": 0.33613445378151263,
|
103 |
-
"acc_norm_stderr": 0.03068473711513536
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.3153846153846154,
|
107 |
-
"acc_stderr": 0.02355964698318994,
|
108 |
-
"acc_norm": 0.3153846153846154,
|
109 |
-
"acc_norm_stderr": 0.02355964698318994
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.24,
|
113 |
-
"acc_stderr": 0.04292346959909284,
|
114 |
-
"acc_norm": 0.24,
|
115 |
-
"acc_norm_stderr": 0.04292346959909284
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.16,
|
119 |
-
"acc_stderr": 0.03684529491774708,
|
120 |
-
"acc_norm": 0.16,
|
121 |
-
"acc_norm_stderr": 0.03684529491774708
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.2222222222222222,
|
125 |
-
"acc_stderr": 0.040191074725573483,
|
126 |
-
"acc_norm": 0.2222222222222222,
|
127 |
-
"acc_norm_stderr": 0.040191074725573483
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.3103448275862069,
|
131 |
-
"acc_stderr": 0.03255086769970103,
|
132 |
-
"acc_norm": 0.3103448275862069,
|
133 |
-
"acc_norm_stderr": 0.03255086769970103
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.31290322580645163,
|
137 |
-
"acc_stderr": 0.026377567028645858,
|
138 |
-
"acc_norm": 0.31290322580645163,
|
139 |
-
"acc_norm_stderr": 0.026377567028645858
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.19230769230769232,
|
143 |
-
"acc_stderr": 0.025819233256483727,
|
144 |
-
"acc_norm": 0.19230769230769232,
|
145 |
-
"acc_norm_stderr": 0.025819233256483727
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.2339622641509434,
|
149 |
-
"acc_stderr": 0.02605529690115292,
|
150 |
-
"acc_norm": 0.2339622641509434,
|
151 |
-
"acc_norm_stderr": 0.02605529690115292
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.20909090909090908,
|
155 |
-
"acc_stderr": 0.03895091015724138,
|
156 |
-
"acc_norm": 0.20909090909090908,
|
157 |
-
"acc_norm_stderr": 0.03895091015724138
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.25925925925925924,
|
161 |
-
"acc_stderr": 0.026719240783712177,
|
162 |
-
"acc_norm": 0.25925925925925924,
|
163 |
-
"acc_norm_stderr": 0.026719240783712177
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.33112582781456956,
|
167 |
-
"acc_stderr": 0.038425817186598696,
|
168 |
-
"acc_norm": 0.33112582781456956,
|
169 |
-
"acc_norm_stderr": 0.038425817186598696
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.19402985074626866,
|
173 |
-
"acc_stderr": 0.027962677604768893,
|
174 |
-
"acc_norm": 0.19402985074626866,
|
175 |
-
"acc_norm_stderr": 0.027962677604768893
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.24277456647398843,
|
179 |
-
"acc_stderr": 0.0326926380614177,
|
180 |
-
"acc_norm": 0.24277456647398843,
|
181 |
-
"acc_norm_stderr": 0.0326926380614177
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.2566137566137566,
|
185 |
-
"acc_stderr": 0.022494510767503154,
|
186 |
-
"acc_norm": 0.2566137566137566,
|
187 |
-
"acc_norm_stderr": 0.022494510767503154
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.2708333333333333,
|
191 |
-
"acc_stderr": 0.03716177437566018,
|
192 |
-
"acc_norm": 0.2708333333333333,
|
193 |
-
"acc_norm_stderr": 0.03716177437566018
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.32,
|
197 |
-
"acc_stderr": 0.04688261722621504,
|
198 |
-
"acc_norm": 0.32,
|
199 |
-
"acc_norm_stderr": 0.04688261722621504
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.26,
|
203 |
-
"acc_stderr": 0.044084400227680794,
|
204 |
-
"acc_norm": 0.26,
|
205 |
-
"acc_norm_stderr": 0.044084400227680794
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.23410404624277456,
|
209 |
-
"acc_stderr": 0.022797110278071134,
|
210 |
-
"acc_norm": 0.23410404624277456,
|
211 |
-
"acc_norm_stderr": 0.022797110278071134
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.2822085889570552,
|
215 |
-
"acc_stderr": 0.03536117886664743,
|
216 |
-
"acc_norm": 0.2822085889570552,
|
217 |
-
"acc_norm_stderr": 0.03536117886664743
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.25925925925925924,
|
221 |
-
"acc_stderr": 0.02438366553103545,
|
222 |
-
"acc_norm": 0.25925925925925924,
|
223 |
-
"acc_norm_stderr": 0.02438366553103545
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.3,
|
227 |
-
"acc_stderr": 0.04605661864718381,
|
228 |
-
"acc_norm": 0.3,
|
229 |
-
"acc_norm_stderr": 0.04605661864718381
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.2849740932642487,
|
233 |
-
"acc_stderr": 0.0325771407770966,
|
234 |
-
"acc_norm": 0.2849740932642487,
|
235 |
-
"acc_norm_stderr": 0.0325771407770966
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.2719298245614035,
|
239 |
-
"acc_stderr": 0.04185774424022056,
|
240 |
-
"acc_norm": 0.2719298245614035,
|
241 |
-
"acc_norm_stderr": 0.04185774424022056
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.23119266055045873,
|
245 |
-
"acc_stderr": 0.018075750241633163,
|
246 |
-
"acc_norm": 0.23119266055045873,
|
247 |
-
"acc_norm_stderr": 0.018075750241633163
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.16666666666666666,
|
251 |
-
"acc_stderr": 0.03333333333333337,
|
252 |
-
"acc_norm": 0.16666666666666666,
|
253 |
-
"acc_norm_stderr": 0.03333333333333337
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.2549019607843137,
|
257 |
-
"acc_stderr": 0.024954184324879912,
|
258 |
-
"acc_norm": 0.2549019607843137,
|
259 |
-
"acc_norm_stderr": 0.024954184324879912
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.27,
|
263 |
-
"acc_stderr": 0.0446196043338474,
|
264 |
-
"acc_norm": 0.27,
|
265 |
-
"acc_norm_stderr": 0.0446196043338474
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.34710743801652894,
|
269 |
-
"acc_stderr": 0.04345724570292534,
|
270 |
-
"acc_norm": 0.34710743801652894,
|
271 |
-
"acc_norm_stderr": 0.04345724570292534
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.26973684210526316,
|
275 |
-
"acc_stderr": 0.036117805602848975,
|
276 |
-
"acc_norm": 0.26973684210526316,
|
277 |
-
"acc_norm_stderr": 0.036117805602848975
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.22712418300653595,
|
281 |
-
"acc_stderr": 0.016949853279212373,
|
282 |
-
"acc_norm": 0.22712418300653595,
|
283 |
-
"acc_norm_stderr": 0.016949853279212373
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.24113475177304963,
|
287 |
-
"acc_stderr": 0.02551873104953777,
|
288 |
-
"acc_norm": 0.24113475177304963,
|
289 |
-
"acc_norm_stderr": 0.02551873104953777
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.23214285714285715,
|
293 |
-
"acc_stderr": 0.04007341809755806,
|
294 |
-
"acc_norm": 0.23214285714285715,
|
295 |
-
"acc_norm_stderr": 0.04007341809755806
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.4351851851851852,
|
299 |
-
"acc_stderr": 0.033812000056435254,
|
300 |
-
"acc_norm": 0.4351851851851852,
|
301 |
-
"acc_norm_stderr": 0.033812000056435254
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.24916201117318434,
|
305 |
-
"acc_stderr": 0.01446589382985993,
|
306 |
-
"acc_norm": 0.24916201117318434,
|
307 |
-
"acc_norm_stderr": 0.01446589382985993
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.31,
|
311 |
-
"acc_stderr": 0.04648231987117316,
|
312 |
-
"acc_norm": 0.31,
|
313 |
-
"acc_norm_stderr": 0.04648231987117316
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.32,
|
317 |
-
"acc_stderr": 0.046882617226215034,
|
318 |
-
"acc_norm": 0.32,
|
319 |
-
"acc_norm_stderr": 0.046882617226215034
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.44485294117647056,
|
323 |
-
"acc_stderr": 0.030187532060329383,
|
324 |
-
"acc_norm": 0.44485294117647056,
|
325 |
-
"acc_norm_stderr": 0.030187532060329383
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.23265306122448978,
|
329 |
-
"acc_stderr": 0.02704925791589618,
|
330 |
-
"acc_norm": 0.23265306122448978,
|
331 |
-
"acc_norm_stderr": 0.02704925791589618
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.22784810126582278,
|
335 |
-
"acc_stderr": 0.02730348459906942,
|
336 |
-
"acc_norm": 0.22784810126582278,
|
337 |
-
"acc_norm_stderr": 0.02730348459906942
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.23728813559322035,
|
341 |
-
"acc_stderr": 0.010865436690780272,
|
342 |
-
"acc_norm": 0.23728813559322035,
|
343 |
-
"acc_norm_stderr": 0.010865436690780272
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.25,
|
347 |
-
"acc_stderr": 0.03039153369274154,
|
348 |
-
"acc_norm": 0.25,
|
349 |
-
"acc_norm_stderr": 0.03039153369274154
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.24848484848484848,
|
353 |
-
"acc_stderr": 0.03374402644139404,
|
354 |
-
"acc_norm": 0.24848484848484848,
|
355 |
-
"acc_norm_stderr": 0.03374402644139404
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.2741738066095471,
|
359 |
-
"mc1_stderr": 0.01561651849721938,
|
360 |
-
"mc2": 0.5406294687690661,
|
361 |
-
"mc2_stderr": 0.016334114258114155
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.12441314553990611,
|
365 |
-
"acc_stderr": 0.011314046818595224,
|
366 |
-
"acc_norm": 0.3615023474178404,
|
367 |
-
"acc_norm_stderr": 0.01646912149043007
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "FINDA-FIT/llama-r",
|
436 |
-
"model_sha": "6bdde9a227da60c2db803024d5b2e3a53a41cf0b",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
FINDA-FIT/xllama-instruct/result_2023-10-01 07:23:53.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.3387372013651877,
|
5 |
-
"acc_stderr": 0.013830568927974334,
|
6 |
-
"acc_norm": 0.3924914675767918,
|
7 |
-
"acc_norm_stderr": 0.01426963463567071
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.3910575582553276,
|
11 |
-
"acc_stderr": 0.004869899297734548,
|
12 |
-
"acc_norm": 0.5143397729535949,
|
13 |
-
"acc_norm_stderr": 0.004987728900897584
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.3684210526315789,
|
17 |
-
"acc_stderr": 0.036996580176568775,
|
18 |
-
"acc_norm": 0.3684210526315789,
|
19 |
-
"acc_norm_stderr": 0.036996580176568775
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.3106796116504854,
|
23 |
-
"acc_stderr": 0.04582124160161549,
|
24 |
-
"acc_norm": 0.3106796116504854,
|
25 |
-
"acc_norm_stderr": 0.04582124160161549
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.39208173690932313,
|
29 |
-
"acc_stderr": 0.01745852405014764,
|
30 |
-
"acc_norm": 0.39208173690932313,
|
31 |
-
"acc_norm_stderr": 0.01745852405014764
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.4074074074074074,
|
35 |
-
"acc_stderr": 0.042446332383532286,
|
36 |
-
"acc_norm": 0.4074074074074074,
|
37 |
-
"acc_norm_stderr": 0.042446332383532286
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.3,
|
41 |
-
"acc_stderr": 0.046056618647183814,
|
42 |
-
"acc_norm": 0.3,
|
43 |
-
"acc_norm_stderr": 0.046056618647183814
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.2680851063829787,
|
47 |
-
"acc_stderr": 0.028957342788342343,
|
48 |
-
"acc_norm": 0.2680851063829787,
|
49 |
-
"acc_norm_stderr": 0.028957342788342343
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.3132530120481928,
|
53 |
-
"acc_stderr": 0.03610805018031024,
|
54 |
-
"acc_norm": 0.3132530120481928,
|
55 |
-
"acc_norm_stderr": 0.03610805018031024
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.43086816720257237,
|
59 |
-
"acc_stderr": 0.028125340983972714,
|
60 |
-
"acc_norm": 0.43086816720257237,
|
61 |
-
"acc_norm_stderr": 0.028125340983972714
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.36771300448430494,
|
65 |
-
"acc_stderr": 0.03236198350928275,
|
66 |
-
"acc_norm": 0.36771300448430494,
|
67 |
-
"acc_norm_stderr": 0.03236198350928275
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.3893129770992366,
|
71 |
-
"acc_stderr": 0.04276486542814591,
|
72 |
-
"acc_norm": 0.3893129770992366,
|
73 |
-
"acc_norm_stderr": 0.04276486542814591
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.33,
|
77 |
-
"acc_stderr": 0.047258156262526045,
|
78 |
-
"acc_norm": 0.33,
|
79 |
-
"acc_norm_stderr": 0.047258156262526045
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.35858585858585856,
|
83 |
-
"acc_stderr": 0.0341690364039152,
|
84 |
-
"acc_norm": 0.35858585858585856,
|
85 |
-
"acc_norm_stderr": 0.0341690364039152
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.3793103448275862,
|
89 |
-
"acc_stderr": 0.04043461861916747,
|
90 |
-
"acc_norm": 0.3793103448275862,
|
91 |
-
"acc_norm_stderr": 0.04043461861916747
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.10784313725490197,
|
95 |
-
"acc_stderr": 0.03086428212206014,
|
96 |
-
"acc_norm": 0.10784313725490197,
|
97 |
-
"acc_norm_stderr": 0.03086428212206014
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.3235294117647059,
|
101 |
-
"acc_stderr": 0.030388353551886845,
|
102 |
-
"acc_norm": 0.3235294117647059,
|
103 |
-
"acc_norm_stderr": 0.030388353551886845
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.26666666666666666,
|
107 |
-
"acc_stderr": 0.0224212736129237,
|
108 |
-
"acc_norm": 0.26666666666666666,
|
109 |
-
"acc_norm_stderr": 0.0224212736129237
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.45,
|
113 |
-
"acc_stderr": 0.05,
|
114 |
-
"acc_norm": 0.45,
|
115 |
-
"acc_norm_stderr": 0.05
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.22,
|
119 |
-
"acc_stderr": 0.04163331998932269,
|
120 |
-
"acc_norm": 0.22,
|
121 |
-
"acc_norm_stderr": 0.04163331998932269
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.4074074074074074,
|
125 |
-
"acc_stderr": 0.047500773411999854,
|
126 |
-
"acc_norm": 0.4074074074074074,
|
127 |
-
"acc_norm_stderr": 0.047500773411999854
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.28078817733990147,
|
131 |
-
"acc_stderr": 0.03161856335358611,
|
132 |
-
"acc_norm": 0.28078817733990147,
|
133 |
-
"acc_norm_stderr": 0.03161856335358611
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.3161290322580645,
|
137 |
-
"acc_stderr": 0.02645087448904277,
|
138 |
-
"acc_norm": 0.3161290322580645,
|
139 |
-
"acc_norm_stderr": 0.02645087448904277
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.4700854700854701,
|
143 |
-
"acc_stderr": 0.03269741106812443,
|
144 |
-
"acc_norm": 0.4700854700854701,
|
145 |
-
"acc_norm_stderr": 0.03269741106812443
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.33962264150943394,
|
149 |
-
"acc_stderr": 0.029146904747798345,
|
150 |
-
"acc_norm": 0.33962264150943394,
|
151 |
-
"acc_norm_stderr": 0.029146904747798345
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.3181818181818182,
|
155 |
-
"acc_stderr": 0.04461272175910507,
|
156 |
-
"acc_norm": 0.3181818181818182,
|
157 |
-
"acc_norm_stderr": 0.04461272175910507
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.2814814814814815,
|
161 |
-
"acc_stderr": 0.027420019350945277,
|
162 |
-
"acc_norm": 0.2814814814814815,
|
163 |
-
"acc_norm_stderr": 0.027420019350945277
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.2781456953642384,
|
167 |
-
"acc_stderr": 0.03658603262763743,
|
168 |
-
"acc_norm": 0.2781456953642384,
|
169 |
-
"acc_norm_stderr": 0.03658603262763743
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.39800995024875624,
|
173 |
-
"acc_stderr": 0.034611994290400135,
|
174 |
-
"acc_norm": 0.39800995024875624,
|
175 |
-
"acc_norm_stderr": 0.034611994290400135
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.3063583815028902,
|
179 |
-
"acc_stderr": 0.035149425512674394,
|
180 |
-
"acc_norm": 0.3063583815028902,
|
181 |
-
"acc_norm_stderr": 0.035149425512674394
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.2830687830687831,
|
185 |
-
"acc_stderr": 0.023201392938194978,
|
186 |
-
"acc_norm": 0.2830687830687831,
|
187 |
-
"acc_norm_stderr": 0.023201392938194978
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.2638888888888889,
|
191 |
-
"acc_stderr": 0.03685651095897532,
|
192 |
-
"acc_norm": 0.2638888888888889,
|
193 |
-
"acc_norm_stderr": 0.03685651095897532
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.25,
|
197 |
-
"acc_stderr": 0.04351941398892446,
|
198 |
-
"acc_norm": 0.25,
|
199 |
-
"acc_norm_stderr": 0.04351941398892446
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.42,
|
203 |
-
"acc_stderr": 0.049604496374885836,
|
204 |
-
"acc_norm": 0.42,
|
205 |
-
"acc_norm_stderr": 0.049604496374885836
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.43641618497109824,
|
209 |
-
"acc_stderr": 0.026700545424943687,
|
210 |
-
"acc_norm": 0.43641618497109824,
|
211 |
-
"acc_norm_stderr": 0.026700545424943687
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.2883435582822086,
|
215 |
-
"acc_stderr": 0.03559039531617342,
|
216 |
-
"acc_norm": 0.2883435582822086,
|
217 |
-
"acc_norm_stderr": 0.03559039531617342
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.3950617283950617,
|
221 |
-
"acc_stderr": 0.027201117666925657,
|
222 |
-
"acc_norm": 0.3950617283950617,
|
223 |
-
"acc_norm_stderr": 0.027201117666925657
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.31,
|
227 |
-
"acc_stderr": 0.04648231987117316,
|
228 |
-
"acc_norm": 0.31,
|
229 |
-
"acc_norm_stderr": 0.04648231987117316
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.3160621761658031,
|
233 |
-
"acc_stderr": 0.033553973696861736,
|
234 |
-
"acc_norm": 0.3160621761658031,
|
235 |
-
"acc_norm_stderr": 0.033553973696861736
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.2719298245614035,
|
239 |
-
"acc_stderr": 0.04185774424022057,
|
240 |
-
"acc_norm": 0.2719298245614035,
|
241 |
-
"acc_norm_stderr": 0.04185774424022057
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.3779816513761468,
|
245 |
-
"acc_stderr": 0.020789187066728113,
|
246 |
-
"acc_norm": 0.3779816513761468,
|
247 |
-
"acc_norm_stderr": 0.020789187066728113
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.21428571428571427,
|
251 |
-
"acc_stderr": 0.03670066451047182,
|
252 |
-
"acc_norm": 0.21428571428571427,
|
253 |
-
"acc_norm_stderr": 0.03670066451047182
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.39869281045751637,
|
257 |
-
"acc_stderr": 0.02803609227389176,
|
258 |
-
"acc_norm": 0.39869281045751637,
|
259 |
-
"acc_norm_stderr": 0.02803609227389176
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.33,
|
263 |
-
"acc_stderr": 0.04725815626252605,
|
264 |
-
"acc_norm": 0.33,
|
265 |
-
"acc_norm_stderr": 0.04725815626252605
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.5537190082644629,
|
269 |
-
"acc_stderr": 0.0453793517794788,
|
270 |
-
"acc_norm": 0.5537190082644629,
|
271 |
-
"acc_norm_stderr": 0.0453793517794788
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.3815789473684211,
|
275 |
-
"acc_stderr": 0.03953173377749194,
|
276 |
-
"acc_norm": 0.3815789473684211,
|
277 |
-
"acc_norm_stderr": 0.03953173377749194
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.3104575163398693,
|
281 |
-
"acc_stderr": 0.01871806705262322,
|
282 |
-
"acc_norm": 0.3104575163398693,
|
283 |
-
"acc_norm_stderr": 0.01871806705262322
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.3191489361702128,
|
287 |
-
"acc_stderr": 0.027807990141320203,
|
288 |
-
"acc_norm": 0.3191489361702128,
|
289 |
-
"acc_norm_stderr": 0.027807990141320203
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.29464285714285715,
|
293 |
-
"acc_stderr": 0.0432704093257873,
|
294 |
-
"acc_norm": 0.29464285714285715,
|
295 |
-
"acc_norm_stderr": 0.0432704093257873
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.3425925925925926,
|
299 |
-
"acc_stderr": 0.032365852526021574,
|
300 |
-
"acc_norm": 0.3425925925925926,
|
301 |
-
"acc_norm_stderr": 0.032365852526021574
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.24134078212290502,
|
305 |
-
"acc_stderr": 0.014310999547961441,
|
306 |
-
"acc_norm": 0.24134078212290502,
|
307 |
-
"acc_norm_stderr": 0.014310999547961441
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.39,
|
311 |
-
"acc_stderr": 0.04902071300001975,
|
312 |
-
"acc_norm": 0.39,
|
313 |
-
"acc_norm_stderr": 0.04902071300001975
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.31,
|
317 |
-
"acc_stderr": 0.04648231987117316,
|
318 |
-
"acc_norm": 0.31,
|
319 |
-
"acc_norm_stderr": 0.04648231987117316
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.3235294117647059,
|
323 |
-
"acc_stderr": 0.028418208619406794,
|
324 |
-
"acc_norm": 0.3235294117647059,
|
325 |
-
"acc_norm_stderr": 0.028418208619406794
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.31020408163265306,
|
329 |
-
"acc_stderr": 0.029613459872484378,
|
330 |
-
"acc_norm": 0.31020408163265306,
|
331 |
-
"acc_norm_stderr": 0.029613459872484378
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.47257383966244726,
|
335 |
-
"acc_stderr": 0.032498227183013026,
|
336 |
-
"acc_norm": 0.47257383966244726,
|
337 |
-
"acc_norm_stderr": 0.032498227183013026
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.2900912646675359,
|
341 |
-
"acc_stderr": 0.0115903755547331,
|
342 |
-
"acc_norm": 0.2900912646675359,
|
343 |
-
"acc_norm_stderr": 0.0115903755547331
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.29411764705882354,
|
347 |
-
"acc_stderr": 0.03198001660115072,
|
348 |
-
"acc_norm": 0.29411764705882354,
|
349 |
-
"acc_norm_stderr": 0.03198001660115072
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.3696969696969697,
|
353 |
-
"acc_stderr": 0.037694303145125674,
|
354 |
-
"acc_norm": 0.3696969696969697,
|
355 |
-
"acc_norm_stderr": 0.037694303145125674
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.26560587515299877,
|
359 |
-
"mc1_stderr": 0.015461027627253597,
|
360 |
-
"mc2": 0.40727214174838056,
|
361 |
-
"mc2_stderr": 0.014940202090745085
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.3204225352112676,
|
365 |
-
"acc_stderr": 0.015996178088626918,
|
366 |
-
"acc_norm": 0.4307511737089202,
|
367 |
-
"acc_norm_stderr": 0.016974599121731444
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "FINDA-FIT/xllama-instruct",
|
436 |
-
"model_sha": "1e5ee340d5f4558a1bc451ba7942fa5f3a1c8d80",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
GAI-LLM/ko-en-llama2-13b-mixed-v3/result_2023-10-23 00:18:31.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.3728668941979522,
|
5 |
-
"acc_stderr": 0.01413117676013117,
|
6 |
-
"acc_norm": 0.42406143344709896,
|
7 |
-
"acc_norm_stderr": 0.014441889627464394
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.40689105755825533,
|
11 |
-
"acc_stderr": 0.004902502514738606,
|
12 |
-
"acc_norm": 0.5433180641306513,
|
13 |
-
"acc_norm_stderr": 0.004971019942726589
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.5146198830409356,
|
17 |
-
"acc_stderr": 0.038331852752130254,
|
18 |
-
"acc_norm": 0.5146198830409356,
|
19 |
-
"acc_norm_stderr": 0.038331852752130254
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.44660194174757284,
|
23 |
-
"acc_stderr": 0.04922424153458933,
|
24 |
-
"acc_norm": 0.44660194174757284,
|
25 |
-
"acc_norm_stderr": 0.04922424153458933
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.4878671775223499,
|
29 |
-
"acc_stderr": 0.01787469866749135,
|
30 |
-
"acc_norm": 0.4878671775223499,
|
31 |
-
"acc_norm_stderr": 0.01787469866749135
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.362962962962963,
|
35 |
-
"acc_stderr": 0.041539484047424,
|
36 |
-
"acc_norm": 0.362962962962963,
|
37 |
-
"acc_norm_stderr": 0.041539484047424
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.29,
|
41 |
-
"acc_stderr": 0.045604802157206824,
|
42 |
-
"acc_norm": 0.29,
|
43 |
-
"acc_norm_stderr": 0.045604802157206824
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.3148936170212766,
|
47 |
-
"acc_stderr": 0.030363582197238167,
|
48 |
-
"acc_norm": 0.3148936170212766,
|
49 |
-
"acc_norm_stderr": 0.030363582197238167
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.4397590361445783,
|
53 |
-
"acc_stderr": 0.03864139923699122,
|
54 |
-
"acc_norm": 0.4397590361445783,
|
55 |
-
"acc_norm_stderr": 0.03864139923699122
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.4694533762057878,
|
59 |
-
"acc_stderr": 0.028345045864840678,
|
60 |
-
"acc_norm": 0.4694533762057878,
|
61 |
-
"acc_norm_stderr": 0.028345045864840678
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.3721973094170404,
|
65 |
-
"acc_stderr": 0.03244305283008731,
|
66 |
-
"acc_norm": 0.3721973094170404,
|
67 |
-
"acc_norm_stderr": 0.03244305283008731
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.48091603053435117,
|
71 |
-
"acc_stderr": 0.04382094705550989,
|
72 |
-
"acc_norm": 0.48091603053435117,
|
73 |
-
"acc_norm_stderr": 0.04382094705550989
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.29,
|
77 |
-
"acc_stderr": 0.04560480215720683,
|
78 |
-
"acc_norm": 0.29,
|
79 |
-
"acc_norm_stderr": 0.04560480215720683
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.5151515151515151,
|
83 |
-
"acc_stderr": 0.0356071651653106,
|
84 |
-
"acc_norm": 0.5151515151515151,
|
85 |
-
"acc_norm_stderr": 0.0356071651653106
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.3724137931034483,
|
89 |
-
"acc_stderr": 0.0402873153294756,
|
90 |
-
"acc_norm": 0.3724137931034483,
|
91 |
-
"acc_norm_stderr": 0.0402873153294756
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.21568627450980393,
|
95 |
-
"acc_stderr": 0.04092563958237655,
|
96 |
-
"acc_norm": 0.21568627450980393,
|
97 |
-
"acc_norm_stderr": 0.04092563958237655
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.3697478991596639,
|
101 |
-
"acc_stderr": 0.03135709599613591,
|
102 |
-
"acc_norm": 0.3697478991596639,
|
103 |
-
"acc_norm_stderr": 0.03135709599613591
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.358974358974359,
|
107 |
-
"acc_stderr": 0.02432173848460237,
|
108 |
-
"acc_norm": 0.358974358974359,
|
109 |
-
"acc_norm_stderr": 0.02432173848460237
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.44,
|
113 |
-
"acc_stderr": 0.04988876515698589,
|
114 |
-
"acc_norm": 0.44,
|
115 |
-
"acc_norm_stderr": 0.04988876515698589
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.33,
|
119 |
-
"acc_stderr": 0.047258156262526045,
|
120 |
-
"acc_norm": 0.33,
|
121 |
-
"acc_norm_stderr": 0.047258156262526045
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.4166666666666667,
|
125 |
-
"acc_stderr": 0.04766075165356461,
|
126 |
-
"acc_norm": 0.4166666666666667,
|
127 |
-
"acc_norm_stderr": 0.04766075165356461
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.35467980295566504,
|
131 |
-
"acc_stderr": 0.03366124489051449,
|
132 |
-
"acc_norm": 0.35467980295566504,
|
133 |
-
"acc_norm_stderr": 0.03366124489051449
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.45483870967741935,
|
137 |
-
"acc_stderr": 0.028327743091561053,
|
138 |
-
"acc_norm": 0.45483870967741935,
|
139 |
-
"acc_norm_stderr": 0.028327743091561053
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.5683760683760684,
|
143 |
-
"acc_stderr": 0.0324483553531149,
|
144 |
-
"acc_norm": 0.5683760683760684,
|
145 |
-
"acc_norm_stderr": 0.0324483553531149
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.4528301886792453,
|
149 |
-
"acc_stderr": 0.03063562795796182,
|
150 |
-
"acc_norm": 0.4528301886792453,
|
151 |
-
"acc_norm_stderr": 0.03063562795796182
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.4727272727272727,
|
155 |
-
"acc_stderr": 0.04782001791380063,
|
156 |
-
"acc_norm": 0.4727272727272727,
|
157 |
-
"acc_norm_stderr": 0.04782001791380063
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.26666666666666666,
|
161 |
-
"acc_stderr": 0.026962424325073828,
|
162 |
-
"acc_norm": 0.26666666666666666,
|
163 |
-
"acc_norm_stderr": 0.026962424325073828
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.2847682119205298,
|
167 |
-
"acc_stderr": 0.03684881521389023,
|
168 |
-
"acc_norm": 0.2847682119205298,
|
169 |
-
"acc_norm_stderr": 0.03684881521389023
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.5472636815920398,
|
173 |
-
"acc_stderr": 0.03519702717576915,
|
174 |
-
"acc_norm": 0.5472636815920398,
|
175 |
-
"acc_norm_stderr": 0.03519702717576915
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.3583815028901734,
|
179 |
-
"acc_stderr": 0.03656343653353159,
|
180 |
-
"acc_norm": 0.3583815028901734,
|
181 |
-
"acc_norm_stderr": 0.03656343653353159
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.30423280423280424,
|
185 |
-
"acc_stderr": 0.023695415009463084,
|
186 |
-
"acc_norm": 0.30423280423280424,
|
187 |
-
"acc_norm_stderr": 0.023695415009463084
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.3055555555555556,
|
191 |
-
"acc_stderr": 0.03852084696008534,
|
192 |
-
"acc_norm": 0.3055555555555556,
|
193 |
-
"acc_norm_stderr": 0.03852084696008534
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.24,
|
197 |
-
"acc_stderr": 0.042923469599092816,
|
198 |
-
"acc_norm": 0.24,
|
199 |
-
"acc_norm_stderr": 0.042923469599092816
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.54,
|
203 |
-
"acc_stderr": 0.05009082659620333,
|
204 |
-
"acc_norm": 0.54,
|
205 |
-
"acc_norm_stderr": 0.05009082659620333
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.4190751445086705,
|
209 |
-
"acc_stderr": 0.02656417811142262,
|
210 |
-
"acc_norm": 0.4190751445086705,
|
211 |
-
"acc_norm_stderr": 0.02656417811142262
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.34355828220858897,
|
215 |
-
"acc_stderr": 0.03731133519673893,
|
216 |
-
"acc_norm": 0.34355828220858897,
|
217 |
-
"acc_norm_stderr": 0.03731133519673893
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.4660493827160494,
|
221 |
-
"acc_stderr": 0.027756535257347666,
|
222 |
-
"acc_norm": 0.4660493827160494,
|
223 |
-
"acc_norm_stderr": 0.027756535257347666
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.26,
|
227 |
-
"acc_stderr": 0.04408440022768079,
|
228 |
-
"acc_norm": 0.26,
|
229 |
-
"acc_norm_stderr": 0.04408440022768079
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.40932642487046633,
|
233 |
-
"acc_stderr": 0.03548608168860806,
|
234 |
-
"acc_norm": 0.40932642487046633,
|
235 |
-
"acc_norm_stderr": 0.03548608168860806
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.2543859649122807,
|
239 |
-
"acc_stderr": 0.040969851398436716,
|
240 |
-
"acc_norm": 0.2543859649122807,
|
241 |
-
"acc_norm_stderr": 0.040969851398436716
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.44954128440366975,
|
245 |
-
"acc_stderr": 0.021327881417823363,
|
246 |
-
"acc_norm": 0.44954128440366975,
|
247 |
-
"acc_norm_stderr": 0.021327881417823363
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.25396825396825395,
|
251 |
-
"acc_stderr": 0.03893259610604675,
|
252 |
-
"acc_norm": 0.25396825396825395,
|
253 |
-
"acc_norm_stderr": 0.03893259610604675
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.38235294117647056,
|
257 |
-
"acc_stderr": 0.027826109307283693,
|
258 |
-
"acc_norm": 0.38235294117647056,
|
259 |
-
"acc_norm_stderr": 0.027826109307283693
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.37,
|
263 |
-
"acc_stderr": 0.048523658709391,
|
264 |
-
"acc_norm": 0.37,
|
265 |
-
"acc_norm_stderr": 0.048523658709391
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.5206611570247934,
|
269 |
-
"acc_stderr": 0.04560456086387235,
|
270 |
-
"acc_norm": 0.5206611570247934,
|
271 |
-
"acc_norm_stderr": 0.04560456086387235
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.40131578947368424,
|
275 |
-
"acc_stderr": 0.03988903703336284,
|
276 |
-
"acc_norm": 0.40131578947368424,
|
277 |
-
"acc_norm_stderr": 0.03988903703336284
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.3137254901960784,
|
281 |
-
"acc_stderr": 0.018771683893528176,
|
282 |
-
"acc_norm": 0.3137254901960784,
|
283 |
-
"acc_norm_stderr": 0.018771683893528176
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.2907801418439716,
|
287 |
-
"acc_stderr": 0.027090664368353178,
|
288 |
-
"acc_norm": 0.2907801418439716,
|
289 |
-
"acc_norm_stderr": 0.027090664368353178
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.22321428571428573,
|
293 |
-
"acc_stderr": 0.039523019677025116,
|
294 |
-
"acc_norm": 0.22321428571428573,
|
295 |
-
"acc_norm_stderr": 0.039523019677025116
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.2916666666666667,
|
299 |
-
"acc_stderr": 0.03099866630456053,
|
300 |
-
"acc_norm": 0.2916666666666667,
|
301 |
-
"acc_norm_stderr": 0.03099866630456053
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.2424581005586592,
|
305 |
-
"acc_stderr": 0.01433352205921789,
|
306 |
-
"acc_norm": 0.2424581005586592,
|
307 |
-
"acc_norm_stderr": 0.01433352205921789
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.27,
|
311 |
-
"acc_stderr": 0.044619604333847394,
|
312 |
-
"acc_norm": 0.27,
|
313 |
-
"acc_norm_stderr": 0.044619604333847394
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.39,
|
317 |
-
"acc_stderr": 0.04902071300001975,
|
318 |
-
"acc_norm": 0.39,
|
319 |
-
"acc_norm_stderr": 0.04902071300001975
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.20220588235294118,
|
323 |
-
"acc_stderr": 0.02439819298665492,
|
324 |
-
"acc_norm": 0.20220588235294118,
|
325 |
-
"acc_norm_stderr": 0.02439819298665492
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.43673469387755104,
|
329 |
-
"acc_stderr": 0.031751952375833226,
|
330 |
-
"acc_norm": 0.43673469387755104,
|
331 |
-
"acc_norm_stderr": 0.031751952375833226
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.4810126582278481,
|
335 |
-
"acc_stderr": 0.03252375148090448,
|
336 |
-
"acc_norm": 0.4810126582278481,
|
337 |
-
"acc_norm_stderr": 0.03252375148090448
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.29726205997392435,
|
341 |
-
"acc_stderr": 0.011673346173086034,
|
342 |
-
"acc_norm": 0.29726205997392435,
|
343 |
-
"acc_norm_stderr": 0.011673346173086034
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.36764705882352944,
|
347 |
-
"acc_stderr": 0.03384132045674118,
|
348 |
-
"acc_norm": 0.36764705882352944,
|
349 |
-
"acc_norm_stderr": 0.03384132045674118
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.46060606060606063,
|
353 |
-
"acc_stderr": 0.03892207016552013,
|
354 |
-
"acc_norm": 0.46060606060606063,
|
355 |
-
"acc_norm_stderr": 0.03892207016552013
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.2558139534883721,
|
359 |
-
"mc1_stderr": 0.015274176219283347,
|
360 |
-
"mc2": 0.41687077666896594,
|
361 |
-
"mc2_stderr": 0.014804732810744745
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.5363849765258216,
|
365 |
-
"acc_stderr": 0.017094337456326263,
|
366 |
-
"acc_norm": 0.6373239436619719,
|
367 |
-
"acc_norm_stderr": 0.016480666823965075
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "GAI-LLM/ko-en-llama2-13b-mixed-v3",
|
436 |
-
"model_sha": "da615711850b1e6c1deb1a9c8dab9476a19df855",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
HAERAE-HUB/hae-tae_v0.1.1/result_2023-09-30 11:46:43.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.28242320819112626,
|
5 |
-
"acc_stderr": 0.01315545688409722,
|
6 |
-
"acc_norm": 0.3302047781569966,
|
7 |
-
"acc_norm_stderr": 0.013743085603760422
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.3719378609838678,
|
11 |
-
"acc_stderr": 0.004823341569605419,
|
12 |
-
"acc_norm": 0.4821748655646286,
|
13 |
-
"acc_norm_stderr": 0.0049866095427490405
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.17543859649122806,
|
17 |
-
"acc_stderr": 0.029170885500727665,
|
18 |
-
"acc_norm": 0.17543859649122806,
|
19 |
-
"acc_norm_stderr": 0.029170885500727665
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.3592233009708738,
|
23 |
-
"acc_stderr": 0.04750458399041693,
|
24 |
-
"acc_norm": 0.3592233009708738,
|
25 |
-
"acc_norm_stderr": 0.04750458399041693
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.20051085568326948,
|
29 |
-
"acc_stderr": 0.014317653708594209,
|
30 |
-
"acc_norm": 0.20051085568326948,
|
31 |
-
"acc_norm_stderr": 0.014317653708594209
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.22962962962962963,
|
35 |
-
"acc_stderr": 0.036333844140734636,
|
36 |
-
"acc_norm": 0.22962962962962963,
|
37 |
-
"acc_norm_stderr": 0.036333844140734636
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.23,
|
41 |
-
"acc_stderr": 0.04229525846816508,
|
42 |
-
"acc_norm": 0.23,
|
43 |
-
"acc_norm_stderr": 0.04229525846816508
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.16170212765957448,
|
47 |
-
"acc_stderr": 0.02406850528969531,
|
48 |
-
"acc_norm": 0.16170212765957448,
|
49 |
-
"acc_norm_stderr": 0.02406850528969531
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.1927710843373494,
|
53 |
-
"acc_stderr": 0.030709824050565264,
|
54 |
-
"acc_norm": 0.1927710843373494,
|
55 |
-
"acc_norm_stderr": 0.030709824050565264
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.24115755627009647,
|
59 |
-
"acc_stderr": 0.024296594034763426,
|
60 |
-
"acc_norm": 0.24115755627009647,
|
61 |
-
"acc_norm_stderr": 0.024296594034763426
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.11659192825112108,
|
65 |
-
"acc_stderr": 0.021539639816244467,
|
66 |
-
"acc_norm": 0.11659192825112108,
|
67 |
-
"acc_norm_stderr": 0.021539639816244467
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.2595419847328244,
|
71 |
-
"acc_stderr": 0.03844876139785271,
|
72 |
-
"acc_norm": 0.2595419847328244,
|
73 |
-
"acc_norm_stderr": 0.03844876139785271
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.25,
|
77 |
-
"acc_stderr": 0.04351941398892446,
|
78 |
-
"acc_norm": 0.25,
|
79 |
-
"acc_norm_stderr": 0.04351941398892446
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.35353535353535354,
|
83 |
-
"acc_stderr": 0.03406086723547153,
|
84 |
-
"acc_norm": 0.35353535353535354,
|
85 |
-
"acc_norm_stderr": 0.03406086723547153
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.2413793103448276,
|
89 |
-
"acc_stderr": 0.03565998174135302,
|
90 |
-
"acc_norm": 0.2413793103448276,
|
91 |
-
"acc_norm_stderr": 0.03565998174135302
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.3627450980392157,
|
95 |
-
"acc_stderr": 0.04784060704105653,
|
96 |
-
"acc_norm": 0.3627450980392157,
|
97 |
-
"acc_norm_stderr": 0.04784060704105653
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.3487394957983193,
|
101 |
-
"acc_stderr": 0.030956636328566548,
|
102 |
-
"acc_norm": 0.3487394957983193,
|
103 |
-
"acc_norm_stderr": 0.030956636328566548
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.36153846153846153,
|
107 |
-
"acc_stderr": 0.024359581465396983,
|
108 |
-
"acc_norm": 0.36153846153846153,
|
109 |
-
"acc_norm_stderr": 0.024359581465396983
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.19,
|
113 |
-
"acc_stderr": 0.03942772444036624,
|
114 |
-
"acc_norm": 0.19,
|
115 |
-
"acc_norm_stderr": 0.03942772444036624
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.16,
|
119 |
-
"acc_stderr": 0.0368452949177471,
|
120 |
-
"acc_norm": 0.16,
|
121 |
-
"acc_norm_stderr": 0.0368452949177471
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.21296296296296297,
|
125 |
-
"acc_stderr": 0.03957835471980981,
|
126 |
-
"acc_norm": 0.21296296296296297,
|
127 |
-
"acc_norm_stderr": 0.03957835471980981
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.28078817733990147,
|
131 |
-
"acc_stderr": 0.0316185633535861,
|
132 |
-
"acc_norm": 0.28078817733990147,
|
133 |
-
"acc_norm_stderr": 0.0316185633535861
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.3161290322580645,
|
137 |
-
"acc_stderr": 0.026450874489042764,
|
138 |
-
"acc_norm": 0.3161290322580645,
|
139 |
-
"acc_norm_stderr": 0.026450874489042764
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.19658119658119658,
|
143 |
-
"acc_stderr": 0.02603538609895129,
|
144 |
-
"acc_norm": 0.19658119658119658,
|
145 |
-
"acc_norm_stderr": 0.02603538609895129
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.3018867924528302,
|
149 |
-
"acc_stderr": 0.02825420034443866,
|
150 |
-
"acc_norm": 0.3018867924528302,
|
151 |
-
"acc_norm_stderr": 0.02825420034443866
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.22727272727272727,
|
155 |
-
"acc_stderr": 0.040139645540727735,
|
156 |
-
"acc_norm": 0.22727272727272727,
|
157 |
-
"acc_norm_stderr": 0.040139645540727735
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.26296296296296295,
|
161 |
-
"acc_stderr": 0.02684205787383371,
|
162 |
-
"acc_norm": 0.26296296296296295,
|
163 |
-
"acc_norm_stderr": 0.02684205787383371
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.33112582781456956,
|
167 |
-
"acc_stderr": 0.038425817186598696,
|
168 |
-
"acc_norm": 0.33112582781456956,
|
169 |
-
"acc_norm_stderr": 0.038425817186598696
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.23880597014925373,
|
173 |
-
"acc_stderr": 0.030147775935409217,
|
174 |
-
"acc_norm": 0.23880597014925373,
|
175 |
-
"acc_norm_stderr": 0.030147775935409217
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.3352601156069364,
|
179 |
-
"acc_stderr": 0.03599586301247078,
|
180 |
-
"acc_norm": 0.3352601156069364,
|
181 |
-
"acc_norm_stderr": 0.03599586301247078
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.2724867724867725,
|
185 |
-
"acc_stderr": 0.02293097307163334,
|
186 |
-
"acc_norm": 0.2724867724867725,
|
187 |
-
"acc_norm_stderr": 0.02293097307163334
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.2569444444444444,
|
191 |
-
"acc_stderr": 0.03653946969442099,
|
192 |
-
"acc_norm": 0.2569444444444444,
|
193 |
-
"acc_norm_stderr": 0.03653946969442099
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.41,
|
197 |
-
"acc_stderr": 0.049431107042371025,
|
198 |
-
"acc_norm": 0.41,
|
199 |
-
"acc_norm_stderr": 0.049431107042371025
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.26,
|
203 |
-
"acc_stderr": 0.044084400227680794,
|
204 |
-
"acc_norm": 0.26,
|
205 |
-
"acc_norm_stderr": 0.044084400227680794
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.2138728323699422,
|
209 |
-
"acc_stderr": 0.022075709251757173,
|
210 |
-
"acc_norm": 0.2138728323699422,
|
211 |
-
"acc_norm_stderr": 0.022075709251757173
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.2331288343558282,
|
215 |
-
"acc_stderr": 0.0332201579577674,
|
216 |
-
"acc_norm": 0.2331288343558282,
|
217 |
-
"acc_norm_stderr": 0.0332201579577674
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.22530864197530864,
|
221 |
-
"acc_stderr": 0.02324620264781975,
|
222 |
-
"acc_norm": 0.22530864197530864,
|
223 |
-
"acc_norm_stderr": 0.02324620264781975
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.31,
|
227 |
-
"acc_stderr": 0.04648231987117316,
|
228 |
-
"acc_norm": 0.31,
|
229 |
-
"acc_norm_stderr": 0.04648231987117316
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.36787564766839376,
|
233 |
-
"acc_stderr": 0.034801756684660366,
|
234 |
-
"acc_norm": 0.36787564766839376,
|
235 |
-
"acc_norm_stderr": 0.034801756684660366
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.23684210526315788,
|
239 |
-
"acc_stderr": 0.03999423879281336,
|
240 |
-
"acc_norm": 0.23684210526315788,
|
241 |
-
"acc_norm_stderr": 0.03999423879281336
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.344954128440367,
|
245 |
-
"acc_stderr": 0.02038060540506697,
|
246 |
-
"acc_norm": 0.344954128440367,
|
247 |
-
"acc_norm_stderr": 0.02038060540506697
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.36507936507936506,
|
251 |
-
"acc_stderr": 0.04306241259127153,
|
252 |
-
"acc_norm": 0.36507936507936506,
|
253 |
-
"acc_norm_stderr": 0.04306241259127153
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.29411764705882354,
|
257 |
-
"acc_stderr": 0.02609016250427905,
|
258 |
-
"acc_norm": 0.29411764705882354,
|
259 |
-
"acc_norm_stderr": 0.02609016250427905
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.23,
|
263 |
-
"acc_stderr": 0.04229525846816506,
|
264 |
-
"acc_norm": 0.23,
|
265 |
-
"acc_norm_stderr": 0.04229525846816506
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.15702479338842976,
|
269 |
-
"acc_stderr": 0.03321244842547128,
|
270 |
-
"acc_norm": 0.15702479338842976,
|
271 |
-
"acc_norm_stderr": 0.03321244842547128
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.3355263157894737,
|
275 |
-
"acc_stderr": 0.038424985593952694,
|
276 |
-
"acc_norm": 0.3355263157894737,
|
277 |
-
"acc_norm_stderr": 0.038424985593952694
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.2173202614379085,
|
281 |
-
"acc_stderr": 0.016684820929148598,
|
282 |
-
"acc_norm": 0.2173202614379085,
|
283 |
-
"acc_norm_stderr": 0.016684820929148598
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.24113475177304963,
|
287 |
-
"acc_stderr": 0.02551873104953776,
|
288 |
-
"acc_norm": 0.24113475177304963,
|
289 |
-
"acc_norm_stderr": 0.02551873104953776
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.14285714285714285,
|
293 |
-
"acc_stderr": 0.033213611069662696,
|
294 |
-
"acc_norm": 0.14285714285714285,
|
295 |
-
"acc_norm_stderr": 0.033213611069662696
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.4722222222222222,
|
299 |
-
"acc_stderr": 0.0340470532865388,
|
300 |
-
"acc_norm": 0.4722222222222222,
|
301 |
-
"acc_norm_stderr": 0.0340470532865388
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.27262569832402234,
|
305 |
-
"acc_stderr": 0.014893391735249608,
|
306 |
-
"acc_norm": 0.27262569832402234,
|
307 |
-
"acc_norm_stderr": 0.014893391735249608
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.34,
|
311 |
-
"acc_stderr": 0.04760952285695235,
|
312 |
-
"acc_norm": 0.34,
|
313 |
-
"acc_norm_stderr": 0.04760952285695235
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.2,
|
317 |
-
"acc_stderr": 0.04020151261036847,
|
318 |
-
"acc_norm": 0.2,
|
319 |
-
"acc_norm_stderr": 0.04020151261036847
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.4485294117647059,
|
323 |
-
"acc_stderr": 0.030211479609121593,
|
324 |
-
"acc_norm": 0.4485294117647059,
|
325 |
-
"acc_norm_stderr": 0.030211479609121593
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.3183673469387755,
|
329 |
-
"acc_stderr": 0.02982253379398209,
|
330 |
-
"acc_norm": 0.3183673469387755,
|
331 |
-
"acc_norm_stderr": 0.02982253379398209
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.20675105485232068,
|
335 |
-
"acc_stderr": 0.026361651668389087,
|
336 |
-
"acc_norm": 0.20675105485232068,
|
337 |
-
"acc_norm_stderr": 0.026361651668389087
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.2470664928292047,
|
341 |
-
"acc_stderr": 0.011015752255279329,
|
342 |
-
"acc_norm": 0.2470664928292047,
|
343 |
-
"acc_norm_stderr": 0.011015752255279329
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.2549019607843137,
|
347 |
-
"acc_stderr": 0.030587591351604246,
|
348 |
-
"acc_norm": 0.2549019607843137,
|
349 |
-
"acc_norm_stderr": 0.030587591351604246
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.2727272727272727,
|
353 |
-
"acc_stderr": 0.03477691162163659,
|
354 |
-
"acc_norm": 0.2727272727272727,
|
355 |
-
"acc_norm_stderr": 0.03477691162163659
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.25458996328029376,
|
359 |
-
"mc1_stderr": 0.015250117079156475,
|
360 |
-
"mc2": 0.3974526680083883,
|
361 |
-
"mc2_stderr": 0.01475058288914894
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.18779342723004694,
|
365 |
-
"acc_stderr": 0.013387782981513264,
|
366 |
-
"acc_norm": 0.23943661971830985,
|
367 |
-
"acc_norm_stderr": 0.014628446638821336
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "HAERAE-HUB/hae-tae_v0.1.1",
|
436 |
-
"model_sha": "4ae77d9659bb11f158180f4b8b243d1e9ddb51f4",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
HAERAE-HUB/hae-tae_v0.1.2/result_2023-09-30 11:46:34.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.2909556313993174,
|
5 |
-
"acc_stderr": 0.01327307786590758,
|
6 |
-
"acc_norm": 0.3302047781569966,
|
7 |
-
"acc_norm_stderr": 0.013743085603760427
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.37442740489942244,
|
11 |
-
"acc_stderr": 0.004829856058603579,
|
12 |
-
"acc_norm": 0.481876120294762,
|
13 |
-
"acc_norm_stderr": 0.00498650229693118
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.17543859649122806,
|
17 |
-
"acc_stderr": 0.029170885500727665,
|
18 |
-
"acc_norm": 0.17543859649122806,
|
19 |
-
"acc_norm_stderr": 0.029170885500727665
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.3786407766990291,
|
23 |
-
"acc_stderr": 0.04802694698258973,
|
24 |
-
"acc_norm": 0.3786407766990291,
|
25 |
-
"acc_norm_stderr": 0.04802694698258973
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.20434227330779056,
|
29 |
-
"acc_stderr": 0.014419123980931906,
|
30 |
-
"acc_norm": 0.20434227330779056,
|
31 |
-
"acc_norm_stderr": 0.014419123980931906
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.21481481481481482,
|
35 |
-
"acc_stderr": 0.03547854198560826,
|
36 |
-
"acc_norm": 0.21481481481481482,
|
37 |
-
"acc_norm_stderr": 0.03547854198560826
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.15,
|
41 |
-
"acc_stderr": 0.035887028128263714,
|
42 |
-
"acc_norm": 0.15,
|
43 |
-
"acc_norm_stderr": 0.035887028128263714
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.20851063829787234,
|
47 |
-
"acc_stderr": 0.026556982117838746,
|
48 |
-
"acc_norm": 0.20851063829787234,
|
49 |
-
"acc_norm_stderr": 0.026556982117838746
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.1927710843373494,
|
53 |
-
"acc_stderr": 0.030709824050565264,
|
54 |
-
"acc_norm": 0.1927710843373494,
|
55 |
-
"acc_norm_stderr": 0.030709824050565264
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.24115755627009647,
|
59 |
-
"acc_stderr": 0.024296594034763426,
|
60 |
-
"acc_norm": 0.24115755627009647,
|
61 |
-
"acc_norm_stderr": 0.024296594034763426
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.1031390134529148,
|
65 |
-
"acc_stderr": 0.020412564289839272,
|
66 |
-
"acc_norm": 0.1031390134529148,
|
67 |
-
"acc_norm_stderr": 0.020412564289839272
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.2900763358778626,
|
71 |
-
"acc_stderr": 0.03980066246467765,
|
72 |
-
"acc_norm": 0.2900763358778626,
|
73 |
-
"acc_norm_stderr": 0.03980066246467765
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.24,
|
77 |
-
"acc_stderr": 0.04292346959909284,
|
78 |
-
"acc_norm": 0.24,
|
79 |
-
"acc_norm_stderr": 0.04292346959909284
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.3434343434343434,
|
83 |
-
"acc_stderr": 0.03383201223244441,
|
84 |
-
"acc_norm": 0.3434343434343434,
|
85 |
-
"acc_norm_stderr": 0.03383201223244441
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.2413793103448276,
|
89 |
-
"acc_stderr": 0.03565998174135302,
|
90 |
-
"acc_norm": 0.2413793103448276,
|
91 |
-
"acc_norm_stderr": 0.03565998174135302
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.37254901960784315,
|
95 |
-
"acc_stderr": 0.048108401480826346,
|
96 |
-
"acc_norm": 0.37254901960784315,
|
97 |
-
"acc_norm_stderr": 0.048108401480826346
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.3487394957983193,
|
101 |
-
"acc_stderr": 0.030956636328566548,
|
102 |
-
"acc_norm": 0.3487394957983193,
|
103 |
-
"acc_norm_stderr": 0.030956636328566548
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.3641025641025641,
|
107 |
-
"acc_stderr": 0.024396672985094778,
|
108 |
-
"acc_norm": 0.3641025641025641,
|
109 |
-
"acc_norm_stderr": 0.024396672985094778
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.2,
|
113 |
-
"acc_stderr": 0.04020151261036845,
|
114 |
-
"acc_norm": 0.2,
|
115 |
-
"acc_norm_stderr": 0.04020151261036845
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.2,
|
119 |
-
"acc_stderr": 0.04020151261036845,
|
120 |
-
"acc_norm": 0.2,
|
121 |
-
"acc_norm_stderr": 0.04020151261036845
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.21296296296296297,
|
125 |
-
"acc_stderr": 0.03957835471980981,
|
126 |
-
"acc_norm": 0.21296296296296297,
|
127 |
-
"acc_norm_stderr": 0.03957835471980981
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.28078817733990147,
|
131 |
-
"acc_stderr": 0.0316185633535861,
|
132 |
-
"acc_norm": 0.28078817733990147,
|
133 |
-
"acc_norm_stderr": 0.0316185633535861
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.3161290322580645,
|
137 |
-
"acc_stderr": 0.026450874489042764,
|
138 |
-
"acc_norm": 0.3161290322580645,
|
139 |
-
"acc_norm_stderr": 0.026450874489042764
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.19230769230769232,
|
143 |
-
"acc_stderr": 0.025819233256483724,
|
144 |
-
"acc_norm": 0.19230769230769232,
|
145 |
-
"acc_norm_stderr": 0.025819233256483724
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.2943396226415094,
|
149 |
-
"acc_stderr": 0.028049186315695248,
|
150 |
-
"acc_norm": 0.2943396226415094,
|
151 |
-
"acc_norm_stderr": 0.028049186315695248
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.22727272727272727,
|
155 |
-
"acc_stderr": 0.040139645540727735,
|
156 |
-
"acc_norm": 0.22727272727272727,
|
157 |
-
"acc_norm_stderr": 0.040139645540727735
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.2518518518518518,
|
161 |
-
"acc_stderr": 0.026466117538959905,
|
162 |
-
"acc_norm": 0.2518518518518518,
|
163 |
-
"acc_norm_stderr": 0.026466117538959905
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.33774834437086093,
|
167 |
-
"acc_stderr": 0.038615575462551684,
|
168 |
-
"acc_norm": 0.33774834437086093,
|
169 |
-
"acc_norm_stderr": 0.038615575462551684
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.26865671641791045,
|
173 |
-
"acc_stderr": 0.03134328358208954,
|
174 |
-
"acc_norm": 0.26865671641791045,
|
175 |
-
"acc_norm_stderr": 0.03134328358208954
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.3352601156069364,
|
179 |
-
"acc_stderr": 0.03599586301247078,
|
180 |
-
"acc_norm": 0.3352601156069364,
|
181 |
-
"acc_norm_stderr": 0.03599586301247078
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.2619047619047619,
|
185 |
-
"acc_stderr": 0.022644212615525214,
|
186 |
-
"acc_norm": 0.2619047619047619,
|
187 |
-
"acc_norm_stderr": 0.022644212615525214
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.2708333333333333,
|
191 |
-
"acc_stderr": 0.03716177437566017,
|
192 |
-
"acc_norm": 0.2708333333333333,
|
193 |
-
"acc_norm_stderr": 0.03716177437566017
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.37,
|
197 |
-
"acc_stderr": 0.04852365870939099,
|
198 |
-
"acc_norm": 0.37,
|
199 |
-
"acc_norm_stderr": 0.04852365870939099
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.26,
|
203 |
-
"acc_stderr": 0.044084400227680794,
|
204 |
-
"acc_norm": 0.26,
|
205 |
-
"acc_norm_stderr": 0.044084400227680794
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.2138728323699422,
|
209 |
-
"acc_stderr": 0.022075709251757173,
|
210 |
-
"acc_norm": 0.2138728323699422,
|
211 |
-
"acc_norm_stderr": 0.022075709251757173
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.2331288343558282,
|
215 |
-
"acc_stderr": 0.0332201579577674,
|
216 |
-
"acc_norm": 0.2331288343558282,
|
217 |
-
"acc_norm_stderr": 0.0332201579577674
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.22530864197530864,
|
221 |
-
"acc_stderr": 0.02324620264781975,
|
222 |
-
"acc_norm": 0.22530864197530864,
|
223 |
-
"acc_norm_stderr": 0.02324620264781975
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.26,
|
227 |
-
"acc_stderr": 0.0440844002276808,
|
228 |
-
"acc_norm": 0.26,
|
229 |
-
"acc_norm_stderr": 0.0440844002276808
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.36787564766839376,
|
233 |
-
"acc_stderr": 0.034801756684660366,
|
234 |
-
"acc_norm": 0.36787564766839376,
|
235 |
-
"acc_norm_stderr": 0.034801756684660366
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.23684210526315788,
|
239 |
-
"acc_stderr": 0.039994238792813365,
|
240 |
-
"acc_norm": 0.23684210526315788,
|
241 |
-
"acc_norm_stderr": 0.039994238792813365
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.344954128440367,
|
245 |
-
"acc_stderr": 0.02038060540506697,
|
246 |
-
"acc_norm": 0.344954128440367,
|
247 |
-
"acc_norm_stderr": 0.02038060540506697
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.3253968253968254,
|
251 |
-
"acc_stderr": 0.041905964388711366,
|
252 |
-
"acc_norm": 0.3253968253968254,
|
253 |
-
"acc_norm_stderr": 0.041905964388711366
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.29411764705882354,
|
257 |
-
"acc_stderr": 0.026090162504279053,
|
258 |
-
"acc_norm": 0.29411764705882354,
|
259 |
-
"acc_norm_stderr": 0.026090162504279053
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.21,
|
263 |
-
"acc_stderr": 0.040936018074033256,
|
264 |
-
"acc_norm": 0.21,
|
265 |
-
"acc_norm_stderr": 0.040936018074033256
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.14049586776859505,
|
269 |
-
"acc_stderr": 0.031722334260021585,
|
270 |
-
"acc_norm": 0.14049586776859505,
|
271 |
-
"acc_norm_stderr": 0.031722334260021585
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.3355263157894737,
|
275 |
-
"acc_stderr": 0.038424985593952694,
|
276 |
-
"acc_norm": 0.3355263157894737,
|
277 |
-
"acc_norm_stderr": 0.038424985593952694
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.2222222222222222,
|
281 |
-
"acc_stderr": 0.016819028375736386,
|
282 |
-
"acc_norm": 0.2222222222222222,
|
283 |
-
"acc_norm_stderr": 0.016819028375736386
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.24468085106382978,
|
287 |
-
"acc_stderr": 0.02564555362226673,
|
288 |
-
"acc_norm": 0.24468085106382978,
|
289 |
-
"acc_norm_stderr": 0.02564555362226673
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.16071428571428573,
|
293 |
-
"acc_stderr": 0.034859460964757394,
|
294 |
-
"acc_norm": 0.16071428571428573,
|
295 |
-
"acc_norm_stderr": 0.034859460964757394
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.4675925925925926,
|
299 |
-
"acc_stderr": 0.03402801581358966,
|
300 |
-
"acc_norm": 0.4675925925925926,
|
301 |
-
"acc_norm_stderr": 0.03402801581358966
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.27262569832402234,
|
305 |
-
"acc_stderr": 0.014893391735249608,
|
306 |
-
"acc_norm": 0.27262569832402234,
|
307 |
-
"acc_norm_stderr": 0.014893391735249608
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.32,
|
311 |
-
"acc_stderr": 0.04688261722621504,
|
312 |
-
"acc_norm": 0.32,
|
313 |
-
"acc_norm_stderr": 0.04688261722621504
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.21,
|
317 |
-
"acc_stderr": 0.040936018074033256,
|
318 |
-
"acc_norm": 0.21,
|
319 |
-
"acc_norm_stderr": 0.040936018074033256
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.4485294117647059,
|
323 |
-
"acc_stderr": 0.030211479609121593,
|
324 |
-
"acc_norm": 0.4485294117647059,
|
325 |
-
"acc_norm_stderr": 0.030211479609121593
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.3346938775510204,
|
329 |
-
"acc_stderr": 0.030209235226242314,
|
330 |
-
"acc_norm": 0.3346938775510204,
|
331 |
-
"acc_norm_stderr": 0.030209235226242314
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.19831223628691982,
|
335 |
-
"acc_stderr": 0.02595502084162111,
|
336 |
-
"acc_norm": 0.19831223628691982,
|
337 |
-
"acc_norm_stderr": 0.02595502084162111
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.24902216427640156,
|
341 |
-
"acc_stderr": 0.01104489226404077,
|
342 |
-
"acc_norm": 0.24902216427640156,
|
343 |
-
"acc_norm_stderr": 0.01104489226404077
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.2549019607843137,
|
347 |
-
"acc_stderr": 0.030587591351604246,
|
348 |
-
"acc_norm": 0.2549019607843137,
|
349 |
-
"acc_norm_stderr": 0.030587591351604246
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.2545454545454545,
|
353 |
-
"acc_stderr": 0.03401506715249039,
|
354 |
-
"acc_norm": 0.2545454545454545,
|
355 |
-
"acc_norm_stderr": 0.03401506715249039
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.27050183598531213,
|
359 |
-
"mc1_stderr": 0.015550778332842885,
|
360 |
-
"mc2": 0.420854027075679,
|
361 |
-
"mc2_stderr": 0.014933313137954875
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.284037558685446,
|
365 |
-
"acc_stderr": 0.01545853115904392,
|
366 |
-
"acc_norm": 0.3474178403755869,
|
367 |
-
"acc_norm_stderr": 0.016322206819108925
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "HAERAE-HUB/hae-tae_v0.1.2",
|
436 |
-
"model_sha": "fd9094c0e91bcb07ecf2b89b36a16480e27a93dc",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
HumanF-MarkrAI/pub-llama-13B-v3/result_2023-10-24 18:02:37.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.36945392491467577,
|
5 |
-
"acc_stderr": 0.014104578366491888,
|
6 |
-
"acc_norm": 0.42150170648464164,
|
7 |
-
"acc_norm_stderr": 0.014430197069326028
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.40450109539932283,
|
11 |
-
"acc_stderr": 0.004897921845492105,
|
12 |
-
"acc_norm": 0.5392352121091416,
|
13 |
-
"acc_norm_stderr": 0.004974395131539592
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.4853801169590643,
|
17 |
-
"acc_stderr": 0.038331852752130205,
|
18 |
-
"acc_norm": 0.4853801169590643,
|
19 |
-
"acc_norm_stderr": 0.038331852752130205
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.5533980582524272,
|
23 |
-
"acc_stderr": 0.04922424153458934,
|
24 |
-
"acc_norm": 0.5533980582524272,
|
25 |
-
"acc_norm_stderr": 0.04922424153458934
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.5070242656449553,
|
29 |
-
"acc_stderr": 0.017878199003432214,
|
30 |
-
"acc_norm": 0.5070242656449553,
|
31 |
-
"acc_norm_stderr": 0.017878199003432214
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.43703703703703706,
|
35 |
-
"acc_stderr": 0.04284958639753399,
|
36 |
-
"acc_norm": 0.43703703703703706,
|
37 |
-
"acc_norm_stderr": 0.04284958639753399
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.27,
|
41 |
-
"acc_stderr": 0.04461960433384741,
|
42 |
-
"acc_norm": 0.27,
|
43 |
-
"acc_norm_stderr": 0.04461960433384741
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.3404255319148936,
|
47 |
-
"acc_stderr": 0.03097669299853443,
|
48 |
-
"acc_norm": 0.3404255319148936,
|
49 |
-
"acc_norm_stderr": 0.03097669299853443
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.3614457831325301,
|
53 |
-
"acc_stderr": 0.037400593820293204,
|
54 |
-
"acc_norm": 0.3614457831325301,
|
55 |
-
"acc_norm_stderr": 0.037400593820293204
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.4983922829581994,
|
59 |
-
"acc_stderr": 0.02839794490780661,
|
60 |
-
"acc_norm": 0.4983922829581994,
|
61 |
-
"acc_norm_stderr": 0.02839794490780661
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.4977578475336323,
|
65 |
-
"acc_stderr": 0.03355746535223263,
|
66 |
-
"acc_norm": 0.4977578475336323,
|
67 |
-
"acc_norm_stderr": 0.03355746535223263
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.44274809160305345,
|
71 |
-
"acc_stderr": 0.0435644720266507,
|
72 |
-
"acc_norm": 0.44274809160305345,
|
73 |
-
"acc_norm_stderr": 0.0435644720266507
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.41,
|
77 |
-
"acc_stderr": 0.049431107042371025,
|
78 |
-
"acc_norm": 0.41,
|
79 |
-
"acc_norm_stderr": 0.049431107042371025
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.5505050505050505,
|
83 |
-
"acc_stderr": 0.0354413249194797,
|
84 |
-
"acc_norm": 0.5505050505050505,
|
85 |
-
"acc_norm_stderr": 0.0354413249194797
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.4482758620689655,
|
89 |
-
"acc_stderr": 0.04144311810878151,
|
90 |
-
"acc_norm": 0.4482758620689655,
|
91 |
-
"acc_norm_stderr": 0.04144311810878151
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.24509803921568626,
|
95 |
-
"acc_stderr": 0.04280105837364396,
|
96 |
-
"acc_norm": 0.24509803921568626,
|
97 |
-
"acc_norm_stderr": 0.04280105837364396
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.453781512605042,
|
101 |
-
"acc_stderr": 0.032339434681820885,
|
102 |
-
"acc_norm": 0.453781512605042,
|
103 |
-
"acc_norm_stderr": 0.032339434681820885
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.4230769230769231,
|
107 |
-
"acc_stderr": 0.025049197876042335,
|
108 |
-
"acc_norm": 0.4230769230769231,
|
109 |
-
"acc_norm_stderr": 0.025049197876042335
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.41,
|
113 |
-
"acc_stderr": 0.04943110704237102,
|
114 |
-
"acc_norm": 0.41,
|
115 |
-
"acc_norm_stderr": 0.04943110704237102
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.29,
|
119 |
-
"acc_stderr": 0.045604802157206845,
|
120 |
-
"acc_norm": 0.29,
|
121 |
-
"acc_norm_stderr": 0.045604802157206845
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.46296296296296297,
|
125 |
-
"acc_stderr": 0.04820403072760626,
|
126 |
-
"acc_norm": 0.46296296296296297,
|
127 |
-
"acc_norm_stderr": 0.04820403072760626
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.37438423645320196,
|
131 |
-
"acc_stderr": 0.034051553805619514,
|
132 |
-
"acc_norm": 0.37438423645320196,
|
133 |
-
"acc_norm_stderr": 0.034051553805619514
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.43548387096774194,
|
137 |
-
"acc_stderr": 0.028206225591502744,
|
138 |
-
"acc_norm": 0.43548387096774194,
|
139 |
-
"acc_norm_stderr": 0.028206225591502744
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.6452991452991453,
|
143 |
-
"acc_stderr": 0.03134250486245402,
|
144 |
-
"acc_norm": 0.6452991452991453,
|
145 |
-
"acc_norm_stderr": 0.03134250486245402
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.4226415094339623,
|
149 |
-
"acc_stderr": 0.03040233144576954,
|
150 |
-
"acc_norm": 0.4226415094339623,
|
151 |
-
"acc_norm_stderr": 0.03040233144576954
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.509090909090909,
|
155 |
-
"acc_stderr": 0.04788339768702861,
|
156 |
-
"acc_norm": 0.509090909090909,
|
157 |
-
"acc_norm_stderr": 0.04788339768702861
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.3148148148148148,
|
161 |
-
"acc_stderr": 0.028317533496066475,
|
162 |
-
"acc_norm": 0.3148148148148148,
|
163 |
-
"acc_norm_stderr": 0.028317533496066475
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.304635761589404,
|
167 |
-
"acc_stderr": 0.03757949922943343,
|
168 |
-
"acc_norm": 0.304635761589404,
|
169 |
-
"acc_norm_stderr": 0.03757949922943343
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.5522388059701493,
|
173 |
-
"acc_stderr": 0.035161847729521675,
|
174 |
-
"acc_norm": 0.5522388059701493,
|
175 |
-
"acc_norm_stderr": 0.035161847729521675
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.3930635838150289,
|
179 |
-
"acc_stderr": 0.0372424959581773,
|
180 |
-
"acc_norm": 0.3930635838150289,
|
181 |
-
"acc_norm_stderr": 0.0372424959581773
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.37037037037037035,
|
185 |
-
"acc_stderr": 0.02487081525105709,
|
186 |
-
"acc_norm": 0.37037037037037035,
|
187 |
-
"acc_norm_stderr": 0.02487081525105709
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.3611111111111111,
|
191 |
-
"acc_stderr": 0.04016660030451233,
|
192 |
-
"acc_norm": 0.3611111111111111,
|
193 |
-
"acc_norm_stderr": 0.04016660030451233
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.32,
|
197 |
-
"acc_stderr": 0.04688261722621504,
|
198 |
-
"acc_norm": 0.32,
|
199 |
-
"acc_norm_stderr": 0.04688261722621504
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.6,
|
203 |
-
"acc_stderr": 0.04923659639173309,
|
204 |
-
"acc_norm": 0.6,
|
205 |
-
"acc_norm_stderr": 0.04923659639173309
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.4595375722543353,
|
209 |
-
"acc_stderr": 0.02683080599895224,
|
210 |
-
"acc_norm": 0.4595375722543353,
|
211 |
-
"acc_norm_stderr": 0.02683080599895224
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.4539877300613497,
|
215 |
-
"acc_stderr": 0.0391170190467718,
|
216 |
-
"acc_norm": 0.4539877300613497,
|
217 |
-
"acc_norm_stderr": 0.0391170190467718
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.4722222222222222,
|
221 |
-
"acc_stderr": 0.027777777777777797,
|
222 |
-
"acc_norm": 0.4722222222222222,
|
223 |
-
"acc_norm_stderr": 0.027777777777777797
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.31,
|
227 |
-
"acc_stderr": 0.04648231987117316,
|
228 |
-
"acc_norm": 0.31,
|
229 |
-
"acc_norm_stderr": 0.04648231987117316
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.48704663212435234,
|
233 |
-
"acc_stderr": 0.03607228061047749,
|
234 |
-
"acc_norm": 0.48704663212435234,
|
235 |
-
"acc_norm_stderr": 0.03607228061047749
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.21052631578947367,
|
239 |
-
"acc_stderr": 0.0383515395439942,
|
240 |
-
"acc_norm": 0.21052631578947367,
|
241 |
-
"acc_norm_stderr": 0.0383515395439942
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.5412844036697247,
|
245 |
-
"acc_stderr": 0.021364122533881695,
|
246 |
-
"acc_norm": 0.5412844036697247,
|
247 |
-
"acc_norm_stderr": 0.021364122533881695
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.42063492063492064,
|
251 |
-
"acc_stderr": 0.04415438226743744,
|
252 |
-
"acc_norm": 0.42063492063492064,
|
253 |
-
"acc_norm_stderr": 0.04415438226743744
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.42810457516339867,
|
257 |
-
"acc_stderr": 0.028332397483664278,
|
258 |
-
"acc_norm": 0.42810457516339867,
|
259 |
-
"acc_norm_stderr": 0.028332397483664278
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.47,
|
263 |
-
"acc_stderr": 0.05016135580465919,
|
264 |
-
"acc_norm": 0.47,
|
265 |
-
"acc_norm_stderr": 0.05016135580465919
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.5950413223140496,
|
269 |
-
"acc_stderr": 0.04481137755942469,
|
270 |
-
"acc_norm": 0.5950413223140496,
|
271 |
-
"acc_norm_stderr": 0.04481137755942469
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.4407894736842105,
|
275 |
-
"acc_stderr": 0.04040311062490436,
|
276 |
-
"acc_norm": 0.4407894736842105,
|
277 |
-
"acc_norm_stderr": 0.04040311062490436
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.3660130718954248,
|
281 |
-
"acc_stderr": 0.019488025745529675,
|
282 |
-
"acc_norm": 0.3660130718954248,
|
283 |
-
"acc_norm_stderr": 0.019488025745529675
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.33687943262411346,
|
287 |
-
"acc_stderr": 0.02819553487396673,
|
288 |
-
"acc_norm": 0.33687943262411346,
|
289 |
-
"acc_norm_stderr": 0.02819553487396673
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.2857142857142857,
|
293 |
-
"acc_stderr": 0.042878587513404544,
|
294 |
-
"acc_norm": 0.2857142857142857,
|
295 |
-
"acc_norm_stderr": 0.042878587513404544
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.375,
|
299 |
-
"acc_stderr": 0.033016908987210894,
|
300 |
-
"acc_norm": 0.375,
|
301 |
-
"acc_norm_stderr": 0.033016908987210894
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.24804469273743016,
|
305 |
-
"acc_stderr": 0.014444157808261453,
|
306 |
-
"acc_norm": 0.24804469273743016,
|
307 |
-
"acc_norm_stderr": 0.014444157808261453
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.44,
|
311 |
-
"acc_stderr": 0.04988876515698589,
|
312 |
-
"acc_norm": 0.44,
|
313 |
-
"acc_norm_stderr": 0.04988876515698589
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.46,
|
317 |
-
"acc_stderr": 0.05009082659620333,
|
318 |
-
"acc_norm": 0.46,
|
319 |
-
"acc_norm_stderr": 0.05009082659620333
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.33455882352941174,
|
323 |
-
"acc_stderr": 0.02866199620233531,
|
324 |
-
"acc_norm": 0.33455882352941174,
|
325 |
-
"acc_norm_stderr": 0.02866199620233531
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.4775510204081633,
|
329 |
-
"acc_stderr": 0.03197694118713673,
|
330 |
-
"acc_norm": 0.4775510204081633,
|
331 |
-
"acc_norm_stderr": 0.03197694118713673
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.5654008438818565,
|
335 |
-
"acc_stderr": 0.03226759995510145,
|
336 |
-
"acc_norm": 0.5654008438818565,
|
337 |
-
"acc_norm_stderr": 0.03226759995510145
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.3559322033898305,
|
341 |
-
"acc_stderr": 0.012228645537277573,
|
342 |
-
"acc_norm": 0.3559322033898305,
|
343 |
-
"acc_norm_stderr": 0.012228645537277573
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.45588235294117646,
|
347 |
-
"acc_stderr": 0.03495624522015474,
|
348 |
-
"acc_norm": 0.45588235294117646,
|
349 |
-
"acc_norm_stderr": 0.03495624522015474
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.503030303030303,
|
353 |
-
"acc_stderr": 0.03904272341431855,
|
354 |
-
"acc_norm": 0.503030303030303,
|
355 |
-
"acc_norm_stderr": 0.03904272341431855
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.27539779681762544,
|
359 |
-
"mc1_stderr": 0.015638135667775527,
|
360 |
-
"mc2": 0.4355517094226067,
|
361 |
-
"mc2_stderr": 0.015309009273280678
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.5915492957746479,
|
365 |
-
"acc_stderr": 0.016850023674109642,
|
366 |
-
"acc_norm": 0.6854460093896714,
|
367 |
-
"acc_norm_stderr": 0.015917301615490653
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "HumanF-MarkrAI/pub-llama-13B-v3",
|
436 |
-
"model_sha": "a077b211925e00e7bd8e3f6bdf29476c59b81d6d",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
HumanF-MarkrAI/pub-llama-13b-v1/result_2023-10-19 18:44:30.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.3677474402730375,
|
5 |
-
"acc_stderr": 0.01409099561816849,
|
6 |
-
"acc_norm": 0.41552901023890787,
|
7 |
-
"acc_norm_stderr": 0.01440136664121639
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.40579565823541125,
|
11 |
-
"acc_stderr": 0.004900417982582061,
|
12 |
-
"acc_norm": 0.5321649073889664,
|
13 |
-
"acc_norm_stderr": 0.004979446038824757
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.4678362573099415,
|
17 |
-
"acc_stderr": 0.03826882417660369,
|
18 |
-
"acc_norm": 0.4678362573099415,
|
19 |
-
"acc_norm_stderr": 0.03826882417660369
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.5825242718446602,
|
23 |
-
"acc_stderr": 0.04882840548212238,
|
24 |
-
"acc_norm": 0.5825242718446602,
|
25 |
-
"acc_norm_stderr": 0.04882840548212238
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.5006385696040868,
|
29 |
-
"acc_stderr": 0.01787994891443168,
|
30 |
-
"acc_norm": 0.5006385696040868,
|
31 |
-
"acc_norm_stderr": 0.01787994891443168
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.43703703703703706,
|
35 |
-
"acc_stderr": 0.04284958639753399,
|
36 |
-
"acc_norm": 0.43703703703703706,
|
37 |
-
"acc_norm_stderr": 0.04284958639753399
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.27,
|
41 |
-
"acc_stderr": 0.044619604333847415,
|
42 |
-
"acc_norm": 0.27,
|
43 |
-
"acc_norm_stderr": 0.044619604333847415
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.34893617021276596,
|
47 |
-
"acc_stderr": 0.031158522131357787,
|
48 |
-
"acc_norm": 0.34893617021276596,
|
49 |
-
"acc_norm_stderr": 0.031158522131357787
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.3795180722891566,
|
53 |
-
"acc_stderr": 0.037777988227480165,
|
54 |
-
"acc_norm": 0.3795180722891566,
|
55 |
-
"acc_norm_stderr": 0.037777988227480165
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.5080385852090032,
|
59 |
-
"acc_stderr": 0.02839442137098453,
|
60 |
-
"acc_norm": 0.5080385852090032,
|
61 |
-
"acc_norm_stderr": 0.02839442137098453
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.47085201793721976,
|
65 |
-
"acc_stderr": 0.03350073248773404,
|
66 |
-
"acc_norm": 0.47085201793721976,
|
67 |
-
"acc_norm_stderr": 0.03350073248773404
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.4351145038167939,
|
71 |
-
"acc_stderr": 0.04348208051644858,
|
72 |
-
"acc_norm": 0.4351145038167939,
|
73 |
-
"acc_norm_stderr": 0.04348208051644858
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.41,
|
77 |
-
"acc_stderr": 0.049431107042371025,
|
78 |
-
"acc_norm": 0.41,
|
79 |
-
"acc_norm_stderr": 0.049431107042371025
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.5454545454545454,
|
83 |
-
"acc_stderr": 0.03547601494006938,
|
84 |
-
"acc_norm": 0.5454545454545454,
|
85 |
-
"acc_norm_stderr": 0.03547601494006938
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.4482758620689655,
|
89 |
-
"acc_stderr": 0.04144311810878151,
|
90 |
-
"acc_norm": 0.4482758620689655,
|
91 |
-
"acc_norm_stderr": 0.04144311810878151
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.23529411764705882,
|
95 |
-
"acc_stderr": 0.04220773659171453,
|
96 |
-
"acc_norm": 0.23529411764705882,
|
97 |
-
"acc_norm_stderr": 0.04220773659171453
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.4789915966386555,
|
101 |
-
"acc_stderr": 0.03244980849990028,
|
102 |
-
"acc_norm": 0.4789915966386555,
|
103 |
-
"acc_norm_stderr": 0.03244980849990028
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.4153846153846154,
|
107 |
-
"acc_stderr": 0.024985354923102318,
|
108 |
-
"acc_norm": 0.4153846153846154,
|
109 |
-
"acc_norm_stderr": 0.024985354923102318
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.4,
|
113 |
-
"acc_stderr": 0.049236596391733084,
|
114 |
-
"acc_norm": 0.4,
|
115 |
-
"acc_norm_stderr": 0.049236596391733084
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.32,
|
119 |
-
"acc_stderr": 0.04688261722621504,
|
120 |
-
"acc_norm": 0.32,
|
121 |
-
"acc_norm_stderr": 0.04688261722621504
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.4351851851851852,
|
125 |
-
"acc_stderr": 0.04792898170907062,
|
126 |
-
"acc_norm": 0.4351851851851852,
|
127 |
-
"acc_norm_stderr": 0.04792898170907062
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.3694581280788177,
|
131 |
-
"acc_stderr": 0.03395970381998576,
|
132 |
-
"acc_norm": 0.3694581280788177,
|
133 |
-
"acc_norm_stderr": 0.03395970381998576
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.4483870967741935,
|
137 |
-
"acc_stderr": 0.02829205683011273,
|
138 |
-
"acc_norm": 0.4483870967741935,
|
139 |
-
"acc_norm_stderr": 0.02829205683011273
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.6538461538461539,
|
143 |
-
"acc_stderr": 0.0311669573672359,
|
144 |
-
"acc_norm": 0.6538461538461539,
|
145 |
-
"acc_norm_stderr": 0.0311669573672359
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.4339622641509434,
|
149 |
-
"acc_stderr": 0.030503292013342592,
|
150 |
-
"acc_norm": 0.4339622641509434,
|
151 |
-
"acc_norm_stderr": 0.030503292013342592
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.5272727272727272,
|
155 |
-
"acc_stderr": 0.04782001791380061,
|
156 |
-
"acc_norm": 0.5272727272727272,
|
157 |
-
"acc_norm_stderr": 0.04782001791380061
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.3148148148148148,
|
161 |
-
"acc_stderr": 0.028317533496066482,
|
162 |
-
"acc_norm": 0.3148148148148148,
|
163 |
-
"acc_norm_stderr": 0.028317533496066482
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.304635761589404,
|
167 |
-
"acc_stderr": 0.037579499229433426,
|
168 |
-
"acc_norm": 0.304635761589404,
|
169 |
-
"acc_norm_stderr": 0.037579499229433426
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.5572139303482587,
|
173 |
-
"acc_stderr": 0.03512310964123935,
|
174 |
-
"acc_norm": 0.5572139303482587,
|
175 |
-
"acc_norm_stderr": 0.03512310964123935
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.4046242774566474,
|
179 |
-
"acc_stderr": 0.03742461193887248,
|
180 |
-
"acc_norm": 0.4046242774566474,
|
181 |
-
"acc_norm_stderr": 0.03742461193887248
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.35978835978835977,
|
185 |
-
"acc_stderr": 0.024718075944129277,
|
186 |
-
"acc_norm": 0.35978835978835977,
|
187 |
-
"acc_norm_stderr": 0.024718075944129277
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.3611111111111111,
|
191 |
-
"acc_stderr": 0.04016660030451233,
|
192 |
-
"acc_norm": 0.3611111111111111,
|
193 |
-
"acc_norm_stderr": 0.04016660030451233
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.34,
|
197 |
-
"acc_stderr": 0.04760952285695235,
|
198 |
-
"acc_norm": 0.34,
|
199 |
-
"acc_norm_stderr": 0.04760952285695235
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.61,
|
203 |
-
"acc_stderr": 0.04902071300001975,
|
204 |
-
"acc_norm": 0.61,
|
205 |
-
"acc_norm_stderr": 0.04902071300001975
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.44508670520231214,
|
209 |
-
"acc_stderr": 0.026756255129663765,
|
210 |
-
"acc_norm": 0.44508670520231214,
|
211 |
-
"acc_norm_stderr": 0.026756255129663765
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.4171779141104294,
|
215 |
-
"acc_stderr": 0.038741028598180814,
|
216 |
-
"acc_norm": 0.4171779141104294,
|
217 |
-
"acc_norm_stderr": 0.038741028598180814
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.46296296296296297,
|
221 |
-
"acc_stderr": 0.027744313443376536,
|
222 |
-
"acc_norm": 0.46296296296296297,
|
223 |
-
"acc_norm_stderr": 0.027744313443376536
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.29,
|
227 |
-
"acc_stderr": 0.045604802157206845,
|
228 |
-
"acc_norm": 0.29,
|
229 |
-
"acc_norm_stderr": 0.045604802157206845
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.47150259067357514,
|
233 |
-
"acc_stderr": 0.036025735712884414,
|
234 |
-
"acc_norm": 0.47150259067357514,
|
235 |
-
"acc_norm_stderr": 0.036025735712884414
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.20175438596491227,
|
239 |
-
"acc_stderr": 0.03775205013583639,
|
240 |
-
"acc_norm": 0.20175438596491227,
|
241 |
-
"acc_norm_stderr": 0.03775205013583639
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.5174311926605505,
|
245 |
-
"acc_stderr": 0.02142429187185315,
|
246 |
-
"acc_norm": 0.5174311926605505,
|
247 |
-
"acc_norm_stderr": 0.02142429187185315
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.4126984126984127,
|
251 |
-
"acc_stderr": 0.04403438954768177,
|
252 |
-
"acc_norm": 0.4126984126984127,
|
253 |
-
"acc_norm_stderr": 0.04403438954768177
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.4150326797385621,
|
257 |
-
"acc_stderr": 0.028213504177824093,
|
258 |
-
"acc_norm": 0.4150326797385621,
|
259 |
-
"acc_norm_stderr": 0.028213504177824093
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.45,
|
263 |
-
"acc_stderr": 0.049999999999999996,
|
264 |
-
"acc_norm": 0.45,
|
265 |
-
"acc_norm_stderr": 0.049999999999999996
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.5867768595041323,
|
269 |
-
"acc_stderr": 0.04495087843548408,
|
270 |
-
"acc_norm": 0.5867768595041323,
|
271 |
-
"acc_norm_stderr": 0.04495087843548408
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.45394736842105265,
|
275 |
-
"acc_stderr": 0.04051646342874142,
|
276 |
-
"acc_norm": 0.45394736842105265,
|
277 |
-
"acc_norm_stderr": 0.04051646342874142
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.3611111111111111,
|
281 |
-
"acc_stderr": 0.019431775677037313,
|
282 |
-
"acc_norm": 0.3611111111111111,
|
283 |
-
"acc_norm_stderr": 0.019431775677037313
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.3333333333333333,
|
287 |
-
"acc_stderr": 0.02812163604063989,
|
288 |
-
"acc_norm": 0.3333333333333333,
|
289 |
-
"acc_norm_stderr": 0.02812163604063989
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.25,
|
293 |
-
"acc_stderr": 0.04109974682633932,
|
294 |
-
"acc_norm": 0.25,
|
295 |
-
"acc_norm_stderr": 0.04109974682633932
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.3888888888888889,
|
299 |
-
"acc_stderr": 0.03324708911809117,
|
300 |
-
"acc_norm": 0.3888888888888889,
|
301 |
-
"acc_norm_stderr": 0.03324708911809117
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.24916201117318434,
|
305 |
-
"acc_stderr": 0.01446589382985992,
|
306 |
-
"acc_norm": 0.24916201117318434,
|
307 |
-
"acc_norm_stderr": 0.01446589382985992
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.41,
|
311 |
-
"acc_stderr": 0.04943110704237102,
|
312 |
-
"acc_norm": 0.41,
|
313 |
-
"acc_norm_stderr": 0.04943110704237102
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.48,
|
317 |
-
"acc_stderr": 0.050211673156867795,
|
318 |
-
"acc_norm": 0.48,
|
319 |
-
"acc_norm_stderr": 0.050211673156867795
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.34191176470588236,
|
323 |
-
"acc_stderr": 0.02881472242225417,
|
324 |
-
"acc_norm": 0.34191176470588236,
|
325 |
-
"acc_norm_stderr": 0.02881472242225417
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.4775510204081633,
|
329 |
-
"acc_stderr": 0.03197694118713673,
|
330 |
-
"acc_norm": 0.4775510204081633,
|
331 |
-
"acc_norm_stderr": 0.03197694118713673
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.5780590717299579,
|
335 |
-
"acc_stderr": 0.032148146302403695,
|
336 |
-
"acc_norm": 0.5780590717299579,
|
337 |
-
"acc_norm_stderr": 0.032148146302403695
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.3624511082138201,
|
341 |
-
"acc_stderr": 0.012277512533252495,
|
342 |
-
"acc_norm": 0.3624511082138201,
|
343 |
-
"acc_norm_stderr": 0.012277512533252495
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.45098039215686275,
|
347 |
-
"acc_stderr": 0.03492406104163614,
|
348 |
-
"acc_norm": 0.45098039215686275,
|
349 |
-
"acc_norm_stderr": 0.03492406104163614
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.4909090909090909,
|
353 |
-
"acc_stderr": 0.03903698647748441,
|
354 |
-
"acc_norm": 0.4909090909090909,
|
355 |
-
"acc_norm_stderr": 0.03903698647748441
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.2864137086903305,
|
359 |
-
"mc1_stderr": 0.015826142439502342,
|
360 |
-
"mc2": 0.4364091486561351,
|
361 |
-
"mc2_stderr": 0.015369734802451228
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.5176056338028169,
|
365 |
-
"acc_stderr": 0.017129150724246808,
|
366 |
-
"acc_norm": 0.5903755868544601,
|
367 |
-
"acc_norm_stderr": 0.016857467505356098
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "HumanF-MarkrAI/pub-llama-13b-v1",
|
436 |
-
"model_sha": "4aa21e41dfcb82ff842306b3b5eadd2b258bfc80",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
HumanF-MarkrAI/pub-llama-13b-v2/result_2023-10-22 16:02:46.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.371160409556314,
|
5 |
-
"acc_stderr": 0.014117971901142824,
|
6 |
-
"acc_norm": 0.4197952218430034,
|
7 |
-
"acc_norm_stderr": 0.014422181226303026
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.4048994224258116,
|
11 |
-
"acc_stderr": 0.004898693652043317,
|
12 |
-
"acc_norm": 0.5401314479187412,
|
13 |
-
"acc_norm_stderr": 0.0049736830262021746
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.47953216374269003,
|
17 |
-
"acc_stderr": 0.0383161053282193,
|
18 |
-
"acc_norm": 0.47953216374269003,
|
19 |
-
"acc_norm_stderr": 0.0383161053282193
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.5631067961165048,
|
23 |
-
"acc_stderr": 0.049111471073657764,
|
24 |
-
"acc_norm": 0.5631067961165048,
|
25 |
-
"acc_norm_stderr": 0.049111471073657764
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.5019157088122606,
|
29 |
-
"acc_stderr": 0.017879832259026677,
|
30 |
-
"acc_norm": 0.5019157088122606,
|
31 |
-
"acc_norm_stderr": 0.017879832259026677
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.4444444444444444,
|
35 |
-
"acc_stderr": 0.042925967182569816,
|
36 |
-
"acc_norm": 0.4444444444444444,
|
37 |
-
"acc_norm_stderr": 0.042925967182569816
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.26,
|
41 |
-
"acc_stderr": 0.044084400227680794,
|
42 |
-
"acc_norm": 0.26,
|
43 |
-
"acc_norm_stderr": 0.044084400227680794
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.3276595744680851,
|
47 |
-
"acc_stderr": 0.030683020843231008,
|
48 |
-
"acc_norm": 0.3276595744680851,
|
49 |
-
"acc_norm_stderr": 0.030683020843231008
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.3493975903614458,
|
53 |
-
"acc_stderr": 0.0371172519074075,
|
54 |
-
"acc_norm": 0.3493975903614458,
|
55 |
-
"acc_norm_stderr": 0.0371172519074075
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.5048231511254019,
|
59 |
-
"acc_stderr": 0.028396770444111298,
|
60 |
-
"acc_norm": 0.5048231511254019,
|
61 |
-
"acc_norm_stderr": 0.028396770444111298
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.48878923766816146,
|
65 |
-
"acc_stderr": 0.033549366530984746,
|
66 |
-
"acc_norm": 0.48878923766816146,
|
67 |
-
"acc_norm_stderr": 0.033549366530984746
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.44274809160305345,
|
71 |
-
"acc_stderr": 0.0435644720266507,
|
72 |
-
"acc_norm": 0.44274809160305345,
|
73 |
-
"acc_norm_stderr": 0.0435644720266507
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.42,
|
77 |
-
"acc_stderr": 0.049604496374885836,
|
78 |
-
"acc_norm": 0.42,
|
79 |
-
"acc_norm_stderr": 0.049604496374885836
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.5353535353535354,
|
83 |
-
"acc_stderr": 0.03553436368828063,
|
84 |
-
"acc_norm": 0.5353535353535354,
|
85 |
-
"acc_norm_stderr": 0.03553436368828063
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.45517241379310347,
|
89 |
-
"acc_stderr": 0.04149886942192117,
|
90 |
-
"acc_norm": 0.45517241379310347,
|
91 |
-
"acc_norm_stderr": 0.04149886942192117
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.24509803921568626,
|
95 |
-
"acc_stderr": 0.04280105837364395,
|
96 |
-
"acc_norm": 0.24509803921568626,
|
97 |
-
"acc_norm_stderr": 0.04280105837364395
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.4495798319327731,
|
101 |
-
"acc_stderr": 0.03231293497137707,
|
102 |
-
"acc_norm": 0.4495798319327731,
|
103 |
-
"acc_norm_stderr": 0.03231293497137707
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.4256410256410256,
|
107 |
-
"acc_stderr": 0.02506909438729654,
|
108 |
-
"acc_norm": 0.4256410256410256,
|
109 |
-
"acc_norm_stderr": 0.02506909438729654
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.41,
|
113 |
-
"acc_stderr": 0.04943110704237102,
|
114 |
-
"acc_norm": 0.41,
|
115 |
-
"acc_norm_stderr": 0.04943110704237102
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.27,
|
119 |
-
"acc_stderr": 0.044619604333847394,
|
120 |
-
"acc_norm": 0.27,
|
121 |
-
"acc_norm_stderr": 0.044619604333847394
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.4537037037037037,
|
125 |
-
"acc_stderr": 0.04812917324536821,
|
126 |
-
"acc_norm": 0.4537037037037037,
|
127 |
-
"acc_norm_stderr": 0.04812917324536821
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.3793103448275862,
|
131 |
-
"acc_stderr": 0.03413963805906234,
|
132 |
-
"acc_norm": 0.3793103448275862,
|
133 |
-
"acc_norm_stderr": 0.03413963805906234
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.43548387096774194,
|
137 |
-
"acc_stderr": 0.02820622559150274,
|
138 |
-
"acc_norm": 0.43548387096774194,
|
139 |
-
"acc_norm_stderr": 0.02820622559150274
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.6452991452991453,
|
143 |
-
"acc_stderr": 0.03134250486245402,
|
144 |
-
"acc_norm": 0.6452991452991453,
|
145 |
-
"acc_norm_stderr": 0.03134250486245402
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.4339622641509434,
|
149 |
-
"acc_stderr": 0.030503292013342592,
|
150 |
-
"acc_norm": 0.4339622641509434,
|
151 |
-
"acc_norm_stderr": 0.030503292013342592
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.5272727272727272,
|
155 |
-
"acc_stderr": 0.04782001791380061,
|
156 |
-
"acc_norm": 0.5272727272727272,
|
157 |
-
"acc_norm_stderr": 0.04782001791380061
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.31851851851851853,
|
161 |
-
"acc_stderr": 0.02840653309060846,
|
162 |
-
"acc_norm": 0.31851851851851853,
|
163 |
-
"acc_norm_stderr": 0.02840653309060846
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.31125827814569534,
|
167 |
-
"acc_stderr": 0.03780445850526733,
|
168 |
-
"acc_norm": 0.31125827814569534,
|
169 |
-
"acc_norm_stderr": 0.03780445850526733
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.5522388059701493,
|
173 |
-
"acc_stderr": 0.035161847729521675,
|
174 |
-
"acc_norm": 0.5522388059701493,
|
175 |
-
"acc_norm_stderr": 0.035161847729521675
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.3930635838150289,
|
179 |
-
"acc_stderr": 0.0372424959581773,
|
180 |
-
"acc_norm": 0.3930635838150289,
|
181 |
-
"acc_norm_stderr": 0.0372424959581773
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.36772486772486773,
|
185 |
-
"acc_stderr": 0.024833839825562424,
|
186 |
-
"acc_norm": 0.36772486772486773,
|
187 |
-
"acc_norm_stderr": 0.024833839825562424
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.3680555555555556,
|
191 |
-
"acc_stderr": 0.040329990539607195,
|
192 |
-
"acc_norm": 0.3680555555555556,
|
193 |
-
"acc_norm_stderr": 0.040329990539607195
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.32,
|
197 |
-
"acc_stderr": 0.04688261722621504,
|
198 |
-
"acc_norm": 0.32,
|
199 |
-
"acc_norm_stderr": 0.04688261722621504
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.65,
|
203 |
-
"acc_stderr": 0.047937248544110196,
|
204 |
-
"acc_norm": 0.65,
|
205 |
-
"acc_norm_stderr": 0.047937248544110196
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.4479768786127168,
|
209 |
-
"acc_stderr": 0.02677299065336182,
|
210 |
-
"acc_norm": 0.4479768786127168,
|
211 |
-
"acc_norm_stderr": 0.02677299065336182
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.44785276073619634,
|
215 |
-
"acc_stderr": 0.03906947479456601,
|
216 |
-
"acc_norm": 0.44785276073619634,
|
217 |
-
"acc_norm_stderr": 0.03906947479456601
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.47530864197530864,
|
221 |
-
"acc_stderr": 0.02778680093142745,
|
222 |
-
"acc_norm": 0.47530864197530864,
|
223 |
-
"acc_norm_stderr": 0.02778680093142745
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.31,
|
227 |
-
"acc_stderr": 0.04648231987117316,
|
228 |
-
"acc_norm": 0.31,
|
229 |
-
"acc_norm_stderr": 0.04648231987117316
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.48186528497409326,
|
233 |
-
"acc_stderr": 0.036060650018329185,
|
234 |
-
"acc_norm": 0.48186528497409326,
|
235 |
-
"acc_norm_stderr": 0.036060650018329185
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.22807017543859648,
|
239 |
-
"acc_stderr": 0.03947152782669415,
|
240 |
-
"acc_norm": 0.22807017543859648,
|
241 |
-
"acc_norm_stderr": 0.03947152782669415
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.5376146788990825,
|
245 |
-
"acc_stderr": 0.021376575274397576,
|
246 |
-
"acc_norm": 0.5376146788990825,
|
247 |
-
"acc_norm_stderr": 0.021376575274397576
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.40476190476190477,
|
251 |
-
"acc_stderr": 0.043902592653775614,
|
252 |
-
"acc_norm": 0.40476190476190477,
|
253 |
-
"acc_norm_stderr": 0.043902592653775614
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.4215686274509804,
|
257 |
-
"acc_stderr": 0.028275490156791434,
|
258 |
-
"acc_norm": 0.4215686274509804,
|
259 |
-
"acc_norm_stderr": 0.028275490156791434
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.47,
|
263 |
-
"acc_stderr": 0.05016135580465919,
|
264 |
-
"acc_norm": 0.47,
|
265 |
-
"acc_norm_stderr": 0.05016135580465919
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.5950413223140496,
|
269 |
-
"acc_stderr": 0.04481137755942469,
|
270 |
-
"acc_norm": 0.5950413223140496,
|
271 |
-
"acc_norm_stderr": 0.04481137755942469
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.4473684210526316,
|
275 |
-
"acc_stderr": 0.04046336883978251,
|
276 |
-
"acc_norm": 0.4473684210526316,
|
277 |
-
"acc_norm_stderr": 0.04046336883978251
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.36764705882352944,
|
281 |
-
"acc_stderr": 0.019506291693954854,
|
282 |
-
"acc_norm": 0.36764705882352944,
|
283 |
-
"acc_norm_stderr": 0.019506291693954854
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.34397163120567376,
|
287 |
-
"acc_stderr": 0.02833801742861132,
|
288 |
-
"acc_norm": 0.34397163120567376,
|
289 |
-
"acc_norm_stderr": 0.02833801742861132
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.25892857142857145,
|
293 |
-
"acc_stderr": 0.041577515398656284,
|
294 |
-
"acc_norm": 0.25892857142857145,
|
295 |
-
"acc_norm_stderr": 0.041577515398656284
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.3888888888888889,
|
299 |
-
"acc_stderr": 0.03324708911809117,
|
300 |
-
"acc_norm": 0.3888888888888889,
|
301 |
-
"acc_norm_stderr": 0.03324708911809117
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.24804469273743016,
|
305 |
-
"acc_stderr": 0.014444157808261453,
|
306 |
-
"acc_norm": 0.24804469273743016,
|
307 |
-
"acc_norm_stderr": 0.014444157808261453
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.43,
|
311 |
-
"acc_stderr": 0.04975698519562428,
|
312 |
-
"acc_norm": 0.43,
|
313 |
-
"acc_norm_stderr": 0.04975698519562428
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.48,
|
317 |
-
"acc_stderr": 0.050211673156867795,
|
318 |
-
"acc_norm": 0.48,
|
319 |
-
"acc_norm_stderr": 0.050211673156867795
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.34558823529411764,
|
323 |
-
"acc_stderr": 0.02888819310398865,
|
324 |
-
"acc_norm": 0.34558823529411764,
|
325 |
-
"acc_norm_stderr": 0.02888819310398865
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.46530612244897956,
|
329 |
-
"acc_stderr": 0.03193207024425314,
|
330 |
-
"acc_norm": 0.46530612244897956,
|
331 |
-
"acc_norm_stderr": 0.03193207024425314
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.569620253164557,
|
335 |
-
"acc_stderr": 0.03223017195937599,
|
336 |
-
"acc_norm": 0.569620253164557,
|
337 |
-
"acc_norm_stderr": 0.03223017195937599
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.35723598435462844,
|
341 |
-
"acc_stderr": 0.012238615750316506,
|
342 |
-
"acc_norm": 0.35723598435462844,
|
343 |
-
"acc_norm_stderr": 0.012238615750316506
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.45098039215686275,
|
347 |
-
"acc_stderr": 0.03492406104163614,
|
348 |
-
"acc_norm": 0.45098039215686275,
|
349 |
-
"acc_norm_stderr": 0.03492406104163614
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.4909090909090909,
|
353 |
-
"acc_stderr": 0.03903698647748441,
|
354 |
-
"acc_norm": 0.4909090909090909,
|
355 |
-
"acc_norm_stderr": 0.03903698647748441
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.2802937576499388,
|
359 |
-
"mc1_stderr": 0.015723139524608742,
|
360 |
-
"mc2": 0.43609767583849846,
|
361 |
-
"mc2_stderr": 0.015308496603243212
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.5751173708920188,
|
365 |
-
"acc_stderr": 0.0169452488268217,
|
366 |
-
"acc_norm": 0.647887323943662,
|
367 |
-
"acc_norm_stderr": 0.016372906865326657
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "HumanF-MarkrAI/pub-llama-13b-v2",
|
436 |
-
"model_sha": "d59387039c395781b62f514db7bf4fb32d254522",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
HumanF-MarkrAI/pub-llama-7b-v1/result_2023-10-19 00:06:32.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.28071672354948807,
|
5 |
-
"acc_stderr": 0.013131238126975593,
|
6 |
-
"acc_norm": 0.34812286689419797,
|
7 |
-
"acc_norm_stderr": 0.013921008595179338
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.36496713802031466,
|
11 |
-
"acc_stderr": 0.0048043705638562305,
|
12 |
-
"acc_norm": 0.48665604461262696,
|
13 |
-
"acc_norm_stderr": 0.004988004122536492
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.39766081871345027,
|
17 |
-
"acc_stderr": 0.0375363895576169,
|
18 |
-
"acc_norm": 0.39766081871345027,
|
19 |
-
"acc_norm_stderr": 0.0375363895576169
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.3106796116504854,
|
23 |
-
"acc_stderr": 0.04582124160161551,
|
24 |
-
"acc_norm": 0.3106796116504854,
|
25 |
-
"acc_norm_stderr": 0.04582124160161551
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.3716475095785441,
|
29 |
-
"acc_stderr": 0.01728080252213318,
|
30 |
-
"acc_norm": 0.3716475095785441,
|
31 |
-
"acc_norm_stderr": 0.01728080252213318
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.362962962962963,
|
35 |
-
"acc_stderr": 0.041539484047424004,
|
36 |
-
"acc_norm": 0.362962962962963,
|
37 |
-
"acc_norm_stderr": 0.041539484047424004
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.28,
|
41 |
-
"acc_stderr": 0.04512608598542127,
|
42 |
-
"acc_norm": 0.28,
|
43 |
-
"acc_norm_stderr": 0.04512608598542127
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.28085106382978725,
|
47 |
-
"acc_stderr": 0.02937917046412482,
|
48 |
-
"acc_norm": 0.28085106382978725,
|
49 |
-
"acc_norm_stderr": 0.02937917046412482
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.3192771084337349,
|
53 |
-
"acc_stderr": 0.03629335329947861,
|
54 |
-
"acc_norm": 0.3192771084337349,
|
55 |
-
"acc_norm_stderr": 0.03629335329947861
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.3890675241157556,
|
59 |
-
"acc_stderr": 0.027690337536485372,
|
60 |
-
"acc_norm": 0.3890675241157556,
|
61 |
-
"acc_norm_stderr": 0.027690337536485372
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.37668161434977576,
|
65 |
-
"acc_stderr": 0.032521134899291884,
|
66 |
-
"acc_norm": 0.37668161434977576,
|
67 |
-
"acc_norm_stderr": 0.032521134899291884
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.45038167938931295,
|
71 |
-
"acc_stderr": 0.04363643698524779,
|
72 |
-
"acc_norm": 0.45038167938931295,
|
73 |
-
"acc_norm_stderr": 0.04363643698524779
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.34,
|
77 |
-
"acc_stderr": 0.04760952285695236,
|
78 |
-
"acc_norm": 0.34,
|
79 |
-
"acc_norm_stderr": 0.04760952285695236
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.398989898989899,
|
83 |
-
"acc_stderr": 0.03488901616852731,
|
84 |
-
"acc_norm": 0.398989898989899,
|
85 |
-
"acc_norm_stderr": 0.03488901616852731
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.38620689655172413,
|
89 |
-
"acc_stderr": 0.04057324734419035,
|
90 |
-
"acc_norm": 0.38620689655172413,
|
91 |
-
"acc_norm_stderr": 0.04057324734419035
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.23529411764705882,
|
95 |
-
"acc_stderr": 0.04220773659171453,
|
96 |
-
"acc_norm": 0.23529411764705882,
|
97 |
-
"acc_norm_stderr": 0.04220773659171453
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.3739495798319328,
|
101 |
-
"acc_stderr": 0.031429466378837076,
|
102 |
-
"acc_norm": 0.3739495798319328,
|
103 |
-
"acc_norm_stderr": 0.031429466378837076
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.28717948717948716,
|
107 |
-
"acc_stderr": 0.022939925418530616,
|
108 |
-
"acc_norm": 0.28717948717948716,
|
109 |
-
"acc_norm_stderr": 0.022939925418530616
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.4,
|
113 |
-
"acc_stderr": 0.049236596391733084,
|
114 |
-
"acc_norm": 0.4,
|
115 |
-
"acc_norm_stderr": 0.049236596391733084
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.21,
|
119 |
-
"acc_stderr": 0.04093601807403326,
|
120 |
-
"acc_norm": 0.21,
|
121 |
-
"acc_norm_stderr": 0.04093601807403326
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.42592592592592593,
|
125 |
-
"acc_stderr": 0.0478034362693679,
|
126 |
-
"acc_norm": 0.42592592592592593,
|
127 |
-
"acc_norm_stderr": 0.0478034362693679
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.23645320197044334,
|
131 |
-
"acc_stderr": 0.029896114291733555,
|
132 |
-
"acc_norm": 0.23645320197044334,
|
133 |
-
"acc_norm_stderr": 0.029896114291733555
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.3774193548387097,
|
137 |
-
"acc_stderr": 0.027575960723278253,
|
138 |
-
"acc_norm": 0.3774193548387097,
|
139 |
-
"acc_norm_stderr": 0.027575960723278253
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.49145299145299143,
|
143 |
-
"acc_stderr": 0.032751303000970296,
|
144 |
-
"acc_norm": 0.49145299145299143,
|
145 |
-
"acc_norm_stderr": 0.032751303000970296
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.32452830188679244,
|
149 |
-
"acc_stderr": 0.028815615713432115,
|
150 |
-
"acc_norm": 0.32452830188679244,
|
151 |
-
"acc_norm_stderr": 0.028815615713432115
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.4,
|
155 |
-
"acc_stderr": 0.0469237132203465,
|
156 |
-
"acc_norm": 0.4,
|
157 |
-
"acc_norm_stderr": 0.0469237132203465
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.22962962962962963,
|
161 |
-
"acc_stderr": 0.02564410863926762,
|
162 |
-
"acc_norm": 0.22962962962962963,
|
163 |
-
"acc_norm_stderr": 0.02564410863926762
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.304635761589404,
|
167 |
-
"acc_stderr": 0.03757949922943342,
|
168 |
-
"acc_norm": 0.304635761589404,
|
169 |
-
"acc_norm_stderr": 0.03757949922943342
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.4527363184079602,
|
173 |
-
"acc_stderr": 0.035197027175769155,
|
174 |
-
"acc_norm": 0.4527363184079602,
|
175 |
-
"acc_norm_stderr": 0.035197027175769155
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.30057803468208094,
|
179 |
-
"acc_stderr": 0.03496101481191181,
|
180 |
-
"acc_norm": 0.30057803468208094,
|
181 |
-
"acc_norm_stderr": 0.03496101481191181
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.24603174603174602,
|
185 |
-
"acc_stderr": 0.022182037202948368,
|
186 |
-
"acc_norm": 0.24603174603174602,
|
187 |
-
"acc_norm_stderr": 0.022182037202948368
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.3263888888888889,
|
191 |
-
"acc_stderr": 0.03921067198982266,
|
192 |
-
"acc_norm": 0.3263888888888889,
|
193 |
-
"acc_norm_stderr": 0.03921067198982266
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.22,
|
197 |
-
"acc_stderr": 0.04163331998932268,
|
198 |
-
"acc_norm": 0.22,
|
199 |
-
"acc_norm_stderr": 0.04163331998932268
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.41,
|
203 |
-
"acc_stderr": 0.04943110704237103,
|
204 |
-
"acc_norm": 0.41,
|
205 |
-
"acc_norm_stderr": 0.04943110704237103
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.3699421965317919,
|
209 |
-
"acc_stderr": 0.025992472029306386,
|
210 |
-
"acc_norm": 0.3699421965317919,
|
211 |
-
"acc_norm_stderr": 0.025992472029306386
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.3128834355828221,
|
215 |
-
"acc_stderr": 0.036429145782924055,
|
216 |
-
"acc_norm": 0.3128834355828221,
|
217 |
-
"acc_norm_stderr": 0.036429145782924055
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.38580246913580246,
|
221 |
-
"acc_stderr": 0.027085401226132143,
|
222 |
-
"acc_norm": 0.38580246913580246,
|
223 |
-
"acc_norm_stderr": 0.027085401226132143
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.34,
|
227 |
-
"acc_stderr": 0.04760952285695234,
|
228 |
-
"acc_norm": 0.34,
|
229 |
-
"acc_norm_stderr": 0.04760952285695234
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.40414507772020725,
|
233 |
-
"acc_stderr": 0.0354150857888402,
|
234 |
-
"acc_norm": 0.40414507772020725,
|
235 |
-
"acc_norm_stderr": 0.0354150857888402
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.2719298245614035,
|
239 |
-
"acc_stderr": 0.04185774424022057,
|
240 |
-
"acc_norm": 0.2719298245614035,
|
241 |
-
"acc_norm_stderr": 0.04185774424022057
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.3926605504587156,
|
245 |
-
"acc_stderr": 0.020937505161201093,
|
246 |
-
"acc_norm": 0.3926605504587156,
|
247 |
-
"acc_norm_stderr": 0.020937505161201093
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.2698412698412698,
|
251 |
-
"acc_stderr": 0.03970158273235173,
|
252 |
-
"acc_norm": 0.2698412698412698,
|
253 |
-
"acc_norm_stderr": 0.03970158273235173
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.43137254901960786,
|
257 |
-
"acc_stderr": 0.028358956313423556,
|
258 |
-
"acc_norm": 0.43137254901960786,
|
259 |
-
"acc_norm_stderr": 0.028358956313423556
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.36,
|
263 |
-
"acc_stderr": 0.04824181513244218,
|
264 |
-
"acc_norm": 0.36,
|
265 |
-
"acc_norm_stderr": 0.04824181513244218
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.49586776859504134,
|
269 |
-
"acc_stderr": 0.04564198767432754,
|
270 |
-
"acc_norm": 0.49586776859504134,
|
271 |
-
"acc_norm_stderr": 0.04564198767432754
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.27631578947368424,
|
275 |
-
"acc_stderr": 0.03639057569952925,
|
276 |
-
"acc_norm": 0.27631578947368424,
|
277 |
-
"acc_norm_stderr": 0.03639057569952925
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.3235294117647059,
|
281 |
-
"acc_stderr": 0.01892608291608339,
|
282 |
-
"acc_norm": 0.3235294117647059,
|
283 |
-
"acc_norm_stderr": 0.01892608291608339
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.2978723404255319,
|
287 |
-
"acc_stderr": 0.027281608344469414,
|
288 |
-
"acc_norm": 0.2978723404255319,
|
289 |
-
"acc_norm_stderr": 0.027281608344469414
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.25892857142857145,
|
293 |
-
"acc_stderr": 0.041577515398656284,
|
294 |
-
"acc_norm": 0.25892857142857145,
|
295 |
-
"acc_norm_stderr": 0.041577515398656284
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.32407407407407407,
|
299 |
-
"acc_stderr": 0.03191923445686186,
|
300 |
-
"acc_norm": 0.32407407407407407,
|
301 |
-
"acc_norm_stderr": 0.03191923445686186
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.25251396648044694,
|
305 |
-
"acc_stderr": 0.014530330201468636,
|
306 |
-
"acc_norm": 0.25251396648044694,
|
307 |
-
"acc_norm_stderr": 0.014530330201468636
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.27,
|
311 |
-
"acc_stderr": 0.044619604333847394,
|
312 |
-
"acc_norm": 0.27,
|
313 |
-
"acc_norm_stderr": 0.044619604333847394
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.29,
|
317 |
-
"acc_stderr": 0.045604802157206845,
|
318 |
-
"acc_norm": 0.29,
|
319 |
-
"acc_norm_stderr": 0.045604802157206845
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.41911764705882354,
|
323 |
-
"acc_stderr": 0.02997280717046463,
|
324 |
-
"acc_norm": 0.41911764705882354,
|
325 |
-
"acc_norm_stderr": 0.02997280717046463
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.3510204081632653,
|
329 |
-
"acc_stderr": 0.03055531675557364,
|
330 |
-
"acc_norm": 0.3510204081632653,
|
331 |
-
"acc_norm_stderr": 0.03055531675557364
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.4810126582278481,
|
335 |
-
"acc_stderr": 0.03252375148090448,
|
336 |
-
"acc_norm": 0.4810126582278481,
|
337 |
-
"acc_norm_stderr": 0.03252375148090448
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.31486310299869624,
|
341 |
-
"acc_stderr": 0.011862561755715928,
|
342 |
-
"acc_norm": 0.31486310299869624,
|
343 |
-
"acc_norm_stderr": 0.011862561755715928
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.3872549019607843,
|
347 |
-
"acc_stderr": 0.03418931233833344,
|
348 |
-
"acc_norm": 0.3872549019607843,
|
349 |
-
"acc_norm_stderr": 0.03418931233833344
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.37575757575757573,
|
353 |
-
"acc_stderr": 0.03781887353205982,
|
354 |
-
"acc_norm": 0.37575757575757573,
|
355 |
-
"acc_norm_stderr": 0.03781887353205982
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.24112607099143207,
|
359 |
-
"mc1_stderr": 0.014974827279752332,
|
360 |
-
"mc2": 0.38399188144082486,
|
361 |
-
"mc2_stderr": 0.015164475722750202
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.2981220657276995,
|
365 |
-
"acc_stderr": 0.01568061440819548,
|
366 |
-
"acc_norm": 0.3720657276995305,
|
367 |
-
"acc_norm_stderr": 0.016569223163823546
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "HumanF-MarkrAI/pub-llama-7b-v1",
|
436 |
-
"model_sha": "41de7ce06931ccfe1ed99435bb071d69aca2ffe0",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
Jaewoo1/Foundation_Platypus_data/result_2023-10-18 09:16:14.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.30204778156996587,
|
5 |
-
"acc_stderr": 0.013417519144716417,
|
6 |
-
"acc_norm": 0.3174061433447099,
|
7 |
-
"acc_norm_stderr": 0.01360223908803817
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.3450507866958773,
|
11 |
-
"acc_stderr": 0.004744132825391515,
|
12 |
-
"acc_norm": 0.41196972714598684,
|
13 |
-
"acc_norm_stderr": 0.00491183773058221
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.4619883040935672,
|
17 |
-
"acc_stderr": 0.03823727092882307,
|
18 |
-
"acc_norm": 0.4619883040935672,
|
19 |
-
"acc_norm_stderr": 0.03823727092882307
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.3592233009708738,
|
23 |
-
"acc_stderr": 0.04750458399041692,
|
24 |
-
"acc_norm": 0.3592233009708738,
|
25 |
-
"acc_norm_stderr": 0.04750458399041692
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.39719029374201786,
|
29 |
-
"acc_stderr": 0.017497905037159377,
|
30 |
-
"acc_norm": 0.39719029374201786,
|
31 |
-
"acc_norm_stderr": 0.017497905037159377
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.31851851851851853,
|
35 |
-
"acc_stderr": 0.040247784019771096,
|
36 |
-
"acc_norm": 0.31851851851851853,
|
37 |
-
"acc_norm_stderr": 0.040247784019771096
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.33,
|
41 |
-
"acc_stderr": 0.047258156262526045,
|
42 |
-
"acc_norm": 0.33,
|
43 |
-
"acc_norm_stderr": 0.047258156262526045
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.3404255319148936,
|
47 |
-
"acc_stderr": 0.030976692998534422,
|
48 |
-
"acc_norm": 0.3404255319148936,
|
49 |
-
"acc_norm_stderr": 0.030976692998534422
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.2891566265060241,
|
53 |
-
"acc_stderr": 0.03529486801511114,
|
54 |
-
"acc_norm": 0.2891566265060241,
|
55 |
-
"acc_norm_stderr": 0.03529486801511114
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.37942122186495175,
|
59 |
-
"acc_stderr": 0.027559949802347817,
|
60 |
-
"acc_norm": 0.37942122186495175,
|
61 |
-
"acc_norm_stderr": 0.027559949802347817
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.3901345291479821,
|
65 |
-
"acc_stderr": 0.03273766725459156,
|
66 |
-
"acc_norm": 0.3901345291479821,
|
67 |
-
"acc_norm_stderr": 0.03273766725459156
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.32061068702290074,
|
71 |
-
"acc_stderr": 0.040933292298342784,
|
72 |
-
"acc_norm": 0.32061068702290074,
|
73 |
-
"acc_norm_stderr": 0.040933292298342784
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.34,
|
77 |
-
"acc_stderr": 0.04760952285695235,
|
78 |
-
"acc_norm": 0.34,
|
79 |
-
"acc_norm_stderr": 0.04760952285695235
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.3282828282828283,
|
83 |
-
"acc_stderr": 0.03345678422756777,
|
84 |
-
"acc_norm": 0.3282828282828283,
|
85 |
-
"acc_norm_stderr": 0.03345678422756777
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.38620689655172413,
|
89 |
-
"acc_stderr": 0.04057324734419034,
|
90 |
-
"acc_norm": 0.38620689655172413,
|
91 |
-
"acc_norm_stderr": 0.04057324734419034
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.19607843137254902,
|
95 |
-
"acc_stderr": 0.03950581861179964,
|
96 |
-
"acc_norm": 0.19607843137254902,
|
97 |
-
"acc_norm_stderr": 0.03950581861179964
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.3487394957983193,
|
101 |
-
"acc_stderr": 0.030956636328566545,
|
102 |
-
"acc_norm": 0.3487394957983193,
|
103 |
-
"acc_norm_stderr": 0.030956636328566545
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.3384615384615385,
|
107 |
-
"acc_stderr": 0.023991500500313036,
|
108 |
-
"acc_norm": 0.3384615384615385,
|
109 |
-
"acc_norm_stderr": 0.023991500500313036
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.37,
|
113 |
-
"acc_stderr": 0.04852365870939099,
|
114 |
-
"acc_norm": 0.37,
|
115 |
-
"acc_norm_stderr": 0.04852365870939099
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.26,
|
119 |
-
"acc_stderr": 0.04408440022768077,
|
120 |
-
"acc_norm": 0.26,
|
121 |
-
"acc_norm_stderr": 0.04408440022768077
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.35185185185185186,
|
125 |
-
"acc_stderr": 0.04616631111801713,
|
126 |
-
"acc_norm": 0.35185185185185186,
|
127 |
-
"acc_norm_stderr": 0.04616631111801713
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.31527093596059114,
|
131 |
-
"acc_stderr": 0.03269080871970186,
|
132 |
-
"acc_norm": 0.31527093596059114,
|
133 |
-
"acc_norm_stderr": 0.03269080871970186
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.3419354838709677,
|
137 |
-
"acc_stderr": 0.026985289576552732,
|
138 |
-
"acc_norm": 0.3419354838709677,
|
139 |
-
"acc_norm_stderr": 0.026985289576552732
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.5341880341880342,
|
143 |
-
"acc_stderr": 0.03267942734081228,
|
144 |
-
"acc_norm": 0.5341880341880342,
|
145 |
-
"acc_norm_stderr": 0.03267942734081228
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.33962264150943394,
|
149 |
-
"acc_stderr": 0.029146904747798342,
|
150 |
-
"acc_norm": 0.33962264150943394,
|
151 |
-
"acc_norm_stderr": 0.029146904747798342
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.4,
|
155 |
-
"acc_stderr": 0.0469237132203465,
|
156 |
-
"acc_norm": 0.4,
|
157 |
-
"acc_norm_stderr": 0.0469237132203465
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.3296296296296296,
|
161 |
-
"acc_stderr": 0.02866120111652457,
|
162 |
-
"acc_norm": 0.3296296296296296,
|
163 |
-
"acc_norm_stderr": 0.02866120111652457
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.2980132450331126,
|
167 |
-
"acc_stderr": 0.037345356767871984,
|
168 |
-
"acc_norm": 0.2980132450331126,
|
169 |
-
"acc_norm_stderr": 0.037345356767871984
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.4129353233830846,
|
173 |
-
"acc_stderr": 0.03481520803367348,
|
174 |
-
"acc_norm": 0.4129353233830846,
|
175 |
-
"acc_norm_stderr": 0.03481520803367348
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.2947976878612717,
|
179 |
-
"acc_stderr": 0.03476599607516478,
|
180 |
-
"acc_norm": 0.2947976878612717,
|
181 |
-
"acc_norm_stderr": 0.03476599607516478
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.30423280423280424,
|
185 |
-
"acc_stderr": 0.023695415009463087,
|
186 |
-
"acc_norm": 0.30423280423280424,
|
187 |
-
"acc_norm_stderr": 0.023695415009463087
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.2708333333333333,
|
191 |
-
"acc_stderr": 0.037161774375660164,
|
192 |
-
"acc_norm": 0.2708333333333333,
|
193 |
-
"acc_norm_stderr": 0.037161774375660164
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.32,
|
197 |
-
"acc_stderr": 0.046882617226215034,
|
198 |
-
"acc_norm": 0.32,
|
199 |
-
"acc_norm_stderr": 0.046882617226215034
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.46,
|
203 |
-
"acc_stderr": 0.05009082659620332,
|
204 |
-
"acc_norm": 0.46,
|
205 |
-
"acc_norm_stderr": 0.05009082659620332
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.3930635838150289,
|
209 |
-
"acc_stderr": 0.026296227915613663,
|
210 |
-
"acc_norm": 0.3930635838150289,
|
211 |
-
"acc_norm_stderr": 0.026296227915613663
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.3619631901840491,
|
215 |
-
"acc_stderr": 0.037757007291414416,
|
216 |
-
"acc_norm": 0.3619631901840491,
|
217 |
-
"acc_norm_stderr": 0.037757007291414416
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.3487654320987654,
|
221 |
-
"acc_stderr": 0.02651759772446501,
|
222 |
-
"acc_norm": 0.3487654320987654,
|
223 |
-
"acc_norm_stderr": 0.02651759772446501
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.33,
|
227 |
-
"acc_stderr": 0.04725815626252605,
|
228 |
-
"acc_norm": 0.33,
|
229 |
-
"acc_norm_stderr": 0.04725815626252605
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.40932642487046633,
|
233 |
-
"acc_stderr": 0.03548608168860806,
|
234 |
-
"acc_norm": 0.40932642487046633,
|
235 |
-
"acc_norm_stderr": 0.03548608168860806
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.19298245614035087,
|
239 |
-
"acc_stderr": 0.037124548537213684,
|
240 |
-
"acc_norm": 0.19298245614035087,
|
241 |
-
"acc_norm_stderr": 0.037124548537213684
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.3522935779816514,
|
245 |
-
"acc_stderr": 0.020480568843999004,
|
246 |
-
"acc_norm": 0.3522935779816514,
|
247 |
-
"acc_norm_stderr": 0.020480568843999004
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.2857142857142857,
|
251 |
-
"acc_stderr": 0.0404061017820884,
|
252 |
-
"acc_norm": 0.2857142857142857,
|
253 |
-
"acc_norm_stderr": 0.0404061017820884
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.39215686274509803,
|
257 |
-
"acc_stderr": 0.027956046165424516,
|
258 |
-
"acc_norm": 0.39215686274509803,
|
259 |
-
"acc_norm_stderr": 0.027956046165424516
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.43,
|
263 |
-
"acc_stderr": 0.049756985195624284,
|
264 |
-
"acc_norm": 0.43,
|
265 |
-
"acc_norm_stderr": 0.049756985195624284
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.5206611570247934,
|
269 |
-
"acc_stderr": 0.045604560863872365,
|
270 |
-
"acc_norm": 0.5206611570247934,
|
271 |
-
"acc_norm_stderr": 0.045604560863872365
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.3881578947368421,
|
275 |
-
"acc_stderr": 0.03965842097512744,
|
276 |
-
"acc_norm": 0.3881578947368421,
|
277 |
-
"acc_norm_stderr": 0.03965842097512744
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.32189542483660133,
|
281 |
-
"acc_stderr": 0.01890101532209309,
|
282 |
-
"acc_norm": 0.32189542483660133,
|
283 |
-
"acc_norm_stderr": 0.01890101532209309
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.30851063829787234,
|
287 |
-
"acc_stderr": 0.027553366165101373,
|
288 |
-
"acc_norm": 0.30851063829787234,
|
289 |
-
"acc_norm_stderr": 0.027553366165101373
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.24107142857142858,
|
293 |
-
"acc_stderr": 0.04059867246952688,
|
294 |
-
"acc_norm": 0.24107142857142858,
|
295 |
-
"acc_norm_stderr": 0.04059867246952688
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.3472222222222222,
|
299 |
-
"acc_stderr": 0.032468872436376486,
|
300 |
-
"acc_norm": 0.3472222222222222,
|
301 |
-
"acc_norm_stderr": 0.032468872436376486
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.23798882681564246,
|
305 |
-
"acc_stderr": 0.01424263007057489,
|
306 |
-
"acc_norm": 0.23798882681564246,
|
307 |
-
"acc_norm_stderr": 0.01424263007057489
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.32,
|
311 |
-
"acc_stderr": 0.046882617226215034,
|
312 |
-
"acc_norm": 0.32,
|
313 |
-
"acc_norm_stderr": 0.046882617226215034
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.38,
|
317 |
-
"acc_stderr": 0.04878317312145632,
|
318 |
-
"acc_norm": 0.38,
|
319 |
-
"acc_norm_stderr": 0.04878317312145632
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.26838235294117646,
|
323 |
-
"acc_stderr": 0.026917481224377246,
|
324 |
-
"acc_norm": 0.26838235294117646,
|
325 |
-
"acc_norm_stderr": 0.026917481224377246
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.31020408163265306,
|
329 |
-
"acc_stderr": 0.029613459872484375,
|
330 |
-
"acc_norm": 0.31020408163265306,
|
331 |
-
"acc_norm_stderr": 0.029613459872484375
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.5274261603375527,
|
335 |
-
"acc_stderr": 0.03249822718301303,
|
336 |
-
"acc_norm": 0.5274261603375527,
|
337 |
-
"acc_norm_stderr": 0.03249822718301303
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.303129074315515,
|
341 |
-
"acc_stderr": 0.0117386699512543,
|
342 |
-
"acc_norm": 0.303129074315515,
|
343 |
-
"acc_norm_stderr": 0.0117386699512543
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.38235294117647056,
|
347 |
-
"acc_stderr": 0.03410785338904719,
|
348 |
-
"acc_norm": 0.38235294117647056,
|
349 |
-
"acc_norm_stderr": 0.03410785338904719
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.3878787878787879,
|
353 |
-
"acc_stderr": 0.038049136539710114,
|
354 |
-
"acc_norm": 0.3878787878787879,
|
355 |
-
"acc_norm_stderr": 0.038049136539710114
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.2729498164014688,
|
359 |
-
"mc1_stderr": 0.015594753632006509,
|
360 |
-
"mc2": 0.4249328187172098,
|
361 |
-
"mc2_stderr": 0.016337088601279814
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.4014084507042254,
|
365 |
-
"acc_stderr": 0.016803268469738605,
|
366 |
-
"acc_norm": 0.46830985915492956,
|
367 |
-
"acc_norm_stderr": 0.01710531885082843
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "Jaewoo1/Foundation_Platypus_data",
|
436 |
-
"model_sha": "63fbecee8df6cc694880299e37b7cd8f8140942e",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
Jaewoo1/KoT-Platypus2_foundation/result_2023-10-16 07:12:51.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.27047781569965873,
|
5 |
-
"acc_stderr": 0.012980954547659556,
|
6 |
-
"acc_norm": 0.3319112627986348,
|
7 |
-
"acc_norm_stderr": 0.013760988200880541
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.3505277833100976,
|
11 |
-
"acc_stderr": 0.004761601303258889,
|
12 |
-
"acc_norm": 0.44722166899024096,
|
13 |
-
"acc_norm_stderr": 0.0049619049491713965
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.32748538011695905,
|
17 |
-
"acc_stderr": 0.03599335771456027,
|
18 |
-
"acc_norm": 0.32748538011695905,
|
19 |
-
"acc_norm_stderr": 0.03599335771456027
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.3106796116504854,
|
23 |
-
"acc_stderr": 0.04582124160161549,
|
24 |
-
"acc_norm": 0.3106796116504854,
|
25 |
-
"acc_norm_stderr": 0.04582124160161549
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.351213282247765,
|
29 |
-
"acc_stderr": 0.01706998205149943,
|
30 |
-
"acc_norm": 0.351213282247765,
|
31 |
-
"acc_norm_stderr": 0.01706998205149943
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.31851851851851853,
|
35 |
-
"acc_stderr": 0.040247784019771096,
|
36 |
-
"acc_norm": 0.31851851851851853,
|
37 |
-
"acc_norm_stderr": 0.040247784019771096
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.32,
|
41 |
-
"acc_stderr": 0.046882617226215034,
|
42 |
-
"acc_norm": 0.32,
|
43 |
-
"acc_norm_stderr": 0.046882617226215034
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.3659574468085106,
|
47 |
-
"acc_stderr": 0.031489558297455304,
|
48 |
-
"acc_norm": 0.3659574468085106,
|
49 |
-
"acc_norm_stderr": 0.031489558297455304
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.3493975903614458,
|
53 |
-
"acc_stderr": 0.0371172519074075,
|
54 |
-
"acc_norm": 0.3493975903614458,
|
55 |
-
"acc_norm_stderr": 0.0371172519074075
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.31511254019292606,
|
59 |
-
"acc_stderr": 0.026385273703464496,
|
60 |
-
"acc_norm": 0.31511254019292606,
|
61 |
-
"acc_norm_stderr": 0.026385273703464496
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.4125560538116592,
|
65 |
-
"acc_stderr": 0.03304062175449297,
|
66 |
-
"acc_norm": 0.4125560538116592,
|
67 |
-
"acc_norm_stderr": 0.03304062175449297
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.29770992366412213,
|
71 |
-
"acc_stderr": 0.040103589424622034,
|
72 |
-
"acc_norm": 0.29770992366412213,
|
73 |
-
"acc_norm_stderr": 0.040103589424622034
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.38,
|
77 |
-
"acc_stderr": 0.048783173121456316,
|
78 |
-
"acc_norm": 0.38,
|
79 |
-
"acc_norm_stderr": 0.048783173121456316
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.3333333333333333,
|
83 |
-
"acc_stderr": 0.03358618145732524,
|
84 |
-
"acc_norm": 0.3333333333333333,
|
85 |
-
"acc_norm_stderr": 0.03358618145732524
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.3448275862068966,
|
89 |
-
"acc_stderr": 0.03960933549451207,
|
90 |
-
"acc_norm": 0.3448275862068966,
|
91 |
-
"acc_norm_stderr": 0.03960933549451207
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.21568627450980393,
|
95 |
-
"acc_stderr": 0.04092563958237655,
|
96 |
-
"acc_norm": 0.21568627450980393,
|
97 |
-
"acc_norm_stderr": 0.04092563958237655
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.27310924369747897,
|
101 |
-
"acc_stderr": 0.028942004040998167,
|
102 |
-
"acc_norm": 0.27310924369747897,
|
103 |
-
"acc_norm_stderr": 0.028942004040998167
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.2717948717948718,
|
107 |
-
"acc_stderr": 0.02255655101013235,
|
108 |
-
"acc_norm": 0.2717948717948718,
|
109 |
-
"acc_norm_stderr": 0.02255655101013235
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.26,
|
113 |
-
"acc_stderr": 0.04408440022768078,
|
114 |
-
"acc_norm": 0.26,
|
115 |
-
"acc_norm_stderr": 0.04408440022768078
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.34,
|
119 |
-
"acc_stderr": 0.04760952285695235,
|
120 |
-
"acc_norm": 0.34,
|
121 |
-
"acc_norm_stderr": 0.04760952285695235
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.3055555555555556,
|
125 |
-
"acc_stderr": 0.044531975073749834,
|
126 |
-
"acc_norm": 0.3055555555555556,
|
127 |
-
"acc_norm_stderr": 0.044531975073749834
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.23645320197044334,
|
131 |
-
"acc_stderr": 0.029896114291733552,
|
132 |
-
"acc_norm": 0.23645320197044334,
|
133 |
-
"acc_norm_stderr": 0.029896114291733552
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.3161290322580645,
|
137 |
-
"acc_stderr": 0.026450874489042767,
|
138 |
-
"acc_norm": 0.3161290322580645,
|
139 |
-
"acc_norm_stderr": 0.026450874489042767
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.4658119658119658,
|
143 |
-
"acc_stderr": 0.03267942734081228,
|
144 |
-
"acc_norm": 0.4658119658119658,
|
145 |
-
"acc_norm_stderr": 0.03267942734081228
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.3320754716981132,
|
149 |
-
"acc_stderr": 0.028985455652334395,
|
150 |
-
"acc_norm": 0.3320754716981132,
|
151 |
-
"acc_norm_stderr": 0.028985455652334395
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.39090909090909093,
|
155 |
-
"acc_stderr": 0.04673752333670237,
|
156 |
-
"acc_norm": 0.39090909090909093,
|
157 |
-
"acc_norm_stderr": 0.04673752333670237
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.22962962962962963,
|
161 |
-
"acc_stderr": 0.025644108639267645,
|
162 |
-
"acc_norm": 0.22962962962962963,
|
163 |
-
"acc_norm_stderr": 0.025644108639267645
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.33112582781456956,
|
167 |
-
"acc_stderr": 0.038425817186598696,
|
168 |
-
"acc_norm": 0.33112582781456956,
|
169 |
-
"acc_norm_stderr": 0.038425817186598696
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.3333333333333333,
|
173 |
-
"acc_stderr": 0.03333333333333336,
|
174 |
-
"acc_norm": 0.3333333333333333,
|
175 |
-
"acc_norm_stderr": 0.03333333333333336
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.2658959537572254,
|
179 |
-
"acc_stderr": 0.033687629322594295,
|
180 |
-
"acc_norm": 0.2658959537572254,
|
181 |
-
"acc_norm_stderr": 0.033687629322594295
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.25925925925925924,
|
185 |
-
"acc_stderr": 0.022569897074918428,
|
186 |
-
"acc_norm": 0.25925925925925924,
|
187 |
-
"acc_norm_stderr": 0.022569897074918428
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.2777777777777778,
|
191 |
-
"acc_stderr": 0.037455547914624576,
|
192 |
-
"acc_norm": 0.2777777777777778,
|
193 |
-
"acc_norm_stderr": 0.037455547914624576
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.28,
|
197 |
-
"acc_stderr": 0.04512608598542128,
|
198 |
-
"acc_norm": 0.28,
|
199 |
-
"acc_norm_stderr": 0.04512608598542128
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.44,
|
203 |
-
"acc_stderr": 0.04988876515698589,
|
204 |
-
"acc_norm": 0.44,
|
205 |
-
"acc_norm_stderr": 0.04988876515698589
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.28901734104046245,
|
209 |
-
"acc_stderr": 0.02440517393578323,
|
210 |
-
"acc_norm": 0.28901734104046245,
|
211 |
-
"acc_norm_stderr": 0.02440517393578323
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.2822085889570552,
|
215 |
-
"acc_stderr": 0.03536117886664743,
|
216 |
-
"acc_norm": 0.2822085889570552,
|
217 |
-
"acc_norm_stderr": 0.03536117886664743
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.31790123456790126,
|
221 |
-
"acc_stderr": 0.025910063528240865,
|
222 |
-
"acc_norm": 0.31790123456790126,
|
223 |
-
"acc_norm_stderr": 0.025910063528240865
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.37,
|
227 |
-
"acc_stderr": 0.04852365870939099,
|
228 |
-
"acc_norm": 0.37,
|
229 |
-
"acc_norm_stderr": 0.04852365870939099
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.2694300518134715,
|
233 |
-
"acc_stderr": 0.032018671228777947,
|
234 |
-
"acc_norm": 0.2694300518134715,
|
235 |
-
"acc_norm_stderr": 0.032018671228777947
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.2719298245614035,
|
239 |
-
"acc_stderr": 0.04185774424022056,
|
240 |
-
"acc_norm": 0.2719298245614035,
|
241 |
-
"acc_norm_stderr": 0.04185774424022056
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.3155963302752294,
|
245 |
-
"acc_stderr": 0.019926117513869666,
|
246 |
-
"acc_norm": 0.3155963302752294,
|
247 |
-
"acc_norm_stderr": 0.019926117513869666
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.20634920634920634,
|
251 |
-
"acc_stderr": 0.0361960452412425,
|
252 |
-
"acc_norm": 0.20634920634920634,
|
253 |
-
"acc_norm_stderr": 0.0361960452412425
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.3006535947712418,
|
257 |
-
"acc_stderr": 0.026256053835718964,
|
258 |
-
"acc_norm": 0.3006535947712418,
|
259 |
-
"acc_norm_stderr": 0.026256053835718964
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.33,
|
263 |
-
"acc_stderr": 0.047258156262526045,
|
264 |
-
"acc_norm": 0.33,
|
265 |
-
"acc_norm_stderr": 0.047258156262526045
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.38016528925619836,
|
269 |
-
"acc_stderr": 0.04431324501968431,
|
270 |
-
"acc_norm": 0.38016528925619836,
|
271 |
-
"acc_norm_stderr": 0.04431324501968431
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.2631578947368421,
|
275 |
-
"acc_stderr": 0.035834961763610625,
|
276 |
-
"acc_norm": 0.2631578947368421,
|
277 |
-
"acc_norm_stderr": 0.035834961763610625
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.2565359477124183,
|
281 |
-
"acc_stderr": 0.01766784161237899,
|
282 |
-
"acc_norm": 0.2565359477124183,
|
283 |
-
"acc_norm_stderr": 0.01766784161237899
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.24113475177304963,
|
287 |
-
"acc_stderr": 0.025518731049537773,
|
288 |
-
"acc_norm": 0.24113475177304963,
|
289 |
-
"acc_norm_stderr": 0.025518731049537773
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.3392857142857143,
|
293 |
-
"acc_stderr": 0.044939490686135404,
|
294 |
-
"acc_norm": 0.3392857142857143,
|
295 |
-
"acc_norm_stderr": 0.044939490686135404
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.23148148148148148,
|
299 |
-
"acc_stderr": 0.028765111718046972,
|
300 |
-
"acc_norm": 0.23148148148148148,
|
301 |
-
"acc_norm_stderr": 0.028765111718046972
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.22793296089385476,
|
305 |
-
"acc_stderr": 0.014030149950805095,
|
306 |
-
"acc_norm": 0.22793296089385476,
|
307 |
-
"acc_norm_stderr": 0.014030149950805095
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.26,
|
311 |
-
"acc_stderr": 0.0440844002276808,
|
312 |
-
"acc_norm": 0.26,
|
313 |
-
"acc_norm_stderr": 0.0440844002276808
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.26,
|
317 |
-
"acc_stderr": 0.04408440022768079,
|
318 |
-
"acc_norm": 0.26,
|
319 |
-
"acc_norm_stderr": 0.04408440022768079
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.3492647058823529,
|
323 |
-
"acc_stderr": 0.02895975519682486,
|
324 |
-
"acc_norm": 0.3492647058823529,
|
325 |
-
"acc_norm_stderr": 0.02895975519682486
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.19183673469387755,
|
329 |
-
"acc_stderr": 0.025206963154225395,
|
330 |
-
"acc_norm": 0.19183673469387755,
|
331 |
-
"acc_norm_stderr": 0.025206963154225395
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.3037974683544304,
|
335 |
-
"acc_stderr": 0.0299366963871386,
|
336 |
-
"acc_norm": 0.3037974683544304,
|
337 |
-
"acc_norm_stderr": 0.0299366963871386
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.2666232073011734,
|
341 |
-
"acc_stderr": 0.01129383603161213,
|
342 |
-
"acc_norm": 0.2666232073011734,
|
343 |
-
"acc_norm_stderr": 0.01129383603161213
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.3235294117647059,
|
347 |
-
"acc_stderr": 0.03283472056108567,
|
348 |
-
"acc_norm": 0.3235294117647059,
|
349 |
-
"acc_norm_stderr": 0.03283472056108567
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.30303030303030304,
|
353 |
-
"acc_stderr": 0.035886248000917075,
|
354 |
-
"acc_norm": 0.30303030303030304,
|
355 |
-
"acc_norm_stderr": 0.035886248000917075
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.27539779681762544,
|
359 |
-
"mc1_stderr": 0.015638135667775523,
|
360 |
-
"mc2": 0.4297360873033464,
|
361 |
-
"mc2_stderr": 0.016304548005749996
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.21713615023474178,
|
365 |
-
"acc_stderr": 0.014133326970413466,
|
366 |
-
"acc_norm": 0.23943661971830985,
|
367 |
-
"acc_norm_stderr": 0.014628446638821324
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "Jaewoo1/KoT-Platypus2_foundation",
|
436 |
-
"model_sha": "7e97a65b825f9aa4691fe2bebf14696d80ba831d",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
Jaewoo1/Llama2-7B-Blend-3rd-dup-Active-LoRA/result_2023-10-04 03:17:08.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.257679180887372,
|
5 |
-
"acc_stderr": 0.0127807705627684,
|
6 |
-
"acc_norm": 0.3003412969283277,
|
7 |
-
"acc_norm_stderr": 0.01339590930995701
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.3263294164509062,
|
11 |
-
"acc_stderr": 0.004679111783653908,
|
12 |
-
"acc_norm": 0.385381398127863,
|
13 |
-
"acc_norm_stderr": 0.00485690647371939
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.38596491228070173,
|
17 |
-
"acc_stderr": 0.03733756969066164,
|
18 |
-
"acc_norm": 0.38596491228070173,
|
19 |
-
"acc_norm_stderr": 0.03733756969066164
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.3786407766990291,
|
23 |
-
"acc_stderr": 0.048026946982589726,
|
24 |
-
"acc_norm": 0.3786407766990291,
|
25 |
-
"acc_norm_stderr": 0.048026946982589726
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.3959131545338442,
|
29 |
-
"acc_stderr": 0.01748824700697927,
|
30 |
-
"acc_norm": 0.3959131545338442,
|
31 |
-
"acc_norm_stderr": 0.01748824700697927
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.3333333333333333,
|
35 |
-
"acc_stderr": 0.04072314811876837,
|
36 |
-
"acc_norm": 0.3333333333333333,
|
37 |
-
"acc_norm_stderr": 0.04072314811876837
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.3,
|
41 |
-
"acc_stderr": 0.046056618647183814,
|
42 |
-
"acc_norm": 0.3,
|
43 |
-
"acc_norm_stderr": 0.046056618647183814
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.31063829787234043,
|
47 |
-
"acc_stderr": 0.030251237579213167,
|
48 |
-
"acc_norm": 0.31063829787234043,
|
49 |
-
"acc_norm_stderr": 0.030251237579213167
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.3493975903614458,
|
53 |
-
"acc_stderr": 0.0371172519074075,
|
54 |
-
"acc_norm": 0.3493975903614458,
|
55 |
-
"acc_norm_stderr": 0.0371172519074075
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.3729903536977492,
|
59 |
-
"acc_stderr": 0.0274666102131401,
|
60 |
-
"acc_norm": 0.3729903536977492,
|
61 |
-
"acc_norm_stderr": 0.0274666102131401
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.3811659192825112,
|
65 |
-
"acc_stderr": 0.032596251184168264,
|
66 |
-
"acc_norm": 0.3811659192825112,
|
67 |
-
"acc_norm_stderr": 0.032596251184168264
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.35877862595419846,
|
71 |
-
"acc_stderr": 0.04206739313864908,
|
72 |
-
"acc_norm": 0.35877862595419846,
|
73 |
-
"acc_norm_stderr": 0.04206739313864908
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.31,
|
77 |
-
"acc_stderr": 0.04648231987117316,
|
78 |
-
"acc_norm": 0.31,
|
79 |
-
"acc_norm_stderr": 0.04648231987117316
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.3888888888888889,
|
83 |
-
"acc_stderr": 0.0347327959083696,
|
84 |
-
"acc_norm": 0.3888888888888889,
|
85 |
-
"acc_norm_stderr": 0.0347327959083696
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.4068965517241379,
|
89 |
-
"acc_stderr": 0.04093793981266237,
|
90 |
-
"acc_norm": 0.4068965517241379,
|
91 |
-
"acc_norm_stderr": 0.04093793981266237
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.21568627450980393,
|
95 |
-
"acc_stderr": 0.04092563958237655,
|
96 |
-
"acc_norm": 0.21568627450980393,
|
97 |
-
"acc_norm_stderr": 0.04092563958237655
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.3319327731092437,
|
101 |
-
"acc_stderr": 0.030588697013783667,
|
102 |
-
"acc_norm": 0.3319327731092437,
|
103 |
-
"acc_norm_stderr": 0.030588697013783667
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.3282051282051282,
|
107 |
-
"acc_stderr": 0.023807633198657262,
|
108 |
-
"acc_norm": 0.3282051282051282,
|
109 |
-
"acc_norm_stderr": 0.023807633198657262
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.41,
|
113 |
-
"acc_stderr": 0.04943110704237101,
|
114 |
-
"acc_norm": 0.41,
|
115 |
-
"acc_norm_stderr": 0.04943110704237101
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.35,
|
119 |
-
"acc_stderr": 0.047937248544110196,
|
120 |
-
"acc_norm": 0.35,
|
121 |
-
"acc_norm_stderr": 0.047937248544110196
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.4166666666666667,
|
125 |
-
"acc_stderr": 0.04766075165356461,
|
126 |
-
"acc_norm": 0.4166666666666667,
|
127 |
-
"acc_norm_stderr": 0.04766075165356461
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.3251231527093596,
|
131 |
-
"acc_stderr": 0.032957975663112704,
|
132 |
-
"acc_norm": 0.3251231527093596,
|
133 |
-
"acc_norm_stderr": 0.032957975663112704
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.3387096774193548,
|
137 |
-
"acc_stderr": 0.02692344605930284,
|
138 |
-
"acc_norm": 0.3387096774193548,
|
139 |
-
"acc_norm_stderr": 0.02692344605930284
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.5470085470085471,
|
143 |
-
"acc_stderr": 0.0326109987309862,
|
144 |
-
"acc_norm": 0.5470085470085471,
|
145 |
-
"acc_norm_stderr": 0.0326109987309862
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.35471698113207545,
|
149 |
-
"acc_stderr": 0.029445175328199593,
|
150 |
-
"acc_norm": 0.35471698113207545,
|
151 |
-
"acc_norm_stderr": 0.029445175328199593
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.4,
|
155 |
-
"acc_stderr": 0.0469237132203465,
|
156 |
-
"acc_norm": 0.4,
|
157 |
-
"acc_norm_stderr": 0.0469237132203465
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.2851851851851852,
|
161 |
-
"acc_stderr": 0.027528599210340496,
|
162 |
-
"acc_norm": 0.2851851851851852,
|
163 |
-
"acc_norm_stderr": 0.027528599210340496
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.24503311258278146,
|
167 |
-
"acc_stderr": 0.03511807571804724,
|
168 |
-
"acc_norm": 0.24503311258278146,
|
169 |
-
"acc_norm_stderr": 0.03511807571804724
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.44776119402985076,
|
173 |
-
"acc_stderr": 0.03516184772952166,
|
174 |
-
"acc_norm": 0.44776119402985076,
|
175 |
-
"acc_norm_stderr": 0.03516184772952166
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.3063583815028902,
|
179 |
-
"acc_stderr": 0.03514942551267437,
|
180 |
-
"acc_norm": 0.3063583815028902,
|
181 |
-
"acc_norm_stderr": 0.03514942551267437
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.2830687830687831,
|
185 |
-
"acc_stderr": 0.023201392938194978,
|
186 |
-
"acc_norm": 0.2830687830687831,
|
187 |
-
"acc_norm_stderr": 0.023201392938194978
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.2986111111111111,
|
191 |
-
"acc_stderr": 0.03827052357950756,
|
192 |
-
"acc_norm": 0.2986111111111111,
|
193 |
-
"acc_norm_stderr": 0.03827052357950756
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.26,
|
197 |
-
"acc_stderr": 0.0440844002276808,
|
198 |
-
"acc_norm": 0.26,
|
199 |
-
"acc_norm_stderr": 0.0440844002276808
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.46,
|
203 |
-
"acc_stderr": 0.05009082659620332,
|
204 |
-
"acc_norm": 0.46,
|
205 |
-
"acc_norm_stderr": 0.05009082659620332
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.3699421965317919,
|
209 |
-
"acc_stderr": 0.025992472029306386,
|
210 |
-
"acc_norm": 0.3699421965317919,
|
211 |
-
"acc_norm_stderr": 0.025992472029306386
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.3803680981595092,
|
215 |
-
"acc_stderr": 0.038142698932618374,
|
216 |
-
"acc_norm": 0.3803680981595092,
|
217 |
-
"acc_norm_stderr": 0.038142698932618374
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.3395061728395062,
|
221 |
-
"acc_stderr": 0.026348564412011624,
|
222 |
-
"acc_norm": 0.3395061728395062,
|
223 |
-
"acc_norm_stderr": 0.026348564412011624
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.28,
|
227 |
-
"acc_stderr": 0.045126085985421296,
|
228 |
-
"acc_norm": 0.28,
|
229 |
-
"acc_norm_stderr": 0.045126085985421296
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.37823834196891193,
|
233 |
-
"acc_stderr": 0.034998072761933396,
|
234 |
-
"acc_norm": 0.37823834196891193,
|
235 |
-
"acc_norm_stderr": 0.034998072761933396
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.2631578947368421,
|
239 |
-
"acc_stderr": 0.04142439719489362,
|
240 |
-
"acc_norm": 0.2631578947368421,
|
241 |
-
"acc_norm_stderr": 0.04142439719489362
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.3339449541284404,
|
245 |
-
"acc_stderr": 0.020220554196736403,
|
246 |
-
"acc_norm": 0.3339449541284404,
|
247 |
-
"acc_norm_stderr": 0.020220554196736403
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.2619047619047619,
|
251 |
-
"acc_stderr": 0.03932537680392871,
|
252 |
-
"acc_norm": 0.2619047619047619,
|
253 |
-
"acc_norm_stderr": 0.03932537680392871
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.3562091503267974,
|
257 |
-
"acc_stderr": 0.02742047766262925,
|
258 |
-
"acc_norm": 0.3562091503267974,
|
259 |
-
"acc_norm_stderr": 0.02742047766262925
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.42,
|
263 |
-
"acc_stderr": 0.049604496374885836,
|
264 |
-
"acc_norm": 0.42,
|
265 |
-
"acc_norm_stderr": 0.049604496374885836
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.512396694214876,
|
269 |
-
"acc_stderr": 0.04562951548180765,
|
270 |
-
"acc_norm": 0.512396694214876,
|
271 |
-
"acc_norm_stderr": 0.04562951548180765
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.3223684210526316,
|
275 |
-
"acc_stderr": 0.03803510248351587,
|
276 |
-
"acc_norm": 0.3223684210526316,
|
277 |
-
"acc_norm_stderr": 0.03803510248351587
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.29248366013071897,
|
281 |
-
"acc_stderr": 0.018403415710109797,
|
282 |
-
"acc_norm": 0.29248366013071897,
|
283 |
-
"acc_norm_stderr": 0.018403415710109797
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.2730496453900709,
|
287 |
-
"acc_stderr": 0.026577860943307857,
|
288 |
-
"acc_norm": 0.2730496453900709,
|
289 |
-
"acc_norm_stderr": 0.026577860943307857
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.30357142857142855,
|
293 |
-
"acc_stderr": 0.04364226155841044,
|
294 |
-
"acc_norm": 0.30357142857142855,
|
295 |
-
"acc_norm_stderr": 0.04364226155841044
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.3148148148148148,
|
299 |
-
"acc_stderr": 0.031674687068289784,
|
300 |
-
"acc_norm": 0.3148148148148148,
|
301 |
-
"acc_norm_stderr": 0.031674687068289784
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.24581005586592178,
|
305 |
-
"acc_stderr": 0.014400296429225608,
|
306 |
-
"acc_norm": 0.24581005586592178,
|
307 |
-
"acc_norm_stderr": 0.014400296429225608
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.3,
|
311 |
-
"acc_stderr": 0.046056618647183814,
|
312 |
-
"acc_norm": 0.3,
|
313 |
-
"acc_norm_stderr": 0.046056618647183814
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.36,
|
317 |
-
"acc_stderr": 0.04824181513244218,
|
318 |
-
"acc_norm": 0.36,
|
319 |
-
"acc_norm_stderr": 0.04824181513244218
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.26838235294117646,
|
323 |
-
"acc_stderr": 0.026917481224377243,
|
324 |
-
"acc_norm": 0.26838235294117646,
|
325 |
-
"acc_norm_stderr": 0.026917481224377243
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.3469387755102041,
|
329 |
-
"acc_stderr": 0.030472526026726492,
|
330 |
-
"acc_norm": 0.3469387755102041,
|
331 |
-
"acc_norm_stderr": 0.030472526026726492
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.3670886075949367,
|
335 |
-
"acc_stderr": 0.03137624072561618,
|
336 |
-
"acc_norm": 0.3670886075949367,
|
337 |
-
"acc_norm_stderr": 0.03137624072561618
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.28552803129074317,
|
341 |
-
"acc_stderr": 0.011535751586665673,
|
342 |
-
"acc_norm": 0.28552803129074317,
|
343 |
-
"acc_norm_stderr": 0.011535751586665673
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.35784313725490197,
|
347 |
-
"acc_stderr": 0.033644872860882996,
|
348 |
-
"acc_norm": 0.35784313725490197,
|
349 |
-
"acc_norm_stderr": 0.033644872860882996
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.42424242424242425,
|
353 |
-
"acc_stderr": 0.038592681420702615,
|
354 |
-
"acc_norm": 0.42424242424242425,
|
355 |
-
"acc_norm_stderr": 0.038592681420702615
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.2582619339045288,
|
359 |
-
"mc1_stderr": 0.015321821688476196,
|
360 |
-
"mc2": 0.41968593595047643,
|
361 |
-
"mc2_stderr": 0.016254999867947123
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.31220657276995306,
|
365 |
-
"acc_stderr": 0.015884928030374883,
|
366 |
-
"acc_norm": 0.3403755868544601,
|
367 |
-
"acc_norm_stderr": 0.016242870504270406
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "Jaewoo1/Llama2-7B-Blend-3rd-dup-Active-LoRA",
|
436 |
-
"model_sha": "cbb72323bf2db6eb9ea591a4a882d02964d53eed",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
Jaewoo1/Llama2-7B-ShareGPT-Wiki_noprompt-News_noprompt-CoT-blending-circulus/result_2023-10-04 09:05:17.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.29692832764505117,
|
5 |
-
"acc_stderr": 0.013352025976725222,
|
6 |
-
"acc_norm": 0.34812286689419797,
|
7 |
-
"acc_norm_stderr": 0.013921008595179342
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.35311690898227444,
|
11 |
-
"acc_stderr": 0.004769618829196517,
|
12 |
-
"acc_norm": 0.42939653455486954,
|
13 |
-
"acc_norm_stderr": 0.0049397843114489855
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.4152046783625731,
|
17 |
-
"acc_stderr": 0.03779275945503201,
|
18 |
-
"acc_norm": 0.4152046783625731,
|
19 |
-
"acc_norm_stderr": 0.03779275945503201
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.42718446601941745,
|
23 |
-
"acc_stderr": 0.04897957737781169,
|
24 |
-
"acc_norm": 0.42718446601941745,
|
25 |
-
"acc_norm_stderr": 0.04897957737781169
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.41507024265644954,
|
29 |
-
"acc_stderr": 0.017620137003655265,
|
30 |
-
"acc_norm": 0.41507024265644954,
|
31 |
-
"acc_norm_stderr": 0.017620137003655265
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.37037037037037035,
|
35 |
-
"acc_stderr": 0.04171654161354543,
|
36 |
-
"acc_norm": 0.37037037037037035,
|
37 |
-
"acc_norm_stderr": 0.04171654161354543
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.35,
|
41 |
-
"acc_stderr": 0.0479372485441102,
|
42 |
-
"acc_norm": 0.35,
|
43 |
-
"acc_norm_stderr": 0.0479372485441102
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.32340425531914896,
|
47 |
-
"acc_stderr": 0.03057944277361034,
|
48 |
-
"acc_norm": 0.32340425531914896,
|
49 |
-
"acc_norm_stderr": 0.03057944277361034
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.35542168674698793,
|
53 |
-
"acc_stderr": 0.03726214354322415,
|
54 |
-
"acc_norm": 0.35542168674698793,
|
55 |
-
"acc_norm_stderr": 0.03726214354322415
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.4405144694533762,
|
59 |
-
"acc_stderr": 0.028196400574197422,
|
60 |
-
"acc_norm": 0.4405144694533762,
|
61 |
-
"acc_norm_stderr": 0.028196400574197422
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.3632286995515695,
|
65 |
-
"acc_stderr": 0.032277904428505,
|
66 |
-
"acc_norm": 0.3632286995515695,
|
67 |
-
"acc_norm_stderr": 0.032277904428505
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.4122137404580153,
|
71 |
-
"acc_stderr": 0.04317171194870254,
|
72 |
-
"acc_norm": 0.4122137404580153,
|
73 |
-
"acc_norm_stderr": 0.04317171194870254
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.29,
|
77 |
-
"acc_stderr": 0.045604802157206845,
|
78 |
-
"acc_norm": 0.29,
|
79 |
-
"acc_norm_stderr": 0.045604802157206845
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.43434343434343436,
|
83 |
-
"acc_stderr": 0.03531505879359183,
|
84 |
-
"acc_norm": 0.43434343434343436,
|
85 |
-
"acc_norm_stderr": 0.03531505879359183
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.36551724137931035,
|
89 |
-
"acc_stderr": 0.04013124195424386,
|
90 |
-
"acc_norm": 0.36551724137931035,
|
91 |
-
"acc_norm_stderr": 0.04013124195424386
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.18627450980392157,
|
95 |
-
"acc_stderr": 0.03873958714149351,
|
96 |
-
"acc_norm": 0.18627450980392157,
|
97 |
-
"acc_norm_stderr": 0.03873958714149351
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.3739495798319328,
|
101 |
-
"acc_stderr": 0.031429466378837076,
|
102 |
-
"acc_norm": 0.3739495798319328,
|
103 |
-
"acc_norm_stderr": 0.031429466378837076
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.34615384615384615,
|
107 |
-
"acc_stderr": 0.024121125416941173,
|
108 |
-
"acc_norm": 0.34615384615384615,
|
109 |
-
"acc_norm_stderr": 0.024121125416941173
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.45,
|
113 |
-
"acc_stderr": 0.05,
|
114 |
-
"acc_norm": 0.45,
|
115 |
-
"acc_norm_stderr": 0.05
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.34,
|
119 |
-
"acc_stderr": 0.04760952285695235,
|
120 |
-
"acc_norm": 0.34,
|
121 |
-
"acc_norm_stderr": 0.04760952285695235
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.4537037037037037,
|
125 |
-
"acc_stderr": 0.04812917324536823,
|
126 |
-
"acc_norm": 0.4537037037037037,
|
127 |
-
"acc_norm_stderr": 0.04812917324536823
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.3251231527093596,
|
131 |
-
"acc_stderr": 0.032957975663112704,
|
132 |
-
"acc_norm": 0.3251231527093596,
|
133 |
-
"acc_norm_stderr": 0.032957975663112704
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.3774193548387097,
|
137 |
-
"acc_stderr": 0.02757596072327824,
|
138 |
-
"acc_norm": 0.3774193548387097,
|
139 |
-
"acc_norm_stderr": 0.02757596072327824
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.5726495726495726,
|
143 |
-
"acc_stderr": 0.03240847393516326,
|
144 |
-
"acc_norm": 0.5726495726495726,
|
145 |
-
"acc_norm_stderr": 0.03240847393516326
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.37358490566037733,
|
149 |
-
"acc_stderr": 0.02977308271331988,
|
150 |
-
"acc_norm": 0.37358490566037733,
|
151 |
-
"acc_norm_stderr": 0.02977308271331988
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.4636363636363636,
|
155 |
-
"acc_stderr": 0.04776449162396197,
|
156 |
-
"acc_norm": 0.4636363636363636,
|
157 |
-
"acc_norm_stderr": 0.04776449162396197
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.28888888888888886,
|
161 |
-
"acc_stderr": 0.02763490726417854,
|
162 |
-
"acc_norm": 0.28888888888888886,
|
163 |
-
"acc_norm_stderr": 0.02763490726417854
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.2980132450331126,
|
167 |
-
"acc_stderr": 0.037345356767871984,
|
168 |
-
"acc_norm": 0.2980132450331126,
|
169 |
-
"acc_norm_stderr": 0.037345356767871984
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.4925373134328358,
|
173 |
-
"acc_stderr": 0.03535140084276719,
|
174 |
-
"acc_norm": 0.4925373134328358,
|
175 |
-
"acc_norm_stderr": 0.03535140084276719
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.32947976878612717,
|
179 |
-
"acc_stderr": 0.03583901754736412,
|
180 |
-
"acc_norm": 0.32947976878612717,
|
181 |
-
"acc_norm_stderr": 0.03583901754736412
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.26455026455026454,
|
185 |
-
"acc_stderr": 0.022717467897708617,
|
186 |
-
"acc_norm": 0.26455026455026454,
|
187 |
-
"acc_norm_stderr": 0.022717467897708617
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.3402777777777778,
|
191 |
-
"acc_stderr": 0.03962135573486219,
|
192 |
-
"acc_norm": 0.3402777777777778,
|
193 |
-
"acc_norm_stderr": 0.03962135573486219
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.27,
|
197 |
-
"acc_stderr": 0.044619604333847394,
|
198 |
-
"acc_norm": 0.27,
|
199 |
-
"acc_norm_stderr": 0.044619604333847394
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.52,
|
203 |
-
"acc_stderr": 0.050211673156867795,
|
204 |
-
"acc_norm": 0.52,
|
205 |
-
"acc_norm_stderr": 0.050211673156867795
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.4190751445086705,
|
209 |
-
"acc_stderr": 0.026564178111422622,
|
210 |
-
"acc_norm": 0.4190751445086705,
|
211 |
-
"acc_norm_stderr": 0.026564178111422622
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.4049079754601227,
|
215 |
-
"acc_stderr": 0.03856672163548914,
|
216 |
-
"acc_norm": 0.4049079754601227,
|
217 |
-
"acc_norm_stderr": 0.03856672163548914
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.41358024691358025,
|
221 |
-
"acc_stderr": 0.027402042040269955,
|
222 |
-
"acc_norm": 0.41358024691358025,
|
223 |
-
"acc_norm_stderr": 0.027402042040269955
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.28,
|
227 |
-
"acc_stderr": 0.045126085985421296,
|
228 |
-
"acc_norm": 0.28,
|
229 |
-
"acc_norm_stderr": 0.045126085985421296
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.43005181347150256,
|
233 |
-
"acc_stderr": 0.03572954333144808,
|
234 |
-
"acc_norm": 0.43005181347150256,
|
235 |
-
"acc_norm_stderr": 0.03572954333144808
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.2807017543859649,
|
239 |
-
"acc_stderr": 0.042270544512321984,
|
240 |
-
"acc_norm": 0.2807017543859649,
|
241 |
-
"acc_norm_stderr": 0.042270544512321984
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.3834862385321101,
|
245 |
-
"acc_stderr": 0.020847156641915984,
|
246 |
-
"acc_norm": 0.3834862385321101,
|
247 |
-
"acc_norm_stderr": 0.020847156641915984
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.2698412698412698,
|
251 |
-
"acc_stderr": 0.03970158273235172,
|
252 |
-
"acc_norm": 0.2698412698412698,
|
253 |
-
"acc_norm_stderr": 0.03970158273235172
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.3790849673202614,
|
257 |
-
"acc_stderr": 0.027780141207023337,
|
258 |
-
"acc_norm": 0.3790849673202614,
|
259 |
-
"acc_norm_stderr": 0.027780141207023337
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.47,
|
263 |
-
"acc_stderr": 0.05016135580465919,
|
264 |
-
"acc_norm": 0.47,
|
265 |
-
"acc_norm_stderr": 0.05016135580465919
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.5785123966942148,
|
269 |
-
"acc_stderr": 0.04507732278775089,
|
270 |
-
"acc_norm": 0.5785123966942148,
|
271 |
-
"acc_norm_stderr": 0.04507732278775089
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.3223684210526316,
|
275 |
-
"acc_stderr": 0.038035102483515854,
|
276 |
-
"acc_norm": 0.3223684210526316,
|
277 |
-
"acc_norm_stderr": 0.038035102483515854
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.3284313725490196,
|
281 |
-
"acc_stderr": 0.01899970738316267,
|
282 |
-
"acc_norm": 0.3284313725490196,
|
283 |
-
"acc_norm_stderr": 0.01899970738316267
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.30851063829787234,
|
287 |
-
"acc_stderr": 0.027553366165101362,
|
288 |
-
"acc_norm": 0.30851063829787234,
|
289 |
-
"acc_norm_stderr": 0.027553366165101362
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.35714285714285715,
|
293 |
-
"acc_stderr": 0.04547960999764376,
|
294 |
-
"acc_norm": 0.35714285714285715,
|
295 |
-
"acc_norm_stderr": 0.04547960999764376
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.3472222222222222,
|
299 |
-
"acc_stderr": 0.03246887243637648,
|
300 |
-
"acc_norm": 0.3472222222222222,
|
301 |
-
"acc_norm_stderr": 0.03246887243637648
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.264804469273743,
|
305 |
-
"acc_stderr": 0.014756906483260657,
|
306 |
-
"acc_norm": 0.264804469273743,
|
307 |
-
"acc_norm_stderr": 0.014756906483260657
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.36,
|
311 |
-
"acc_stderr": 0.048241815132442176,
|
312 |
-
"acc_norm": 0.36,
|
313 |
-
"acc_norm_stderr": 0.048241815132442176
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.35,
|
317 |
-
"acc_stderr": 0.0479372485441102,
|
318 |
-
"acc_norm": 0.35,
|
319 |
-
"acc_norm_stderr": 0.0479372485441102
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.33455882352941174,
|
323 |
-
"acc_stderr": 0.028661996202335314,
|
324 |
-
"acc_norm": 0.33455882352941174,
|
325 |
-
"acc_norm_stderr": 0.028661996202335314
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.43673469387755104,
|
329 |
-
"acc_stderr": 0.031751952375833226,
|
330 |
-
"acc_norm": 0.43673469387755104,
|
331 |
-
"acc_norm_stderr": 0.031751952375833226
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.4219409282700422,
|
335 |
-
"acc_stderr": 0.032148146302403695,
|
336 |
-
"acc_norm": 0.4219409282700422,
|
337 |
-
"acc_norm_stderr": 0.032148146302403695
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.2920469361147327,
|
341 |
-
"acc_stderr": 0.011613349136271817,
|
342 |
-
"acc_norm": 0.2920469361147327,
|
343 |
-
"acc_norm_stderr": 0.011613349136271817
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.4068627450980392,
|
347 |
-
"acc_stderr": 0.03447891136353383,
|
348 |
-
"acc_norm": 0.4068627450980392,
|
349 |
-
"acc_norm_stderr": 0.03447891136353383
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.4303030303030303,
|
353 |
-
"acc_stderr": 0.03866225962879077,
|
354 |
-
"acc_norm": 0.4303030303030303,
|
355 |
-
"acc_norm_stderr": 0.03866225962879077
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.3023255813953488,
|
359 |
-
"mc1_stderr": 0.01607750926613303,
|
360 |
-
"mc2": 0.4750714543386988,
|
361 |
-
"mc2_stderr": 0.016159472828434183
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.27582159624413144,
|
365 |
-
"acc_stderr": 0.01532047174956522,
|
366 |
-
"acc_norm": 0.29107981220657275,
|
367 |
-
"acc_norm_stderr": 0.015571840078994575
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "Jaewoo1/Llama2-7B-ShareGPT-Wiki_noprompt-News_noprompt-CoT-blending-circulus",
|
436 |
-
"model_sha": "1c97acb58f2a740d7994d1ea7b0c02c234bbde3a",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
Jaewoo1/Platypus7B_Follow_FT/result_2023-10-21 14:41:08.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.197098976109215,
|
5 |
-
"acc_stderr": 0.011625047669880612,
|
6 |
-
"acc_norm": 0.26535836177474403,
|
7 |
-
"acc_norm_stderr": 0.012902554762313964
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.29904401513642703,
|
11 |
-
"acc_stderr": 0.0045690346133326004,
|
12 |
-
"acc_norm": 0.36675960963951404,
|
13 |
-
"acc_norm_stderr": 0.0048093520750089385
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.22807017543859648,
|
17 |
-
"acc_stderr": 0.03218093795602357,
|
18 |
-
"acc_norm": 0.22807017543859648,
|
19 |
-
"acc_norm_stderr": 0.03218093795602357
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.20388349514563106,
|
23 |
-
"acc_stderr": 0.0398913985953177,
|
24 |
-
"acc_norm": 0.20388349514563106,
|
25 |
-
"acc_norm_stderr": 0.0398913985953177
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.2796934865900383,
|
29 |
-
"acc_stderr": 0.016050792148036536,
|
30 |
-
"acc_norm": 0.2796934865900383,
|
31 |
-
"acc_norm_stderr": 0.016050792148036536
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.32592592592592595,
|
35 |
-
"acc_stderr": 0.040491220417025055,
|
36 |
-
"acc_norm": 0.32592592592592595,
|
37 |
-
"acc_norm_stderr": 0.040491220417025055
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.25,
|
41 |
-
"acc_stderr": 0.04351941398892446,
|
42 |
-
"acc_norm": 0.25,
|
43 |
-
"acc_norm_stderr": 0.04351941398892446
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.23404255319148937,
|
47 |
-
"acc_stderr": 0.027678452578212387,
|
48 |
-
"acc_norm": 0.23404255319148937,
|
49 |
-
"acc_norm_stderr": 0.027678452578212387
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.21084337349397592,
|
53 |
-
"acc_stderr": 0.031755547866299194,
|
54 |
-
"acc_norm": 0.21084337349397592,
|
55 |
-
"acc_norm_stderr": 0.031755547866299194
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.2765273311897106,
|
59 |
-
"acc_stderr": 0.02540383297817961,
|
60 |
-
"acc_norm": 0.2765273311897106,
|
61 |
-
"acc_norm_stderr": 0.02540383297817961
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.2825112107623318,
|
65 |
-
"acc_stderr": 0.030216831011508762,
|
66 |
-
"acc_norm": 0.2825112107623318,
|
67 |
-
"acc_norm_stderr": 0.030216831011508762
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.24427480916030533,
|
71 |
-
"acc_stderr": 0.03768335959728742,
|
72 |
-
"acc_norm": 0.24427480916030533,
|
73 |
-
"acc_norm_stderr": 0.03768335959728742
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.28,
|
77 |
-
"acc_stderr": 0.04512608598542127,
|
78 |
-
"acc_norm": 0.28,
|
79 |
-
"acc_norm_stderr": 0.04512608598542127
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.2676767676767677,
|
83 |
-
"acc_stderr": 0.03154449888270285,
|
84 |
-
"acc_norm": 0.2676767676767677,
|
85 |
-
"acc_norm_stderr": 0.03154449888270285
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.25517241379310346,
|
89 |
-
"acc_stderr": 0.03632984052707842,
|
90 |
-
"acc_norm": 0.25517241379310346,
|
91 |
-
"acc_norm_stderr": 0.03632984052707842
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.22549019607843138,
|
95 |
-
"acc_stderr": 0.04158307533083286,
|
96 |
-
"acc_norm": 0.22549019607843138,
|
97 |
-
"acc_norm_stderr": 0.04158307533083286
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.23529411764705882,
|
101 |
-
"acc_stderr": 0.027553614467863786,
|
102 |
-
"acc_norm": 0.23529411764705882,
|
103 |
-
"acc_norm_stderr": 0.027553614467863786
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.2641025641025641,
|
107 |
-
"acc_stderr": 0.022352193737453285,
|
108 |
-
"acc_norm": 0.2641025641025641,
|
109 |
-
"acc_norm_stderr": 0.022352193737453285
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.25,
|
113 |
-
"acc_stderr": 0.04351941398892446,
|
114 |
-
"acc_norm": 0.25,
|
115 |
-
"acc_norm_stderr": 0.04351941398892446
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.27,
|
119 |
-
"acc_stderr": 0.044619604333847394,
|
120 |
-
"acc_norm": 0.27,
|
121 |
-
"acc_norm_stderr": 0.044619604333847394
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.26851851851851855,
|
125 |
-
"acc_stderr": 0.04284467968052191,
|
126 |
-
"acc_norm": 0.26851851851851855,
|
127 |
-
"acc_norm_stderr": 0.04284467968052191
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.2315270935960591,
|
131 |
-
"acc_stderr": 0.029678333141444444,
|
132 |
-
"acc_norm": 0.2315270935960591,
|
133 |
-
"acc_norm_stderr": 0.029678333141444444
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.3096774193548387,
|
137 |
-
"acc_stderr": 0.026302774983517414,
|
138 |
-
"acc_norm": 0.3096774193548387,
|
139 |
-
"acc_norm_stderr": 0.026302774983517414
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.3333333333333333,
|
143 |
-
"acc_stderr": 0.030882736974138663,
|
144 |
-
"acc_norm": 0.3333333333333333,
|
145 |
-
"acc_norm_stderr": 0.030882736974138663
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.2641509433962264,
|
149 |
-
"acc_stderr": 0.0271342916287417,
|
150 |
-
"acc_norm": 0.2641509433962264,
|
151 |
-
"acc_norm_stderr": 0.0271342916287417
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.20909090909090908,
|
155 |
-
"acc_stderr": 0.038950910157241364,
|
156 |
-
"acc_norm": 0.20909090909090908,
|
157 |
-
"acc_norm_stderr": 0.038950910157241364
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.24814814814814815,
|
161 |
-
"acc_stderr": 0.0263357394040558,
|
162 |
-
"acc_norm": 0.24814814814814815,
|
163 |
-
"acc_norm_stderr": 0.0263357394040558
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.2980132450331126,
|
167 |
-
"acc_stderr": 0.03734535676787198,
|
168 |
-
"acc_norm": 0.2980132450331126,
|
169 |
-
"acc_norm_stderr": 0.03734535676787198
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.29850746268656714,
|
173 |
-
"acc_stderr": 0.032357437893550424,
|
174 |
-
"acc_norm": 0.29850746268656714,
|
175 |
-
"acc_norm_stderr": 0.032357437893550424
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.21965317919075145,
|
179 |
-
"acc_stderr": 0.031568093627031744,
|
180 |
-
"acc_norm": 0.21965317919075145,
|
181 |
-
"acc_norm_stderr": 0.031568093627031744
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.24603174603174602,
|
185 |
-
"acc_stderr": 0.022182037202948368,
|
186 |
-
"acc_norm": 0.24603174603174602,
|
187 |
-
"acc_norm_stderr": 0.022182037202948368
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.2222222222222222,
|
191 |
-
"acc_stderr": 0.03476590104304134,
|
192 |
-
"acc_norm": 0.2222222222222222,
|
193 |
-
"acc_norm_stderr": 0.03476590104304134
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.24,
|
197 |
-
"acc_stderr": 0.04292346959909282,
|
198 |
-
"acc_norm": 0.24,
|
199 |
-
"acc_norm_stderr": 0.04292346959909282
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.24,
|
203 |
-
"acc_stderr": 0.042923469599092816,
|
204 |
-
"acc_norm": 0.24,
|
205 |
-
"acc_norm_stderr": 0.042923469599092816
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.23410404624277456,
|
209 |
-
"acc_stderr": 0.022797110278071128,
|
210 |
-
"acc_norm": 0.23410404624277456,
|
211 |
-
"acc_norm_stderr": 0.022797110278071128
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.37423312883435583,
|
215 |
-
"acc_stderr": 0.03802068102899616,
|
216 |
-
"acc_norm": 0.37423312883435583,
|
217 |
-
"acc_norm_stderr": 0.03802068102899616
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.2654320987654321,
|
221 |
-
"acc_stderr": 0.024569223600460845,
|
222 |
-
"acc_norm": 0.2654320987654321,
|
223 |
-
"acc_norm_stderr": 0.024569223600460845
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.23,
|
227 |
-
"acc_stderr": 0.042295258468165065,
|
228 |
-
"acc_norm": 0.23,
|
229 |
-
"acc_norm_stderr": 0.042295258468165065
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.2538860103626943,
|
233 |
-
"acc_stderr": 0.03141024780565318,
|
234 |
-
"acc_norm": 0.2538860103626943,
|
235 |
-
"acc_norm_stderr": 0.03141024780565318
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.23684210526315788,
|
239 |
-
"acc_stderr": 0.039994238792813344,
|
240 |
-
"acc_norm": 0.23684210526315788,
|
241 |
-
"acc_norm_stderr": 0.039994238792813344
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.28807339449541286,
|
245 |
-
"acc_stderr": 0.019416445892636015,
|
246 |
-
"acc_norm": 0.28807339449541286,
|
247 |
-
"acc_norm_stderr": 0.019416445892636015
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.20634920634920634,
|
251 |
-
"acc_stderr": 0.0361960452412425,
|
252 |
-
"acc_norm": 0.20634920634920634,
|
253 |
-
"acc_norm_stderr": 0.0361960452412425
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.2777777777777778,
|
257 |
-
"acc_stderr": 0.025646863097137908,
|
258 |
-
"acc_norm": 0.2777777777777778,
|
259 |
-
"acc_norm_stderr": 0.025646863097137908
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.25,
|
263 |
-
"acc_stderr": 0.04351941398892446,
|
264 |
-
"acc_norm": 0.25,
|
265 |
-
"acc_norm_stderr": 0.04351941398892446
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.38016528925619836,
|
269 |
-
"acc_stderr": 0.04431324501968431,
|
270 |
-
"acc_norm": 0.38016528925619836,
|
271 |
-
"acc_norm_stderr": 0.04431324501968431
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.2236842105263158,
|
275 |
-
"acc_stderr": 0.03391160934343602,
|
276 |
-
"acc_norm": 0.2236842105263158,
|
277 |
-
"acc_norm_stderr": 0.03391160934343602
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.2647058823529412,
|
281 |
-
"acc_stderr": 0.01784808957491323,
|
282 |
-
"acc_norm": 0.2647058823529412,
|
283 |
-
"acc_norm_stderr": 0.01784808957491323
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.25886524822695034,
|
287 |
-
"acc_stderr": 0.026129572527180848,
|
288 |
-
"acc_norm": 0.25886524822695034,
|
289 |
-
"acc_norm_stderr": 0.026129572527180848
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.30357142857142855,
|
293 |
-
"acc_stderr": 0.04364226155841044,
|
294 |
-
"acc_norm": 0.30357142857142855,
|
295 |
-
"acc_norm_stderr": 0.04364226155841044
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.26851851851851855,
|
299 |
-
"acc_stderr": 0.030225226160012386,
|
300 |
-
"acc_norm": 0.26851851851851855,
|
301 |
-
"acc_norm_stderr": 0.030225226160012386
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.24581005586592178,
|
305 |
-
"acc_stderr": 0.014400296429225605,
|
306 |
-
"acc_norm": 0.24581005586592178,
|
307 |
-
"acc_norm_stderr": 0.014400296429225605
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.18,
|
311 |
-
"acc_stderr": 0.038612291966536955,
|
312 |
-
"acc_norm": 0.18,
|
313 |
-
"acc_norm_stderr": 0.038612291966536955
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.28,
|
317 |
-
"acc_stderr": 0.04512608598542128,
|
318 |
-
"acc_norm": 0.28,
|
319 |
-
"acc_norm_stderr": 0.04512608598542128
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.35661764705882354,
|
323 |
-
"acc_stderr": 0.029097209568411962,
|
324 |
-
"acc_norm": 0.35661764705882354,
|
325 |
-
"acc_norm_stderr": 0.029097209568411962
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.24489795918367346,
|
329 |
-
"acc_stderr": 0.027529637440174913,
|
330 |
-
"acc_norm": 0.24489795918367346,
|
331 |
-
"acc_norm_stderr": 0.027529637440174913
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.3291139240506329,
|
335 |
-
"acc_stderr": 0.03058732629470236,
|
336 |
-
"acc_norm": 0.3291139240506329,
|
337 |
-
"acc_norm_stderr": 0.03058732629470236
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.2522816166883963,
|
341 |
-
"acc_stderr": 0.011092789056875248,
|
342 |
-
"acc_norm": 0.2522816166883963,
|
343 |
-
"acc_norm_stderr": 0.011092789056875248
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.27450980392156865,
|
347 |
-
"acc_stderr": 0.031321798030832904,
|
348 |
-
"acc_norm": 0.27450980392156865,
|
349 |
-
"acc_norm_stderr": 0.031321798030832904
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.4,
|
353 |
-
"acc_stderr": 0.03825460278380026,
|
354 |
-
"acc_norm": 0.4,
|
355 |
-
"acc_norm_stderr": 0.03825460278380026
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.2692778457772338,
|
359 |
-
"mc1_stderr": 0.015528566637087288,
|
360 |
-
"mc2": 0.4755864114164748,
|
361 |
-
"mc2_stderr": 0.016657423214439165
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.12910798122065728,
|
365 |
-
"acc_stderr": 0.011494601522741298,
|
366 |
-
"acc_norm": 0.29460093896713613,
|
367 |
-
"acc_norm_stderr": 0.015626788056631535
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "Jaewoo1/Platypus7B_Follow_FT",
|
436 |
-
"model_sha": "ac5c77ab817d2d9b0a4f3fc7c609dce3770428d8",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
Jaewoo1/Platypus7B_Follow_LoRA/result_2023-10-22 15:04:14.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.27986348122866894,
|
5 |
-
"acc_stderr": 0.013119040897725923,
|
6 |
-
"acc_norm": 0.3506825938566553,
|
7 |
-
"acc_norm_stderr": 0.013944635930726089
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.3648675562636925,
|
11 |
-
"acc_stderr": 0.004804091708812553,
|
12 |
-
"acc_norm": 0.4856602270464051,
|
13 |
-
"acc_norm_stderr": 0.004987728900897601
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.40350877192982454,
|
17 |
-
"acc_stderr": 0.03762738699917055,
|
18 |
-
"acc_norm": 0.40350877192982454,
|
19 |
-
"acc_norm_stderr": 0.03762738699917055
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.32038834951456313,
|
23 |
-
"acc_stderr": 0.0462028408228004,
|
24 |
-
"acc_norm": 0.32038834951456313,
|
25 |
-
"acc_norm_stderr": 0.0462028408228004
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.37037037037037035,
|
29 |
-
"acc_stderr": 0.017268607560005776,
|
30 |
-
"acc_norm": 0.37037037037037035,
|
31 |
-
"acc_norm_stderr": 0.017268607560005776
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.35555555555555557,
|
35 |
-
"acc_stderr": 0.04135176749720386,
|
36 |
-
"acc_norm": 0.35555555555555557,
|
37 |
-
"acc_norm_stderr": 0.04135176749720386
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.3,
|
41 |
-
"acc_stderr": 0.046056618647183814,
|
42 |
-
"acc_norm": 0.3,
|
43 |
-
"acc_norm_stderr": 0.046056618647183814
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.2765957446808511,
|
47 |
-
"acc_stderr": 0.029241883869628817,
|
48 |
-
"acc_norm": 0.2765957446808511,
|
49 |
-
"acc_norm_stderr": 0.029241883869628817
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.30120481927710846,
|
53 |
-
"acc_stderr": 0.03571609230053481,
|
54 |
-
"acc_norm": 0.30120481927710846,
|
55 |
-
"acc_norm_stderr": 0.03571609230053481
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.3987138263665595,
|
59 |
-
"acc_stderr": 0.0278093225857745,
|
60 |
-
"acc_norm": 0.3987138263665595,
|
61 |
-
"acc_norm_stderr": 0.0278093225857745
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.3811659192825112,
|
65 |
-
"acc_stderr": 0.03259625118416828,
|
66 |
-
"acc_norm": 0.3811659192825112,
|
67 |
-
"acc_norm_stderr": 0.03259625118416828
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.46564885496183206,
|
71 |
-
"acc_stderr": 0.043749285605997376,
|
72 |
-
"acc_norm": 0.46564885496183206,
|
73 |
-
"acc_norm_stderr": 0.043749285605997376
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.34,
|
77 |
-
"acc_stderr": 0.04760952285695235,
|
78 |
-
"acc_norm": 0.34,
|
79 |
-
"acc_norm_stderr": 0.04760952285695235
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.3838383838383838,
|
83 |
-
"acc_stderr": 0.03464881675016339,
|
84 |
-
"acc_norm": 0.3838383838383838,
|
85 |
-
"acc_norm_stderr": 0.03464881675016339
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.4,
|
89 |
-
"acc_stderr": 0.04082482904638628,
|
90 |
-
"acc_norm": 0.4,
|
91 |
-
"acc_norm_stderr": 0.04082482904638628
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.23529411764705882,
|
95 |
-
"acc_stderr": 0.04220773659171453,
|
96 |
-
"acc_norm": 0.23529411764705882,
|
97 |
-
"acc_norm_stderr": 0.04220773659171453
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.38235294117647056,
|
101 |
-
"acc_stderr": 0.03156663099215416,
|
102 |
-
"acc_norm": 0.38235294117647056,
|
103 |
-
"acc_norm_stderr": 0.03156663099215416
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.28717948717948716,
|
107 |
-
"acc_stderr": 0.022939925418530613,
|
108 |
-
"acc_norm": 0.28717948717948716,
|
109 |
-
"acc_norm_stderr": 0.022939925418530613
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.41,
|
113 |
-
"acc_stderr": 0.04943110704237102,
|
114 |
-
"acc_norm": 0.41,
|
115 |
-
"acc_norm_stderr": 0.04943110704237102
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.22,
|
119 |
-
"acc_stderr": 0.041633319989322695,
|
120 |
-
"acc_norm": 0.22,
|
121 |
-
"acc_norm_stderr": 0.041633319989322695
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.4166666666666667,
|
125 |
-
"acc_stderr": 0.04766075165356461,
|
126 |
-
"acc_norm": 0.4166666666666667,
|
127 |
-
"acc_norm_stderr": 0.04766075165356461
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.23645320197044334,
|
131 |
-
"acc_stderr": 0.029896114291733555,
|
132 |
-
"acc_norm": 0.23645320197044334,
|
133 |
-
"acc_norm_stderr": 0.029896114291733555
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.38387096774193546,
|
137 |
-
"acc_stderr": 0.027666182075539652,
|
138 |
-
"acc_norm": 0.38387096774193546,
|
139 |
-
"acc_norm_stderr": 0.027666182075539652
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.49572649572649574,
|
143 |
-
"acc_stderr": 0.032754892643821316,
|
144 |
-
"acc_norm": 0.49572649572649574,
|
145 |
-
"acc_norm_stderr": 0.032754892643821316
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.3320754716981132,
|
149 |
-
"acc_stderr": 0.02898545565233439,
|
150 |
-
"acc_norm": 0.3320754716981132,
|
151 |
-
"acc_norm_stderr": 0.02898545565233439
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.39090909090909093,
|
155 |
-
"acc_stderr": 0.04673752333670237,
|
156 |
-
"acc_norm": 0.39090909090909093,
|
157 |
-
"acc_norm_stderr": 0.04673752333670237
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.22592592592592592,
|
161 |
-
"acc_stderr": 0.02549753263960955,
|
162 |
-
"acc_norm": 0.22592592592592592,
|
163 |
-
"acc_norm_stderr": 0.02549753263960955
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.2980132450331126,
|
167 |
-
"acc_stderr": 0.037345356767871984,
|
168 |
-
"acc_norm": 0.2980132450331126,
|
169 |
-
"acc_norm_stderr": 0.037345356767871984
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.4527363184079602,
|
173 |
-
"acc_stderr": 0.035197027175769155,
|
174 |
-
"acc_norm": 0.4527363184079602,
|
175 |
-
"acc_norm_stderr": 0.035197027175769155
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.2947976878612717,
|
179 |
-
"acc_stderr": 0.03476599607516478,
|
180 |
-
"acc_norm": 0.2947976878612717,
|
181 |
-
"acc_norm_stderr": 0.03476599607516478
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.24338624338624337,
|
185 |
-
"acc_stderr": 0.022101128787415412,
|
186 |
-
"acc_norm": 0.24338624338624337,
|
187 |
-
"acc_norm_stderr": 0.022101128787415412
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.3402777777777778,
|
191 |
-
"acc_stderr": 0.039621355734862175,
|
192 |
-
"acc_norm": 0.3402777777777778,
|
193 |
-
"acc_norm_stderr": 0.039621355734862175
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.22,
|
197 |
-
"acc_stderr": 0.04163331998932268,
|
198 |
-
"acc_norm": 0.22,
|
199 |
-
"acc_norm_stderr": 0.04163331998932268
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.41,
|
203 |
-
"acc_stderr": 0.04943110704237103,
|
204 |
-
"acc_norm": 0.41,
|
205 |
-
"acc_norm_stderr": 0.04943110704237103
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.3699421965317919,
|
209 |
-
"acc_stderr": 0.025992472029306386,
|
210 |
-
"acc_norm": 0.3699421965317919,
|
211 |
-
"acc_norm_stderr": 0.025992472029306386
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.3374233128834356,
|
215 |
-
"acc_stderr": 0.03714908409935575,
|
216 |
-
"acc_norm": 0.3374233128834356,
|
217 |
-
"acc_norm_stderr": 0.03714908409935575
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.38580246913580246,
|
221 |
-
"acc_stderr": 0.027085401226132143,
|
222 |
-
"acc_norm": 0.38580246913580246,
|
223 |
-
"acc_norm_stderr": 0.027085401226132143
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.33,
|
227 |
-
"acc_stderr": 0.04725815626252605,
|
228 |
-
"acc_norm": 0.33,
|
229 |
-
"acc_norm_stderr": 0.04725815626252605
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.38860103626943004,
|
233 |
-
"acc_stderr": 0.03517739796373132,
|
234 |
-
"acc_norm": 0.38860103626943004,
|
235 |
-
"acc_norm_stderr": 0.03517739796373132
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.2719298245614035,
|
239 |
-
"acc_stderr": 0.04185774424022057,
|
240 |
-
"acc_norm": 0.2719298245614035,
|
241 |
-
"acc_norm_stderr": 0.04185774424022057
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.4018348623853211,
|
245 |
-
"acc_stderr": 0.021020106172997013,
|
246 |
-
"acc_norm": 0.4018348623853211,
|
247 |
-
"acc_norm_stderr": 0.021020106172997013
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.2619047619047619,
|
251 |
-
"acc_stderr": 0.039325376803928704,
|
252 |
-
"acc_norm": 0.2619047619047619,
|
253 |
-
"acc_norm_stderr": 0.039325376803928704
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.43137254901960786,
|
257 |
-
"acc_stderr": 0.028358956313423556,
|
258 |
-
"acc_norm": 0.43137254901960786,
|
259 |
-
"acc_norm_stderr": 0.028358956313423556
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.35,
|
263 |
-
"acc_stderr": 0.0479372485441102,
|
264 |
-
"acc_norm": 0.35,
|
265 |
-
"acc_norm_stderr": 0.0479372485441102
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.49586776859504134,
|
269 |
-
"acc_stderr": 0.04564198767432754,
|
270 |
-
"acc_norm": 0.49586776859504134,
|
271 |
-
"acc_norm_stderr": 0.04564198767432754
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.27631578947368424,
|
275 |
-
"acc_stderr": 0.03639057569952925,
|
276 |
-
"acc_norm": 0.27631578947368424,
|
277 |
-
"acc_norm_stderr": 0.03639057569952925
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.3202614379084967,
|
281 |
-
"acc_stderr": 0.01887568293806944,
|
282 |
-
"acc_norm": 0.3202614379084967,
|
283 |
-
"acc_norm_stderr": 0.01887568293806944
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.30141843971631205,
|
287 |
-
"acc_stderr": 0.02737412888263115,
|
288 |
-
"acc_norm": 0.30141843971631205,
|
289 |
-
"acc_norm_stderr": 0.02737412888263115
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.26785714285714285,
|
293 |
-
"acc_stderr": 0.04203277291467762,
|
294 |
-
"acc_norm": 0.26785714285714285,
|
295 |
-
"acc_norm_stderr": 0.04203277291467762
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.32407407407407407,
|
299 |
-
"acc_stderr": 0.03191923445686186,
|
300 |
-
"acc_norm": 0.32407407407407407,
|
301 |
-
"acc_norm_stderr": 0.03191923445686186
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.24692737430167597,
|
305 |
-
"acc_stderr": 0.014422292204808852,
|
306 |
-
"acc_norm": 0.24692737430167597,
|
307 |
-
"acc_norm_stderr": 0.014422292204808852
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.27,
|
311 |
-
"acc_stderr": 0.044619604333847394,
|
312 |
-
"acc_norm": 0.27,
|
313 |
-
"acc_norm_stderr": 0.044619604333847394
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.29,
|
317 |
-
"acc_stderr": 0.04560480215720684,
|
318 |
-
"acc_norm": 0.29,
|
319 |
-
"acc_norm_stderr": 0.04560480215720684
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.41911764705882354,
|
323 |
-
"acc_stderr": 0.02997280717046463,
|
324 |
-
"acc_norm": 0.41911764705882354,
|
325 |
-
"acc_norm_stderr": 0.02997280717046463
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.33877551020408164,
|
329 |
-
"acc_stderr": 0.030299506562154185,
|
330 |
-
"acc_norm": 0.33877551020408164,
|
331 |
-
"acc_norm_stderr": 0.030299506562154185
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.4810126582278481,
|
335 |
-
"acc_stderr": 0.03252375148090448,
|
336 |
-
"acc_norm": 0.4810126582278481,
|
337 |
-
"acc_norm_stderr": 0.03252375148090448
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.3135593220338983,
|
341 |
-
"acc_stderr": 0.011849234291459324,
|
342 |
-
"acc_norm": 0.3135593220338983,
|
343 |
-
"acc_norm_stderr": 0.011849234291459324
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.39215686274509803,
|
347 |
-
"acc_stderr": 0.03426712349247272,
|
348 |
-
"acc_norm": 0.39215686274509803,
|
349 |
-
"acc_norm_stderr": 0.03426712349247272
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.3696969696969697,
|
353 |
-
"acc_stderr": 0.03769430314512568,
|
354 |
-
"acc_norm": 0.3696969696969697,
|
355 |
-
"acc_norm_stderr": 0.03769430314512568
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.24112607099143207,
|
359 |
-
"mc1_stderr": 0.014974827279752332,
|
360 |
-
"mc2": 0.3857319099407924,
|
361 |
-
"mc2_stderr": 0.015181937276962347
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.2992957746478873,
|
365 |
-
"acc_stderr": 0.015698309276204952,
|
366 |
-
"acc_norm": 0.3779342723004695,
|
367 |
-
"acc_norm_stderr": 0.01662116634084928
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "Jaewoo1/Platypus7B_Follow_LoRA",
|
436 |
-
"model_sha": "b963d09e5db0e791858e56e3fafac7e066328014",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
KRAFTON/KORani-v1-13B/result_2023-10-17 13:34:31.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.30802047781569963,
|
5 |
-
"acc_stderr": 0.01349142951729204,
|
6 |
-
"acc_norm": 0.3515358361774744,
|
7 |
-
"acc_norm_stderr": 0.013952413699600938
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.39533957379008167,
|
11 |
-
"acc_stderr": 0.004879242848473461,
|
12 |
-
"acc_norm": 0.5114519020115514,
|
13 |
-
"acc_norm_stderr": 0.0049884724594180165
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.3216374269005848,
|
17 |
-
"acc_stderr": 0.03582529442573122,
|
18 |
-
"acc_norm": 0.3216374269005848,
|
19 |
-
"acc_norm_stderr": 0.03582529442573122
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.20388349514563106,
|
23 |
-
"acc_stderr": 0.0398913985953177,
|
24 |
-
"acc_norm": 0.20388349514563106,
|
25 |
-
"acc_norm_stderr": 0.0398913985953177
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.2567049808429119,
|
29 |
-
"acc_stderr": 0.015620480263064533,
|
30 |
-
"acc_norm": 0.2567049808429119,
|
31 |
-
"acc_norm_stderr": 0.015620480263064533
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.22962962962962963,
|
35 |
-
"acc_stderr": 0.03633384414073465,
|
36 |
-
"acc_norm": 0.22962962962962963,
|
37 |
-
"acc_norm_stderr": 0.03633384414073465
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.22,
|
41 |
-
"acc_stderr": 0.04163331998932268,
|
42 |
-
"acc_norm": 0.22,
|
43 |
-
"acc_norm_stderr": 0.04163331998932268
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.2680851063829787,
|
47 |
-
"acc_stderr": 0.02895734278834235,
|
48 |
-
"acc_norm": 0.2680851063829787,
|
49 |
-
"acc_norm_stderr": 0.02895734278834235
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.2710843373493976,
|
53 |
-
"acc_stderr": 0.034605799075530276,
|
54 |
-
"acc_norm": 0.2710843373493976,
|
55 |
-
"acc_norm_stderr": 0.034605799075530276
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.24437299035369775,
|
59 |
-
"acc_stderr": 0.0244061620946689,
|
60 |
-
"acc_norm": 0.24437299035369775,
|
61 |
-
"acc_norm_stderr": 0.0244061620946689
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.22869955156950672,
|
65 |
-
"acc_stderr": 0.028188240046929196,
|
66 |
-
"acc_norm": 0.22869955156950672,
|
67 |
-
"acc_norm_stderr": 0.028188240046929196
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.2824427480916031,
|
71 |
-
"acc_stderr": 0.03948406125768361,
|
72 |
-
"acc_norm": 0.2824427480916031,
|
73 |
-
"acc_norm_stderr": 0.03948406125768361
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.31,
|
77 |
-
"acc_stderr": 0.04648231987117316,
|
78 |
-
"acc_norm": 0.31,
|
79 |
-
"acc_norm_stderr": 0.04648231987117316
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.18686868686868688,
|
83 |
-
"acc_stderr": 0.02777253333421899,
|
84 |
-
"acc_norm": 0.18686868686868688,
|
85 |
-
"acc_norm_stderr": 0.02777253333421899
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.2620689655172414,
|
89 |
-
"acc_stderr": 0.03664666337225256,
|
90 |
-
"acc_norm": 0.2620689655172414,
|
91 |
-
"acc_norm_stderr": 0.03664666337225256
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.14705882352941177,
|
95 |
-
"acc_stderr": 0.035240689515674495,
|
96 |
-
"acc_norm": 0.14705882352941177,
|
97 |
-
"acc_norm_stderr": 0.035240689515674495
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.20588235294117646,
|
101 |
-
"acc_stderr": 0.026265024608275882,
|
102 |
-
"acc_norm": 0.20588235294117646,
|
103 |
-
"acc_norm_stderr": 0.026265024608275882
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.2076923076923077,
|
107 |
-
"acc_stderr": 0.020567539567246787,
|
108 |
-
"acc_norm": 0.2076923076923077,
|
109 |
-
"acc_norm_stderr": 0.020567539567246787
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.26,
|
113 |
-
"acc_stderr": 0.04408440022768077,
|
114 |
-
"acc_norm": 0.26,
|
115 |
-
"acc_norm_stderr": 0.04408440022768077
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.19,
|
119 |
-
"acc_stderr": 0.03942772444036623,
|
120 |
-
"acc_norm": 0.19,
|
121 |
-
"acc_norm_stderr": 0.03942772444036623
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.23148148148148148,
|
125 |
-
"acc_stderr": 0.04077494709252627,
|
126 |
-
"acc_norm": 0.23148148148148148,
|
127 |
-
"acc_norm_stderr": 0.04077494709252627
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.1724137931034483,
|
131 |
-
"acc_stderr": 0.026577672183036572,
|
132 |
-
"acc_norm": 0.1724137931034483,
|
133 |
-
"acc_norm_stderr": 0.026577672183036572
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.20967741935483872,
|
137 |
-
"acc_stderr": 0.02315787934908353,
|
138 |
-
"acc_norm": 0.20967741935483872,
|
139 |
-
"acc_norm_stderr": 0.02315787934908353
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.2863247863247863,
|
143 |
-
"acc_stderr": 0.02961432369045665,
|
144 |
-
"acc_norm": 0.2863247863247863,
|
145 |
-
"acc_norm_stderr": 0.02961432369045665
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.21132075471698114,
|
149 |
-
"acc_stderr": 0.025125766484827845,
|
150 |
-
"acc_norm": 0.21132075471698114,
|
151 |
-
"acc_norm_stderr": 0.025125766484827845
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.22727272727272727,
|
155 |
-
"acc_stderr": 0.04013964554072776,
|
156 |
-
"acc_norm": 0.22727272727272727,
|
157 |
-
"acc_norm_stderr": 0.04013964554072776
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.22962962962962963,
|
161 |
-
"acc_stderr": 0.025644108639267645,
|
162 |
-
"acc_norm": 0.22962962962962963,
|
163 |
-
"acc_norm_stderr": 0.025644108639267645
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.2251655629139073,
|
167 |
-
"acc_stderr": 0.03410435282008936,
|
168 |
-
"acc_norm": 0.2251655629139073,
|
169 |
-
"acc_norm_stderr": 0.03410435282008936
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.23383084577114427,
|
173 |
-
"acc_stderr": 0.029929415408348377,
|
174 |
-
"acc_norm": 0.23383084577114427,
|
175 |
-
"acc_norm_stderr": 0.029929415408348377
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.21965317919075145,
|
179 |
-
"acc_stderr": 0.031568093627031744,
|
180 |
-
"acc_norm": 0.21965317919075145,
|
181 |
-
"acc_norm_stderr": 0.031568093627031744
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.21693121693121692,
|
185 |
-
"acc_stderr": 0.02122708244944504,
|
186 |
-
"acc_norm": 0.21693121693121692,
|
187 |
-
"acc_norm_stderr": 0.02122708244944504
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.25,
|
191 |
-
"acc_stderr": 0.03621034121889507,
|
192 |
-
"acc_norm": 0.25,
|
193 |
-
"acc_norm_stderr": 0.03621034121889507
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.17,
|
197 |
-
"acc_stderr": 0.03775251680686371,
|
198 |
-
"acc_norm": 0.17,
|
199 |
-
"acc_norm_stderr": 0.03775251680686371
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.31,
|
203 |
-
"acc_stderr": 0.04648231987117316,
|
204 |
-
"acc_norm": 0.31,
|
205 |
-
"acc_norm_stderr": 0.04648231987117316
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.2658959537572254,
|
209 |
-
"acc_stderr": 0.02378620325550829,
|
210 |
-
"acc_norm": 0.2658959537572254,
|
211 |
-
"acc_norm_stderr": 0.02378620325550829
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.2331288343558282,
|
215 |
-
"acc_stderr": 0.033220157957767414,
|
216 |
-
"acc_norm": 0.2331288343558282,
|
217 |
-
"acc_norm_stderr": 0.033220157957767414
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.25308641975308643,
|
221 |
-
"acc_stderr": 0.024191808600712992,
|
222 |
-
"acc_norm": 0.25308641975308643,
|
223 |
-
"acc_norm_stderr": 0.024191808600712992
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.22,
|
227 |
-
"acc_stderr": 0.04163331998932269,
|
228 |
-
"acc_norm": 0.22,
|
229 |
-
"acc_norm_stderr": 0.04163331998932269
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.21243523316062177,
|
233 |
-
"acc_stderr": 0.029519282616817247,
|
234 |
-
"acc_norm": 0.21243523316062177,
|
235 |
-
"acc_norm_stderr": 0.029519282616817247
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.23684210526315788,
|
239 |
-
"acc_stderr": 0.039994238792813344,
|
240 |
-
"acc_norm": 0.23684210526315788,
|
241 |
-
"acc_norm_stderr": 0.039994238792813344
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.21100917431192662,
|
245 |
-
"acc_stderr": 0.017493922404112648,
|
246 |
-
"acc_norm": 0.21100917431192662,
|
247 |
-
"acc_norm_stderr": 0.017493922404112648
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.2222222222222222,
|
251 |
-
"acc_stderr": 0.037184890068181146,
|
252 |
-
"acc_norm": 0.2222222222222222,
|
253 |
-
"acc_norm_stderr": 0.037184890068181146
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.2549019607843137,
|
257 |
-
"acc_stderr": 0.024954184324879905,
|
258 |
-
"acc_norm": 0.2549019607843137,
|
259 |
-
"acc_norm_stderr": 0.024954184324879905
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.31,
|
263 |
-
"acc_stderr": 0.04648231987117316,
|
264 |
-
"acc_norm": 0.31,
|
265 |
-
"acc_norm_stderr": 0.04648231987117316
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.24793388429752067,
|
269 |
-
"acc_stderr": 0.03941897526516304,
|
270 |
-
"acc_norm": 0.24793388429752067,
|
271 |
-
"acc_norm_stderr": 0.03941897526516304
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.19736842105263158,
|
275 |
-
"acc_stderr": 0.03238981601699397,
|
276 |
-
"acc_norm": 0.19736842105263158,
|
277 |
-
"acc_norm_stderr": 0.03238981601699397
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.27124183006535946,
|
281 |
-
"acc_stderr": 0.017986615304030312,
|
282 |
-
"acc_norm": 0.27124183006535946,
|
283 |
-
"acc_norm_stderr": 0.017986615304030312
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.23404255319148937,
|
287 |
-
"acc_stderr": 0.025257861359432407,
|
288 |
-
"acc_norm": 0.23404255319148937,
|
289 |
-
"acc_norm_stderr": 0.025257861359432407
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.38392857142857145,
|
293 |
-
"acc_stderr": 0.04616143075028547,
|
294 |
-
"acc_norm": 0.38392857142857145,
|
295 |
-
"acc_norm_stderr": 0.04616143075028547
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.16666666666666666,
|
299 |
-
"acc_stderr": 0.025416428388767474,
|
300 |
-
"acc_norm": 0.16666666666666666,
|
301 |
-
"acc_norm_stderr": 0.025416428388767474
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.24134078212290502,
|
305 |
-
"acc_stderr": 0.014310999547961459,
|
306 |
-
"acc_norm": 0.24134078212290502,
|
307 |
-
"acc_norm_stderr": 0.014310999547961459
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.24,
|
311 |
-
"acc_stderr": 0.04292346959909282,
|
312 |
-
"acc_norm": 0.24,
|
313 |
-
"acc_norm_stderr": 0.04292346959909282
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.29,
|
317 |
-
"acc_stderr": 0.045604802157206824,
|
318 |
-
"acc_norm": 0.29,
|
319 |
-
"acc_norm_stderr": 0.045604802157206824
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.20220588235294118,
|
323 |
-
"acc_stderr": 0.024398192986654924,
|
324 |
-
"acc_norm": 0.20220588235294118,
|
325 |
-
"acc_norm_stderr": 0.024398192986654924
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.22040816326530613,
|
329 |
-
"acc_stderr": 0.0265370453121453,
|
330 |
-
"acc_norm": 0.22040816326530613,
|
331 |
-
"acc_norm_stderr": 0.0265370453121453
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.2489451476793249,
|
335 |
-
"acc_stderr": 0.028146970599422644,
|
336 |
-
"acc_norm": 0.2489451476793249,
|
337 |
-
"acc_norm_stderr": 0.028146970599422644
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.24641460234680573,
|
341 |
-
"acc_stderr": 0.011005971399927234,
|
342 |
-
"acc_norm": 0.24641460234680573,
|
343 |
-
"acc_norm_stderr": 0.011005971399927234
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.22549019607843138,
|
347 |
-
"acc_stderr": 0.029331162294251735,
|
348 |
-
"acc_norm": 0.22549019607843138,
|
349 |
-
"acc_norm_stderr": 0.029331162294251735
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.19393939393939394,
|
353 |
-
"acc_stderr": 0.030874145136562097,
|
354 |
-
"acc_norm": 0.19393939393939394,
|
355 |
-
"acc_norm_stderr": 0.030874145136562097
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.2484700122399021,
|
359 |
-
"mc1_stderr": 0.0151274270965207,
|
360 |
-
"mc2": 0.40538205465914606,
|
361 |
-
"mc2_stderr": 0.01537488137847706
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.5551643192488263,
|
365 |
-
"acc_stderr": 0.01703514366596627,
|
366 |
-
"acc_norm": 0.613849765258216,
|
367 |
-
"acc_norm_stderr": 0.016689541992754253
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "KRAFTON/KORani-v1-13B",
|
436 |
-
"model_sha": "a699d0cebc4815f33854bc83065a03fc9008473c",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
KRAFTON/KORani-v2-13B/result_2023-10-17 13:34:06.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.2960750853242321,
|
5 |
-
"acc_stderr": 0.013340916085246263,
|
6 |
-
"acc_norm": 0.3370307167235495,
|
7 |
-
"acc_norm_stderr": 0.013813476652902265
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.35241983668591914,
|
11 |
-
"acc_stderr": 0.004767475366689779,
|
12 |
-
"acc_norm": 0.42252539334793865,
|
13 |
-
"acc_norm_stderr": 0.004929517011508216
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.4093567251461988,
|
17 |
-
"acc_stderr": 0.037712831076265434,
|
18 |
-
"acc_norm": 0.4093567251461988,
|
19 |
-
"acc_norm_stderr": 0.037712831076265434
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.32038834951456313,
|
23 |
-
"acc_stderr": 0.0462028408228004,
|
24 |
-
"acc_norm": 0.32038834951456313,
|
25 |
-
"acc_norm_stderr": 0.0462028408228004
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.37547892720306514,
|
29 |
-
"acc_stderr": 0.01731661319718279,
|
30 |
-
"acc_norm": 0.37547892720306514,
|
31 |
-
"acc_norm_stderr": 0.01731661319718279
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.2740740740740741,
|
35 |
-
"acc_stderr": 0.03853254836552003,
|
36 |
-
"acc_norm": 0.2740740740740741,
|
37 |
-
"acc_norm_stderr": 0.03853254836552003
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.3,
|
41 |
-
"acc_stderr": 0.046056618647183814,
|
42 |
-
"acc_norm": 0.3,
|
43 |
-
"acc_norm_stderr": 0.046056618647183814
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.3446808510638298,
|
47 |
-
"acc_stderr": 0.031068985963122145,
|
48 |
-
"acc_norm": 0.3446808510638298,
|
49 |
-
"acc_norm_stderr": 0.031068985963122145
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.3192771084337349,
|
53 |
-
"acc_stderr": 0.0362933532994786,
|
54 |
-
"acc_norm": 0.3192771084337349,
|
55 |
-
"acc_norm_stderr": 0.0362933532994786
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.4115755627009646,
|
59 |
-
"acc_stderr": 0.027950481494401266,
|
60 |
-
"acc_norm": 0.4115755627009646,
|
61 |
-
"acc_norm_stderr": 0.027950481494401266
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.37668161434977576,
|
65 |
-
"acc_stderr": 0.03252113489929188,
|
66 |
-
"acc_norm": 0.37668161434977576,
|
67 |
-
"acc_norm_stderr": 0.03252113489929188
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.3893129770992366,
|
71 |
-
"acc_stderr": 0.04276486542814591,
|
72 |
-
"acc_norm": 0.3893129770992366,
|
73 |
-
"acc_norm_stderr": 0.04276486542814591
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.36,
|
77 |
-
"acc_stderr": 0.04824181513244218,
|
78 |
-
"acc_norm": 0.36,
|
79 |
-
"acc_norm_stderr": 0.04824181513244218
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.3333333333333333,
|
83 |
-
"acc_stderr": 0.03358618145732524,
|
84 |
-
"acc_norm": 0.3333333333333333,
|
85 |
-
"acc_norm_stderr": 0.03358618145732524
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.3586206896551724,
|
89 |
-
"acc_stderr": 0.039966295748767186,
|
90 |
-
"acc_norm": 0.3586206896551724,
|
91 |
-
"acc_norm_stderr": 0.039966295748767186
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.19607843137254902,
|
95 |
-
"acc_stderr": 0.03950581861179962,
|
96 |
-
"acc_norm": 0.19607843137254902,
|
97 |
-
"acc_norm_stderr": 0.03950581861179962
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.3403361344537815,
|
101 |
-
"acc_stderr": 0.03077805742293167,
|
102 |
-
"acc_norm": 0.3403361344537815,
|
103 |
-
"acc_norm_stderr": 0.03077805742293167
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.33589743589743587,
|
107 |
-
"acc_stderr": 0.02394672474156397,
|
108 |
-
"acc_norm": 0.33589743589743587,
|
109 |
-
"acc_norm_stderr": 0.02394672474156397
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.54,
|
113 |
-
"acc_stderr": 0.05009082659620333,
|
114 |
-
"acc_norm": 0.54,
|
115 |
-
"acc_norm_stderr": 0.05009082659620333
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.21,
|
119 |
-
"acc_stderr": 0.040936018074033256,
|
120 |
-
"acc_norm": 0.21,
|
121 |
-
"acc_norm_stderr": 0.040936018074033256
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.4351851851851852,
|
125 |
-
"acc_stderr": 0.04792898170907062,
|
126 |
-
"acc_norm": 0.4351851851851852,
|
127 |
-
"acc_norm_stderr": 0.04792898170907062
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.31527093596059114,
|
131 |
-
"acc_stderr": 0.03269080871970187,
|
132 |
-
"acc_norm": 0.31527093596059114,
|
133 |
-
"acc_norm_stderr": 0.03269080871970187
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.3258064516129032,
|
137 |
-
"acc_stderr": 0.026662010578567107,
|
138 |
-
"acc_norm": 0.3258064516129032,
|
139 |
-
"acc_norm_stderr": 0.026662010578567107
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.5683760683760684,
|
143 |
-
"acc_stderr": 0.0324483553531149,
|
144 |
-
"acc_norm": 0.5683760683760684,
|
145 |
-
"acc_norm_stderr": 0.0324483553531149
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.37358490566037733,
|
149 |
-
"acc_stderr": 0.029773082713319878,
|
150 |
-
"acc_norm": 0.37358490566037733,
|
151 |
-
"acc_norm_stderr": 0.029773082713319878
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.45454545454545453,
|
155 |
-
"acc_stderr": 0.04769300568972743,
|
156 |
-
"acc_norm": 0.45454545454545453,
|
157 |
-
"acc_norm_stderr": 0.04769300568972743
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.24814814814814815,
|
161 |
-
"acc_stderr": 0.0263357394040558,
|
162 |
-
"acc_norm": 0.24814814814814815,
|
163 |
-
"acc_norm_stderr": 0.0263357394040558
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.2913907284768212,
|
167 |
-
"acc_stderr": 0.037101857261199946,
|
168 |
-
"acc_norm": 0.2913907284768212,
|
169 |
-
"acc_norm_stderr": 0.037101857261199946
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.42786069651741293,
|
173 |
-
"acc_stderr": 0.03498541988407795,
|
174 |
-
"acc_norm": 0.42786069651741293,
|
175 |
-
"acc_norm_stderr": 0.03498541988407795
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.3236994219653179,
|
179 |
-
"acc_stderr": 0.035676037996391685,
|
180 |
-
"acc_norm": 0.3236994219653179,
|
181 |
-
"acc_norm_stderr": 0.035676037996391685
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.2222222222222222,
|
185 |
-
"acc_stderr": 0.02141168439369418,
|
186 |
-
"acc_norm": 0.2222222222222222,
|
187 |
-
"acc_norm_stderr": 0.02141168439369418
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.3263888888888889,
|
191 |
-
"acc_stderr": 0.03921067198982266,
|
192 |
-
"acc_norm": 0.3263888888888889,
|
193 |
-
"acc_norm_stderr": 0.03921067198982266
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.25,
|
197 |
-
"acc_stderr": 0.04351941398892446,
|
198 |
-
"acc_norm": 0.25,
|
199 |
-
"acc_norm_stderr": 0.04351941398892446
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.44,
|
203 |
-
"acc_stderr": 0.0498887651569859,
|
204 |
-
"acc_norm": 0.44,
|
205 |
-
"acc_norm_stderr": 0.0498887651569859
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.40173410404624277,
|
209 |
-
"acc_stderr": 0.026394104177643634,
|
210 |
-
"acc_norm": 0.40173410404624277,
|
211 |
-
"acc_norm_stderr": 0.026394104177643634
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.3128834355828221,
|
215 |
-
"acc_stderr": 0.03642914578292404,
|
216 |
-
"acc_norm": 0.3128834355828221,
|
217 |
-
"acc_norm_stderr": 0.03642914578292404
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.3425925925925926,
|
221 |
-
"acc_stderr": 0.026406145973625658,
|
222 |
-
"acc_norm": 0.3425925925925926,
|
223 |
-
"acc_norm_stderr": 0.026406145973625658
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.29,
|
227 |
-
"acc_stderr": 0.045604802157206845,
|
228 |
-
"acc_norm": 0.29,
|
229 |
-
"acc_norm_stderr": 0.045604802157206845
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.40932642487046633,
|
233 |
-
"acc_stderr": 0.03548608168860806,
|
234 |
-
"acc_norm": 0.40932642487046633,
|
235 |
-
"acc_norm_stderr": 0.03548608168860806
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.24561403508771928,
|
239 |
-
"acc_stderr": 0.04049339297748142,
|
240 |
-
"acc_norm": 0.24561403508771928,
|
241 |
-
"acc_norm_stderr": 0.04049339297748142
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.30642201834862387,
|
245 |
-
"acc_stderr": 0.019765517220458523,
|
246 |
-
"acc_norm": 0.30642201834862387,
|
247 |
-
"acc_norm_stderr": 0.019765517220458523
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.30952380952380953,
|
251 |
-
"acc_stderr": 0.04134913018303316,
|
252 |
-
"acc_norm": 0.30952380952380953,
|
253 |
-
"acc_norm_stderr": 0.04134913018303316
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.4215686274509804,
|
257 |
-
"acc_stderr": 0.028275490156791438,
|
258 |
-
"acc_norm": 0.4215686274509804,
|
259 |
-
"acc_norm_stderr": 0.028275490156791438
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.35,
|
263 |
-
"acc_stderr": 0.047937248544110196,
|
264 |
-
"acc_norm": 0.35,
|
265 |
-
"acc_norm_stderr": 0.047937248544110196
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.5041322314049587,
|
269 |
-
"acc_stderr": 0.045641987674327526,
|
270 |
-
"acc_norm": 0.5041322314049587,
|
271 |
-
"acc_norm_stderr": 0.045641987674327526
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.23684210526315788,
|
275 |
-
"acc_stderr": 0.034597776068105365,
|
276 |
-
"acc_norm": 0.23684210526315788,
|
277 |
-
"acc_norm_stderr": 0.034597776068105365
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.3300653594771242,
|
281 |
-
"acc_stderr": 0.019023726160724553,
|
282 |
-
"acc_norm": 0.3300653594771242,
|
283 |
-
"acc_norm_stderr": 0.019023726160724553
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.2765957446808511,
|
287 |
-
"acc_stderr": 0.026684564340460994,
|
288 |
-
"acc_norm": 0.2765957446808511,
|
289 |
-
"acc_norm_stderr": 0.026684564340460994
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.3482142857142857,
|
293 |
-
"acc_stderr": 0.045218299028335865,
|
294 |
-
"acc_norm": 0.3482142857142857,
|
295 |
-
"acc_norm_stderr": 0.045218299028335865
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.24074074074074073,
|
299 |
-
"acc_stderr": 0.029157522184605617,
|
300 |
-
"acc_norm": 0.24074074074074073,
|
301 |
-
"acc_norm_stderr": 0.029157522184605617
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.24134078212290502,
|
305 |
-
"acc_stderr": 0.014310999547961443,
|
306 |
-
"acc_norm": 0.24134078212290502,
|
307 |
-
"acc_norm_stderr": 0.014310999547961443
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.27,
|
311 |
-
"acc_stderr": 0.0446196043338474,
|
312 |
-
"acc_norm": 0.27,
|
313 |
-
"acc_norm_stderr": 0.0446196043338474
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.43,
|
317 |
-
"acc_stderr": 0.04975698519562428,
|
318 |
-
"acc_norm": 0.43,
|
319 |
-
"acc_norm_stderr": 0.04975698519562428
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.2610294117647059,
|
323 |
-
"acc_stderr": 0.02667925227010312,
|
324 |
-
"acc_norm": 0.2610294117647059,
|
325 |
-
"acc_norm_stderr": 0.02667925227010312
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.40816326530612246,
|
329 |
-
"acc_stderr": 0.03146465712827424,
|
330 |
-
"acc_norm": 0.40816326530612246,
|
331 |
-
"acc_norm_stderr": 0.03146465712827424
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.39662447257383965,
|
335 |
-
"acc_stderr": 0.03184399873811225,
|
336 |
-
"acc_norm": 0.39662447257383965,
|
337 |
-
"acc_norm_stderr": 0.03184399873811225
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.27640156453715775,
|
341 |
-
"acc_stderr": 0.011422153194553567,
|
342 |
-
"acc_norm": 0.27640156453715775,
|
343 |
-
"acc_norm_stderr": 0.011422153194553567
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.3431372549019608,
|
347 |
-
"acc_stderr": 0.033321399446680854,
|
348 |
-
"acc_norm": 0.3431372549019608,
|
349 |
-
"acc_norm_stderr": 0.033321399446680854
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.3515151515151515,
|
353 |
-
"acc_stderr": 0.037282069986826503,
|
354 |
-
"acc_norm": 0.3515151515151515,
|
355 |
-
"acc_norm_stderr": 0.037282069986826503
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.2692778457772338,
|
359 |
-
"mc1_stderr": 0.015528566637087305,
|
360 |
-
"mc2": 0.44326975161880294,
|
361 |
-
"mc2_stderr": 0.015781962014868475
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.1936619718309859,
|
365 |
-
"acc_stderr": 0.013546152666107363,
|
366 |
-
"acc_norm": 0.27816901408450706,
|
367 |
-
"acc_norm_stderr": 0.01536057085913159
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "KRAFTON/KORani-v2-13B",
|
436 |
-
"model_sha": "12dbb4046d3fabb3b64c3eab2ecc91faec1af9e9",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
KRAFTON/KORani-v3-13B/result_2023-10-17 13:33:45.json
DELETED
@@ -1,444 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"results": {
|
3 |
-
"harness|ko_arc_challenge|25": {
|
4 |
-
"acc": 0.3046075085324232,
|
5 |
-
"acc_stderr": 0.01344952210993249,
|
6 |
-
"acc_norm": 0.34726962457337884,
|
7 |
-
"acc_norm_stderr": 0.013913034529620442
|
8 |
-
},
|
9 |
-
"harness|ko_hellaswag|10": {
|
10 |
-
"acc": 0.3494323839872535,
|
11 |
-
"acc_stderr": 0.004758162967997396,
|
12 |
-
"acc_norm": 0.4313881696873133,
|
13 |
-
"acc_norm_stderr": 0.004942578520987348
|
14 |
-
},
|
15 |
-
"harness|ko_mmlu_world_religions|5": {
|
16 |
-
"acc": 0.38011695906432746,
|
17 |
-
"acc_stderr": 0.03722965741385539,
|
18 |
-
"acc_norm": 0.38011695906432746,
|
19 |
-
"acc_norm_stderr": 0.03722965741385539
|
20 |
-
},
|
21 |
-
"harness|ko_mmlu_management|5": {
|
22 |
-
"acc": 0.39805825242718446,
|
23 |
-
"acc_stderr": 0.04846748253977239,
|
24 |
-
"acc_norm": 0.39805825242718446,
|
25 |
-
"acc_norm_stderr": 0.04846748253977239
|
26 |
-
},
|
27 |
-
"harness|ko_mmlu_miscellaneous|5": {
|
28 |
-
"acc": 0.3665389527458493,
|
29 |
-
"acc_stderr": 0.01723124462679705,
|
30 |
-
"acc_norm": 0.3665389527458493,
|
31 |
-
"acc_norm_stderr": 0.01723124462679705
|
32 |
-
},
|
33 |
-
"harness|ko_mmlu_anatomy|5": {
|
34 |
-
"acc": 0.2962962962962963,
|
35 |
-
"acc_stderr": 0.03944624162501117,
|
36 |
-
"acc_norm": 0.2962962962962963,
|
37 |
-
"acc_norm_stderr": 0.03944624162501117
|
38 |
-
},
|
39 |
-
"harness|ko_mmlu_abstract_algebra|5": {
|
40 |
-
"acc": 0.27,
|
41 |
-
"acc_stderr": 0.0446196043338474,
|
42 |
-
"acc_norm": 0.27,
|
43 |
-
"acc_norm_stderr": 0.0446196043338474
|
44 |
-
},
|
45 |
-
"harness|ko_mmlu_conceptual_physics|5": {
|
46 |
-
"acc": 0.32340425531914896,
|
47 |
-
"acc_stderr": 0.030579442773610334,
|
48 |
-
"acc_norm": 0.32340425531914896,
|
49 |
-
"acc_norm_stderr": 0.030579442773610334
|
50 |
-
},
|
51 |
-
"harness|ko_mmlu_virology|5": {
|
52 |
-
"acc": 0.3253012048192771,
|
53 |
-
"acc_stderr": 0.03647168523683227,
|
54 |
-
"acc_norm": 0.3253012048192771,
|
55 |
-
"acc_norm_stderr": 0.03647168523683227
|
56 |
-
},
|
57 |
-
"harness|ko_mmlu_philosophy|5": {
|
58 |
-
"acc": 0.37942122186495175,
|
59 |
-
"acc_stderr": 0.027559949802347817,
|
60 |
-
"acc_norm": 0.37942122186495175,
|
61 |
-
"acc_norm_stderr": 0.027559949802347817
|
62 |
-
},
|
63 |
-
"harness|ko_mmlu_human_aging|5": {
|
64 |
-
"acc": 0.3811659192825112,
|
65 |
-
"acc_stderr": 0.03259625118416827,
|
66 |
-
"acc_norm": 0.3811659192825112,
|
67 |
-
"acc_norm_stderr": 0.03259625118416827
|
68 |
-
},
|
69 |
-
"harness|ko_mmlu_human_sexuality|5": {
|
70 |
-
"acc": 0.3816793893129771,
|
71 |
-
"acc_stderr": 0.0426073515764456,
|
72 |
-
"acc_norm": 0.3816793893129771,
|
73 |
-
"acc_norm_stderr": 0.0426073515764456
|
74 |
-
},
|
75 |
-
"harness|ko_mmlu_medical_genetics|5": {
|
76 |
-
"acc": 0.34,
|
77 |
-
"acc_stderr": 0.04760952285695236,
|
78 |
-
"acc_norm": 0.34,
|
79 |
-
"acc_norm_stderr": 0.04760952285695236
|
80 |
-
},
|
81 |
-
"harness|ko_mmlu_high_school_geography|5": {
|
82 |
-
"acc": 0.3282828282828283,
|
83 |
-
"acc_stderr": 0.03345678422756777,
|
84 |
-
"acc_norm": 0.3282828282828283,
|
85 |
-
"acc_norm_stderr": 0.03345678422756777
|
86 |
-
},
|
87 |
-
"harness|ko_mmlu_electrical_engineering|5": {
|
88 |
-
"acc": 0.32413793103448274,
|
89 |
-
"acc_stderr": 0.03900432069185553,
|
90 |
-
"acc_norm": 0.32413793103448274,
|
91 |
-
"acc_norm_stderr": 0.03900432069185553
|
92 |
-
},
|
93 |
-
"harness|ko_mmlu_college_physics|5": {
|
94 |
-
"acc": 0.22549019607843138,
|
95 |
-
"acc_stderr": 0.041583075330832865,
|
96 |
-
"acc_norm": 0.22549019607843138,
|
97 |
-
"acc_norm_stderr": 0.041583075330832865
|
98 |
-
},
|
99 |
-
"harness|ko_mmlu_high_school_microeconomics|5": {
|
100 |
-
"acc": 0.3865546218487395,
|
101 |
-
"acc_stderr": 0.03163145807552378,
|
102 |
-
"acc_norm": 0.3865546218487395,
|
103 |
-
"acc_norm_stderr": 0.03163145807552378
|
104 |
-
},
|
105 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": {
|
106 |
-
"acc": 0.34102564102564104,
|
107 |
-
"acc_stderr": 0.024035489676335044,
|
108 |
-
"acc_norm": 0.34102564102564104,
|
109 |
-
"acc_norm_stderr": 0.024035489676335044
|
110 |
-
},
|
111 |
-
"harness|ko_mmlu_computer_security|5": {
|
112 |
-
"acc": 0.52,
|
113 |
-
"acc_stderr": 0.050211673156867795,
|
114 |
-
"acc_norm": 0.52,
|
115 |
-
"acc_norm_stderr": 0.050211673156867795
|
116 |
-
},
|
117 |
-
"harness|ko_mmlu_global_facts|5": {
|
118 |
-
"acc": 0.27,
|
119 |
-
"acc_stderr": 0.04461960433384739,
|
120 |
-
"acc_norm": 0.27,
|
121 |
-
"acc_norm_stderr": 0.04461960433384739
|
122 |
-
},
|
123 |
-
"harness|ko_mmlu_jurisprudence|5": {
|
124 |
-
"acc": 0.4444444444444444,
|
125 |
-
"acc_stderr": 0.04803752235190192,
|
126 |
-
"acc_norm": 0.4444444444444444,
|
127 |
-
"acc_norm_stderr": 0.04803752235190192
|
128 |
-
},
|
129 |
-
"harness|ko_mmlu_high_school_chemistry|5": {
|
130 |
-
"acc": 0.2955665024630542,
|
131 |
-
"acc_stderr": 0.032104944337514575,
|
132 |
-
"acc_norm": 0.2955665024630542,
|
133 |
-
"acc_norm_stderr": 0.032104944337514575
|
134 |
-
},
|
135 |
-
"harness|ko_mmlu_high_school_biology|5": {
|
136 |
-
"acc": 0.3870967741935484,
|
137 |
-
"acc_stderr": 0.02770935967503249,
|
138 |
-
"acc_norm": 0.3870967741935484,
|
139 |
-
"acc_norm_stderr": 0.02770935967503249
|
140 |
-
},
|
141 |
-
"harness|ko_mmlu_marketing|5": {
|
142 |
-
"acc": 0.5811965811965812,
|
143 |
-
"acc_stderr": 0.03232128912157792,
|
144 |
-
"acc_norm": 0.5811965811965812,
|
145 |
-
"acc_norm_stderr": 0.03232128912157792
|
146 |
-
},
|
147 |
-
"harness|ko_mmlu_clinical_knowledge|5": {
|
148 |
-
"acc": 0.33584905660377357,
|
149 |
-
"acc_stderr": 0.029067220146644826,
|
150 |
-
"acc_norm": 0.33584905660377357,
|
151 |
-
"acc_norm_stderr": 0.029067220146644826
|
152 |
-
},
|
153 |
-
"harness|ko_mmlu_public_relations|5": {
|
154 |
-
"acc": 0.42727272727272725,
|
155 |
-
"acc_stderr": 0.04738198703545483,
|
156 |
-
"acc_norm": 0.42727272727272725,
|
157 |
-
"acc_norm_stderr": 0.04738198703545483
|
158 |
-
},
|
159 |
-
"harness|ko_mmlu_high_school_mathematics|5": {
|
160 |
-
"acc": 0.23703703703703705,
|
161 |
-
"acc_stderr": 0.02592887613276611,
|
162 |
-
"acc_norm": 0.23703703703703705,
|
163 |
-
"acc_norm_stderr": 0.02592887613276611
|
164 |
-
},
|
165 |
-
"harness|ko_mmlu_high_school_physics|5": {
|
166 |
-
"acc": 0.31125827814569534,
|
167 |
-
"acc_stderr": 0.03780445850526733,
|
168 |
-
"acc_norm": 0.31125827814569534,
|
169 |
-
"acc_norm_stderr": 0.03780445850526733
|
170 |
-
},
|
171 |
-
"harness|ko_mmlu_sociology|5": {
|
172 |
-
"acc": 0.43781094527363185,
|
173 |
-
"acc_stderr": 0.0350808011219984,
|
174 |
-
"acc_norm": 0.43781094527363185,
|
175 |
-
"acc_norm_stderr": 0.0350808011219984
|
176 |
-
},
|
177 |
-
"harness|ko_mmlu_college_medicine|5": {
|
178 |
-
"acc": 0.27167630057803466,
|
179 |
-
"acc_stderr": 0.0339175032232166,
|
180 |
-
"acc_norm": 0.27167630057803466,
|
181 |
-
"acc_norm_stderr": 0.0339175032232166
|
182 |
-
},
|
183 |
-
"harness|ko_mmlu_elementary_mathematics|5": {
|
184 |
-
"acc": 0.25396825396825395,
|
185 |
-
"acc_stderr": 0.022418042891113935,
|
186 |
-
"acc_norm": 0.25396825396825395,
|
187 |
-
"acc_norm_stderr": 0.022418042891113935
|
188 |
-
},
|
189 |
-
"harness|ko_mmlu_college_biology|5": {
|
190 |
-
"acc": 0.2708333333333333,
|
191 |
-
"acc_stderr": 0.03716177437566017,
|
192 |
-
"acc_norm": 0.2708333333333333,
|
193 |
-
"acc_norm_stderr": 0.03716177437566017
|
194 |
-
},
|
195 |
-
"harness|ko_mmlu_college_chemistry|5": {
|
196 |
-
"acc": 0.22,
|
197 |
-
"acc_stderr": 0.04163331998932269,
|
198 |
-
"acc_norm": 0.22,
|
199 |
-
"acc_norm_stderr": 0.04163331998932269
|
200 |
-
},
|
201 |
-
"harness|ko_mmlu_us_foreign_policy|5": {
|
202 |
-
"acc": 0.55,
|
203 |
-
"acc_stderr": 0.05,
|
204 |
-
"acc_norm": 0.55,
|
205 |
-
"acc_norm_stderr": 0.05
|
206 |
-
},
|
207 |
-
"harness|ko_mmlu_moral_disputes|5": {
|
208 |
-
"acc": 0.3872832369942196,
|
209 |
-
"acc_stderr": 0.026226158605124655,
|
210 |
-
"acc_norm": 0.3872832369942196,
|
211 |
-
"acc_norm_stderr": 0.026226158605124655
|
212 |
-
},
|
213 |
-
"harness|ko_mmlu_logical_fallacies|5": {
|
214 |
-
"acc": 0.3987730061349693,
|
215 |
-
"acc_stderr": 0.03847021420456023,
|
216 |
-
"acc_norm": 0.3987730061349693,
|
217 |
-
"acc_norm_stderr": 0.03847021420456023
|
218 |
-
},
|
219 |
-
"harness|ko_mmlu_prehistory|5": {
|
220 |
-
"acc": 0.38271604938271603,
|
221 |
-
"acc_stderr": 0.027044538138402616,
|
222 |
-
"acc_norm": 0.38271604938271603,
|
223 |
-
"acc_norm_stderr": 0.027044538138402616
|
224 |
-
},
|
225 |
-
"harness|ko_mmlu_college_mathematics|5": {
|
226 |
-
"acc": 0.27,
|
227 |
-
"acc_stderr": 0.044619604333847394,
|
228 |
-
"acc_norm": 0.27,
|
229 |
-
"acc_norm_stderr": 0.044619604333847394
|
230 |
-
},
|
231 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": {
|
232 |
-
"acc": 0.41968911917098445,
|
233 |
-
"acc_stderr": 0.035615873276858834,
|
234 |
-
"acc_norm": 0.41968911917098445,
|
235 |
-
"acc_norm_stderr": 0.035615873276858834
|
236 |
-
},
|
237 |
-
"harness|ko_mmlu_econometrics|5": {
|
238 |
-
"acc": 0.22807017543859648,
|
239 |
-
"acc_stderr": 0.03947152782669415,
|
240 |
-
"acc_norm": 0.22807017543859648,
|
241 |
-
"acc_norm_stderr": 0.03947152782669415
|
242 |
-
},
|
243 |
-
"harness|ko_mmlu_high_school_psychology|5": {
|
244 |
-
"acc": 0.3174311926605505,
|
245 |
-
"acc_stderr": 0.0199571521984605,
|
246 |
-
"acc_norm": 0.3174311926605505,
|
247 |
-
"acc_norm_stderr": 0.0199571521984605
|
248 |
-
},
|
249 |
-
"harness|ko_mmlu_formal_logic|5": {
|
250 |
-
"acc": 0.31746031746031744,
|
251 |
-
"acc_stderr": 0.04163453031302859,
|
252 |
-
"acc_norm": 0.31746031746031744,
|
253 |
-
"acc_norm_stderr": 0.04163453031302859
|
254 |
-
},
|
255 |
-
"harness|ko_mmlu_nutrition|5": {
|
256 |
-
"acc": 0.4084967320261438,
|
257 |
-
"acc_stderr": 0.028146405993096358,
|
258 |
-
"acc_norm": 0.4084967320261438,
|
259 |
-
"acc_norm_stderr": 0.028146405993096358
|
260 |
-
},
|
261 |
-
"harness|ko_mmlu_business_ethics|5": {
|
262 |
-
"acc": 0.43,
|
263 |
-
"acc_stderr": 0.04975698519562428,
|
264 |
-
"acc_norm": 0.43,
|
265 |
-
"acc_norm_stderr": 0.04975698519562428
|
266 |
-
},
|
267 |
-
"harness|ko_mmlu_international_law|5": {
|
268 |
-
"acc": 0.5702479338842975,
|
269 |
-
"acc_stderr": 0.04519082021319773,
|
270 |
-
"acc_norm": 0.5702479338842975,
|
271 |
-
"acc_norm_stderr": 0.04519082021319773
|
272 |
-
},
|
273 |
-
"harness|ko_mmlu_astronomy|5": {
|
274 |
-
"acc": 0.23026315789473684,
|
275 |
-
"acc_stderr": 0.03426059424403165,
|
276 |
-
"acc_norm": 0.23026315789473684,
|
277 |
-
"acc_norm_stderr": 0.03426059424403165
|
278 |
-
},
|
279 |
-
"harness|ko_mmlu_professional_psychology|5": {
|
280 |
-
"acc": 0.3137254901960784,
|
281 |
-
"acc_stderr": 0.01877168389352817,
|
282 |
-
"acc_norm": 0.3137254901960784,
|
283 |
-
"acc_norm_stderr": 0.01877168389352817
|
284 |
-
},
|
285 |
-
"harness|ko_mmlu_professional_accounting|5": {
|
286 |
-
"acc": 0.3049645390070922,
|
287 |
-
"acc_stderr": 0.02746470844202213,
|
288 |
-
"acc_norm": 0.3049645390070922,
|
289 |
-
"acc_norm_stderr": 0.02746470844202213
|
290 |
-
},
|
291 |
-
"harness|ko_mmlu_machine_learning|5": {
|
292 |
-
"acc": 0.36607142857142855,
|
293 |
-
"acc_stderr": 0.04572372358737431,
|
294 |
-
"acc_norm": 0.36607142857142855,
|
295 |
-
"acc_norm_stderr": 0.04572372358737431
|
296 |
-
},
|
297 |
-
"harness|ko_mmlu_high_school_statistics|5": {
|
298 |
-
"acc": 0.24074074074074073,
|
299 |
-
"acc_stderr": 0.029157522184605607,
|
300 |
-
"acc_norm": 0.24074074074074073,
|
301 |
-
"acc_norm_stderr": 0.029157522184605607
|
302 |
-
},
|
303 |
-
"harness|ko_mmlu_moral_scenarios|5": {
|
304 |
-
"acc": 0.24916201117318434,
|
305 |
-
"acc_stderr": 0.014465893829859933,
|
306 |
-
"acc_norm": 0.24916201117318434,
|
307 |
-
"acc_norm_stderr": 0.014465893829859933
|
308 |
-
},
|
309 |
-
"harness|ko_mmlu_college_computer_science|5": {
|
310 |
-
"acc": 0.3,
|
311 |
-
"acc_stderr": 0.046056618647183814,
|
312 |
-
"acc_norm": 0.3,
|
313 |
-
"acc_norm_stderr": 0.046056618647183814
|
314 |
-
},
|
315 |
-
"harness|ko_mmlu_high_school_computer_science|5": {
|
316 |
-
"acc": 0.36,
|
317 |
-
"acc_stderr": 0.04824181513244218,
|
318 |
-
"acc_norm": 0.36,
|
319 |
-
"acc_norm_stderr": 0.04824181513244218
|
320 |
-
},
|
321 |
-
"harness|ko_mmlu_professional_medicine|5": {
|
322 |
-
"acc": 0.21691176470588236,
|
323 |
-
"acc_stderr": 0.025035845227711254,
|
324 |
-
"acc_norm": 0.21691176470588236,
|
325 |
-
"acc_norm_stderr": 0.025035845227711254
|
326 |
-
},
|
327 |
-
"harness|ko_mmlu_security_studies|5": {
|
328 |
-
"acc": 0.4163265306122449,
|
329 |
-
"acc_stderr": 0.03155782816556164,
|
330 |
-
"acc_norm": 0.4163265306122449,
|
331 |
-
"acc_norm_stderr": 0.03155782816556164
|
332 |
-
},
|
333 |
-
"harness|ko_mmlu_high_school_world_history|5": {
|
334 |
-
"acc": 0.3924050632911392,
|
335 |
-
"acc_stderr": 0.03178471874564729,
|
336 |
-
"acc_norm": 0.3924050632911392,
|
337 |
-
"acc_norm_stderr": 0.03178471874564729
|
338 |
-
},
|
339 |
-
"harness|ko_mmlu_professional_law|5": {
|
340 |
-
"acc": 0.3050847457627119,
|
341 |
-
"acc_stderr": 0.011759939618085451,
|
342 |
-
"acc_norm": 0.3050847457627119,
|
343 |
-
"acc_norm_stderr": 0.011759939618085451
|
344 |
-
},
|
345 |
-
"harness|ko_mmlu_high_school_us_history|5": {
|
346 |
-
"acc": 0.3627450980392157,
|
347 |
-
"acc_stderr": 0.03374499356319355,
|
348 |
-
"acc_norm": 0.3627450980392157,
|
349 |
-
"acc_norm_stderr": 0.03374499356319355
|
350 |
-
},
|
351 |
-
"harness|ko_mmlu_high_school_european_history|5": {
|
352 |
-
"acc": 0.4,
|
353 |
-
"acc_stderr": 0.03825460278380026,
|
354 |
-
"acc_norm": 0.4,
|
355 |
-
"acc_norm_stderr": 0.03825460278380026
|
356 |
-
},
|
357 |
-
"harness|ko_truthfulqa_mc|0": {
|
358 |
-
"mc1": 0.26805385556915545,
|
359 |
-
"mc1_stderr": 0.015506204722834562,
|
360 |
-
"mc2": 0.44032476462099357,
|
361 |
-
"mc2_stderr": 0.015871156864559203
|
362 |
-
},
|
363 |
-
"harness|ko_commongen_v2|2": {
|
364 |
-
"acc": 0.19014084507042253,
|
365 |
-
"acc_stderr": 0.01345171563310588,
|
366 |
-
"acc_norm": 0.2664319248826291,
|
367 |
-
"acc_norm_stderr": 0.01515474253336583
|
368 |
-
}
|
369 |
-
},
|
370 |
-
"versions": {
|
371 |
-
"all": 0,
|
372 |
-
"harness|ko_arc_challenge|25": 0,
|
373 |
-
"harness|ko_hellaswag|10": 0,
|
374 |
-
"harness|ko_mmlu_world_religions|5": 1,
|
375 |
-
"harness|ko_mmlu_management|5": 1,
|
376 |
-
"harness|ko_mmlu_miscellaneous|5": 1,
|
377 |
-
"harness|ko_mmlu_anatomy|5": 1,
|
378 |
-
"harness|ko_mmlu_abstract_algebra|5": 1,
|
379 |
-
"harness|ko_mmlu_conceptual_physics|5": 1,
|
380 |
-
"harness|ko_mmlu_virology|5": 1,
|
381 |
-
"harness|ko_mmlu_philosophy|5": 1,
|
382 |
-
"harness|ko_mmlu_human_aging|5": 1,
|
383 |
-
"harness|ko_mmlu_human_sexuality|5": 1,
|
384 |
-
"harness|ko_mmlu_medical_genetics|5": 1,
|
385 |
-
"harness|ko_mmlu_high_school_geography|5": 1,
|
386 |
-
"harness|ko_mmlu_electrical_engineering|5": 1,
|
387 |
-
"harness|ko_mmlu_college_physics|5": 1,
|
388 |
-
"harness|ko_mmlu_high_school_microeconomics|5": 1,
|
389 |
-
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
|
390 |
-
"harness|ko_mmlu_computer_security|5": 1,
|
391 |
-
"harness|ko_mmlu_global_facts|5": 1,
|
392 |
-
"harness|ko_mmlu_jurisprudence|5": 1,
|
393 |
-
"harness|ko_mmlu_high_school_chemistry|5": 1,
|
394 |
-
"harness|ko_mmlu_high_school_biology|5": 1,
|
395 |
-
"harness|ko_mmlu_marketing|5": 1,
|
396 |
-
"harness|ko_mmlu_clinical_knowledge|5": 1,
|
397 |
-
"harness|ko_mmlu_public_relations|5": 1,
|
398 |
-
"harness|ko_mmlu_high_school_mathematics|5": 1,
|
399 |
-
"harness|ko_mmlu_high_school_physics|5": 1,
|
400 |
-
"harness|ko_mmlu_sociology|5": 1,
|
401 |
-
"harness|ko_mmlu_college_medicine|5": 1,
|
402 |
-
"harness|ko_mmlu_elementary_mathematics|5": 1,
|
403 |
-
"harness|ko_mmlu_college_biology|5": 1,
|
404 |
-
"harness|ko_mmlu_college_chemistry|5": 1,
|
405 |
-
"harness|ko_mmlu_us_foreign_policy|5": 1,
|
406 |
-
"harness|ko_mmlu_moral_disputes|5": 1,
|
407 |
-
"harness|ko_mmlu_logical_fallacies|5": 1,
|
408 |
-
"harness|ko_mmlu_prehistory|5": 1,
|
409 |
-
"harness|ko_mmlu_college_mathematics|5": 1,
|
410 |
-
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
|
411 |
-
"harness|ko_mmlu_econometrics|5": 1,
|
412 |
-
"harness|ko_mmlu_high_school_psychology|5": 1,
|
413 |
-
"harness|ko_mmlu_formal_logic|5": 1,
|
414 |
-
"harness|ko_mmlu_nutrition|5": 1,
|
415 |
-
"harness|ko_mmlu_business_ethics|5": 1,
|
416 |
-
"harness|ko_mmlu_international_law|5": 1,
|
417 |
-
"harness|ko_mmlu_astronomy|5": 1,
|
418 |
-
"harness|ko_mmlu_professional_psychology|5": 1,
|
419 |
-
"harness|ko_mmlu_professional_accounting|5": 1,
|
420 |
-
"harness|ko_mmlu_machine_learning|5": 1,
|
421 |
-
"harness|ko_mmlu_high_school_statistics|5": 1,
|
422 |
-
"harness|ko_mmlu_moral_scenarios|5": 1,
|
423 |
-
"harness|ko_mmlu_college_computer_science|5": 1,
|
424 |
-
"harness|ko_mmlu_high_school_computer_science|5": 1,
|
425 |
-
"harness|ko_mmlu_professional_medicine|5": 1,
|
426 |
-
"harness|ko_mmlu_security_studies|5": 1,
|
427 |
-
"harness|ko_mmlu_high_school_world_history|5": 1,
|
428 |
-
"harness|ko_mmlu_professional_law|5": 1,
|
429 |
-
"harness|ko_mmlu_high_school_us_history|5": 1,
|
430 |
-
"harness|ko_mmlu_high_school_european_history|5": 1,
|
431 |
-
"harness|ko_truthfulqa_mc|0": 0,
|
432 |
-
"harness|ko_commongen_v2|2": 1
|
433 |
-
},
|
434 |
-
"config_general": {
|
435 |
-
"model_name": "KRAFTON/KORani-v3-13B",
|
436 |
-
"model_sha": "d6479f9de126caf02a770e5e8db4524a0ccb4db7",
|
437 |
-
"model_dtype": "torch.float16",
|
438 |
-
"lighteval_sha": "",
|
439 |
-
"num_few_shot_default": 0,
|
440 |
-
"num_fewshot_seeds": 1,
|
441 |
-
"override_batch_size": 1,
|
442 |
-
"max_samples": null
|
443 |
-
}
|
444 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|