lvkaokao
commited on
Commit
•
7be779d
1
Parent(s):
81c28b8
sync results.
Browse files- EleutherAI/results_2024-05-19-04-30-22_gpt-j-6b.json +576 -0
- Intel/phi-2-int4-inc/results_2024-05-29-00-22-57.json +3 -3
- Intel/phi-2-int4-inc/results_2024-05-31-20-22-58.json +597 -0
- Intel/results_2024-05-29-11-08-21.json +595 -0
- Intel/results_2024-05-29-17-28-38.json +595 -0
- Intel/results_2024-05-29-18-26-24.json +596 -0
- Intel/results_2024-05-31-22-34-27.json +597 -0
- QuantFactory/results_2024-05-25-03-11-56.json +579 -0
- Qwen/results_2024-05-19-02-59-07_Qwen1.5-7B-Chat.json +576 -0
- Qwen/results_2024-05-19-03-23-26_Qwen1.5-0.5B-Chat.json +576 -0
- baichuan-inc/results_2024-05-19-01-02-50_Baichuan2-13B-Chat.json +576 -0
- baichuan-inc/results_2024-05-19-01-56-52_Baichuan2-7B-Chat.json +576 -0
- bigscience/results_2024-05-19-06-01-34_bloom-7b1.json +576 -0
- facebook/results_2024-05-19-13-35-55_opt-13b.json +576 -0
- facebook/results_2024-05-19-14-00-10_opt-1.3b.json +576 -0
- google/results_2024-05-18-14-39-54_gemma-7b.json +576 -0
- google/results_2024-05-18-15-11-33_gemma-7b-it.json +576 -0
- lodrick-the-lafted/results_2024-05-18-13-49-24_Olethros-8B.json +576 -0
- meta-llama/results_2024-05-18-11-57-40_llama3_8b_instruct-chat.json +576 -0
- meta-llama/results_2024-05-21-09-49-00_Llama-2-7b-chat-hf.json +576 -0
- microsoft/results_2024-05-17-23-28-23_Phi-3-mini-4k-instruct.json +576 -0
- microsoft/results_2024-05-18-20-01-59_phi-2.json +576 -0
- microsoft/results_2024-05-19-05-12-46_Phi-3-mini-128k-instruct.json +576 -0
- mistralai/results_2024-05-18-11-15-11_Mistral-7B-Instruct-v0.2.json +576 -0
- tiiuae/results_2024-05-19-06-56-28_falcon-7b.json +576 -0
- upstage/results_2024-05-17-22-35-35_SOLAR-10.7B-Instruct-v1.0.json +576 -0
EleutherAI/results_2024-05-19-04-30-22_gpt-j-6b.json
ADDED
@@ -0,0 +1,576 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "1.4",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": "-1",
|
9 |
+
"start_time": "",
|
10 |
+
"end_time": "",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "EleutherAI/gpt-j-6b",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "32bit",
|
15 |
+
"model_size": 24.0,
|
16 |
+
"model_params": 6.0,
|
17 |
+
"quant_type": null,
|
18 |
+
"precision": "32bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|hellaswag|0": {
|
22 |
+
"acc,none": 0.4954192391953794,
|
23 |
+
"acc_stderr,none": 0.004989572002196689,
|
24 |
+
"acc_norm,none": 0.6624178450507867,
|
25 |
+
"acc_norm_stderr,none": 0.004719187890948097,
|
26 |
+
"alias": "hellaswag"
|
27 |
+
},
|
28 |
+
"harness|truthfulqa:mc1|0": {
|
29 |
+
"acc,none": 0.20195838433292534,
|
30 |
+
"acc_stderr,none": 0.01405395744151236,
|
31 |
+
"alias": "truthfulqa_mc1"
|
32 |
+
},
|
33 |
+
"harness|openbookqa|0": {
|
34 |
+
"acc,none": 0.288,
|
35 |
+
"acc_stderr,none": 0.02027150383507522,
|
36 |
+
"acc_norm,none": 0.382,
|
37 |
+
"acc_norm_stderr,none": 0.021750820591250834,
|
38 |
+
"alias": "openbookqa"
|
39 |
+
},
|
40 |
+
"harness|lambada:openai|0": {
|
41 |
+
"perplexity,none": 4.102596264046853,
|
42 |
+
"perplexity_stderr,none": 0.08851218184790374,
|
43 |
+
"acc,none": 0.6827091014942752,
|
44 |
+
"acc_stderr,none": 0.006484234706911054,
|
45 |
+
"alias": "lambada_openai"
|
46 |
+
},
|
47 |
+
"harness|piqa|0": {
|
48 |
+
"acc,none": 0.7535364526659413,
|
49 |
+
"acc_stderr,none": 0.01005481078967182,
|
50 |
+
"acc_norm,none": 0.7616974972796517,
|
51 |
+
"acc_norm_stderr,none": 0.009940334245876209,
|
52 |
+
"alias": "piqa"
|
53 |
+
},
|
54 |
+
"harness|mmlu|0": {
|
55 |
+
"acc,none": 0.2593647628542943,
|
56 |
+
"acc_stderr,none": 0.0036927373905284726,
|
57 |
+
"alias": "mmlu"
|
58 |
+
},
|
59 |
+
"harness|mmlu_humanities|0": {
|
60 |
+
"alias": " - humanities",
|
61 |
+
"acc,none": 0.25270988310308184,
|
62 |
+
"acc_stderr,none": 0.006332069946024035
|
63 |
+
},
|
64 |
+
"harness|mmlu_formal_logic|0": {
|
65 |
+
"alias": " - formal_logic",
|
66 |
+
"acc,none": 0.25396825396825395,
|
67 |
+
"acc_stderr,none": 0.03893259610604674
|
68 |
+
},
|
69 |
+
"harness|mmlu_high_school_european_history|0": {
|
70 |
+
"alias": " - high_school_european_history",
|
71 |
+
"acc,none": 0.21212121212121213,
|
72 |
+
"acc_stderr,none": 0.03192271569548299
|
73 |
+
},
|
74 |
+
"harness|mmlu_high_school_us_history|0": {
|
75 |
+
"alias": " - high_school_us_history",
|
76 |
+
"acc,none": 0.24019607843137256,
|
77 |
+
"acc_stderr,none": 0.02998373305591361
|
78 |
+
},
|
79 |
+
"harness|mmlu_high_school_world_history|0": {
|
80 |
+
"alias": " - high_school_world_history",
|
81 |
+
"acc,none": 0.28270042194092826,
|
82 |
+
"acc_stderr,none": 0.029312814153955927
|
83 |
+
},
|
84 |
+
"harness|mmlu_international_law|0": {
|
85 |
+
"alias": " - international_law",
|
86 |
+
"acc,none": 0.256198347107438,
|
87 |
+
"acc_stderr,none": 0.039849796533028725
|
88 |
+
},
|
89 |
+
"harness|mmlu_jurisprudence|0": {
|
90 |
+
"alias": " - jurisprudence",
|
91 |
+
"acc,none": 0.25925925925925924,
|
92 |
+
"acc_stderr,none": 0.04236511258094632
|
93 |
+
},
|
94 |
+
"harness|mmlu_logical_fallacies|0": {
|
95 |
+
"alias": " - logical_fallacies",
|
96 |
+
"acc,none": 0.19631901840490798,
|
97 |
+
"acc_stderr,none": 0.031207970394709218
|
98 |
+
},
|
99 |
+
"harness|mmlu_moral_disputes|0": {
|
100 |
+
"alias": " - moral_disputes",
|
101 |
+
"acc,none": 0.28901734104046245,
|
102 |
+
"acc_stderr,none": 0.02440517393578323
|
103 |
+
},
|
104 |
+
"harness|mmlu_moral_scenarios|0": {
|
105 |
+
"alias": " - moral_scenarios",
|
106 |
+
"acc,none": 0.2424581005586592,
|
107 |
+
"acc_stderr,none": 0.014333522059217887
|
108 |
+
},
|
109 |
+
"harness|mmlu_philosophy|0": {
|
110 |
+
"alias": " - philosophy",
|
111 |
+
"acc,none": 0.22186495176848875,
|
112 |
+
"acc_stderr,none": 0.023598858292863047
|
113 |
+
},
|
114 |
+
"harness|mmlu_prehistory|0": {
|
115 |
+
"alias": " - prehistory",
|
116 |
+
"acc,none": 0.3148148148148148,
|
117 |
+
"acc_stderr,none": 0.025842248700902168
|
118 |
+
},
|
119 |
+
"harness|mmlu_professional_law|0": {
|
120 |
+
"alias": " - professional_law",
|
121 |
+
"acc,none": 0.24641460234680573,
|
122 |
+
"acc_stderr,none": 0.011005971399927232
|
123 |
+
},
|
124 |
+
"harness|mmlu_world_religions|0": {
|
125 |
+
"alias": " - world_religions",
|
126 |
+
"acc,none": 0.28654970760233917,
|
127 |
+
"acc_stderr,none": 0.034678266857038266
|
128 |
+
},
|
129 |
+
"harness|mmlu_other|0": {
|
130 |
+
"alias": " - other",
|
131 |
+
"acc,none": 0.2777598970067589,
|
132 |
+
"acc_stderr,none": 0.00800529472828752
|
133 |
+
},
|
134 |
+
"harness|mmlu_business_ethics|0": {
|
135 |
+
"alias": " - business_ethics",
|
136 |
+
"acc,none": 0.29,
|
137 |
+
"acc_stderr,none": 0.04560480215720684
|
138 |
+
},
|
139 |
+
"harness|mmlu_clinical_knowledge|0": {
|
140 |
+
"alias": " - clinical_knowledge",
|
141 |
+
"acc,none": 0.23773584905660378,
|
142 |
+
"acc_stderr,none": 0.026199808807561932
|
143 |
+
},
|
144 |
+
"harness|mmlu_college_medicine|0": {
|
145 |
+
"alias": " - college_medicine",
|
146 |
+
"acc,none": 0.2254335260115607,
|
147 |
+
"acc_stderr,none": 0.03186209851641144
|
148 |
+
},
|
149 |
+
"harness|mmlu_global_facts|0": {
|
150 |
+
"alias": " - global_facts",
|
151 |
+
"acc,none": 0.31,
|
152 |
+
"acc_stderr,none": 0.04648231987117316
|
153 |
+
},
|
154 |
+
"harness|mmlu_human_aging|0": {
|
155 |
+
"alias": " - human_aging",
|
156 |
+
"acc,none": 0.3991031390134529,
|
157 |
+
"acc_stderr,none": 0.03286745312567961
|
158 |
+
},
|
159 |
+
"harness|mmlu_management|0": {
|
160 |
+
"alias": " - management",
|
161 |
+
"acc,none": 0.22330097087378642,
|
162 |
+
"acc_stderr,none": 0.04123553189891431
|
163 |
+
},
|
164 |
+
"harness|mmlu_marketing|0": {
|
165 |
+
"alias": " - marketing",
|
166 |
+
"acc,none": 0.2692307692307692,
|
167 |
+
"acc_stderr,none": 0.02905858830374884
|
168 |
+
},
|
169 |
+
"harness|mmlu_medical_genetics|0": {
|
170 |
+
"alias": " - medical_genetics",
|
171 |
+
"acc,none": 0.33,
|
172 |
+
"acc_stderr,none": 0.047258156262526045
|
173 |
+
},
|
174 |
+
"harness|mmlu_miscellaneous|0": {
|
175 |
+
"alias": " - miscellaneous",
|
176 |
+
"acc,none": 0.2720306513409962,
|
177 |
+
"acc_stderr,none": 0.015913367447500503
|
178 |
+
},
|
179 |
+
"harness|mmlu_nutrition|0": {
|
180 |
+
"alias": " - nutrition",
|
181 |
+
"acc,none": 0.3104575163398693,
|
182 |
+
"acc_stderr,none": 0.0264930332251459
|
183 |
+
},
|
184 |
+
"harness|mmlu_professional_accounting|0": {
|
185 |
+
"alias": " - professional_accounting",
|
186 |
+
"acc,none": 0.2765957446808511,
|
187 |
+
"acc_stderr,none": 0.026684564340460994
|
188 |
+
},
|
189 |
+
"harness|mmlu_professional_medicine|0": {
|
190 |
+
"alias": " - professional_medicine",
|
191 |
+
"acc,none": 0.19852941176470587,
|
192 |
+
"acc_stderr,none": 0.024231013370541093
|
193 |
+
},
|
194 |
+
"harness|mmlu_virology|0": {
|
195 |
+
"alias": " - virology",
|
196 |
+
"acc,none": 0.3192771084337349,
|
197 |
+
"acc_stderr,none": 0.036293353299478595
|
198 |
+
},
|
199 |
+
"harness|mmlu_social_sciences|0": {
|
200 |
+
"alias": " - social_sciences",
|
201 |
+
"acc,none": 0.25641858953526164,
|
202 |
+
"acc_stderr,none": 0.007862223264863579
|
203 |
+
},
|
204 |
+
"harness|mmlu_econometrics|0": {
|
205 |
+
"alias": " - econometrics",
|
206 |
+
"acc,none": 0.18421052631578946,
|
207 |
+
"acc_stderr,none": 0.03646758875075566
|
208 |
+
},
|
209 |
+
"harness|mmlu_high_school_geography|0": {
|
210 |
+
"alias": " - high_school_geography",
|
211 |
+
"acc,none": 0.18181818181818182,
|
212 |
+
"acc_stderr,none": 0.0274796030105388
|
213 |
+
},
|
214 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
215 |
+
"alias": " - high_school_government_and_politics",
|
216 |
+
"acc,none": 0.27461139896373055,
|
217 |
+
"acc_stderr,none": 0.03221024508041154
|
218 |
+
},
|
219 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
220 |
+
"alias": " - high_school_macroeconomics",
|
221 |
+
"acc,none": 0.2692307692307692,
|
222 |
+
"acc_stderr,none": 0.022489389793654824
|
223 |
+
},
|
224 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
225 |
+
"alias": " - high_school_microeconomics",
|
226 |
+
"acc,none": 0.27310924369747897,
|
227 |
+
"acc_stderr,none": 0.02894200404099817
|
228 |
+
},
|
229 |
+
"harness|mmlu_high_school_psychology|0": {
|
230 |
+
"alias": " - high_school_psychology",
|
231 |
+
"acc,none": 0.24036697247706423,
|
232 |
+
"acc_stderr,none": 0.01832060732096407
|
233 |
+
},
|
234 |
+
"harness|mmlu_human_sexuality|0": {
|
235 |
+
"alias": " - human_sexuality",
|
236 |
+
"acc,none": 0.29770992366412213,
|
237 |
+
"acc_stderr,none": 0.040103589424622034
|
238 |
+
},
|
239 |
+
"harness|mmlu_professional_psychology|0": {
|
240 |
+
"alias": " - professional_psychology",
|
241 |
+
"acc,none": 0.24509803921568626,
|
242 |
+
"acc_stderr,none": 0.017401816711427657
|
243 |
+
},
|
244 |
+
"harness|mmlu_public_relations|0": {
|
245 |
+
"alias": " - public_relations",
|
246 |
+
"acc,none": 0.3090909090909091,
|
247 |
+
"acc_stderr,none": 0.044262946482000985
|
248 |
+
},
|
249 |
+
"harness|mmlu_security_studies|0": {
|
250 |
+
"alias": " - security_studies",
|
251 |
+
"acc,none": 0.2938775510204082,
|
252 |
+
"acc_stderr,none": 0.029162738410249772
|
253 |
+
},
|
254 |
+
"harness|mmlu_sociology|0": {
|
255 |
+
"alias": " - sociology",
|
256 |
+
"acc,none": 0.24378109452736318,
|
257 |
+
"acc_stderr,none": 0.030360490154014645
|
258 |
+
},
|
259 |
+
"harness|mmlu_us_foreign_policy|0": {
|
260 |
+
"alias": " - us_foreign_policy",
|
261 |
+
"acc,none": 0.34,
|
262 |
+
"acc_stderr,none": 0.04760952285695236
|
263 |
+
},
|
264 |
+
"harness|mmlu_stem|0": {
|
265 |
+
"alias": " - stem",
|
266 |
+
"acc,none": 0.25404376784015226,
|
267 |
+
"acc_stderr,none": 0.007751242884532469
|
268 |
+
},
|
269 |
+
"harness|mmlu_abstract_algebra|0": {
|
270 |
+
"alias": " - abstract_algebra",
|
271 |
+
"acc,none": 0.29,
|
272 |
+
"acc_stderr,none": 0.04560480215720683
|
273 |
+
},
|
274 |
+
"harness|mmlu_anatomy|0": {
|
275 |
+
"alias": " - anatomy",
|
276 |
+
"acc,none": 0.24444444444444444,
|
277 |
+
"acc_stderr,none": 0.03712537833614866
|
278 |
+
},
|
279 |
+
"harness|mmlu_astronomy|0": {
|
280 |
+
"alias": " - astronomy",
|
281 |
+
"acc,none": 0.2631578947368421,
|
282 |
+
"acc_stderr,none": 0.03583496176361061
|
283 |
+
},
|
284 |
+
"harness|mmlu_college_biology|0": {
|
285 |
+
"alias": " - college_biology",
|
286 |
+
"acc,none": 0.2152777777777778,
|
287 |
+
"acc_stderr,none": 0.03437079344106133
|
288 |
+
},
|
289 |
+
"harness|mmlu_college_chemistry|0": {
|
290 |
+
"alias": " - college_chemistry",
|
291 |
+
"acc,none": 0.26,
|
292 |
+
"acc_stderr,none": 0.0440844002276808
|
293 |
+
},
|
294 |
+
"harness|mmlu_college_computer_science|0": {
|
295 |
+
"alias": " - college_computer_science",
|
296 |
+
"acc,none": 0.23,
|
297 |
+
"acc_stderr,none": 0.04229525846816506
|
298 |
+
},
|
299 |
+
"harness|mmlu_college_mathematics|0": {
|
300 |
+
"alias": " - college_mathematics",
|
301 |
+
"acc,none": 0.28,
|
302 |
+
"acc_stderr,none": 0.045126085985421276
|
303 |
+
},
|
304 |
+
"harness|mmlu_college_physics|0": {
|
305 |
+
"alias": " - college_physics",
|
306 |
+
"acc,none": 0.20588235294117646,
|
307 |
+
"acc_stderr,none": 0.04023382273617746
|
308 |
+
},
|
309 |
+
"harness|mmlu_computer_security|0": {
|
310 |
+
"alias": " - computer_security",
|
311 |
+
"acc,none": 0.29,
|
312 |
+
"acc_stderr,none": 0.04560480215720683
|
313 |
+
},
|
314 |
+
"harness|mmlu_conceptual_physics|0": {
|
315 |
+
"alias": " - conceptual_physics",
|
316 |
+
"acc,none": 0.31063829787234043,
|
317 |
+
"acc_stderr,none": 0.03025123757921317
|
318 |
+
},
|
319 |
+
"harness|mmlu_electrical_engineering|0": {
|
320 |
+
"alias": " - electrical_engineering",
|
321 |
+
"acc,none": 0.3103448275862069,
|
322 |
+
"acc_stderr,none": 0.03855289616378947
|
323 |
+
},
|
324 |
+
"harness|mmlu_elementary_mathematics|0": {
|
325 |
+
"alias": " - elementary_mathematics",
|
326 |
+
"acc,none": 0.24867724867724866,
|
327 |
+
"acc_stderr,none": 0.02226181769240016
|
328 |
+
},
|
329 |
+
"harness|mmlu_high_school_biology|0": {
|
330 |
+
"alias": " - high_school_biology",
|
331 |
+
"acc,none": 0.23225806451612904,
|
332 |
+
"acc_stderr,none": 0.024022256130308235
|
333 |
+
},
|
334 |
+
"harness|mmlu_high_school_chemistry|0": {
|
335 |
+
"alias": " - high_school_chemistry",
|
336 |
+
"acc,none": 0.270935960591133,
|
337 |
+
"acc_stderr,none": 0.031270907132976984
|
338 |
+
},
|
339 |
+
"harness|mmlu_high_school_computer_science|0": {
|
340 |
+
"alias": " - high_school_computer_science",
|
341 |
+
"acc,none": 0.23,
|
342 |
+
"acc_stderr,none": 0.04229525846816506
|
343 |
+
},
|
344 |
+
"harness|mmlu_high_school_mathematics|0": {
|
345 |
+
"alias": " - high_school_mathematics",
|
346 |
+
"acc,none": 0.24444444444444444,
|
347 |
+
"acc_stderr,none": 0.026202766534652148
|
348 |
+
},
|
349 |
+
"harness|mmlu_high_school_physics|0": {
|
350 |
+
"alias": " - high_school_physics",
|
351 |
+
"acc,none": 0.23841059602649006,
|
352 |
+
"acc_stderr,none": 0.03479185572599661
|
353 |
+
},
|
354 |
+
"harness|mmlu_high_school_statistics|0": {
|
355 |
+
"alias": " - high_school_statistics",
|
356 |
+
"acc,none": 0.18981481481481483,
|
357 |
+
"acc_stderr,none": 0.026744714834691916
|
358 |
+
},
|
359 |
+
"harness|mmlu_machine_learning|0": {
|
360 |
+
"alias": " - machine_learning",
|
361 |
+
"acc,none": 0.32142857142857145,
|
362 |
+
"acc_stderr,none": 0.04432804055291519
|
363 |
+
},
|
364 |
+
"harness|truthfulqa:mc2|0": {
|
365 |
+
"acc,none": 0.359663881399798,
|
366 |
+
"acc_stderr,none": 0.013461833560008199,
|
367 |
+
"alias": "truthfulqa_mc2"
|
368 |
+
},
|
369 |
+
"harness|arc:easy|0": {
|
370 |
+
"acc,none": 0.67003367003367,
|
371 |
+
"acc_stderr,none": 0.009648311574241042,
|
372 |
+
"acc_norm,none": 0.6224747474747475,
|
373 |
+
"acc_norm_stderr,none": 0.009947227833469432,
|
374 |
+
"alias": "arc_easy"
|
375 |
+
},
|
376 |
+
"harness|winogrande|0": {
|
377 |
+
"acc,none": 0.6408839779005525,
|
378 |
+
"acc_stderr,none": 0.013483115202120236,
|
379 |
+
"alias": "winogrande"
|
380 |
+
},
|
381 |
+
"harness|boolq|0": {
|
382 |
+
"acc,none": 0.655045871559633,
|
383 |
+
"acc_stderr,none": 0.008313981812572256,
|
384 |
+
"alias": "boolq"
|
385 |
+
},
|
386 |
+
"harness|arc:challenge|0": {
|
387 |
+
"acc,none": 0.3387372013651877,
|
388 |
+
"acc_stderr,none": 0.013830568927974332,
|
389 |
+
"acc_norm,none": 0.363481228668942,
|
390 |
+
"acc_norm_stderr,none": 0.014056207319068282,
|
391 |
+
"alias": "arc_challenge"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "EleutherAI/gpt-j-6b",
|
396 |
+
"revision": "main",
|
397 |
+
"private": false,
|
398 |
+
"params": 6.0,
|
399 |
+
"architectures": "GPTJForCausalLM",
|
400 |
+
"quant_type": null,
|
401 |
+
"precision": "32bit",
|
402 |
+
"model_params": 6.0,
|
403 |
+
"model_size": 24.0,
|
404 |
+
"weight_dtype": "float32",
|
405 |
+
"compute_dtype": "float16",
|
406 |
+
"gguf_ftype": "*Q4_0.gguf",
|
407 |
+
"hardware": "gpu",
|
408 |
+
"status": "Pending",
|
409 |
+
"submitted_time": "2024-04-27T08:04:58Z",
|
410 |
+
"model_type": "original",
|
411 |
+
"job_id": -1,
|
412 |
+
"job_start_time": null,
|
413 |
+
"scripts": "ITREX"
|
414 |
+
},
|
415 |
+
"quantization_config": null,
|
416 |
+
"versions": {
|
417 |
+
"harness|hellaswag|0": 1.0,
|
418 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
419 |
+
"harness|openbookqa|0": 1.0,
|
420 |
+
"harness|lambada:openai|0": 1.0,
|
421 |
+
"harness|piqa|0": 1.0,
|
422 |
+
"harness|mmlu|0": null,
|
423 |
+
"harness|mmlu_humanities|0": null,
|
424 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
425 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
426 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
427 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
428 |
+
"harness|mmlu_international_law|0": 0.0,
|
429 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
430 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
431 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
432 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
433 |
+
"harness|mmlu_philosophy|0": 0.0,
|
434 |
+
"harness|mmlu_prehistory|0": 0.0,
|
435 |
+
"harness|mmlu_professional_law|0": 0.0,
|
436 |
+
"harness|mmlu_world_religions|0": 0.0,
|
437 |
+
"harness|mmlu_other|0": null,
|
438 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
439 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
440 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
441 |
+
"harness|mmlu_global_facts|0": 0.0,
|
442 |
+
"harness|mmlu_human_aging|0": 0.0,
|
443 |
+
"harness|mmlu_management|0": 0.0,
|
444 |
+
"harness|mmlu_marketing|0": 0.0,
|
445 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
446 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
447 |
+
"harness|mmlu_nutrition|0": 0.0,
|
448 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
449 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
450 |
+
"harness|mmlu_virology|0": 0.0,
|
451 |
+
"harness|mmlu_social_sciences|0": null,
|
452 |
+
"harness|mmlu_econometrics|0": 0.0,
|
453 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
454 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
455 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
456 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
457 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
458 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
459 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
460 |
+
"harness|mmlu_public_relations|0": 0.0,
|
461 |
+
"harness|mmlu_security_studies|0": 0.0,
|
462 |
+
"harness|mmlu_sociology|0": 0.0,
|
463 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
464 |
+
"harness|mmlu_stem|0": null,
|
465 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
466 |
+
"harness|mmlu_anatomy|0": 0.0,
|
467 |
+
"harness|mmlu_astronomy|0": 0.0,
|
468 |
+
"harness|mmlu_college_biology|0": 0.0,
|
469 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
470 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
471 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
472 |
+
"harness|mmlu_college_physics|0": 0.0,
|
473 |
+
"harness|mmlu_computer_security|0": 0.0,
|
474 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
475 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
476 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
477 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
478 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
479 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
480 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
481 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
482 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
483 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
484 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
485 |
+
"harness|arc:easy|0": 1.0,
|
486 |
+
"harness|winogrande|0": 1.0,
|
487 |
+
"harness|boolq|0": 2.0,
|
488 |
+
"harness|arc:challenge|0": 1.0
|
489 |
+
},
|
490 |
+
"n-shot": {
|
491 |
+
"arc_challenge": 0,
|
492 |
+
"arc_easy": 0,
|
493 |
+
"boolq": 0,
|
494 |
+
"hellaswag": 0,
|
495 |
+
"lambada_openai": 0,
|
496 |
+
"mmlu": 0,
|
497 |
+
"mmlu_abstract_algebra": 0,
|
498 |
+
"mmlu_anatomy": 0,
|
499 |
+
"mmlu_astronomy": 0,
|
500 |
+
"mmlu_business_ethics": 0,
|
501 |
+
"mmlu_clinical_knowledge": 0,
|
502 |
+
"mmlu_college_biology": 0,
|
503 |
+
"mmlu_college_chemistry": 0,
|
504 |
+
"mmlu_college_computer_science": 0,
|
505 |
+
"mmlu_college_mathematics": 0,
|
506 |
+
"mmlu_college_medicine": 0,
|
507 |
+
"mmlu_college_physics": 0,
|
508 |
+
"mmlu_computer_security": 0,
|
509 |
+
"mmlu_conceptual_physics": 0,
|
510 |
+
"mmlu_econometrics": 0,
|
511 |
+
"mmlu_electrical_engineering": 0,
|
512 |
+
"mmlu_elementary_mathematics": 0,
|
513 |
+
"mmlu_formal_logic": 0,
|
514 |
+
"mmlu_global_facts": 0,
|
515 |
+
"mmlu_high_school_biology": 0,
|
516 |
+
"mmlu_high_school_chemistry": 0,
|
517 |
+
"mmlu_high_school_computer_science": 0,
|
518 |
+
"mmlu_high_school_european_history": 0,
|
519 |
+
"mmlu_high_school_geography": 0,
|
520 |
+
"mmlu_high_school_government_and_politics": 0,
|
521 |
+
"mmlu_high_school_macroeconomics": 0,
|
522 |
+
"mmlu_high_school_mathematics": 0,
|
523 |
+
"mmlu_high_school_microeconomics": 0,
|
524 |
+
"mmlu_high_school_physics": 0,
|
525 |
+
"mmlu_high_school_psychology": 0,
|
526 |
+
"mmlu_high_school_statistics": 0,
|
527 |
+
"mmlu_high_school_us_history": 0,
|
528 |
+
"mmlu_high_school_world_history": 0,
|
529 |
+
"mmlu_human_aging": 0,
|
530 |
+
"mmlu_human_sexuality": 0,
|
531 |
+
"mmlu_humanities": 0,
|
532 |
+
"mmlu_international_law": 0,
|
533 |
+
"mmlu_jurisprudence": 0,
|
534 |
+
"mmlu_logical_fallacies": 0,
|
535 |
+
"mmlu_machine_learning": 0,
|
536 |
+
"mmlu_management": 0,
|
537 |
+
"mmlu_marketing": 0,
|
538 |
+
"mmlu_medical_genetics": 0,
|
539 |
+
"mmlu_miscellaneous": 0,
|
540 |
+
"mmlu_moral_disputes": 0,
|
541 |
+
"mmlu_moral_scenarios": 0,
|
542 |
+
"mmlu_nutrition": 0,
|
543 |
+
"mmlu_other": 0,
|
544 |
+
"mmlu_philosophy": 0,
|
545 |
+
"mmlu_prehistory": 0,
|
546 |
+
"mmlu_professional_accounting": 0,
|
547 |
+
"mmlu_professional_law": 0,
|
548 |
+
"mmlu_professional_medicine": 0,
|
549 |
+
"mmlu_professional_psychology": 0,
|
550 |
+
"mmlu_public_relations": 0,
|
551 |
+
"mmlu_security_studies": 0,
|
552 |
+
"mmlu_social_sciences": 0,
|
553 |
+
"mmlu_sociology": 0,
|
554 |
+
"mmlu_stem": 0,
|
555 |
+
"mmlu_us_foreign_policy": 0,
|
556 |
+
"mmlu_virology": 0,
|
557 |
+
"mmlu_world_religions": 0,
|
558 |
+
"openbookqa": 0,
|
559 |
+
"piqa": 0,
|
560 |
+
"truthfulqa_mc1": 0,
|
561 |
+
"truthfulqa_mc2": 0,
|
562 |
+
"winogrande": 0
|
563 |
+
},
|
564 |
+
"date": 1716060228.1085954,
|
565 |
+
"config": {
|
566 |
+
"model": "hf",
|
567 |
+
"model_args": "pretrained=EleutherAI/gpt-j-6b,trust_remote_code=True,dtype=float16,_commit_hash=main",
|
568 |
+
"batch_size": 4,
|
569 |
+
"batch_sizes": [],
|
570 |
+
"device": "cuda",
|
571 |
+
"use_cache": null,
|
572 |
+
"limit": null,
|
573 |
+
"bootstrap_iters": 100000,
|
574 |
+
"gen_kwargs": null
|
575 |
+
}
|
576 |
+
}
|
Intel/phi-2-int4-inc/results_2024-05-29-00-22-57.json
CHANGED
@@ -14,7 +14,7 @@
|
|
14 |
"model_dtype": "4bit",
|
15 |
"model_size": 1.84,
|
16 |
"model_params": 2.54,
|
17 |
-
"quant_type": "
|
18 |
"precision": "4bit"
|
19 |
},
|
20 |
"results": {
|
@@ -397,7 +397,7 @@
|
|
397 |
"private": false,
|
398 |
"params": 1.84,
|
399 |
"architectures": "PhiForCausalLM",
|
400 |
-
"quant_type": "
|
401 |
"precision": "4bit",
|
402 |
"model_params": 2.54,
|
403 |
"model_size": 1.84,
|
@@ -592,4 +592,4 @@
|
|
592 |
"bootstrap_iters": 100000,
|
593 |
"gen_kwargs": null
|
594 |
}
|
595 |
-
}
|
|
|
14 |
"model_dtype": "4bit",
|
15 |
"model_size": 1.84,
|
16 |
"model_params": 2.54,
|
17 |
+
"quant_type": "AutoRound",
|
18 |
"precision": "4bit"
|
19 |
},
|
20 |
"results": {
|
|
|
397 |
"private": false,
|
398 |
"params": 1.84,
|
399 |
"architectures": "PhiForCausalLM",
|
400 |
+
"quant_type": "AutoRound",
|
401 |
"precision": "4bit",
|
402 |
"model_params": 2.54,
|
403 |
"model_size": 1.84,
|
|
|
592 |
"bootstrap_iters": 100000,
|
593 |
"gen_kwargs": null
|
594 |
}
|
595 |
+
}
|
Intel/phi-2-int4-inc/results_2024-05-31-20-22-58.json
ADDED
@@ -0,0 +1,597 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "1.4",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": -1,
|
9 |
+
"start_time": null,
|
10 |
+
"end_time": "2024-05-31-20-22-58",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "Intel/phi-2-int4-inc",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "4bit",
|
15 |
+
"model_size": 1.84,
|
16 |
+
"model_params": 2.54,
|
17 |
+
"quant_type": "AutoRound",
|
18 |
+
"precision": "4bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|mmlu|0": {
|
22 |
+
"acc,none": 0.5269904571998291,
|
23 |
+
"acc_stderr,none": 0.004027192999056069,
|
24 |
+
"alias": "mmlu"
|
25 |
+
},
|
26 |
+
"harness|mmlu_humanities|0": {
|
27 |
+
"alias": " - humanities",
|
28 |
+
"acc,none": 0.48374070138150904,
|
29 |
+
"acc_stderr,none": 0.0068766898488879106
|
30 |
+
},
|
31 |
+
"harness|mmlu_formal_logic|0": {
|
32 |
+
"alias": " - formal_logic",
|
33 |
+
"acc,none": 0.3412698412698413,
|
34 |
+
"acc_stderr,none": 0.04240799327574924
|
35 |
+
},
|
36 |
+
"harness|mmlu_high_school_european_history|0": {
|
37 |
+
"alias": " - high_school_european_history",
|
38 |
+
"acc,none": 0.6606060606060606,
|
39 |
+
"acc_stderr,none": 0.03697442205031596
|
40 |
+
},
|
41 |
+
"harness|mmlu_high_school_us_history|0": {
|
42 |
+
"alias": " - high_school_us_history",
|
43 |
+
"acc,none": 0.6764705882352942,
|
44 |
+
"acc_stderr,none": 0.032834720561085606
|
45 |
+
},
|
46 |
+
"harness|mmlu_high_school_world_history|0": {
|
47 |
+
"alias": " - high_school_world_history",
|
48 |
+
"acc,none": 0.7172995780590717,
|
49 |
+
"acc_stderr,none": 0.02931281415395592
|
50 |
+
},
|
51 |
+
"harness|mmlu_international_law|0": {
|
52 |
+
"alias": " - international_law",
|
53 |
+
"acc,none": 0.7603305785123967,
|
54 |
+
"acc_stderr,none": 0.03896878985070417
|
55 |
+
},
|
56 |
+
"harness|mmlu_jurisprudence|0": {
|
57 |
+
"alias": " - jurisprudence",
|
58 |
+
"acc,none": 0.6481481481481481,
|
59 |
+
"acc_stderr,none": 0.046166311118017146
|
60 |
+
},
|
61 |
+
"harness|mmlu_logical_fallacies|0": {
|
62 |
+
"alias": " - logical_fallacies",
|
63 |
+
"acc,none": 0.7116564417177914,
|
64 |
+
"acc_stderr,none": 0.03559039531617342
|
65 |
+
},
|
66 |
+
"harness|mmlu_moral_disputes|0": {
|
67 |
+
"alias": " - moral_disputes",
|
68 |
+
"acc,none": 0.6213872832369942,
|
69 |
+
"acc_stderr,none": 0.02611374936131034
|
70 |
+
},
|
71 |
+
"harness|mmlu_moral_scenarios|0": {
|
72 |
+
"alias": " - moral_scenarios",
|
73 |
+
"acc,none": 0.23798882681564246,
|
74 |
+
"acc_stderr,none": 0.014242630070574885
|
75 |
+
},
|
76 |
+
"harness|mmlu_philosophy|0": {
|
77 |
+
"alias": " - philosophy",
|
78 |
+
"acc,none": 0.5562700964630225,
|
79 |
+
"acc_stderr,none": 0.028217683556652308
|
80 |
+
},
|
81 |
+
"harness|mmlu_prehistory|0": {
|
82 |
+
"alias": " - prehistory",
|
83 |
+
"acc,none": 0.5895061728395061,
|
84 |
+
"acc_stderr,none": 0.027371350925124768
|
85 |
+
},
|
86 |
+
"harness|mmlu_professional_law|0": {
|
87 |
+
"alias": " - professional_law",
|
88 |
+
"acc,none": 0.408735332464146,
|
89 |
+
"acc_stderr,none": 0.012555701346703384
|
90 |
+
},
|
91 |
+
"harness|mmlu_world_religions|0": {
|
92 |
+
"alias": " - world_religions",
|
93 |
+
"acc,none": 0.695906432748538,
|
94 |
+
"acc_stderr,none": 0.0352821125824523
|
95 |
+
},
|
96 |
+
"harness|mmlu_other|0": {
|
97 |
+
"alias": " - other",
|
98 |
+
"acc,none": 0.5780495654972643,
|
99 |
+
"acc_stderr,none": 0.008617183067560772
|
100 |
+
},
|
101 |
+
"harness|mmlu_business_ethics|0": {
|
102 |
+
"alias": " - business_ethics",
|
103 |
+
"acc,none": 0.54,
|
104 |
+
"acc_stderr,none": 0.05009082659620332
|
105 |
+
},
|
106 |
+
"harness|mmlu_clinical_knowledge|0": {
|
107 |
+
"alias": " - clinical_knowledge",
|
108 |
+
"acc,none": 0.6113207547169811,
|
109 |
+
"acc_stderr,none": 0.030000485448675986
|
110 |
+
},
|
111 |
+
"harness|mmlu_college_medicine|0": {
|
112 |
+
"alias": " - college_medicine",
|
113 |
+
"acc,none": 0.5375722543352601,
|
114 |
+
"acc_stderr,none": 0.0380168510452446
|
115 |
+
},
|
116 |
+
"harness|mmlu_global_facts|0": {
|
117 |
+
"alias": " - global_facts",
|
118 |
+
"acc,none": 0.34,
|
119 |
+
"acc_stderr,none": 0.04760952285695235
|
120 |
+
},
|
121 |
+
"harness|mmlu_human_aging|0": {
|
122 |
+
"alias": " - human_aging",
|
123 |
+
"acc,none": 0.600896860986547,
|
124 |
+
"acc_stderr,none": 0.032867453125679603
|
125 |
+
},
|
126 |
+
"harness|mmlu_management|0": {
|
127 |
+
"alias": " - management",
|
128 |
+
"acc,none": 0.6893203883495146,
|
129 |
+
"acc_stderr,none": 0.04582124160161549
|
130 |
+
},
|
131 |
+
"harness|mmlu_marketing|0": {
|
132 |
+
"alias": " - marketing",
|
133 |
+
"acc,none": 0.7991452991452992,
|
134 |
+
"acc_stderr,none": 0.026246772946890477
|
135 |
+
},
|
136 |
+
"harness|mmlu_medical_genetics|0": {
|
137 |
+
"alias": " - medical_genetics",
|
138 |
+
"acc,none": 0.61,
|
139 |
+
"acc_stderr,none": 0.04902071300001975
|
140 |
+
},
|
141 |
+
"harness|mmlu_miscellaneous|0": {
|
142 |
+
"alias": " - miscellaneous",
|
143 |
+
"acc,none": 0.6590038314176245,
|
144 |
+
"acc_stderr,none": 0.01695178138322331
|
145 |
+
},
|
146 |
+
"harness|mmlu_nutrition|0": {
|
147 |
+
"alias": " - nutrition",
|
148 |
+
"acc,none": 0.6013071895424836,
|
149 |
+
"acc_stderr,none": 0.02803609227389176
|
150 |
+
},
|
151 |
+
"harness|mmlu_professional_accounting|0": {
|
152 |
+
"alias": " - professional_accounting",
|
153 |
+
"acc,none": 0.38652482269503546,
|
154 |
+
"acc_stderr,none": 0.029049190342543465
|
155 |
+
},
|
156 |
+
"harness|mmlu_professional_medicine|0": {
|
157 |
+
"alias": " - professional_medicine",
|
158 |
+
"acc,none": 0.41911764705882354,
|
159 |
+
"acc_stderr,none": 0.029972807170464626
|
160 |
+
},
|
161 |
+
"harness|mmlu_virology|0": {
|
162 |
+
"alias": " - virology",
|
163 |
+
"acc,none": 0.463855421686747,
|
164 |
+
"acc_stderr,none": 0.038823108508905954
|
165 |
+
},
|
166 |
+
"harness|mmlu_social_sciences|0": {
|
167 |
+
"alias": " - social_sciences",
|
168 |
+
"acc,none": 0.617809554761131,
|
169 |
+
"acc_stderr,none": 0.008533616608499968
|
170 |
+
},
|
171 |
+
"harness|mmlu_econometrics|0": {
|
172 |
+
"alias": " - econometrics",
|
173 |
+
"acc,none": 0.2719298245614035,
|
174 |
+
"acc_stderr,none": 0.04185774424022057
|
175 |
+
},
|
176 |
+
"harness|mmlu_high_school_geography|0": {
|
177 |
+
"alias": " - high_school_geography",
|
178 |
+
"acc,none": 0.6666666666666666,
|
179 |
+
"acc_stderr,none": 0.03358618145732523
|
180 |
+
},
|
181 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
182 |
+
"alias": " - high_school_government_and_politics",
|
183 |
+
"acc,none": 0.7357512953367875,
|
184 |
+
"acc_stderr,none": 0.03182155050916646
|
185 |
+
},
|
186 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
187 |
+
"alias": " - high_school_macroeconomics",
|
188 |
+
"acc,none": 0.541025641025641,
|
189 |
+
"acc_stderr,none": 0.025265525491284295
|
190 |
+
},
|
191 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
192 |
+
"alias": " - high_school_microeconomics",
|
193 |
+
"acc,none": 0.5252100840336135,
|
194 |
+
"acc_stderr,none": 0.032437180551374095
|
195 |
+
},
|
196 |
+
"harness|mmlu_high_school_psychology|0": {
|
197 |
+
"alias": " - high_school_psychology",
|
198 |
+
"acc,none": 0.7357798165137615,
|
199 |
+
"acc_stderr,none": 0.018904164171510182
|
200 |
+
},
|
201 |
+
"harness|mmlu_human_sexuality|0": {
|
202 |
+
"alias": " - human_sexuality",
|
203 |
+
"acc,none": 0.6106870229007634,
|
204 |
+
"acc_stderr,none": 0.04276486542814591
|
205 |
+
},
|
206 |
+
"harness|mmlu_professional_psychology|0": {
|
207 |
+
"alias": " - professional_psychology",
|
208 |
+
"acc,none": 0.5228758169934641,
|
209 |
+
"acc_stderr,none": 0.02020665318788479
|
210 |
+
},
|
211 |
+
"harness|mmlu_public_relations|0": {
|
212 |
+
"alias": " - public_relations",
|
213 |
+
"acc,none": 0.6545454545454545,
|
214 |
+
"acc_stderr,none": 0.04554619617541054
|
215 |
+
},
|
216 |
+
"harness|mmlu_security_studies|0": {
|
217 |
+
"alias": " - security_studies",
|
218 |
+
"acc,none": 0.6653061224489796,
|
219 |
+
"acc_stderr,none": 0.030209235226242304
|
220 |
+
},
|
221 |
+
"harness|mmlu_sociology|0": {
|
222 |
+
"alias": " - sociology",
|
223 |
+
"acc,none": 0.746268656716418,
|
224 |
+
"acc_stderr,none": 0.03076944496729602
|
225 |
+
},
|
226 |
+
"harness|mmlu_us_foreign_policy|0": {
|
227 |
+
"alias": " - us_foreign_policy",
|
228 |
+
"acc,none": 0.74,
|
229 |
+
"acc_stderr,none": 0.0440844002276808
|
230 |
+
},
|
231 |
+
"harness|mmlu_stem|0": {
|
232 |
+
"alias": " - stem",
|
233 |
+
"acc,none": 0.4525848398350777,
|
234 |
+
"acc_stderr,none": 0.008655273536743203
|
235 |
+
},
|
236 |
+
"harness|mmlu_abstract_algebra|0": {
|
237 |
+
"alias": " - abstract_algebra",
|
238 |
+
"acc,none": 0.29,
|
239 |
+
"acc_stderr,none": 0.04560480215720683
|
240 |
+
},
|
241 |
+
"harness|mmlu_anatomy|0": {
|
242 |
+
"alias": " - anatomy",
|
243 |
+
"acc,none": 0.4444444444444444,
|
244 |
+
"acc_stderr,none": 0.04292596718256981
|
245 |
+
},
|
246 |
+
"harness|mmlu_astronomy|0": {
|
247 |
+
"alias": " - astronomy",
|
248 |
+
"acc,none": 0.5526315789473685,
|
249 |
+
"acc_stderr,none": 0.040463368839782514
|
250 |
+
},
|
251 |
+
"harness|mmlu_college_biology|0": {
|
252 |
+
"alias": " - college_biology",
|
253 |
+
"acc,none": 0.5277777777777778,
|
254 |
+
"acc_stderr,none": 0.04174752578923185
|
255 |
+
},
|
256 |
+
"harness|mmlu_college_chemistry|0": {
|
257 |
+
"alias": " - college_chemistry",
|
258 |
+
"acc,none": 0.32,
|
259 |
+
"acc_stderr,none": 0.04688261722621503
|
260 |
+
},
|
261 |
+
"harness|mmlu_college_computer_science|0": {
|
262 |
+
"alias": " - college_computer_science",
|
263 |
+
"acc,none": 0.4,
|
264 |
+
"acc_stderr,none": 0.049236596391733084
|
265 |
+
},
|
266 |
+
"harness|mmlu_college_mathematics|0": {
|
267 |
+
"alias": " - college_mathematics",
|
268 |
+
"acc,none": 0.42,
|
269 |
+
"acc_stderr,none": 0.04960449637488584
|
270 |
+
},
|
271 |
+
"harness|mmlu_college_physics|0": {
|
272 |
+
"alias": " - college_physics",
|
273 |
+
"acc,none": 0.23529411764705882,
|
274 |
+
"acc_stderr,none": 0.042207736591714534
|
275 |
+
},
|
276 |
+
"harness|mmlu_computer_security|0": {
|
277 |
+
"alias": " - computer_security",
|
278 |
+
"acc,none": 0.61,
|
279 |
+
"acc_stderr,none": 0.04902071300001975
|
280 |
+
},
|
281 |
+
"harness|mmlu_conceptual_physics|0": {
|
282 |
+
"alias": " - conceptual_physics",
|
283 |
+
"acc,none": 0.46382978723404256,
|
284 |
+
"acc_stderr,none": 0.032600385118357715
|
285 |
+
},
|
286 |
+
"harness|mmlu_electrical_engineering|0": {
|
287 |
+
"alias": " - electrical_engineering",
|
288 |
+
"acc,none": 0.496551724137931,
|
289 |
+
"acc_stderr,none": 0.041665675771015785
|
290 |
+
},
|
291 |
+
"harness|mmlu_elementary_mathematics|0": {
|
292 |
+
"alias": " - elementary_mathematics",
|
293 |
+
"acc,none": 0.3994708994708995,
|
294 |
+
"acc_stderr,none": 0.025225450284067877
|
295 |
+
},
|
296 |
+
"harness|mmlu_high_school_biology|0": {
|
297 |
+
"alias": " - high_school_biology",
|
298 |
+
"acc,none": 0.6741935483870968,
|
299 |
+
"acc_stderr,none": 0.0266620105785671
|
300 |
+
},
|
301 |
+
"harness|mmlu_high_school_chemistry|0": {
|
302 |
+
"alias": " - high_school_chemistry",
|
303 |
+
"acc,none": 0.43349753694581283,
|
304 |
+
"acc_stderr,none": 0.034867317274198714
|
305 |
+
},
|
306 |
+
"harness|mmlu_high_school_computer_science|0": {
|
307 |
+
"alias": " - high_school_computer_science",
|
308 |
+
"acc,none": 0.62,
|
309 |
+
"acc_stderr,none": 0.048783173121456316
|
310 |
+
},
|
311 |
+
"harness|mmlu_high_school_mathematics|0": {
|
312 |
+
"alias": " - high_school_mathematics",
|
313 |
+
"acc,none": 0.3037037037037037,
|
314 |
+
"acc_stderr,none": 0.028037929969114993
|
315 |
+
},
|
316 |
+
"harness|mmlu_high_school_physics|0": {
|
317 |
+
"alias": " - high_school_physics",
|
318 |
+
"acc,none": 0.3708609271523179,
|
319 |
+
"acc_stderr,none": 0.03943966699183629
|
320 |
+
},
|
321 |
+
"harness|mmlu_high_school_statistics|0": {
|
322 |
+
"alias": " - high_school_statistics",
|
323 |
+
"acc,none": 0.4583333333333333,
|
324 |
+
"acc_stderr,none": 0.033981108902946366
|
325 |
+
},
|
326 |
+
"harness|mmlu_machine_learning|0": {
|
327 |
+
"alias": " - machine_learning",
|
328 |
+
"acc,none": 0.45535714285714285,
|
329 |
+
"acc_stderr,none": 0.04726835553719099
|
330 |
+
},
|
331 |
+
"harness|arc:easy|0": {
|
332 |
+
"acc,none": 0.8000841750841751,
|
333 |
+
"acc_stderr,none": 0.008206531105458865,
|
334 |
+
"acc_norm,none": 0.7798821548821548,
|
335 |
+
"acc_norm_stderr,none": 0.008501788774716775,
|
336 |
+
"alias": "arc_easy"
|
337 |
+
},
|
338 |
+
"harness|truthfulqa:mc2|0": {
|
339 |
+
"acc,none": 0.4455337824401393,
|
340 |
+
"acc_stderr,none": 0.015029648455608844,
|
341 |
+
"alias": "truthfulqa_mc2"
|
342 |
+
},
|
343 |
+
"harness|boolq|0": {
|
344 |
+
"acc,none": 0.8351681957186544,
|
345 |
+
"acc_stderr,none": 0.006489332389894504,
|
346 |
+
"alias": "boolq"
|
347 |
+
},
|
348 |
+
"harness|arc:challenge|0": {
|
349 |
+
"acc,none": 0.5136518771331058,
|
350 |
+
"acc_stderr,none": 0.014605943429860945,
|
351 |
+
"acc_norm,none": 0.5426621160409556,
|
352 |
+
"acc_norm_stderr,none": 0.014558106543924065,
|
353 |
+
"alias": "arc_challenge"
|
354 |
+
},
|
355 |
+
"harness|winogrande|0": {
|
356 |
+
"acc,none": 0.7584846093133386,
|
357 |
+
"acc_stderr,none": 0.012028983782011868,
|
358 |
+
"alias": "winogrande"
|
359 |
+
},
|
360 |
+
"harness|lambada:openai|0": {
|
361 |
+
"perplexity,none": 5.8546659684448885,
|
362 |
+
"perplexity_stderr,none": 0.16138014524913216,
|
363 |
+
"acc,none": 0.6163399961187658,
|
364 |
+
"acc_stderr,none": 0.0067747849158962875,
|
365 |
+
"alias": "lambada_openai"
|
366 |
+
},
|
367 |
+
"harness|piqa|0": {
|
368 |
+
"acc,none": 0.7856365614798694,
|
369 |
+
"acc_stderr,none": 0.009574842136050947,
|
370 |
+
"acc_norm,none": 0.7932535364526659,
|
371 |
+
"acc_norm_stderr,none": 0.009448665514183273,
|
372 |
+
"alias": "piqa"
|
373 |
+
},
|
374 |
+
"harness|openbookqa|0": {
|
375 |
+
"acc,none": 0.406,
|
376 |
+
"acc_stderr,none": 0.021983962090086337,
|
377 |
+
"acc_norm,none": 0.506,
|
378 |
+
"acc_norm_stderr,none": 0.022381462412439324,
|
379 |
+
"alias": "openbookqa"
|
380 |
+
},
|
381 |
+
"harness|hellaswag|0": {
|
382 |
+
"acc,none": 0.5493925512846046,
|
383 |
+
"acc_stderr,none": 0.004965375341643114,
|
384 |
+
"acc_norm,none": 0.726548496315475,
|
385 |
+
"acc_norm_stderr,none": 0.00444819664838301,
|
386 |
+
"alias": "hellaswag"
|
387 |
+
},
|
388 |
+
"harness|truthfulqa:mc1|0": {
|
389 |
+
"acc,none": 0.2998776009791922,
|
390 |
+
"acc_stderr,none": 0.01604035296671364,
|
391 |
+
"alias": "truthfulqa_mc1"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "Intel/phi-2-int4-inc",
|
396 |
+
"revision": "main",
|
397 |
+
"private": false,
|
398 |
+
"params": 1.84,
|
399 |
+
"architectures": "PhiForCausalLM",
|
400 |
+
"quant_type": "AutoRound",
|
401 |
+
"precision": "4bit",
|
402 |
+
"model_params": 2.54,
|
403 |
+
"model_size": 1.84,
|
404 |
+
"weight_dtype": "int4",
|
405 |
+
"compute_dtype": "float16",
|
406 |
+
"gguf_ftype": "*Q4_0.gguf",
|
407 |
+
"hardware": "gpu",
|
408 |
+
"status": "Finished",
|
409 |
+
"submitted_time": "2024-05-28T15:43:10Z",
|
410 |
+
"model_type": "quantization",
|
411 |
+
"job_id": -1,
|
412 |
+
"job_start_time": null,
|
413 |
+
"scripts": "ITREX"
|
414 |
+
},
|
415 |
+
"quantization_config": {
|
416 |
+
"amp": true,
|
417 |
+
"autoround_version": "0.2.0.dev",
|
418 |
+
"backend": "autoround:exllamav2",
|
419 |
+
"bits": 4,
|
420 |
+
"data_type": "int",
|
421 |
+
"dataset": "NeelNanda/pile-10k",
|
422 |
+
"enable_minmax_tuning": true,
|
423 |
+
"enable_quanted_input": false,
|
424 |
+
"gradient_accumulate_steps": 1,
|
425 |
+
"group_size": 128,
|
426 |
+
"iters": 200,
|
427 |
+
"low_gpu_mem_usage": false,
|
428 |
+
"lr": 0.005,
|
429 |
+
"minmax_lr": 0.005,
|
430 |
+
"n_samples": 512,
|
431 |
+
"quant_method": "intel/auto-round",
|
432 |
+
"scale_dtype": "torch.float16",
|
433 |
+
"seqlen": 2048,
|
434 |
+
"sym": false,
|
435 |
+
"train_bs": 8
|
436 |
+
},
|
437 |
+
"versions": {
|
438 |
+
"harness|mmlu|0": null,
|
439 |
+
"harness|mmlu_humanities|0": null,
|
440 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
441 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
442 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
443 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
444 |
+
"harness|mmlu_international_law|0": 0.0,
|
445 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
446 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
447 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
448 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
449 |
+
"harness|mmlu_philosophy|0": 0.0,
|
450 |
+
"harness|mmlu_prehistory|0": 0.0,
|
451 |
+
"harness|mmlu_professional_law|0": 0.0,
|
452 |
+
"harness|mmlu_world_religions|0": 0.0,
|
453 |
+
"harness|mmlu_other|0": null,
|
454 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
455 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
456 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
457 |
+
"harness|mmlu_global_facts|0": 0.0,
|
458 |
+
"harness|mmlu_human_aging|0": 0.0,
|
459 |
+
"harness|mmlu_management|0": 0.0,
|
460 |
+
"harness|mmlu_marketing|0": 0.0,
|
461 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
462 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
463 |
+
"harness|mmlu_nutrition|0": 0.0,
|
464 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
465 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
466 |
+
"harness|mmlu_virology|0": 0.0,
|
467 |
+
"harness|mmlu_social_sciences|0": null,
|
468 |
+
"harness|mmlu_econometrics|0": 0.0,
|
469 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
470 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
471 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
472 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
473 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
474 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
475 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
476 |
+
"harness|mmlu_public_relations|0": 0.0,
|
477 |
+
"harness|mmlu_security_studies|0": 0.0,
|
478 |
+
"harness|mmlu_sociology|0": 0.0,
|
479 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
480 |
+
"harness|mmlu_stem|0": null,
|
481 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
482 |
+
"harness|mmlu_anatomy|0": 0.0,
|
483 |
+
"harness|mmlu_astronomy|0": 0.0,
|
484 |
+
"harness|mmlu_college_biology|0": 0.0,
|
485 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
486 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
487 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
488 |
+
"harness|mmlu_college_physics|0": 0.0,
|
489 |
+
"harness|mmlu_computer_security|0": 0.0,
|
490 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
491 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
492 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
493 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
494 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
495 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
496 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
497 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
498 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
499 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
500 |
+
"harness|arc:easy|0": 1.0,
|
501 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
502 |
+
"harness|boolq|0": 2.0,
|
503 |
+
"harness|arc:challenge|0": 1.0,
|
504 |
+
"harness|winogrande|0": 1.0,
|
505 |
+
"harness|lambada:openai|0": 1.0,
|
506 |
+
"harness|piqa|0": 1.0,
|
507 |
+
"harness|openbookqa|0": 1.0,
|
508 |
+
"harness|hellaswag|0": 1.0,
|
509 |
+
"harness|truthfulqa:mc1|0": 2.0
|
510 |
+
},
|
511 |
+
"n-shot": {
|
512 |
+
"arc_challenge": 0,
|
513 |
+
"arc_easy": 0,
|
514 |
+
"boolq": 0,
|
515 |
+
"hellaswag": 0,
|
516 |
+
"lambada_openai": 0,
|
517 |
+
"mmlu": 0,
|
518 |
+
"mmlu_abstract_algebra": 0,
|
519 |
+
"mmlu_anatomy": 0,
|
520 |
+
"mmlu_astronomy": 0,
|
521 |
+
"mmlu_business_ethics": 0,
|
522 |
+
"mmlu_clinical_knowledge": 0,
|
523 |
+
"mmlu_college_biology": 0,
|
524 |
+
"mmlu_college_chemistry": 0,
|
525 |
+
"mmlu_college_computer_science": 0,
|
526 |
+
"mmlu_college_mathematics": 0,
|
527 |
+
"mmlu_college_medicine": 0,
|
528 |
+
"mmlu_college_physics": 0,
|
529 |
+
"mmlu_computer_security": 0,
|
530 |
+
"mmlu_conceptual_physics": 0,
|
531 |
+
"mmlu_econometrics": 0,
|
532 |
+
"mmlu_electrical_engineering": 0,
|
533 |
+
"mmlu_elementary_mathematics": 0,
|
534 |
+
"mmlu_formal_logic": 0,
|
535 |
+
"mmlu_global_facts": 0,
|
536 |
+
"mmlu_high_school_biology": 0,
|
537 |
+
"mmlu_high_school_chemistry": 0,
|
538 |
+
"mmlu_high_school_computer_science": 0,
|
539 |
+
"mmlu_high_school_european_history": 0,
|
540 |
+
"mmlu_high_school_geography": 0,
|
541 |
+
"mmlu_high_school_government_and_politics": 0,
|
542 |
+
"mmlu_high_school_macroeconomics": 0,
|
543 |
+
"mmlu_high_school_mathematics": 0,
|
544 |
+
"mmlu_high_school_microeconomics": 0,
|
545 |
+
"mmlu_high_school_physics": 0,
|
546 |
+
"mmlu_high_school_psychology": 0,
|
547 |
+
"mmlu_high_school_statistics": 0,
|
548 |
+
"mmlu_high_school_us_history": 0,
|
549 |
+
"mmlu_high_school_world_history": 0,
|
550 |
+
"mmlu_human_aging": 0,
|
551 |
+
"mmlu_human_sexuality": 0,
|
552 |
+
"mmlu_humanities": 0,
|
553 |
+
"mmlu_international_law": 0,
|
554 |
+
"mmlu_jurisprudence": 0,
|
555 |
+
"mmlu_logical_fallacies": 0,
|
556 |
+
"mmlu_machine_learning": 0,
|
557 |
+
"mmlu_management": 0,
|
558 |
+
"mmlu_marketing": 0,
|
559 |
+
"mmlu_medical_genetics": 0,
|
560 |
+
"mmlu_miscellaneous": 0,
|
561 |
+
"mmlu_moral_disputes": 0,
|
562 |
+
"mmlu_moral_scenarios": 0,
|
563 |
+
"mmlu_nutrition": 0,
|
564 |
+
"mmlu_other": 0,
|
565 |
+
"mmlu_philosophy": 0,
|
566 |
+
"mmlu_prehistory": 0,
|
567 |
+
"mmlu_professional_accounting": 0,
|
568 |
+
"mmlu_professional_law": 0,
|
569 |
+
"mmlu_professional_medicine": 0,
|
570 |
+
"mmlu_professional_psychology": 0,
|
571 |
+
"mmlu_public_relations": 0,
|
572 |
+
"mmlu_security_studies": 0,
|
573 |
+
"mmlu_social_sciences": 0,
|
574 |
+
"mmlu_sociology": 0,
|
575 |
+
"mmlu_stem": 0,
|
576 |
+
"mmlu_us_foreign_policy": 0,
|
577 |
+
"mmlu_virology": 0,
|
578 |
+
"mmlu_world_religions": 0,
|
579 |
+
"openbookqa": 0,
|
580 |
+
"piqa": 0,
|
581 |
+
"truthfulqa_mc1": 0,
|
582 |
+
"truthfulqa_mc2": 0,
|
583 |
+
"winogrande": 0
|
584 |
+
},
|
585 |
+
"date": 1717156363.3451269,
|
586 |
+
"config": {
|
587 |
+
"model": "hf",
|
588 |
+
"model_args": "pretrained=Intel/phi-2-int4-inc,trust_remote_code=True,dtype=float16,_commit_hash=main",
|
589 |
+
"batch_size": 4,
|
590 |
+
"batch_sizes": [],
|
591 |
+
"device": "cuda",
|
592 |
+
"use_cache": null,
|
593 |
+
"limit": null,
|
594 |
+
"bootstrap_iters": 100000,
|
595 |
+
"gen_kwargs": null
|
596 |
+
}
|
597 |
+
}
|
Intel/results_2024-05-29-11-08-21.json
ADDED
@@ -0,0 +1,595 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "1.4",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": -1,
|
9 |
+
"start_time": null,
|
10 |
+
"end_time": "2024-05-29-11-08-21",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "Intel/gemma-2b-int4-inc",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "4bit",
|
15 |
+
"model_size": 3.13,
|
16 |
+
"model_params": 2.0,
|
17 |
+
"quant_type": "AutoRound",
|
18 |
+
"precision": "4bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|openbookqa|0": {
|
22 |
+
"acc,none": 0.31,
|
23 |
+
"acc_stderr,none": 0.0207040410217248,
|
24 |
+
"acc_norm,none": 0.402,
|
25 |
+
"acc_norm_stderr,none": 0.021948929609938612,
|
26 |
+
"alias": "openbookqa"
|
27 |
+
},
|
28 |
+
"harness|piqa|0": {
|
29 |
+
"acc,none": 0.7622415669205659,
|
30 |
+
"acc_stderr,none": 0.009932525779525487,
|
31 |
+
"acc_norm,none": 0.7818280739934712,
|
32 |
+
"acc_norm_stderr,none": 0.009636081958374383,
|
33 |
+
"alias": "piqa"
|
34 |
+
},
|
35 |
+
"harness|lambada:openai|0": {
|
36 |
+
"perplexity,none": 5.459819719176573,
|
37 |
+
"perplexity_stderr,none": 0.130568495808606,
|
38 |
+
"acc,none": 0.6409858334950514,
|
39 |
+
"acc_stderr,none": 0.006683317316135514,
|
40 |
+
"alias": "lambada_openai"
|
41 |
+
},
|
42 |
+
"harness|truthfulqa:mc2|0": {
|
43 |
+
"acc,none": 0.33564684519977467,
|
44 |
+
"acc_stderr,none": 0.013451541740241494,
|
45 |
+
"alias": "truthfulqa_mc2"
|
46 |
+
},
|
47 |
+
"harness|hellaswag|0": {
|
48 |
+
"acc,none": 0.5184226249751046,
|
49 |
+
"acc_stderr,none": 0.004986393266269148,
|
50 |
+
"acc_norm,none": 0.7029476199960167,
|
51 |
+
"acc_norm_stderr,none": 0.004560259083197374,
|
52 |
+
"alias": "hellaswag"
|
53 |
+
},
|
54 |
+
"harness|truthfulqa:mc1|0": {
|
55 |
+
"acc,none": 0.2215422276621787,
|
56 |
+
"acc_stderr,none": 0.01453786760130114,
|
57 |
+
"alias": "truthfulqa_mc1"
|
58 |
+
},
|
59 |
+
"harness|arc:challenge|0": {
|
60 |
+
"acc,none": 0.386518771331058,
|
61 |
+
"acc_stderr,none": 0.014230084761910478,
|
62 |
+
"acc_norm,none": 0.40273037542662116,
|
63 |
+
"acc_norm_stderr,none": 0.014332236306790144,
|
64 |
+
"alias": "arc_challenge"
|
65 |
+
},
|
66 |
+
"harness|winogrande|0": {
|
67 |
+
"acc,none": 0.6550907655880032,
|
68 |
+
"acc_stderr,none": 0.01335937980503369,
|
69 |
+
"alias": "winogrande"
|
70 |
+
},
|
71 |
+
"harness|boolq|0": {
|
72 |
+
"acc,none": 0.6948012232415902,
|
73 |
+
"acc_stderr,none": 0.00805404814192796,
|
74 |
+
"alias": "boolq"
|
75 |
+
},
|
76 |
+
"harness|mmlu|0": {
|
77 |
+
"acc,none": 0.3305084745762712,
|
78 |
+
"acc_stderr,none": 0.0039454910882845634,
|
79 |
+
"alias": "mmlu"
|
80 |
+
},
|
81 |
+
"harness|mmlu_humanities|0": {
|
82 |
+
"alias": " - humanities",
|
83 |
+
"acc,none": 0.3141339001062699,
|
84 |
+
"acc_stderr,none": 0.006717003169884759
|
85 |
+
},
|
86 |
+
"harness|mmlu_formal_logic|0": {
|
87 |
+
"alias": " - formal_logic",
|
88 |
+
"acc,none": 0.30952380952380953,
|
89 |
+
"acc_stderr,none": 0.04134913018303316
|
90 |
+
},
|
91 |
+
"harness|mmlu_high_school_european_history|0": {
|
92 |
+
"alias": " - high_school_european_history",
|
93 |
+
"acc,none": 0.3878787878787879,
|
94 |
+
"acc_stderr,none": 0.0380491365397101
|
95 |
+
},
|
96 |
+
"harness|mmlu_high_school_us_history|0": {
|
97 |
+
"alias": " - high_school_us_history",
|
98 |
+
"acc,none": 0.3137254901960784,
|
99 |
+
"acc_stderr,none": 0.032566854844603886
|
100 |
+
},
|
101 |
+
"harness|mmlu_high_school_world_history|0": {
|
102 |
+
"alias": " - high_school_world_history",
|
103 |
+
"acc,none": 0.3291139240506329,
|
104 |
+
"acc_stderr,none": 0.03058732629470236
|
105 |
+
},
|
106 |
+
"harness|mmlu_international_law|0": {
|
107 |
+
"alias": " - international_law",
|
108 |
+
"acc,none": 0.5206611570247934,
|
109 |
+
"acc_stderr,none": 0.04560456086387235
|
110 |
+
},
|
111 |
+
"harness|mmlu_jurisprudence|0": {
|
112 |
+
"alias": " - jurisprudence",
|
113 |
+
"acc,none": 0.35185185185185186,
|
114 |
+
"acc_stderr,none": 0.046166311118017125
|
115 |
+
},
|
116 |
+
"harness|mmlu_logical_fallacies|0": {
|
117 |
+
"alias": " - logical_fallacies",
|
118 |
+
"acc,none": 0.3128834355828221,
|
119 |
+
"acc_stderr,none": 0.036429145782924055
|
120 |
+
},
|
121 |
+
"harness|mmlu_moral_disputes|0": {
|
122 |
+
"alias": " - moral_disputes",
|
123 |
+
"acc,none": 0.32947976878612717,
|
124 |
+
"acc_stderr,none": 0.02530525813187972
|
125 |
+
},
|
126 |
+
"harness|mmlu_moral_scenarios|0": {
|
127 |
+
"alias": " - moral_scenarios",
|
128 |
+
"acc,none": 0.2424581005586592,
|
129 |
+
"acc_stderr,none": 0.014333522059217892
|
130 |
+
},
|
131 |
+
"harness|mmlu_philosophy|0": {
|
132 |
+
"alias": " - philosophy",
|
133 |
+
"acc,none": 0.3408360128617363,
|
134 |
+
"acc_stderr,none": 0.02692084126077616
|
135 |
+
},
|
136 |
+
"harness|mmlu_prehistory|0": {
|
137 |
+
"alias": " - prehistory",
|
138 |
+
"acc,none": 0.37962962962962965,
|
139 |
+
"acc_stderr,none": 0.027002521034516468
|
140 |
+
},
|
141 |
+
"harness|mmlu_professional_law|0": {
|
142 |
+
"alias": " - professional_law",
|
143 |
+
"acc,none": 0.28748370273794005,
|
144 |
+
"acc_stderr,none": 0.0115593373557085
|
145 |
+
},
|
146 |
+
"harness|mmlu_world_religions|0": {
|
147 |
+
"alias": " - world_religions",
|
148 |
+
"acc,none": 0.4678362573099415,
|
149 |
+
"acc_stderr,none": 0.03826882417660369
|
150 |
+
},
|
151 |
+
"harness|mmlu_other|0": {
|
152 |
+
"alias": " - other",
|
153 |
+
"acc,none": 0.3678789829417444,
|
154 |
+
"acc_stderr,none": 0.008609954637109535
|
155 |
+
},
|
156 |
+
"harness|mmlu_business_ethics|0": {
|
157 |
+
"alias": " - business_ethics",
|
158 |
+
"acc,none": 0.36,
|
159 |
+
"acc_stderr,none": 0.04824181513244218
|
160 |
+
},
|
161 |
+
"harness|mmlu_clinical_knowledge|0": {
|
162 |
+
"alias": " - clinical_knowledge",
|
163 |
+
"acc,none": 0.39245283018867927,
|
164 |
+
"acc_stderr,none": 0.03005258057955784
|
165 |
+
},
|
166 |
+
"harness|mmlu_college_medicine|0": {
|
167 |
+
"alias": " - college_medicine",
|
168 |
+
"acc,none": 0.3583815028901734,
|
169 |
+
"acc_stderr,none": 0.036563436533531585
|
170 |
+
},
|
171 |
+
"harness|mmlu_global_facts|0": {
|
172 |
+
"alias": " - global_facts",
|
173 |
+
"acc,none": 0.19,
|
174 |
+
"acc_stderr,none": 0.03942772444036622
|
175 |
+
},
|
176 |
+
"harness|mmlu_human_aging|0": {
|
177 |
+
"alias": " - human_aging",
|
178 |
+
"acc,none": 0.3542600896860987,
|
179 |
+
"acc_stderr,none": 0.032100621541349864
|
180 |
+
},
|
181 |
+
"harness|mmlu_management|0": {
|
182 |
+
"alias": " - management",
|
183 |
+
"acc,none": 0.3592233009708738,
|
184 |
+
"acc_stderr,none": 0.04750458399041694
|
185 |
+
},
|
186 |
+
"harness|mmlu_marketing|0": {
|
187 |
+
"alias": " - marketing",
|
188 |
+
"acc,none": 0.4658119658119658,
|
189 |
+
"acc_stderr,none": 0.03267942734081227
|
190 |
+
},
|
191 |
+
"harness|mmlu_medical_genetics|0": {
|
192 |
+
"alias": " - medical_genetics",
|
193 |
+
"acc,none": 0.38,
|
194 |
+
"acc_stderr,none": 0.048783173121456316
|
195 |
+
},
|
196 |
+
"harness|mmlu_miscellaneous|0": {
|
197 |
+
"alias": " - miscellaneous",
|
198 |
+
"acc,none": 0.40357598978288634,
|
199 |
+
"acc_stderr,none": 0.017544332237926417
|
200 |
+
},
|
201 |
+
"harness|mmlu_nutrition|0": {
|
202 |
+
"alias": " - nutrition",
|
203 |
+
"acc,none": 0.3888888888888889,
|
204 |
+
"acc_stderr,none": 0.027914055510468008
|
205 |
+
},
|
206 |
+
"harness|mmlu_professional_accounting|0": {
|
207 |
+
"alias": " - professional_accounting",
|
208 |
+
"acc,none": 0.32269503546099293,
|
209 |
+
"acc_stderr,none": 0.027889139300534792
|
210 |
+
},
|
211 |
+
"harness|mmlu_professional_medicine|0": {
|
212 |
+
"alias": " - professional_medicine",
|
213 |
+
"acc,none": 0.27205882352941174,
|
214 |
+
"acc_stderr,none": 0.027033041151681453
|
215 |
+
},
|
216 |
+
"harness|mmlu_virology|0": {
|
217 |
+
"alias": " - virology",
|
218 |
+
"acc,none": 0.35542168674698793,
|
219 |
+
"acc_stderr,none": 0.03726214354322415
|
220 |
+
},
|
221 |
+
"harness|mmlu_social_sciences|0": {
|
222 |
+
"alias": " - social_sciences",
|
223 |
+
"acc,none": 0.3431914202144946,
|
224 |
+
"acc_stderr,none": 0.008551004819937463
|
225 |
+
},
|
226 |
+
"harness|mmlu_econometrics|0": {
|
227 |
+
"alias": " - econometrics",
|
228 |
+
"acc,none": 0.2631578947368421,
|
229 |
+
"acc_stderr,none": 0.04142439719489359
|
230 |
+
},
|
231 |
+
"harness|mmlu_high_school_geography|0": {
|
232 |
+
"alias": " - high_school_geography",
|
233 |
+
"acc,none": 0.3181818181818182,
|
234 |
+
"acc_stderr,none": 0.0331847733384533
|
235 |
+
},
|
236 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
237 |
+
"alias": " - high_school_government_and_politics",
|
238 |
+
"acc,none": 0.34196891191709844,
|
239 |
+
"acc_stderr,none": 0.034234651001042844
|
240 |
+
},
|
241 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
242 |
+
"alias": " - high_school_macroeconomics",
|
243 |
+
"acc,none": 0.30256410256410254,
|
244 |
+
"acc_stderr,none": 0.02329088805377273
|
245 |
+
},
|
246 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
247 |
+
"alias": " - high_school_microeconomics",
|
248 |
+
"acc,none": 0.28991596638655465,
|
249 |
+
"acc_stderr,none": 0.029472485833136094
|
250 |
+
},
|
251 |
+
"harness|mmlu_high_school_psychology|0": {
|
252 |
+
"alias": " - high_school_psychology",
|
253 |
+
"acc,none": 0.3779816513761468,
|
254 |
+
"acc_stderr,none": 0.02078918706672811
|
255 |
+
},
|
256 |
+
"harness|mmlu_human_sexuality|0": {
|
257 |
+
"alias": " - human_sexuality",
|
258 |
+
"acc,none": 0.3893129770992366,
|
259 |
+
"acc_stderr,none": 0.0427648654281459
|
260 |
+
},
|
261 |
+
"harness|mmlu_professional_psychology|0": {
|
262 |
+
"alias": " - professional_psychology",
|
263 |
+
"acc,none": 0.35784313725490197,
|
264 |
+
"acc_stderr,none": 0.01939305840235544
|
265 |
+
},
|
266 |
+
"harness|mmlu_public_relations|0": {
|
267 |
+
"alias": " - public_relations",
|
268 |
+
"acc,none": 0.3090909090909091,
|
269 |
+
"acc_stderr,none": 0.044262946482000985
|
270 |
+
},
|
271 |
+
"harness|mmlu_security_studies|0": {
|
272 |
+
"alias": " - security_studies",
|
273 |
+
"acc,none": 0.34285714285714286,
|
274 |
+
"acc_stderr,none": 0.030387262919547735
|
275 |
+
},
|
276 |
+
"harness|mmlu_sociology|0": {
|
277 |
+
"alias": " - sociology",
|
278 |
+
"acc,none": 0.373134328358209,
|
279 |
+
"acc_stderr,none": 0.03419832608176007
|
280 |
+
},
|
281 |
+
"harness|mmlu_us_foreign_policy|0": {
|
282 |
+
"alias": " - us_foreign_policy",
|
283 |
+
"acc,none": 0.41,
|
284 |
+
"acc_stderr,none": 0.04943110704237102
|
285 |
+
},
|
286 |
+
"harness|mmlu_stem|0": {
|
287 |
+
"alias": " - stem",
|
288 |
+
"acc,none": 0.30574056454170634,
|
289 |
+
"acc_stderr,none": 0.008164924344786467
|
290 |
+
},
|
291 |
+
"harness|mmlu_abstract_algebra|0": {
|
292 |
+
"alias": " - abstract_algebra",
|
293 |
+
"acc,none": 0.18,
|
294 |
+
"acc_stderr,none": 0.038612291966536955
|
295 |
+
},
|
296 |
+
"harness|mmlu_anatomy|0": {
|
297 |
+
"alias": " - anatomy",
|
298 |
+
"acc,none": 0.37037037037037035,
|
299 |
+
"acc_stderr,none": 0.041716541613545426
|
300 |
+
},
|
301 |
+
"harness|mmlu_astronomy|0": {
|
302 |
+
"alias": " - astronomy",
|
303 |
+
"acc,none": 0.34210526315789475,
|
304 |
+
"acc_stderr,none": 0.03860731599316092
|
305 |
+
},
|
306 |
+
"harness|mmlu_college_biology|0": {
|
307 |
+
"alias": " - college_biology",
|
308 |
+
"acc,none": 0.3055555555555556,
|
309 |
+
"acc_stderr,none": 0.03852084696008534
|
310 |
+
},
|
311 |
+
"harness|mmlu_college_chemistry|0": {
|
312 |
+
"alias": " - college_chemistry",
|
313 |
+
"acc,none": 0.28,
|
314 |
+
"acc_stderr,none": 0.04512608598542128
|
315 |
+
},
|
316 |
+
"harness|mmlu_college_computer_science|0": {
|
317 |
+
"alias": " - college_computer_science",
|
318 |
+
"acc,none": 0.27,
|
319 |
+
"acc_stderr,none": 0.04461960433384741
|
320 |
+
},
|
321 |
+
"harness|mmlu_college_mathematics|0": {
|
322 |
+
"alias": " - college_mathematics",
|
323 |
+
"acc,none": 0.27,
|
324 |
+
"acc_stderr,none": 0.0446196043338474
|
325 |
+
},
|
326 |
+
"harness|mmlu_college_physics|0": {
|
327 |
+
"alias": " - college_physics",
|
328 |
+
"acc,none": 0.3333333333333333,
|
329 |
+
"acc_stderr,none": 0.04690650298201943
|
330 |
+
},
|
331 |
+
"harness|mmlu_computer_security|0": {
|
332 |
+
"alias": " - computer_security",
|
333 |
+
"acc,none": 0.46,
|
334 |
+
"acc_stderr,none": 0.05009082659620332
|
335 |
+
},
|
336 |
+
"harness|mmlu_conceptual_physics|0": {
|
337 |
+
"alias": " - conceptual_physics",
|
338 |
+
"acc,none": 0.35319148936170214,
|
339 |
+
"acc_stderr,none": 0.031245325202761926
|
340 |
+
},
|
341 |
+
"harness|mmlu_electrical_engineering|0": {
|
342 |
+
"alias": " - electrical_engineering",
|
343 |
+
"acc,none": 0.3931034482758621,
|
344 |
+
"acc_stderr,none": 0.040703290137070705
|
345 |
+
},
|
346 |
+
"harness|mmlu_elementary_mathematics|0": {
|
347 |
+
"alias": " - elementary_mathematics",
|
348 |
+
"acc,none": 0.26455026455026454,
|
349 |
+
"acc_stderr,none": 0.02271746789770861
|
350 |
+
},
|
351 |
+
"harness|mmlu_high_school_biology|0": {
|
352 |
+
"alias": " - high_school_biology",
|
353 |
+
"acc,none": 0.3419354838709677,
|
354 |
+
"acc_stderr,none": 0.02698528957655274
|
355 |
+
},
|
356 |
+
"harness|mmlu_high_school_chemistry|0": {
|
357 |
+
"alias": " - high_school_chemistry",
|
358 |
+
"acc,none": 0.270935960591133,
|
359 |
+
"acc_stderr,none": 0.031270907132976984
|
360 |
+
},
|
361 |
+
"harness|mmlu_high_school_computer_science|0": {
|
362 |
+
"alias": " - high_school_computer_science",
|
363 |
+
"acc,none": 0.32,
|
364 |
+
"acc_stderr,none": 0.046882617226215034
|
365 |
+
},
|
366 |
+
"harness|mmlu_high_school_mathematics|0": {
|
367 |
+
"alias": " - high_school_mathematics",
|
368 |
+
"acc,none": 0.26296296296296295,
|
369 |
+
"acc_stderr,none": 0.026842057873833706
|
370 |
+
},
|
371 |
+
"harness|mmlu_high_school_physics|0": {
|
372 |
+
"alias": " - high_school_physics",
|
373 |
+
"acc,none": 0.2582781456953642,
|
374 |
+
"acc_stderr,none": 0.035737053147634576
|
375 |
+
},
|
376 |
+
"harness|mmlu_high_school_statistics|0": {
|
377 |
+
"alias": " - high_school_statistics",
|
378 |
+
"acc,none": 0.24074074074074073,
|
379 |
+
"acc_stderr,none": 0.029157522184605603
|
380 |
+
},
|
381 |
+
"harness|mmlu_machine_learning|0": {
|
382 |
+
"alias": " - machine_learning",
|
383 |
+
"acc,none": 0.38392857142857145,
|
384 |
+
"acc_stderr,none": 0.04616143075028547
|
385 |
+
},
|
386 |
+
"harness|arc:easy|0": {
|
387 |
+
"acc,none": 0.7361111111111112,
|
388 |
+
"acc_stderr,none": 0.009043789220055129,
|
389 |
+
"acc_norm,none": 0.7112794612794613,
|
390 |
+
"acc_norm_stderr,none": 0.009298805565435508,
|
391 |
+
"alias": "arc_easy"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "Intel/gemma-2b-int4-inc",
|
396 |
+
"revision": "main",
|
397 |
+
"private": false,
|
398 |
+
"params": 3.13,
|
399 |
+
"architectures": "GemmaForCausalLM",
|
400 |
+
"quant_type": "AutoRound",
|
401 |
+
"precision": "4bit",
|
402 |
+
"model_params": 2.0,
|
403 |
+
"model_size": 3.13,
|
404 |
+
"weight_dtype": "int4",
|
405 |
+
"compute_dtype": "float16",
|
406 |
+
"gguf_ftype": "*Q4_0.gguf",
|
407 |
+
"hardware": "gpu",
|
408 |
+
"status": "Finished",
|
409 |
+
"submitted_time": "2024-05-28T15:48:10Z",
|
410 |
+
"model_type": "quantization",
|
411 |
+
"job_id": -1,
|
412 |
+
"job_start_time": null,
|
413 |
+
"scripts": "ITREX"
|
414 |
+
},
|
415 |
+
"quantization_config": {
|
416 |
+
"autoround_version": "0.2.0.dev",
|
417 |
+
"bits": 4,
|
418 |
+
"damp_percent": 0.01,
|
419 |
+
"desc_act": false,
|
420 |
+
"enable_minmax_tuning": true,
|
421 |
+
"enable_quanted_input": true,
|
422 |
+
"group_size": 128,
|
423 |
+
"is_marlin_format": false,
|
424 |
+
"iters": 400,
|
425 |
+
"lr": 0.0025,
|
426 |
+
"minmax_lr": 0.0025,
|
427 |
+
"model_file_base_name": "model",
|
428 |
+
"model_name_or_path": null,
|
429 |
+
"quant_method": "gptq",
|
430 |
+
"scale_dtype": "float16",
|
431 |
+
"static_groups": false,
|
432 |
+
"sym": false,
|
433 |
+
"true_sequential": false
|
434 |
+
},
|
435 |
+
"versions": {
|
436 |
+
"harness|openbookqa|0": 1.0,
|
437 |
+
"harness|piqa|0": 1.0,
|
438 |
+
"harness|lambada:openai|0": 1.0,
|
439 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
440 |
+
"harness|hellaswag|0": 1.0,
|
441 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
442 |
+
"harness|arc:challenge|0": 1.0,
|
443 |
+
"harness|winogrande|0": 1.0,
|
444 |
+
"harness|boolq|0": 2.0,
|
445 |
+
"harness|mmlu|0": null,
|
446 |
+
"harness|mmlu_humanities|0": null,
|
447 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
448 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
449 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
450 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
451 |
+
"harness|mmlu_international_law|0": 0.0,
|
452 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
453 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
454 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
455 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
456 |
+
"harness|mmlu_philosophy|0": 0.0,
|
457 |
+
"harness|mmlu_prehistory|0": 0.0,
|
458 |
+
"harness|mmlu_professional_law|0": 0.0,
|
459 |
+
"harness|mmlu_world_religions|0": 0.0,
|
460 |
+
"harness|mmlu_other|0": null,
|
461 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
462 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
463 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
464 |
+
"harness|mmlu_global_facts|0": 0.0,
|
465 |
+
"harness|mmlu_human_aging|0": 0.0,
|
466 |
+
"harness|mmlu_management|0": 0.0,
|
467 |
+
"harness|mmlu_marketing|0": 0.0,
|
468 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
469 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
470 |
+
"harness|mmlu_nutrition|0": 0.0,
|
471 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
472 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
473 |
+
"harness|mmlu_virology|0": 0.0,
|
474 |
+
"harness|mmlu_social_sciences|0": null,
|
475 |
+
"harness|mmlu_econometrics|0": 0.0,
|
476 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
477 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
478 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
479 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
480 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
481 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
482 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
483 |
+
"harness|mmlu_public_relations|0": 0.0,
|
484 |
+
"harness|mmlu_security_studies|0": 0.0,
|
485 |
+
"harness|mmlu_sociology|0": 0.0,
|
486 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
487 |
+
"harness|mmlu_stem|0": null,
|
488 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
489 |
+
"harness|mmlu_anatomy|0": 0.0,
|
490 |
+
"harness|mmlu_astronomy|0": 0.0,
|
491 |
+
"harness|mmlu_college_biology|0": 0.0,
|
492 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
493 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
494 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
495 |
+
"harness|mmlu_college_physics|0": 0.0,
|
496 |
+
"harness|mmlu_computer_security|0": 0.0,
|
497 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
498 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
499 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
500 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
501 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
502 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
503 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
504 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
505 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
506 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
507 |
+
"harness|arc:easy|0": 1.0
|
508 |
+
},
|
509 |
+
"n-shot": {
|
510 |
+
"arc_challenge": 0,
|
511 |
+
"arc_easy": 0,
|
512 |
+
"boolq": 0,
|
513 |
+
"hellaswag": 0,
|
514 |
+
"lambada_openai": 0,
|
515 |
+
"mmlu": 0,
|
516 |
+
"mmlu_abstract_algebra": 0,
|
517 |
+
"mmlu_anatomy": 0,
|
518 |
+
"mmlu_astronomy": 0,
|
519 |
+
"mmlu_business_ethics": 0,
|
520 |
+
"mmlu_clinical_knowledge": 0,
|
521 |
+
"mmlu_college_biology": 0,
|
522 |
+
"mmlu_college_chemistry": 0,
|
523 |
+
"mmlu_college_computer_science": 0,
|
524 |
+
"mmlu_college_mathematics": 0,
|
525 |
+
"mmlu_college_medicine": 0,
|
526 |
+
"mmlu_college_physics": 0,
|
527 |
+
"mmlu_computer_security": 0,
|
528 |
+
"mmlu_conceptual_physics": 0,
|
529 |
+
"mmlu_econometrics": 0,
|
530 |
+
"mmlu_electrical_engineering": 0,
|
531 |
+
"mmlu_elementary_mathematics": 0,
|
532 |
+
"mmlu_formal_logic": 0,
|
533 |
+
"mmlu_global_facts": 0,
|
534 |
+
"mmlu_high_school_biology": 0,
|
535 |
+
"mmlu_high_school_chemistry": 0,
|
536 |
+
"mmlu_high_school_computer_science": 0,
|
537 |
+
"mmlu_high_school_european_history": 0,
|
538 |
+
"mmlu_high_school_geography": 0,
|
539 |
+
"mmlu_high_school_government_and_politics": 0,
|
540 |
+
"mmlu_high_school_macroeconomics": 0,
|
541 |
+
"mmlu_high_school_mathematics": 0,
|
542 |
+
"mmlu_high_school_microeconomics": 0,
|
543 |
+
"mmlu_high_school_physics": 0,
|
544 |
+
"mmlu_high_school_psychology": 0,
|
545 |
+
"mmlu_high_school_statistics": 0,
|
546 |
+
"mmlu_high_school_us_history": 0,
|
547 |
+
"mmlu_high_school_world_history": 0,
|
548 |
+
"mmlu_human_aging": 0,
|
549 |
+
"mmlu_human_sexuality": 0,
|
550 |
+
"mmlu_humanities": 0,
|
551 |
+
"mmlu_international_law": 0,
|
552 |
+
"mmlu_jurisprudence": 0,
|
553 |
+
"mmlu_logical_fallacies": 0,
|
554 |
+
"mmlu_machine_learning": 0,
|
555 |
+
"mmlu_management": 0,
|
556 |
+
"mmlu_marketing": 0,
|
557 |
+
"mmlu_medical_genetics": 0,
|
558 |
+
"mmlu_miscellaneous": 0,
|
559 |
+
"mmlu_moral_disputes": 0,
|
560 |
+
"mmlu_moral_scenarios": 0,
|
561 |
+
"mmlu_nutrition": 0,
|
562 |
+
"mmlu_other": 0,
|
563 |
+
"mmlu_philosophy": 0,
|
564 |
+
"mmlu_prehistory": 0,
|
565 |
+
"mmlu_professional_accounting": 0,
|
566 |
+
"mmlu_professional_law": 0,
|
567 |
+
"mmlu_professional_medicine": 0,
|
568 |
+
"mmlu_professional_psychology": 0,
|
569 |
+
"mmlu_public_relations": 0,
|
570 |
+
"mmlu_security_studies": 0,
|
571 |
+
"mmlu_social_sciences": 0,
|
572 |
+
"mmlu_sociology": 0,
|
573 |
+
"mmlu_stem": 0,
|
574 |
+
"mmlu_us_foreign_policy": 0,
|
575 |
+
"mmlu_virology": 0,
|
576 |
+
"mmlu_world_religions": 0,
|
577 |
+
"openbookqa": 0,
|
578 |
+
"piqa": 0,
|
579 |
+
"truthfulqa_mc1": 0,
|
580 |
+
"truthfulqa_mc2": 0,
|
581 |
+
"winogrande": 0
|
582 |
+
},
|
583 |
+
"date": 1716949585.8120224,
|
584 |
+
"config": {
|
585 |
+
"model": "hf",
|
586 |
+
"model_args": "pretrained=Intel/gemma-2b-int4-inc,trust_remote_code=True,dtype=float16,_commit_hash=main",
|
587 |
+
"batch_size": 2,
|
588 |
+
"batch_sizes": [],
|
589 |
+
"device": "cuda",
|
590 |
+
"use_cache": null,
|
591 |
+
"limit": null,
|
592 |
+
"bootstrap_iters": 100000,
|
593 |
+
"gen_kwargs": null
|
594 |
+
}
|
595 |
+
}
|
Intel/results_2024-05-29-17-28-38.json
ADDED
@@ -0,0 +1,595 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "1.4",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": -1,
|
9 |
+
"start_time": null,
|
10 |
+
"end_time": "2024-05-29-17-28-38",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "Intel/falcon-7b-int4-inc",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "4bit",
|
15 |
+
"model_size": 4.76,
|
16 |
+
"model_params": 6.74,
|
17 |
+
"quant_type": "AutoRound",
|
18 |
+
"precision": "4bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|truthfulqa:mc2|0": {
|
22 |
+
"acc,none": 0.3437610262249931,
|
23 |
+
"acc_stderr,none": 0.013342089877488627,
|
24 |
+
"alias": "truthfulqa_mc2"
|
25 |
+
},
|
26 |
+
"harness|arc:challenge|0": {
|
27 |
+
"acc,none": 0.3924914675767918,
|
28 |
+
"acc_stderr,none": 0.014269634635670693,
|
29 |
+
"acc_norm,none": 0.4206484641638225,
|
30 |
+
"acc_norm_stderr,none": 0.014426211252508403,
|
31 |
+
"alias": "arc_challenge"
|
32 |
+
},
|
33 |
+
"harness|mmlu|0": {
|
34 |
+
"acc,none": 0.25672981056829514,
|
35 |
+
"acc_stderr,none": 0.0036823783683945254,
|
36 |
+
"alias": "mmlu"
|
37 |
+
},
|
38 |
+
"harness|mmlu_humanities|0": {
|
39 |
+
"alias": " - humanities",
|
40 |
+
"acc,none": 0.27013815090329435,
|
41 |
+
"acc_stderr,none": 0.006469786339743364
|
42 |
+
},
|
43 |
+
"harness|mmlu_formal_logic|0": {
|
44 |
+
"alias": " - formal_logic",
|
45 |
+
"acc,none": 0.2857142857142857,
|
46 |
+
"acc_stderr,none": 0.04040610178208841
|
47 |
+
},
|
48 |
+
"harness|mmlu_high_school_european_history|0": {
|
49 |
+
"alias": " - high_school_european_history",
|
50 |
+
"acc,none": 0.23636363636363636,
|
51 |
+
"acc_stderr,none": 0.033175059300091805
|
52 |
+
},
|
53 |
+
"harness|mmlu_high_school_us_history|0": {
|
54 |
+
"alias": " - high_school_us_history",
|
55 |
+
"acc,none": 0.24019607843137256,
|
56 |
+
"acc_stderr,none": 0.02998373305591362
|
57 |
+
},
|
58 |
+
"harness|mmlu_high_school_world_history|0": {
|
59 |
+
"alias": " - high_school_world_history",
|
60 |
+
"acc,none": 0.2742616033755274,
|
61 |
+
"acc_stderr,none": 0.02904133351059804
|
62 |
+
},
|
63 |
+
"harness|mmlu_international_law|0": {
|
64 |
+
"alias": " - international_law",
|
65 |
+
"acc,none": 0.32231404958677684,
|
66 |
+
"acc_stderr,none": 0.04266416363352168
|
67 |
+
},
|
68 |
+
"harness|mmlu_jurisprudence|0": {
|
69 |
+
"alias": " - jurisprudence",
|
70 |
+
"acc,none": 0.28703703703703703,
|
71 |
+
"acc_stderr,none": 0.043733130409147614
|
72 |
+
},
|
73 |
+
"harness|mmlu_logical_fallacies|0": {
|
74 |
+
"alias": " - logical_fallacies",
|
75 |
+
"acc,none": 0.24539877300613497,
|
76 |
+
"acc_stderr,none": 0.03380939813943354
|
77 |
+
},
|
78 |
+
"harness|mmlu_moral_disputes|0": {
|
79 |
+
"alias": " - moral_disputes",
|
80 |
+
"acc,none": 0.3236994219653179,
|
81 |
+
"acc_stderr,none": 0.0251901813276084
|
82 |
+
},
|
83 |
+
"harness|mmlu_moral_scenarios|0": {
|
84 |
+
"alias": " - moral_scenarios",
|
85 |
+
"acc,none": 0.23798882681564246,
|
86 |
+
"acc_stderr,none": 0.014242630070574885
|
87 |
+
},
|
88 |
+
"harness|mmlu_philosophy|0": {
|
89 |
+
"alias": " - philosophy",
|
90 |
+
"acc,none": 0.2797427652733119,
|
91 |
+
"acc_stderr,none": 0.025494259350694905
|
92 |
+
},
|
93 |
+
"harness|mmlu_prehistory|0": {
|
94 |
+
"alias": " - prehistory",
|
95 |
+
"acc,none": 0.28703703703703703,
|
96 |
+
"acc_stderr,none": 0.025171041915309684
|
97 |
+
},
|
98 |
+
"harness|mmlu_professional_law|0": {
|
99 |
+
"alias": " - professional_law",
|
100 |
+
"acc,none": 0.26727509778357234,
|
101 |
+
"acc_stderr,none": 0.011302607515637534
|
102 |
+
},
|
103 |
+
"harness|mmlu_world_religions|0": {
|
104 |
+
"alias": " - world_religions",
|
105 |
+
"acc,none": 0.3333333333333333,
|
106 |
+
"acc_stderr,none": 0.03615507630310935
|
107 |
+
},
|
108 |
+
"harness|mmlu_other|0": {
|
109 |
+
"alias": " - other",
|
110 |
+
"acc,none": 0.25458641776633406,
|
111 |
+
"acc_stderr,none": 0.007807996038061353
|
112 |
+
},
|
113 |
+
"harness|mmlu_business_ethics|0": {
|
114 |
+
"alias": " - business_ethics",
|
115 |
+
"acc,none": 0.21,
|
116 |
+
"acc_stderr,none": 0.040936018074033256
|
117 |
+
},
|
118 |
+
"harness|mmlu_clinical_knowledge|0": {
|
119 |
+
"alias": " - clinical_knowledge",
|
120 |
+
"acc,none": 0.23773584905660378,
|
121 |
+
"acc_stderr,none": 0.02619980880756193
|
122 |
+
},
|
123 |
+
"harness|mmlu_college_medicine|0": {
|
124 |
+
"alias": " - college_medicine",
|
125 |
+
"acc,none": 0.2658959537572254,
|
126 |
+
"acc_stderr,none": 0.033687629322594316
|
127 |
+
},
|
128 |
+
"harness|mmlu_global_facts|0": {
|
129 |
+
"alias": " - global_facts",
|
130 |
+
"acc,none": 0.29,
|
131 |
+
"acc_stderr,none": 0.045604802157206845
|
132 |
+
},
|
133 |
+
"harness|mmlu_human_aging|0": {
|
134 |
+
"alias": " - human_aging",
|
135 |
+
"acc,none": 0.3183856502242152,
|
136 |
+
"acc_stderr,none": 0.03126580522513713
|
137 |
+
},
|
138 |
+
"harness|mmlu_management|0": {
|
139 |
+
"alias": " - management",
|
140 |
+
"acc,none": 0.23300970873786409,
|
141 |
+
"acc_stderr,none": 0.04185832598928315
|
142 |
+
},
|
143 |
+
"harness|mmlu_marketing|0": {
|
144 |
+
"alias": " - marketing",
|
145 |
+
"acc,none": 0.2777777777777778,
|
146 |
+
"acc_stderr,none": 0.029343114798094472
|
147 |
+
},
|
148 |
+
"harness|mmlu_medical_genetics|0": {
|
149 |
+
"alias": " - medical_genetics",
|
150 |
+
"acc,none": 0.25,
|
151 |
+
"acc_stderr,none": 0.04351941398892446
|
152 |
+
},
|
153 |
+
"harness|mmlu_miscellaneous|0": {
|
154 |
+
"alias": " - miscellaneous",
|
155 |
+
"acc,none": 0.2656449553001277,
|
156 |
+
"acc_stderr,none": 0.015794302487888722
|
157 |
+
},
|
158 |
+
"harness|mmlu_nutrition|0": {
|
159 |
+
"alias": " - nutrition",
|
160 |
+
"acc,none": 0.27124183006535946,
|
161 |
+
"acc_stderr,none": 0.025457756696667874
|
162 |
+
},
|
163 |
+
"harness|mmlu_professional_accounting|0": {
|
164 |
+
"alias": " - professional_accounting",
|
165 |
+
"acc,none": 0.24468085106382978,
|
166 |
+
"acc_stderr,none": 0.025645553622266733
|
167 |
+
},
|
168 |
+
"harness|mmlu_professional_medicine|0": {
|
169 |
+
"alias": " - professional_medicine",
|
170 |
+
"acc,none": 0.17279411764705882,
|
171 |
+
"acc_stderr,none": 0.02296606758558178
|
172 |
+
},
|
173 |
+
"harness|mmlu_virology|0": {
|
174 |
+
"alias": " - virology",
|
175 |
+
"acc,none": 0.24096385542168675,
|
176 |
+
"acc_stderr,none": 0.0332939411907353
|
177 |
+
},
|
178 |
+
"harness|mmlu_social_sciences|0": {
|
179 |
+
"alias": " - social_sciences",
|
180 |
+
"acc,none": 0.24666883327916803,
|
181 |
+
"acc_stderr,none": 0.007760689043299785
|
182 |
+
},
|
183 |
+
"harness|mmlu_econometrics|0": {
|
184 |
+
"alias": " - econometrics",
|
185 |
+
"acc,none": 0.23684210526315788,
|
186 |
+
"acc_stderr,none": 0.03999423879281339
|
187 |
+
},
|
188 |
+
"harness|mmlu_high_school_geography|0": {
|
189 |
+
"alias": " - high_school_geography",
|
190 |
+
"acc,none": 0.20707070707070707,
|
191 |
+
"acc_stderr,none": 0.02886977846026705
|
192 |
+
},
|
193 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
194 |
+
"alias": " - high_school_government_and_politics",
|
195 |
+
"acc,none": 0.25906735751295334,
|
196 |
+
"acc_stderr,none": 0.03161877917935411
|
197 |
+
},
|
198 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
199 |
+
"alias": " - high_school_macroeconomics",
|
200 |
+
"acc,none": 0.23846153846153847,
|
201 |
+
"acc_stderr,none": 0.02160629449464773
|
202 |
+
},
|
203 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
204 |
+
"alias": " - high_school_microeconomics",
|
205 |
+
"acc,none": 0.226890756302521,
|
206 |
+
"acc_stderr,none": 0.027205371538279493
|
207 |
+
},
|
208 |
+
"harness|mmlu_high_school_psychology|0": {
|
209 |
+
"alias": " - high_school_psychology",
|
210 |
+
"acc,none": 0.23486238532110093,
|
211 |
+
"acc_stderr,none": 0.018175110510343585
|
212 |
+
},
|
213 |
+
"harness|mmlu_human_sexuality|0": {
|
214 |
+
"alias": " - human_sexuality",
|
215 |
+
"acc,none": 0.2900763358778626,
|
216 |
+
"acc_stderr,none": 0.039800662464677665
|
217 |
+
},
|
218 |
+
"harness|mmlu_professional_psychology|0": {
|
219 |
+
"alias": " - professional_psychology",
|
220 |
+
"acc,none": 0.2679738562091503,
|
221 |
+
"acc_stderr,none": 0.017917974069594722
|
222 |
+
},
|
223 |
+
"harness|mmlu_public_relations|0": {
|
224 |
+
"alias": " - public_relations",
|
225 |
+
"acc,none": 0.2545454545454545,
|
226 |
+
"acc_stderr,none": 0.04172343038705383
|
227 |
+
},
|
228 |
+
"harness|mmlu_security_studies|0": {
|
229 |
+
"alias": " - security_studies",
|
230 |
+
"acc,none": 0.19591836734693877,
|
231 |
+
"acc_stderr,none": 0.025409301953225678
|
232 |
+
},
|
233 |
+
"harness|mmlu_sociology|0": {
|
234 |
+
"alias": " - sociology",
|
235 |
+
"acc,none": 0.24378109452736318,
|
236 |
+
"acc_stderr,none": 0.030360490154014645
|
237 |
+
},
|
238 |
+
"harness|mmlu_us_foreign_policy|0": {
|
239 |
+
"alias": " - us_foreign_policy",
|
240 |
+
"acc,none": 0.39,
|
241 |
+
"acc_stderr,none": 0.04902071300001974
|
242 |
+
},
|
243 |
+
"harness|mmlu_stem|0": {
|
244 |
+
"alias": " - stem",
|
245 |
+
"acc,none": 0.24865207738661593,
|
246 |
+
"acc_stderr,none": 0.0076928830660678025
|
247 |
+
},
|
248 |
+
"harness|mmlu_abstract_algebra|0": {
|
249 |
+
"alias": " - abstract_algebra",
|
250 |
+
"acc,none": 0.22,
|
251 |
+
"acc_stderr,none": 0.04163331998932269
|
252 |
+
},
|
253 |
+
"harness|mmlu_anatomy|0": {
|
254 |
+
"alias": " - anatomy",
|
255 |
+
"acc,none": 0.25925925925925924,
|
256 |
+
"acc_stderr,none": 0.03785714465066653
|
257 |
+
},
|
258 |
+
"harness|mmlu_astronomy|0": {
|
259 |
+
"alias": " - astronomy",
|
260 |
+
"acc,none": 0.26973684210526316,
|
261 |
+
"acc_stderr,none": 0.03611780560284898
|
262 |
+
},
|
263 |
+
"harness|mmlu_college_biology|0": {
|
264 |
+
"alias": " - college_biology",
|
265 |
+
"acc,none": 0.2708333333333333,
|
266 |
+
"acc_stderr,none": 0.03716177437566017
|
267 |
+
},
|
268 |
+
"harness|mmlu_college_chemistry|0": {
|
269 |
+
"alias": " - college_chemistry",
|
270 |
+
"acc,none": 0.17,
|
271 |
+
"acc_stderr,none": 0.0377525168068637
|
272 |
+
},
|
273 |
+
"harness|mmlu_college_computer_science|0": {
|
274 |
+
"alias": " - college_computer_science",
|
275 |
+
"acc,none": 0.29,
|
276 |
+
"acc_stderr,none": 0.04560480215720684
|
277 |
+
},
|
278 |
+
"harness|mmlu_college_mathematics|0": {
|
279 |
+
"alias": " - college_mathematics",
|
280 |
+
"acc,none": 0.21,
|
281 |
+
"acc_stderr,none": 0.040936018074033256
|
282 |
+
},
|
283 |
+
"harness|mmlu_college_physics|0": {
|
284 |
+
"alias": " - college_physics",
|
285 |
+
"acc,none": 0.19607843137254902,
|
286 |
+
"acc_stderr,none": 0.03950581861179961
|
287 |
+
},
|
288 |
+
"harness|mmlu_computer_security|0": {
|
289 |
+
"alias": " - computer_security",
|
290 |
+
"acc,none": 0.33,
|
291 |
+
"acc_stderr,none": 0.047258156262526045
|
292 |
+
},
|
293 |
+
"harness|mmlu_conceptual_physics|0": {
|
294 |
+
"alias": " - conceptual_physics",
|
295 |
+
"acc,none": 0.2765957446808511,
|
296 |
+
"acc_stderr,none": 0.029241883869628837
|
297 |
+
},
|
298 |
+
"harness|mmlu_electrical_engineering|0": {
|
299 |
+
"alias": " - electrical_engineering",
|
300 |
+
"acc,none": 0.27586206896551724,
|
301 |
+
"acc_stderr,none": 0.037245636197746325
|
302 |
+
},
|
303 |
+
"harness|mmlu_elementary_mathematics|0": {
|
304 |
+
"alias": " - elementary_mathematics",
|
305 |
+
"acc,none": 0.2222222222222222,
|
306 |
+
"acc_stderr,none": 0.02141168439369418
|
307 |
+
},
|
308 |
+
"harness|mmlu_high_school_biology|0": {
|
309 |
+
"alias": " - high_school_biology",
|
310 |
+
"acc,none": 0.24838709677419354,
|
311 |
+
"acc_stderr,none": 0.024580028921481
|
312 |
+
},
|
313 |
+
"harness|mmlu_high_school_chemistry|0": {
|
314 |
+
"alias": " - high_school_chemistry",
|
315 |
+
"acc,none": 0.28078817733990147,
|
316 |
+
"acc_stderr,none": 0.03161856335358611
|
317 |
+
},
|
318 |
+
"harness|mmlu_high_school_computer_science|0": {
|
319 |
+
"alias": " - high_school_computer_science",
|
320 |
+
"acc,none": 0.31,
|
321 |
+
"acc_stderr,none": 0.04648231987117316
|
322 |
+
},
|
323 |
+
"harness|mmlu_high_school_mathematics|0": {
|
324 |
+
"alias": " - high_school_mathematics",
|
325 |
+
"acc,none": 0.2222222222222222,
|
326 |
+
"acc_stderr,none": 0.02534809746809784
|
327 |
+
},
|
328 |
+
"harness|mmlu_high_school_physics|0": {
|
329 |
+
"alias": " - high_school_physics",
|
330 |
+
"acc,none": 0.23178807947019867,
|
331 |
+
"acc_stderr,none": 0.03445406271987054
|
332 |
+
},
|
333 |
+
"harness|mmlu_high_school_statistics|0": {
|
334 |
+
"alias": " - high_school_statistics",
|
335 |
+
"acc,none": 0.2037037037037037,
|
336 |
+
"acc_stderr,none": 0.02746740180405799
|
337 |
+
},
|
338 |
+
"harness|mmlu_machine_learning|0": {
|
339 |
+
"alias": " - machine_learning",
|
340 |
+
"acc,none": 0.30357142857142855,
|
341 |
+
"acc_stderr,none": 0.04364226155841044
|
342 |
+
},
|
343 |
+
"harness|winogrande|0": {
|
344 |
+
"acc,none": 0.6835043409629045,
|
345 |
+
"acc_stderr,none": 0.013071868328051487,
|
346 |
+
"alias": "winogrande"
|
347 |
+
},
|
348 |
+
"harness|truthfulqa:mc1|0": {
|
349 |
+
"acc,none": 0.21542227662178703,
|
350 |
+
"acc_stderr,none": 0.01439190265242768,
|
351 |
+
"alias": "truthfulqa_mc1"
|
352 |
+
},
|
353 |
+
"harness|arc:easy|0": {
|
354 |
+
"acc,none": 0.7432659932659933,
|
355 |
+
"acc_stderr,none": 0.008963590834042407,
|
356 |
+
"acc_norm,none": 0.7125420875420876,
|
357 |
+
"acc_norm_stderr,none": 0.009286682281593414,
|
358 |
+
"alias": "arc_easy"
|
359 |
+
},
|
360 |
+
"harness|boolq|0": {
|
361 |
+
"acc,none": 0.7422018348623853,
|
362 |
+
"acc_stderr,none": 0.007650564175824782,
|
363 |
+
"alias": "boolq"
|
364 |
+
},
|
365 |
+
"harness|hellaswag|0": {
|
366 |
+
"acc,none": 0.5715992830113523,
|
367 |
+
"acc_stderr,none": 0.004938356615955415,
|
368 |
+
"acc_norm,none": 0.7571200955984864,
|
369 |
+
"acc_norm_stderr,none": 0.00427946712856064,
|
370 |
+
"alias": "hellaswag"
|
371 |
+
},
|
372 |
+
"harness|openbookqa|0": {
|
373 |
+
"acc,none": 0.308,
|
374 |
+
"acc_stderr,none": 0.0206670329874661,
|
375 |
+
"acc_norm,none": 0.42,
|
376 |
+
"acc_norm_stderr,none": 0.02209471322976178,
|
377 |
+
"alias": "openbookqa"
|
378 |
+
},
|
379 |
+
"harness|lambada:openai|0": {
|
380 |
+
"perplexity,none": 3.3667652318024137,
|
381 |
+
"perplexity_stderr,none": 0.06549965546799866,
|
382 |
+
"acc,none": 0.7483019600232874,
|
383 |
+
"acc_stderr,none": 0.00604631029126968,
|
384 |
+
"alias": "lambada_openai"
|
385 |
+
},
|
386 |
+
"harness|piqa|0": {
|
387 |
+
"acc,none": 0.7910772578890098,
|
388 |
+
"acc_stderr,none": 0.009485227030105054,
|
389 |
+
"acc_norm,none": 0.7986942328618063,
|
390 |
+
"acc_norm_stderr,none": 0.009355431098990466,
|
391 |
+
"alias": "piqa"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "Intel/falcon-7b-int4-inc",
|
396 |
+
"revision": "main",
|
397 |
+
"private": false,
|
398 |
+
"params": 4.76,
|
399 |
+
"architectures": "FalconForCausalLM",
|
400 |
+
"quant_type": "AutoRound",
|
401 |
+
"precision": "4bit",
|
402 |
+
"model_params": 6.74,
|
403 |
+
"model_size": 4.76,
|
404 |
+
"weight_dtype": "int4",
|
405 |
+
"compute_dtype": "float16",
|
406 |
+
"gguf_ftype": "*Q4_0.gguf",
|
407 |
+
"hardware": "gpu",
|
408 |
+
"status": "Finished",
|
409 |
+
"submitted_time": "2024-05-28T15:45:53Z",
|
410 |
+
"model_type": "quantization",
|
411 |
+
"job_id": -1,
|
412 |
+
"job_start_time": null,
|
413 |
+
"scripts": "ITREX"
|
414 |
+
},
|
415 |
+
"quantization_config": {
|
416 |
+
"autoround_version": "0.2.0.dev",
|
417 |
+
"bits": 4,
|
418 |
+
"damp_percent": 0.01,
|
419 |
+
"desc_act": false,
|
420 |
+
"enable_minmax_tuning": true,
|
421 |
+
"enable_quanted_input": false,
|
422 |
+
"group_size": 64,
|
423 |
+
"is_marlin_format": false,
|
424 |
+
"iters": 1000,
|
425 |
+
"lr": 0.001,
|
426 |
+
"minmax_lr": 0.001,
|
427 |
+
"model_file_base_name": "model",
|
428 |
+
"model_name_or_path": null,
|
429 |
+
"quant_method": "gptq",
|
430 |
+
"scale_dtype": "float16",
|
431 |
+
"static_groups": false,
|
432 |
+
"sym": false,
|
433 |
+
"true_sequential": false
|
434 |
+
},
|
435 |
+
"versions": {
|
436 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
437 |
+
"harness|arc:challenge|0": 1.0,
|
438 |
+
"harness|mmlu|0": null,
|
439 |
+
"harness|mmlu_humanities|0": null,
|
440 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
441 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
442 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
443 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
444 |
+
"harness|mmlu_international_law|0": 0.0,
|
445 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
446 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
447 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
448 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
449 |
+
"harness|mmlu_philosophy|0": 0.0,
|
450 |
+
"harness|mmlu_prehistory|0": 0.0,
|
451 |
+
"harness|mmlu_professional_law|0": 0.0,
|
452 |
+
"harness|mmlu_world_religions|0": 0.0,
|
453 |
+
"harness|mmlu_other|0": null,
|
454 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
455 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
456 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
457 |
+
"harness|mmlu_global_facts|0": 0.0,
|
458 |
+
"harness|mmlu_human_aging|0": 0.0,
|
459 |
+
"harness|mmlu_management|0": 0.0,
|
460 |
+
"harness|mmlu_marketing|0": 0.0,
|
461 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
462 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
463 |
+
"harness|mmlu_nutrition|0": 0.0,
|
464 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
465 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
466 |
+
"harness|mmlu_virology|0": 0.0,
|
467 |
+
"harness|mmlu_social_sciences|0": null,
|
468 |
+
"harness|mmlu_econometrics|0": 0.0,
|
469 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
470 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
471 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
472 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
473 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
474 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
475 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
476 |
+
"harness|mmlu_public_relations|0": 0.0,
|
477 |
+
"harness|mmlu_security_studies|0": 0.0,
|
478 |
+
"harness|mmlu_sociology|0": 0.0,
|
479 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
480 |
+
"harness|mmlu_stem|0": null,
|
481 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
482 |
+
"harness|mmlu_anatomy|0": 0.0,
|
483 |
+
"harness|mmlu_astronomy|0": 0.0,
|
484 |
+
"harness|mmlu_college_biology|0": 0.0,
|
485 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
486 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
487 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
488 |
+
"harness|mmlu_college_physics|0": 0.0,
|
489 |
+
"harness|mmlu_computer_security|0": 0.0,
|
490 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
491 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
492 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
493 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
494 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
495 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
496 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
497 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
498 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
499 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
500 |
+
"harness|winogrande|0": 1.0,
|
501 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
502 |
+
"harness|arc:easy|0": 1.0,
|
503 |
+
"harness|boolq|0": 2.0,
|
504 |
+
"harness|hellaswag|0": 1.0,
|
505 |
+
"harness|openbookqa|0": 1.0,
|
506 |
+
"harness|lambada:openai|0": 1.0,
|
507 |
+
"harness|piqa|0": 1.0
|
508 |
+
},
|
509 |
+
"n-shot": {
|
510 |
+
"arc_challenge": 0,
|
511 |
+
"arc_easy": 0,
|
512 |
+
"boolq": 0,
|
513 |
+
"hellaswag": 0,
|
514 |
+
"lambada_openai": 0,
|
515 |
+
"mmlu": 0,
|
516 |
+
"mmlu_abstract_algebra": 0,
|
517 |
+
"mmlu_anatomy": 0,
|
518 |
+
"mmlu_astronomy": 0,
|
519 |
+
"mmlu_business_ethics": 0,
|
520 |
+
"mmlu_clinical_knowledge": 0,
|
521 |
+
"mmlu_college_biology": 0,
|
522 |
+
"mmlu_college_chemistry": 0,
|
523 |
+
"mmlu_college_computer_science": 0,
|
524 |
+
"mmlu_college_mathematics": 0,
|
525 |
+
"mmlu_college_medicine": 0,
|
526 |
+
"mmlu_college_physics": 0,
|
527 |
+
"mmlu_computer_security": 0,
|
528 |
+
"mmlu_conceptual_physics": 0,
|
529 |
+
"mmlu_econometrics": 0,
|
530 |
+
"mmlu_electrical_engineering": 0,
|
531 |
+
"mmlu_elementary_mathematics": 0,
|
532 |
+
"mmlu_formal_logic": 0,
|
533 |
+
"mmlu_global_facts": 0,
|
534 |
+
"mmlu_high_school_biology": 0,
|
535 |
+
"mmlu_high_school_chemistry": 0,
|
536 |
+
"mmlu_high_school_computer_science": 0,
|
537 |
+
"mmlu_high_school_european_history": 0,
|
538 |
+
"mmlu_high_school_geography": 0,
|
539 |
+
"mmlu_high_school_government_and_politics": 0,
|
540 |
+
"mmlu_high_school_macroeconomics": 0,
|
541 |
+
"mmlu_high_school_mathematics": 0,
|
542 |
+
"mmlu_high_school_microeconomics": 0,
|
543 |
+
"mmlu_high_school_physics": 0,
|
544 |
+
"mmlu_high_school_psychology": 0,
|
545 |
+
"mmlu_high_school_statistics": 0,
|
546 |
+
"mmlu_high_school_us_history": 0,
|
547 |
+
"mmlu_high_school_world_history": 0,
|
548 |
+
"mmlu_human_aging": 0,
|
549 |
+
"mmlu_human_sexuality": 0,
|
550 |
+
"mmlu_humanities": 0,
|
551 |
+
"mmlu_international_law": 0,
|
552 |
+
"mmlu_jurisprudence": 0,
|
553 |
+
"mmlu_logical_fallacies": 0,
|
554 |
+
"mmlu_machine_learning": 0,
|
555 |
+
"mmlu_management": 0,
|
556 |
+
"mmlu_marketing": 0,
|
557 |
+
"mmlu_medical_genetics": 0,
|
558 |
+
"mmlu_miscellaneous": 0,
|
559 |
+
"mmlu_moral_disputes": 0,
|
560 |
+
"mmlu_moral_scenarios": 0,
|
561 |
+
"mmlu_nutrition": 0,
|
562 |
+
"mmlu_other": 0,
|
563 |
+
"mmlu_philosophy": 0,
|
564 |
+
"mmlu_prehistory": 0,
|
565 |
+
"mmlu_professional_accounting": 0,
|
566 |
+
"mmlu_professional_law": 0,
|
567 |
+
"mmlu_professional_medicine": 0,
|
568 |
+
"mmlu_professional_psychology": 0,
|
569 |
+
"mmlu_public_relations": 0,
|
570 |
+
"mmlu_security_studies": 0,
|
571 |
+
"mmlu_social_sciences": 0,
|
572 |
+
"mmlu_sociology": 0,
|
573 |
+
"mmlu_stem": 0,
|
574 |
+
"mmlu_us_foreign_policy": 0,
|
575 |
+
"mmlu_virology": 0,
|
576 |
+
"mmlu_world_religions": 0,
|
577 |
+
"openbookqa": 0,
|
578 |
+
"piqa": 0,
|
579 |
+
"truthfulqa_mc1": 0,
|
580 |
+
"truthfulqa_mc2": 0,
|
581 |
+
"winogrande": 0
|
582 |
+
},
|
583 |
+
"date": 1716969791.1227543,
|
584 |
+
"config": {
|
585 |
+
"model": "hf",
|
586 |
+
"model_args": "pretrained=Intel/falcon-7b-int4-inc,trust_remote_code=True,dtype=float16,_commit_hash=main",
|
587 |
+
"batch_size": 2,
|
588 |
+
"batch_sizes": [],
|
589 |
+
"device": "cuda",
|
590 |
+
"use_cache": null,
|
591 |
+
"limit": null,
|
592 |
+
"bootstrap_iters": 100000,
|
593 |
+
"gen_kwargs": null
|
594 |
+
}
|
595 |
+
}
|
Intel/results_2024-05-29-18-26-24.json
ADDED
@@ -0,0 +1,596 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "1.4",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": -1,
|
9 |
+
"start_time": null,
|
10 |
+
"end_time": "2024-05-29-18-26-24",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "Intel/Qwen1.5-0.5B-Chat-int4-inc",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "4bit",
|
15 |
+
"model_size": 0.5,
|
16 |
+
"model_params": 1,
|
17 |
+
"quant_type": "AutoRound",
|
18 |
+
"precision": "4bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|piqa|0": {
|
22 |
+
"acc,none": 0.6681175190424374,
|
23 |
+
"acc_stderr,none": 0.010986617776361585,
|
24 |
+
"acc_norm,none": 0.6653971708378672,
|
25 |
+
"acc_norm_stderr,none": 0.011009071725162497,
|
26 |
+
"alias": "piqa"
|
27 |
+
},
|
28 |
+
"harness|truthfulqa:mc2|0": {
|
29 |
+
"acc,none": 0.4315761654933411,
|
30 |
+
"acc_stderr,none": 0.015099270531814455,
|
31 |
+
"alias": "truthfulqa_mc2"
|
32 |
+
},
|
33 |
+
"harness|hellaswag|0": {
|
34 |
+
"acc,none": 0.3579964150567616,
|
35 |
+
"acc_stderr,none": 0.004784312972495402,
|
36 |
+
"acc_norm,none": 0.44303923521210914,
|
37 |
+
"acc_norm_stderr,none": 0.0049572966913915785,
|
38 |
+
"alias": "hellaswag"
|
39 |
+
},
|
40 |
+
"harness|lambada:openai|0": {
|
41 |
+
"perplexity,none": 31.409381819503288,
|
42 |
+
"perplexity_stderr,none": 1.554502871641947,
|
43 |
+
"acc,none": 0.40655928585290124,
|
44 |
+
"acc_stderr,none": 0.006843254387875305,
|
45 |
+
"alias": "lambada_openai"
|
46 |
+
},
|
47 |
+
"harness|mmlu|0": {
|
48 |
+
"acc,none": 0.27994587665574705,
|
49 |
+
"acc_stderr,none": 0.0037624302697806446,
|
50 |
+
"alias": "mmlu"
|
51 |
+
},
|
52 |
+
"harness|mmlu_humanities|0": {
|
53 |
+
"alias": " - humanities",
|
54 |
+
"acc,none": 0.28990435706695006,
|
55 |
+
"acc_stderr,none": 0.006569543117391881
|
56 |
+
},
|
57 |
+
"harness|mmlu_formal_logic|0": {
|
58 |
+
"alias": " - formal_logic",
|
59 |
+
"acc,none": 0.3253968253968254,
|
60 |
+
"acc_stderr,none": 0.04190596438871137
|
61 |
+
},
|
62 |
+
"harness|mmlu_high_school_european_history|0": {
|
63 |
+
"alias": " - high_school_european_history",
|
64 |
+
"acc,none": 0.46060606060606063,
|
65 |
+
"acc_stderr,none": 0.03892207016552013
|
66 |
+
},
|
67 |
+
"harness|mmlu_high_school_us_history|0": {
|
68 |
+
"alias": " - high_school_us_history",
|
69 |
+
"acc,none": 0.3480392156862745,
|
70 |
+
"acc_stderr,none": 0.03343311240488418
|
71 |
+
},
|
72 |
+
"harness|mmlu_high_school_world_history|0": {
|
73 |
+
"alias": " - high_school_world_history",
|
74 |
+
"acc,none": 0.43037974683544306,
|
75 |
+
"acc_stderr,none": 0.03223017195937599
|
76 |
+
},
|
77 |
+
"harness|mmlu_international_law|0": {
|
78 |
+
"alias": " - international_law",
|
79 |
+
"acc,none": 0.34710743801652894,
|
80 |
+
"acc_stderr,none": 0.04345724570292534
|
81 |
+
},
|
82 |
+
"harness|mmlu_jurisprudence|0": {
|
83 |
+
"alias": " - jurisprudence",
|
84 |
+
"acc,none": 0.35185185185185186,
|
85 |
+
"acc_stderr,none": 0.04616631111801712
|
86 |
+
},
|
87 |
+
"harness|mmlu_logical_fallacies|0": {
|
88 |
+
"alias": " - logical_fallacies",
|
89 |
+
"acc,none": 0.294478527607362,
|
90 |
+
"acc_stderr,none": 0.03581165790474082
|
91 |
+
},
|
92 |
+
"harness|mmlu_moral_disputes|0": {
|
93 |
+
"alias": " - moral_disputes",
|
94 |
+
"acc,none": 0.28901734104046245,
|
95 |
+
"acc_stderr,none": 0.024405173935783238
|
96 |
+
},
|
97 |
+
"harness|mmlu_moral_scenarios|0": {
|
98 |
+
"alias": " - moral_scenarios",
|
99 |
+
"acc,none": 0.23798882681564246,
|
100 |
+
"acc_stderr,none": 0.014242630070574885
|
101 |
+
},
|
102 |
+
"harness|mmlu_philosophy|0": {
|
103 |
+
"alias": " - philosophy",
|
104 |
+
"acc,none": 0.29260450160771706,
|
105 |
+
"acc_stderr,none": 0.02583989833487798
|
106 |
+
},
|
107 |
+
"harness|mmlu_prehistory|0": {
|
108 |
+
"alias": " - prehistory",
|
109 |
+
"acc,none": 0.2716049382716049,
|
110 |
+
"acc_stderr,none": 0.02474862449053738
|
111 |
+
},
|
112 |
+
"harness|mmlu_professional_law|0": {
|
113 |
+
"alias": " - professional_law",
|
114 |
+
"acc,none": 0.25749674054758803,
|
115 |
+
"acc_stderr,none": 0.011167706014904156
|
116 |
+
},
|
117 |
+
"harness|mmlu_world_religions|0": {
|
118 |
+
"alias": " - world_religions",
|
119 |
+
"acc,none": 0.34502923976608185,
|
120 |
+
"acc_stderr,none": 0.03645981377388806
|
121 |
+
},
|
122 |
+
"harness|mmlu_other|0": {
|
123 |
+
"alias": " - other",
|
124 |
+
"acc,none": 0.30672674605729,
|
125 |
+
"acc_stderr,none": 0.00820738160863629
|
126 |
+
},
|
127 |
+
"harness|mmlu_business_ethics|0": {
|
128 |
+
"alias": " - business_ethics",
|
129 |
+
"acc,none": 0.34,
|
130 |
+
"acc_stderr,none": 0.04760952285695235
|
131 |
+
},
|
132 |
+
"harness|mmlu_clinical_knowledge|0": {
|
133 |
+
"alias": " - clinical_knowledge",
|
134 |
+
"acc,none": 0.26037735849056604,
|
135 |
+
"acc_stderr,none": 0.027008766090708094
|
136 |
+
},
|
137 |
+
"harness|mmlu_college_medicine|0": {
|
138 |
+
"alias": " - college_medicine",
|
139 |
+
"acc,none": 0.24855491329479767,
|
140 |
+
"acc_stderr,none": 0.03295304696818318
|
141 |
+
},
|
142 |
+
"harness|mmlu_global_facts|0": {
|
143 |
+
"alias": " - global_facts",
|
144 |
+
"acc,none": 0.2,
|
145 |
+
"acc_stderr,none": 0.04020151261036845
|
146 |
+
},
|
147 |
+
"harness|mmlu_human_aging|0": {
|
148 |
+
"alias": " - human_aging",
|
149 |
+
"acc,none": 0.3901345291479821,
|
150 |
+
"acc_stderr,none": 0.03273766725459157
|
151 |
+
},
|
152 |
+
"harness|mmlu_management|0": {
|
153 |
+
"alias": " - management",
|
154 |
+
"acc,none": 0.3300970873786408,
|
155 |
+
"acc_stderr,none": 0.0465614711001235
|
156 |
+
},
|
157 |
+
"harness|mmlu_marketing|0": {
|
158 |
+
"alias": " - marketing",
|
159 |
+
"acc,none": 0.3803418803418803,
|
160 |
+
"acc_stderr,none": 0.031804252043840985
|
161 |
+
},
|
162 |
+
"harness|mmlu_medical_genetics|0": {
|
163 |
+
"alias": " - medical_genetics",
|
164 |
+
"acc,none": 0.33,
|
165 |
+
"acc_stderr,none": 0.04725815626252604
|
166 |
+
},
|
167 |
+
"harness|mmlu_miscellaneous|0": {
|
168 |
+
"alias": " - miscellaneous",
|
169 |
+
"acc,none": 0.36015325670498083,
|
170 |
+
"acc_stderr,none": 0.0171663624713693
|
171 |
+
},
|
172 |
+
"harness|mmlu_nutrition|0": {
|
173 |
+
"alias": " - nutrition",
|
174 |
+
"acc,none": 0.3006535947712418,
|
175 |
+
"acc_stderr,none": 0.026256053835718964
|
176 |
+
},
|
177 |
+
"harness|mmlu_professional_accounting|0": {
|
178 |
+
"alias": " - professional_accounting",
|
179 |
+
"acc,none": 0.23049645390070922,
|
180 |
+
"acc_stderr,none": 0.025123739226872405
|
181 |
+
},
|
182 |
+
"harness|mmlu_professional_medicine|0": {
|
183 |
+
"alias": " - professional_medicine",
|
184 |
+
"acc,none": 0.1875,
|
185 |
+
"acc_stderr,none": 0.023709788253811766
|
186 |
+
},
|
187 |
+
"harness|mmlu_virology|0": {
|
188 |
+
"alias": " - virology",
|
189 |
+
"acc,none": 0.3253012048192771,
|
190 |
+
"acc_stderr,none": 0.03647168523683227
|
191 |
+
},
|
192 |
+
"harness|mmlu_social_sciences|0": {
|
193 |
+
"alias": " - social_sciences",
|
194 |
+
"acc,none": 0.2723431914202145,
|
195 |
+
"acc_stderr,none": 0.008002480716014159
|
196 |
+
},
|
197 |
+
"harness|mmlu_econometrics|0": {
|
198 |
+
"alias": " - econometrics",
|
199 |
+
"acc,none": 0.22807017543859648,
|
200 |
+
"acc_stderr,none": 0.03947152782669415
|
201 |
+
},
|
202 |
+
"harness|mmlu_high_school_geography|0": {
|
203 |
+
"alias": " - high_school_geography",
|
204 |
+
"acc,none": 0.31313131313131315,
|
205 |
+
"acc_stderr,none": 0.033042050878136525
|
206 |
+
},
|
207 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
208 |
+
"alias": " - high_school_government_and_politics",
|
209 |
+
"acc,none": 0.22797927461139897,
|
210 |
+
"acc_stderr,none": 0.030276909945178253
|
211 |
+
},
|
212 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
213 |
+
"alias": " - high_school_macroeconomics",
|
214 |
+
"acc,none": 0.23846153846153847,
|
215 |
+
"acc_stderr,none": 0.021606294494647727
|
216 |
+
},
|
217 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
218 |
+
"alias": " - high_school_microeconomics",
|
219 |
+
"acc,none": 0.226890756302521,
|
220 |
+
"acc_stderr,none": 0.02720537153827948
|
221 |
+
},
|
222 |
+
"harness|mmlu_high_school_psychology|0": {
|
223 |
+
"alias": " - high_school_psychology",
|
224 |
+
"acc,none": 0.29174311926605506,
|
225 |
+
"acc_stderr,none": 0.019489300968876525
|
226 |
+
},
|
227 |
+
"harness|mmlu_human_sexuality|0": {
|
228 |
+
"alias": " - human_sexuality",
|
229 |
+
"acc,none": 0.366412213740458,
|
230 |
+
"acc_stderr,none": 0.04225875451969638
|
231 |
+
},
|
232 |
+
"harness|mmlu_professional_psychology|0": {
|
233 |
+
"alias": " - professional_psychology",
|
234 |
+
"acc,none": 0.2875816993464052,
|
235 |
+
"acc_stderr,none": 0.018311653053648222
|
236 |
+
},
|
237 |
+
"harness|mmlu_public_relations|0": {
|
238 |
+
"alias": " - public_relations",
|
239 |
+
"acc,none": 0.2636363636363636,
|
240 |
+
"acc_stderr,none": 0.04220224692971987
|
241 |
+
},
|
242 |
+
"harness|mmlu_security_studies|0": {
|
243 |
+
"alias": " - security_studies",
|
244 |
+
"acc,none": 0.2,
|
245 |
+
"acc_stderr,none": 0.025607375986579153
|
246 |
+
},
|
247 |
+
"harness|mmlu_sociology|0": {
|
248 |
+
"alias": " - sociology",
|
249 |
+
"acc,none": 0.3034825870646766,
|
250 |
+
"acc_stderr,none": 0.03251006816458618
|
251 |
+
},
|
252 |
+
"harness|mmlu_us_foreign_policy|0": {
|
253 |
+
"alias": " - us_foreign_policy",
|
254 |
+
"acc,none": 0.37,
|
255 |
+
"acc_stderr,none": 0.04852365870939099
|
256 |
+
},
|
257 |
+
"harness|mmlu_stem|0": {
|
258 |
+
"alias": " - stem",
|
259 |
+
"acc,none": 0.24611481129083412,
|
260 |
+
"acc_stderr,none": 0.007633065196853315
|
261 |
+
},
|
262 |
+
"harness|mmlu_abstract_algebra|0": {
|
263 |
+
"alias": " - abstract_algebra",
|
264 |
+
"acc,none": 0.25,
|
265 |
+
"acc_stderr,none": 0.04351941398892446
|
266 |
+
},
|
267 |
+
"harness|mmlu_anatomy|0": {
|
268 |
+
"alias": " - anatomy",
|
269 |
+
"acc,none": 0.2740740740740741,
|
270 |
+
"acc_stderr,none": 0.03853254836552003
|
271 |
+
},
|
272 |
+
"harness|mmlu_astronomy|0": {
|
273 |
+
"alias": " - astronomy",
|
274 |
+
"acc,none": 0.28289473684210525,
|
275 |
+
"acc_stderr,none": 0.03665349695640767
|
276 |
+
},
|
277 |
+
"harness|mmlu_college_biology|0": {
|
278 |
+
"alias": " - college_biology",
|
279 |
+
"acc,none": 0.2986111111111111,
|
280 |
+
"acc_stderr,none": 0.03827052357950756
|
281 |
+
},
|
282 |
+
"harness|mmlu_college_chemistry|0": {
|
283 |
+
"alias": " - college_chemistry",
|
284 |
+
"acc,none": 0.21,
|
285 |
+
"acc_stderr,none": 0.040936018074033256
|
286 |
+
},
|
287 |
+
"harness|mmlu_college_computer_science|0": {
|
288 |
+
"alias": " - college_computer_science",
|
289 |
+
"acc,none": 0.32,
|
290 |
+
"acc_stderr,none": 0.046882617226215034
|
291 |
+
},
|
292 |
+
"harness|mmlu_college_mathematics|0": {
|
293 |
+
"alias": " - college_mathematics",
|
294 |
+
"acc,none": 0.32,
|
295 |
+
"acc_stderr,none": 0.04688261722621504
|
296 |
+
},
|
297 |
+
"harness|mmlu_college_physics|0": {
|
298 |
+
"alias": " - college_physics",
|
299 |
+
"acc,none": 0.2549019607843137,
|
300 |
+
"acc_stderr,none": 0.043364327079931785
|
301 |
+
},
|
302 |
+
"harness|mmlu_computer_security|0": {
|
303 |
+
"alias": " - computer_security",
|
304 |
+
"acc,none": 0.32,
|
305 |
+
"acc_stderr,none": 0.046882617226215034
|
306 |
+
},
|
307 |
+
"harness|mmlu_conceptual_physics|0": {
|
308 |
+
"alias": " - conceptual_physics",
|
309 |
+
"acc,none": 0.26382978723404255,
|
310 |
+
"acc_stderr,none": 0.028809989854102973
|
311 |
+
},
|
312 |
+
"harness|mmlu_electrical_engineering|0": {
|
313 |
+
"alias": " - electrical_engineering",
|
314 |
+
"acc,none": 0.27586206896551724,
|
315 |
+
"acc_stderr,none": 0.037245636197746325
|
316 |
+
},
|
317 |
+
"harness|mmlu_elementary_mathematics|0": {
|
318 |
+
"alias": " - elementary_mathematics",
|
319 |
+
"acc,none": 0.19576719576719576,
|
320 |
+
"acc_stderr,none": 0.020435730971541794
|
321 |
+
},
|
322 |
+
"harness|mmlu_high_school_biology|0": {
|
323 |
+
"alias": " - high_school_biology",
|
324 |
+
"acc,none": 0.25483870967741934,
|
325 |
+
"acc_stderr,none": 0.024790118459332215
|
326 |
+
},
|
327 |
+
"harness|mmlu_high_school_chemistry|0": {
|
328 |
+
"alias": " - high_school_chemistry",
|
329 |
+
"acc,none": 0.1625615763546798,
|
330 |
+
"acc_stderr,none": 0.02596030006460557
|
331 |
+
},
|
332 |
+
"harness|mmlu_high_school_computer_science|0": {
|
333 |
+
"alias": " - high_school_computer_science",
|
334 |
+
"acc,none": 0.34,
|
335 |
+
"acc_stderr,none": 0.047609522856952344
|
336 |
+
},
|
337 |
+
"harness|mmlu_high_school_mathematics|0": {
|
338 |
+
"alias": " - high_school_mathematics",
|
339 |
+
"acc,none": 0.2222222222222222,
|
340 |
+
"acc_stderr,none": 0.025348097468097845
|
341 |
+
},
|
342 |
+
"harness|mmlu_high_school_physics|0": {
|
343 |
+
"alias": " - high_school_physics",
|
344 |
+
"acc,none": 0.2185430463576159,
|
345 |
+
"acc_stderr,none": 0.033742355504256936
|
346 |
+
},
|
347 |
+
"harness|mmlu_high_school_statistics|0": {
|
348 |
+
"alias": " - high_school_statistics",
|
349 |
+
"acc,none": 0.14814814814814814,
|
350 |
+
"acc_stderr,none": 0.024227629273728356
|
351 |
+
},
|
352 |
+
"harness|mmlu_machine_learning|0": {
|
353 |
+
"alias": " - machine_learning",
|
354 |
+
"acc,none": 0.3392857142857143,
|
355 |
+
"acc_stderr,none": 0.04493949068613539
|
356 |
+
},
|
357 |
+
"harness|winogrande|0": {
|
358 |
+
"acc,none": 0.5524861878453039,
|
359 |
+
"acc_stderr,none": 0.013974847640536197,
|
360 |
+
"alias": "winogrande"
|
361 |
+
},
|
362 |
+
"harness|arc:easy|0": {
|
363 |
+
"acc,none": 0.47895622895622897,
|
364 |
+
"acc_stderr,none": 0.010250692602022582,
|
365 |
+
"acc_norm,none": 0.41708754208754206,
|
366 |
+
"acc_norm_stderr,none": 0.010117738967781982,
|
367 |
+
"alias": "arc_easy"
|
368 |
+
},
|
369 |
+
"harness|truthfulqa:mc1|0": {
|
370 |
+
"acc,none": 0.2558139534883721,
|
371 |
+
"acc_stderr,none": 0.015274176219283349,
|
372 |
+
"alias": "truthfulqa_mc1"
|
373 |
+
},
|
374 |
+
"harness|boolq|0": {
|
375 |
+
"acc,none": 0.4507645259938838,
|
376 |
+
"acc_stderr,none": 0.008702553362422868,
|
377 |
+
"alias": "boolq"
|
378 |
+
},
|
379 |
+
"harness|openbookqa|0": {
|
380 |
+
"acc,none": 0.194,
|
381 |
+
"acc_stderr,none": 0.01770182785530462,
|
382 |
+
"acc_norm,none": 0.3,
|
383 |
+
"acc_norm_stderr,none": 0.020514426225628053,
|
384 |
+
"alias": "openbookqa"
|
385 |
+
},
|
386 |
+
"harness|arc:challenge|0": {
|
387 |
+
"acc,none": 0.24829351535836178,
|
388 |
+
"acc_stderr,none": 0.012624912868089753,
|
389 |
+
"acc_norm,none": 0.2832764505119454,
|
390 |
+
"acc_norm_stderr,none": 0.013167478735134575,
|
391 |
+
"alias": "arc_challenge"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "Intel/Qwen1.5-0.5B-Chat-int4-inc",
|
396 |
+
"local": true,
|
397 |
+
"revision": "main",
|
398 |
+
"private": false,
|
399 |
+
"params": 1,
|
400 |
+
"architectures": "QwenForCausalLM",
|
401 |
+
"quant_type": "AutoRound",
|
402 |
+
"precision": "4bit",
|
403 |
+
"model_params": 1,
|
404 |
+
"model_size": 0.5,
|
405 |
+
"weight_dtype": "int4",
|
406 |
+
"compute_dtype": "float16",
|
407 |
+
"gguf_ftype": "*Q4_0.gguf",
|
408 |
+
"hardware": "gpu",
|
409 |
+
"status": "Finished",
|
410 |
+
"submitted_time": "2024-04-23T15:44:22Z",
|
411 |
+
"model_type": "quantization",
|
412 |
+
"job_id": -1,
|
413 |
+
"job_start_time": null,
|
414 |
+
"scripts": "ITREX"
|
415 |
+
},
|
416 |
+
"quantization_config": {
|
417 |
+
"autoround_version": "0.1",
|
418 |
+
"bits": 4,
|
419 |
+
"damp_percent": 0.01,
|
420 |
+
"desc_act": false,
|
421 |
+
"enable_minmax_tuning": true,
|
422 |
+
"group_size": 128,
|
423 |
+
"is_marlin_format": false,
|
424 |
+
"iters": 200,
|
425 |
+
"lr": 0.005,
|
426 |
+
"minmax_lr": 0.005,
|
427 |
+
"model_file_base_name": "model",
|
428 |
+
"model_name_or_path": null,
|
429 |
+
"quant_method": "gptq",
|
430 |
+
"scale_dtype": "torch.float32",
|
431 |
+
"static_groups": false,
|
432 |
+
"sym": true,
|
433 |
+
"true_sequential": false,
|
434 |
+
"use_quant_input": false
|
435 |
+
},
|
436 |
+
"versions": {
|
437 |
+
"harness|piqa|0": 1.0,
|
438 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
439 |
+
"harness|hellaswag|0": 1.0,
|
440 |
+
"harness|lambada:openai|0": 1.0,
|
441 |
+
"harness|mmlu|0": null,
|
442 |
+
"harness|mmlu_humanities|0": null,
|
443 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
444 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
445 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
446 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
447 |
+
"harness|mmlu_international_law|0": 0.0,
|
448 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
449 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
450 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
451 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
452 |
+
"harness|mmlu_philosophy|0": 0.0,
|
453 |
+
"harness|mmlu_prehistory|0": 0.0,
|
454 |
+
"harness|mmlu_professional_law|0": 0.0,
|
455 |
+
"harness|mmlu_world_religions|0": 0.0,
|
456 |
+
"harness|mmlu_other|0": null,
|
457 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
458 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
459 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
460 |
+
"harness|mmlu_global_facts|0": 0.0,
|
461 |
+
"harness|mmlu_human_aging|0": 0.0,
|
462 |
+
"harness|mmlu_management|0": 0.0,
|
463 |
+
"harness|mmlu_marketing|0": 0.0,
|
464 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
465 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
466 |
+
"harness|mmlu_nutrition|0": 0.0,
|
467 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
468 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
469 |
+
"harness|mmlu_virology|0": 0.0,
|
470 |
+
"harness|mmlu_social_sciences|0": null,
|
471 |
+
"harness|mmlu_econometrics|0": 0.0,
|
472 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
473 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
474 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
475 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
476 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
477 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
478 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
479 |
+
"harness|mmlu_public_relations|0": 0.0,
|
480 |
+
"harness|mmlu_security_studies|0": 0.0,
|
481 |
+
"harness|mmlu_sociology|0": 0.0,
|
482 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
483 |
+
"harness|mmlu_stem|0": null,
|
484 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
485 |
+
"harness|mmlu_anatomy|0": 0.0,
|
486 |
+
"harness|mmlu_astronomy|0": 0.0,
|
487 |
+
"harness|mmlu_college_biology|0": 0.0,
|
488 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
489 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
490 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
491 |
+
"harness|mmlu_college_physics|0": 0.0,
|
492 |
+
"harness|mmlu_computer_security|0": 0.0,
|
493 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
494 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
495 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
496 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
497 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
498 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
499 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
500 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
501 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
502 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
503 |
+
"harness|winogrande|0": 1.0,
|
504 |
+
"harness|arc:easy|0": 1.0,
|
505 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
506 |
+
"harness|boolq|0": 2.0,
|
507 |
+
"harness|openbookqa|0": 1.0,
|
508 |
+
"harness|arc:challenge|0": 1.0
|
509 |
+
},
|
510 |
+
"n-shot": {
|
511 |
+
"arc_challenge": 0,
|
512 |
+
"arc_easy": 0,
|
513 |
+
"boolq": 0,
|
514 |
+
"hellaswag": 0,
|
515 |
+
"lambada_openai": 0,
|
516 |
+
"mmlu": 0,
|
517 |
+
"mmlu_abstract_algebra": 0,
|
518 |
+
"mmlu_anatomy": 0,
|
519 |
+
"mmlu_astronomy": 0,
|
520 |
+
"mmlu_business_ethics": 0,
|
521 |
+
"mmlu_clinical_knowledge": 0,
|
522 |
+
"mmlu_college_biology": 0,
|
523 |
+
"mmlu_college_chemistry": 0,
|
524 |
+
"mmlu_college_computer_science": 0,
|
525 |
+
"mmlu_college_mathematics": 0,
|
526 |
+
"mmlu_college_medicine": 0,
|
527 |
+
"mmlu_college_physics": 0,
|
528 |
+
"mmlu_computer_security": 0,
|
529 |
+
"mmlu_conceptual_physics": 0,
|
530 |
+
"mmlu_econometrics": 0,
|
531 |
+
"mmlu_electrical_engineering": 0,
|
532 |
+
"mmlu_elementary_mathematics": 0,
|
533 |
+
"mmlu_formal_logic": 0,
|
534 |
+
"mmlu_global_facts": 0,
|
535 |
+
"mmlu_high_school_biology": 0,
|
536 |
+
"mmlu_high_school_chemistry": 0,
|
537 |
+
"mmlu_high_school_computer_science": 0,
|
538 |
+
"mmlu_high_school_european_history": 0,
|
539 |
+
"mmlu_high_school_geography": 0,
|
540 |
+
"mmlu_high_school_government_and_politics": 0,
|
541 |
+
"mmlu_high_school_macroeconomics": 0,
|
542 |
+
"mmlu_high_school_mathematics": 0,
|
543 |
+
"mmlu_high_school_microeconomics": 0,
|
544 |
+
"mmlu_high_school_physics": 0,
|
545 |
+
"mmlu_high_school_psychology": 0,
|
546 |
+
"mmlu_high_school_statistics": 0,
|
547 |
+
"mmlu_high_school_us_history": 0,
|
548 |
+
"mmlu_high_school_world_history": 0,
|
549 |
+
"mmlu_human_aging": 0,
|
550 |
+
"mmlu_human_sexuality": 0,
|
551 |
+
"mmlu_humanities": 0,
|
552 |
+
"mmlu_international_law": 0,
|
553 |
+
"mmlu_jurisprudence": 0,
|
554 |
+
"mmlu_logical_fallacies": 0,
|
555 |
+
"mmlu_machine_learning": 0,
|
556 |
+
"mmlu_management": 0,
|
557 |
+
"mmlu_marketing": 0,
|
558 |
+
"mmlu_medical_genetics": 0,
|
559 |
+
"mmlu_miscellaneous": 0,
|
560 |
+
"mmlu_moral_disputes": 0,
|
561 |
+
"mmlu_moral_scenarios": 0,
|
562 |
+
"mmlu_nutrition": 0,
|
563 |
+
"mmlu_other": 0,
|
564 |
+
"mmlu_philosophy": 0,
|
565 |
+
"mmlu_prehistory": 0,
|
566 |
+
"mmlu_professional_accounting": 0,
|
567 |
+
"mmlu_professional_law": 0,
|
568 |
+
"mmlu_professional_medicine": 0,
|
569 |
+
"mmlu_professional_psychology": 0,
|
570 |
+
"mmlu_public_relations": 0,
|
571 |
+
"mmlu_security_studies": 0,
|
572 |
+
"mmlu_social_sciences": 0,
|
573 |
+
"mmlu_sociology": 0,
|
574 |
+
"mmlu_stem": 0,
|
575 |
+
"mmlu_us_foreign_policy": 0,
|
576 |
+
"mmlu_virology": 0,
|
577 |
+
"mmlu_world_religions": 0,
|
578 |
+
"openbookqa": 0,
|
579 |
+
"piqa": 0,
|
580 |
+
"truthfulqa_mc1": 0,
|
581 |
+
"truthfulqa_mc2": 0,
|
582 |
+
"winogrande": 0
|
583 |
+
},
|
584 |
+
"date": 1716976011.00755,
|
585 |
+
"config": {
|
586 |
+
"model": "hf",
|
587 |
+
"model_args": "pretrained=Intel/Qwen1.5-0.5B-Chat-int4-inc,trust_remote_code=True,dtype=float16,_commit_hash=main",
|
588 |
+
"batch_size": 2,
|
589 |
+
"batch_sizes": [],
|
590 |
+
"device": "cuda",
|
591 |
+
"use_cache": null,
|
592 |
+
"limit": null,
|
593 |
+
"bootstrap_iters": 100000,
|
594 |
+
"gen_kwargs": null
|
595 |
+
}
|
596 |
+
}
|
Intel/results_2024-05-31-22-34-27.json
ADDED
@@ -0,0 +1,597 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "1.4",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": -1,
|
9 |
+
"start_time": null,
|
10 |
+
"end_time": "2024-05-31-22-34-27",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "Intel/Mistral-7B-Instruct-v0.2-int2-inc",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "2bit",
|
15 |
+
"model_size": 2.6,
|
16 |
+
"model_params": 7.0,
|
17 |
+
"quant_type": "AutoRound",
|
18 |
+
"precision": "2bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|arc:easy|0": {
|
22 |
+
"acc,none": 0.7647306397306397,
|
23 |
+
"acc_stderr,none": 0.008703724269718644,
|
24 |
+
"acc_norm,none": 0.7062289562289562,
|
25 |
+
"acc_norm_stderr,none": 0.00934642329816672,
|
26 |
+
"alias": "arc_easy"
|
27 |
+
},
|
28 |
+
"harness|truthfulqa:mc1|0": {
|
29 |
+
"acc,none": 0.46511627906976744,
|
30 |
+
"acc_stderr,none": 0.017460849975873972,
|
31 |
+
"alias": "truthfulqa_mc1"
|
32 |
+
},
|
33 |
+
"harness|openbookqa|0": {
|
34 |
+
"acc,none": 0.29,
|
35 |
+
"acc_stderr,none": 0.02031317923174519,
|
36 |
+
"acc_norm,none": 0.422,
|
37 |
+
"acc_norm_stderr,none": 0.022109039310618552,
|
38 |
+
"alias": "openbookqa"
|
39 |
+
},
|
40 |
+
"harness|hellaswag|0": {
|
41 |
+
"acc,none": 0.5856403106950807,
|
42 |
+
"acc_stderr,none": 0.004916043838455648,
|
43 |
+
"acc_norm,none": 0.7586138219478191,
|
44 |
+
"acc_norm_stderr,none": 0.00427049375720614,
|
45 |
+
"alias": "hellaswag"
|
46 |
+
},
|
47 |
+
"harness|mmlu|0": {
|
48 |
+
"acc,none": 0.5140293405497792,
|
49 |
+
"acc_stderr,none": 0.004053748565900439,
|
50 |
+
"alias": "mmlu"
|
51 |
+
},
|
52 |
+
"harness|mmlu_humanities|0": {
|
53 |
+
"alias": " - humanities",
|
54 |
+
"acc,none": 0.4758767268862912,
|
55 |
+
"acc_stderr,none": 0.006976115417610935
|
56 |
+
},
|
57 |
+
"harness|mmlu_formal_logic|0": {
|
58 |
+
"alias": " - formal_logic",
|
59 |
+
"acc,none": 0.40476190476190477,
|
60 |
+
"acc_stderr,none": 0.043902592653775614
|
61 |
+
},
|
62 |
+
"harness|mmlu_high_school_european_history|0": {
|
63 |
+
"alias": " - high_school_european_history",
|
64 |
+
"acc,none": 0.703030303030303,
|
65 |
+
"acc_stderr,none": 0.03567969772268049
|
66 |
+
},
|
67 |
+
"harness|mmlu_high_school_us_history|0": {
|
68 |
+
"alias": " - high_school_us_history",
|
69 |
+
"acc,none": 0.7058823529411765,
|
70 |
+
"acc_stderr,none": 0.03198001660115072
|
71 |
+
},
|
72 |
+
"harness|mmlu_high_school_world_history|0": {
|
73 |
+
"alias": " - high_school_world_history",
|
74 |
+
"acc,none": 0.6666666666666666,
|
75 |
+
"acc_stderr,none": 0.0306858205966108
|
76 |
+
},
|
77 |
+
"harness|mmlu_international_law|0": {
|
78 |
+
"alias": " - international_law",
|
79 |
+
"acc,none": 0.7107438016528925,
|
80 |
+
"acc_stderr,none": 0.04139112727635463
|
81 |
+
},
|
82 |
+
"harness|mmlu_jurisprudence|0": {
|
83 |
+
"alias": " - jurisprudence",
|
84 |
+
"acc,none": 0.6296296296296297,
|
85 |
+
"acc_stderr,none": 0.04668408033024931
|
86 |
+
},
|
87 |
+
"harness|mmlu_logical_fallacies|0": {
|
88 |
+
"alias": " - logical_fallacies",
|
89 |
+
"acc,none": 0.6503067484662577,
|
90 |
+
"acc_stderr,none": 0.037466683254700206
|
91 |
+
},
|
92 |
+
"harness|mmlu_moral_disputes|0": {
|
93 |
+
"alias": " - moral_disputes",
|
94 |
+
"acc,none": 0.5809248554913294,
|
95 |
+
"acc_stderr,none": 0.02656417811142261
|
96 |
+
},
|
97 |
+
"harness|mmlu_moral_scenarios|0": {
|
98 |
+
"alias": " - moral_scenarios",
|
99 |
+
"acc,none": 0.29720670391061454,
|
100 |
+
"acc_stderr,none": 0.015285313353641602
|
101 |
+
},
|
102 |
+
"harness|mmlu_philosophy|0": {
|
103 |
+
"alias": " - philosophy",
|
104 |
+
"acc,none": 0.5530546623794212,
|
105 |
+
"acc_stderr,none": 0.02823776942208532
|
106 |
+
},
|
107 |
+
"harness|mmlu_prehistory|0": {
|
108 |
+
"alias": " - prehistory",
|
109 |
+
"acc,none": 0.5432098765432098,
|
110 |
+
"acc_stderr,none": 0.02771666165019404
|
111 |
+
},
|
112 |
+
"harness|mmlu_professional_law|0": {
|
113 |
+
"alias": " - professional_law",
|
114 |
+
"acc,none": 0.378748370273794,
|
115 |
+
"acc_stderr,none": 0.012389052105003736
|
116 |
+
},
|
117 |
+
"harness|mmlu_world_religions|0": {
|
118 |
+
"alias": " - world_religions",
|
119 |
+
"acc,none": 0.6666666666666666,
|
120 |
+
"acc_stderr,none": 0.03615507630310935
|
121 |
+
},
|
122 |
+
"harness|mmlu_other|0": {
|
123 |
+
"alias": " - other",
|
124 |
+
"acc,none": 0.5754747344705504,
|
125 |
+
"acc_stderr,none": 0.008572422932328742
|
126 |
+
},
|
127 |
+
"harness|mmlu_business_ethics|0": {
|
128 |
+
"alias": " - business_ethics",
|
129 |
+
"acc,none": 0.49,
|
130 |
+
"acc_stderr,none": 0.05024183937956912
|
131 |
+
},
|
132 |
+
"harness|mmlu_clinical_knowledge|0": {
|
133 |
+
"alias": " - clinical_knowledge",
|
134 |
+
"acc,none": 0.6,
|
135 |
+
"acc_stderr,none": 0.030151134457776285
|
136 |
+
},
|
137 |
+
"harness|mmlu_college_medicine|0": {
|
138 |
+
"alias": " - college_medicine",
|
139 |
+
"acc,none": 0.47398843930635837,
|
140 |
+
"acc_stderr,none": 0.038073017265045125
|
141 |
+
},
|
142 |
+
"harness|mmlu_global_facts|0": {
|
143 |
+
"alias": " - global_facts",
|
144 |
+
"acc,none": 0.35,
|
145 |
+
"acc_stderr,none": 0.047937248544110196
|
146 |
+
},
|
147 |
+
"harness|mmlu_human_aging|0": {
|
148 |
+
"alias": " - human_aging",
|
149 |
+
"acc,none": 0.57847533632287,
|
150 |
+
"acc_stderr,none": 0.03314190222110657
|
151 |
+
},
|
152 |
+
"harness|mmlu_management|0": {
|
153 |
+
"alias": " - management",
|
154 |
+
"acc,none": 0.7087378640776699,
|
155 |
+
"acc_stderr,none": 0.044986763205729224
|
156 |
+
},
|
157 |
+
"harness|mmlu_marketing|0": {
|
158 |
+
"alias": " - marketing",
|
159 |
+
"acc,none": 0.7905982905982906,
|
160 |
+
"acc_stderr,none": 0.026655699653922744
|
161 |
+
},
|
162 |
+
"harness|mmlu_medical_genetics|0": {
|
163 |
+
"alias": " - medical_genetics",
|
164 |
+
"acc,none": 0.51,
|
165 |
+
"acc_stderr,none": 0.05024183937956912
|
166 |
+
},
|
167 |
+
"harness|mmlu_miscellaneous|0": {
|
168 |
+
"alias": " - miscellaneous",
|
169 |
+
"acc,none": 0.7037037037037037,
|
170 |
+
"acc_stderr,none": 0.016328814422102055
|
171 |
+
},
|
172 |
+
"harness|mmlu_nutrition|0": {
|
173 |
+
"alias": " - nutrition",
|
174 |
+
"acc,none": 0.5522875816993464,
|
175 |
+
"acc_stderr,none": 0.02847293847803353
|
176 |
+
},
|
177 |
+
"harness|mmlu_professional_accounting|0": {
|
178 |
+
"alias": " - professional_accounting",
|
179 |
+
"acc,none": 0.36524822695035464,
|
180 |
+
"acc_stderr,none": 0.028723863853281264
|
181 |
+
},
|
182 |
+
"harness|mmlu_professional_medicine|0": {
|
183 |
+
"alias": " - professional_medicine",
|
184 |
+
"acc,none": 0.4889705882352941,
|
185 |
+
"acc_stderr,none": 0.030365446477275675
|
186 |
+
},
|
187 |
+
"harness|mmlu_virology|0": {
|
188 |
+
"alias": " - virology",
|
189 |
+
"acc,none": 0.41566265060240964,
|
190 |
+
"acc_stderr,none": 0.038367221765980515
|
191 |
+
},
|
192 |
+
"harness|mmlu_social_sciences|0": {
|
193 |
+
"alias": " - social_sciences",
|
194 |
+
"acc,none": 0.5940851478713032,
|
195 |
+
"acc_stderr,none": 0.008651102612203724
|
196 |
+
},
|
197 |
+
"harness|mmlu_econometrics|0": {
|
198 |
+
"alias": " - econometrics",
|
199 |
+
"acc,none": 0.3333333333333333,
|
200 |
+
"acc_stderr,none": 0.044346007015849245
|
201 |
+
},
|
202 |
+
"harness|mmlu_high_school_geography|0": {
|
203 |
+
"alias": " - high_school_geography",
|
204 |
+
"acc,none": 0.6515151515151515,
|
205 |
+
"acc_stderr,none": 0.033948539651564025
|
206 |
+
},
|
207 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
208 |
+
"alias": " - high_school_government_and_politics",
|
209 |
+
"acc,none": 0.6839378238341969,
|
210 |
+
"acc_stderr,none": 0.03355397369686172
|
211 |
+
},
|
212 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
213 |
+
"alias": " - high_school_macroeconomics",
|
214 |
+
"acc,none": 0.45897435897435895,
|
215 |
+
"acc_stderr,none": 0.025265525491284295
|
216 |
+
},
|
217 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
218 |
+
"alias": " - high_school_microeconomics",
|
219 |
+
"acc,none": 0.542016806722689,
|
220 |
+
"acc_stderr,none": 0.03236361111951941
|
221 |
+
},
|
222 |
+
"harness|mmlu_high_school_psychology|0": {
|
223 |
+
"alias": " - high_school_psychology",
|
224 |
+
"acc,none": 0.6954128440366972,
|
225 |
+
"acc_stderr,none": 0.019732299420354045
|
226 |
+
},
|
227 |
+
"harness|mmlu_human_sexuality|0": {
|
228 |
+
"alias": " - human_sexuality",
|
229 |
+
"acc,none": 0.5877862595419847,
|
230 |
+
"acc_stderr,none": 0.04317171194870255
|
231 |
+
},
|
232 |
+
"harness|mmlu_professional_psychology|0": {
|
233 |
+
"alias": " - professional_psychology",
|
234 |
+
"acc,none": 0.511437908496732,
|
235 |
+
"acc_stderr,none": 0.02022254151561086
|
236 |
+
},
|
237 |
+
"harness|mmlu_public_relations|0": {
|
238 |
+
"alias": " - public_relations",
|
239 |
+
"acc,none": 0.6,
|
240 |
+
"acc_stderr,none": 0.0469237132203465
|
241 |
+
},
|
242 |
+
"harness|mmlu_security_studies|0": {
|
243 |
+
"alias": " - security_studies",
|
244 |
+
"acc,none": 0.6448979591836734,
|
245 |
+
"acc_stderr,none": 0.030635655150387638
|
246 |
+
},
|
247 |
+
"harness|mmlu_sociology|0": {
|
248 |
+
"alias": " - sociology",
|
249 |
+
"acc,none": 0.7562189054726368,
|
250 |
+
"acc_stderr,none": 0.03036049015401466
|
251 |
+
},
|
252 |
+
"harness|mmlu_us_foreign_policy|0": {
|
253 |
+
"alias": " - us_foreign_policy",
|
254 |
+
"acc,none": 0.76,
|
255 |
+
"acc_stderr,none": 0.04292346959909283
|
256 |
+
},
|
257 |
+
"harness|mmlu_stem|0": {
|
258 |
+
"alias": " - stem",
|
259 |
+
"acc,none": 0.4322867110688233,
|
260 |
+
"acc_stderr,none": 0.008656050195793772
|
261 |
+
},
|
262 |
+
"harness|mmlu_abstract_algebra|0": {
|
263 |
+
"alias": " - abstract_algebra",
|
264 |
+
"acc,none": 0.29,
|
265 |
+
"acc_stderr,none": 0.04560480215720684
|
266 |
+
},
|
267 |
+
"harness|mmlu_anatomy|0": {
|
268 |
+
"alias": " - anatomy",
|
269 |
+
"acc,none": 0.5037037037037037,
|
270 |
+
"acc_stderr,none": 0.04319223625811331
|
271 |
+
},
|
272 |
+
"harness|mmlu_astronomy|0": {
|
273 |
+
"alias": " - astronomy",
|
274 |
+
"acc,none": 0.5723684210526315,
|
275 |
+
"acc_stderr,none": 0.04026097083296563
|
276 |
+
},
|
277 |
+
"harness|mmlu_college_biology|0": {
|
278 |
+
"alias": " - college_biology",
|
279 |
+
"acc,none": 0.5138888888888888,
|
280 |
+
"acc_stderr,none": 0.041795966175810016
|
281 |
+
},
|
282 |
+
"harness|mmlu_college_chemistry|0": {
|
283 |
+
"alias": " - college_chemistry",
|
284 |
+
"acc,none": 0.29,
|
285 |
+
"acc_stderr,none": 0.04560480215720684
|
286 |
+
},
|
287 |
+
"harness|mmlu_college_computer_science|0": {
|
288 |
+
"alias": " - college_computer_science",
|
289 |
+
"acc,none": 0.48,
|
290 |
+
"acc_stderr,none": 0.050211673156867795
|
291 |
+
},
|
292 |
+
"harness|mmlu_college_mathematics|0": {
|
293 |
+
"alias": " - college_mathematics",
|
294 |
+
"acc,none": 0.35,
|
295 |
+
"acc_stderr,none": 0.047937248544110196
|
296 |
+
},
|
297 |
+
"harness|mmlu_college_physics|0": {
|
298 |
+
"alias": " - college_physics",
|
299 |
+
"acc,none": 0.4117647058823529,
|
300 |
+
"acc_stderr,none": 0.04897104952726366
|
301 |
+
},
|
302 |
+
"harness|mmlu_computer_security|0": {
|
303 |
+
"alias": " - computer_security",
|
304 |
+
"acc,none": 0.62,
|
305 |
+
"acc_stderr,none": 0.04878317312145632
|
306 |
+
},
|
307 |
+
"harness|mmlu_conceptual_physics|0": {
|
308 |
+
"alias": " - conceptual_physics",
|
309 |
+
"acc,none": 0.37872340425531914,
|
310 |
+
"acc_stderr,none": 0.031709956060406545
|
311 |
+
},
|
312 |
+
"harness|mmlu_electrical_engineering|0": {
|
313 |
+
"alias": " - electrical_engineering",
|
314 |
+
"acc,none": 0.5103448275862069,
|
315 |
+
"acc_stderr,none": 0.04165774775728762
|
316 |
+
},
|
317 |
+
"harness|mmlu_elementary_mathematics|0": {
|
318 |
+
"alias": " - elementary_mathematics",
|
319 |
+
"acc,none": 0.328042328042328,
|
320 |
+
"acc_stderr,none": 0.024180497164376896
|
321 |
+
},
|
322 |
+
"harness|mmlu_high_school_biology|0": {
|
323 |
+
"alias": " - high_school_biology",
|
324 |
+
"acc,none": 0.6129032258064516,
|
325 |
+
"acc_stderr,none": 0.02770935967503249
|
326 |
+
},
|
327 |
+
"harness|mmlu_high_school_chemistry|0": {
|
328 |
+
"alias": " - high_school_chemistry",
|
329 |
+
"acc,none": 0.39408866995073893,
|
330 |
+
"acc_stderr,none": 0.034381579670365446
|
331 |
+
},
|
332 |
+
"harness|mmlu_high_school_computer_science|0": {
|
333 |
+
"alias": " - high_school_computer_science",
|
334 |
+
"acc,none": 0.53,
|
335 |
+
"acc_stderr,none": 0.05016135580465919
|
336 |
+
},
|
337 |
+
"harness|mmlu_high_school_mathematics|0": {
|
338 |
+
"alias": " - high_school_mathematics",
|
339 |
+
"acc,none": 0.34814814814814815,
|
340 |
+
"acc_stderr,none": 0.029045600290616258
|
341 |
+
},
|
342 |
+
"harness|mmlu_high_school_physics|0": {
|
343 |
+
"alias": " - high_school_physics",
|
344 |
+
"acc,none": 0.3841059602649007,
|
345 |
+
"acc_stderr,none": 0.03971301814719198
|
346 |
+
},
|
347 |
+
"harness|mmlu_high_school_statistics|0": {
|
348 |
+
"alias": " - high_school_statistics",
|
349 |
+
"acc,none": 0.36574074074074076,
|
350 |
+
"acc_stderr,none": 0.03284738857647206
|
351 |
+
},
|
352 |
+
"harness|mmlu_machine_learning|0": {
|
353 |
+
"alias": " - machine_learning",
|
354 |
+
"acc,none": 0.42857142857142855,
|
355 |
+
"acc_stderr,none": 0.04697113923010213
|
356 |
+
},
|
357 |
+
"harness|piqa|0": {
|
358 |
+
"acc,none": 0.7747551686615887,
|
359 |
+
"acc_stderr,none": 0.009746643471032147,
|
360 |
+
"acc_norm,none": 0.7763873775843307,
|
361 |
+
"acc_norm_stderr,none": 0.009721489519176287,
|
362 |
+
"alias": "piqa"
|
363 |
+
},
|
364 |
+
"harness|boolq|0": {
|
365 |
+
"acc,none": 0.8226299694189603,
|
366 |
+
"acc_stderr,none": 0.00668089927034429,
|
367 |
+
"alias": "boolq"
|
368 |
+
},
|
369 |
+
"harness|lambada:openai|0": {
|
370 |
+
"perplexity,none": 5.145717320091905,
|
371 |
+
"perplexity_stderr,none": 0.14689377643543283,
|
372 |
+
"acc,none": 0.6293421307975936,
|
373 |
+
"acc_stderr,none": 0.006728869231430017,
|
374 |
+
"alias": "lambada_openai"
|
375 |
+
},
|
376 |
+
"harness|arc:challenge|0": {
|
377 |
+
"acc,none": 0.48464163822525597,
|
378 |
+
"acc_stderr,none": 0.014604496129394913,
|
379 |
+
"acc_norm,none": 0.5,
|
380 |
+
"acc_norm_stderr,none": 0.014611390804670088,
|
381 |
+
"alias": "arc_challenge"
|
382 |
+
},
|
383 |
+
"harness|truthfulqa:mc2|0": {
|
384 |
+
"acc,none": 0.6205759673296364,
|
385 |
+
"acc_stderr,none": 0.015812804941316927,
|
386 |
+
"alias": "truthfulqa_mc2"
|
387 |
+
},
|
388 |
+
"harness|winogrande|0": {
|
389 |
+
"acc,none": 0.6835043409629045,
|
390 |
+
"acc_stderr,none": 0.01307186832805148,
|
391 |
+
"alias": "winogrande"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "Intel/Mistral-7B-Instruct-v0.2-int2-inc",
|
396 |
+
"revision": "main",
|
397 |
+
"private": false,
|
398 |
+
"params": 7.0,
|
399 |
+
"architectures": "MistralForCausalLM",
|
400 |
+
"quant_type": "AutoRound",
|
401 |
+
"precision": "2bit",
|
402 |
+
"model_params": 7.0,
|
403 |
+
"model_size": 2.6,
|
404 |
+
"weight_dtype": "int2",
|
405 |
+
"compute_dtype": "float16",
|
406 |
+
"gguf_ftype": "*Q4_0.gguf",
|
407 |
+
"hardware": "gpu",
|
408 |
+
"status": "Finished",
|
409 |
+
"submitted_time": "2024-05-28T15:43:10Z",
|
410 |
+
"model_type": "quantization",
|
411 |
+
"job_id": -1,
|
412 |
+
"job_start_time": null,
|
413 |
+
"scripts": "ITREX"
|
414 |
+
},
|
415 |
+
"quantization_config": {
|
416 |
+
"amp": true,
|
417 |
+
"autoround_version": "0.2.0.dev",
|
418 |
+
"backend": "gptq:triton",
|
419 |
+
"bits": 2,
|
420 |
+
"data_type": "int",
|
421 |
+
"dataset": "NeelNanda/pile-10k",
|
422 |
+
"enable_minmax_tuning": true,
|
423 |
+
"enable_quanted_input": true,
|
424 |
+
"gradient_accumulate_steps": 1,
|
425 |
+
"group_size": 32,
|
426 |
+
"iters": 200,
|
427 |
+
"low_gpu_mem_usage": true,
|
428 |
+
"lr": 0.005,
|
429 |
+
"minmax_lr": 0.01,
|
430 |
+
"n_samples": 512,
|
431 |
+
"quant_method": "intel/auto-round",
|
432 |
+
"scale_dtype": "torch.float16",
|
433 |
+
"seqlen": 2048,
|
434 |
+
"sym": false,
|
435 |
+
"train_bs": 8
|
436 |
+
},
|
437 |
+
"versions": {
|
438 |
+
"harness|arc:easy|0": 1.0,
|
439 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
440 |
+
"harness|openbookqa|0": 1.0,
|
441 |
+
"harness|hellaswag|0": 1.0,
|
442 |
+
"harness|mmlu|0": null,
|
443 |
+
"harness|mmlu_humanities|0": null,
|
444 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
445 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
446 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
447 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
448 |
+
"harness|mmlu_international_law|0": 0.0,
|
449 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
450 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
451 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
452 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
453 |
+
"harness|mmlu_philosophy|0": 0.0,
|
454 |
+
"harness|mmlu_prehistory|0": 0.0,
|
455 |
+
"harness|mmlu_professional_law|0": 0.0,
|
456 |
+
"harness|mmlu_world_religions|0": 0.0,
|
457 |
+
"harness|mmlu_other|0": null,
|
458 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
459 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
460 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
461 |
+
"harness|mmlu_global_facts|0": 0.0,
|
462 |
+
"harness|mmlu_human_aging|0": 0.0,
|
463 |
+
"harness|mmlu_management|0": 0.0,
|
464 |
+
"harness|mmlu_marketing|0": 0.0,
|
465 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
466 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
467 |
+
"harness|mmlu_nutrition|0": 0.0,
|
468 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
469 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
470 |
+
"harness|mmlu_virology|0": 0.0,
|
471 |
+
"harness|mmlu_social_sciences|0": null,
|
472 |
+
"harness|mmlu_econometrics|0": 0.0,
|
473 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
474 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
475 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
476 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
477 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
478 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
479 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
480 |
+
"harness|mmlu_public_relations|0": 0.0,
|
481 |
+
"harness|mmlu_security_studies|0": 0.0,
|
482 |
+
"harness|mmlu_sociology|0": 0.0,
|
483 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
484 |
+
"harness|mmlu_stem|0": null,
|
485 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
486 |
+
"harness|mmlu_anatomy|0": 0.0,
|
487 |
+
"harness|mmlu_astronomy|0": 0.0,
|
488 |
+
"harness|mmlu_college_biology|0": 0.0,
|
489 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
490 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
491 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
492 |
+
"harness|mmlu_college_physics|0": 0.0,
|
493 |
+
"harness|mmlu_computer_security|0": 0.0,
|
494 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
495 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
496 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
497 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
498 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
499 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
500 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
501 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
502 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
503 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
504 |
+
"harness|piqa|0": 1.0,
|
505 |
+
"harness|boolq|0": 2.0,
|
506 |
+
"harness|lambada:openai|0": 1.0,
|
507 |
+
"harness|arc:challenge|0": 1.0,
|
508 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
509 |
+
"harness|winogrande|0": 1.0
|
510 |
+
},
|
511 |
+
"n-shot": {
|
512 |
+
"arc_challenge": 0,
|
513 |
+
"arc_easy": 0,
|
514 |
+
"boolq": 0,
|
515 |
+
"hellaswag": 0,
|
516 |
+
"lambada_openai": 0,
|
517 |
+
"mmlu": 0,
|
518 |
+
"mmlu_abstract_algebra": 0,
|
519 |
+
"mmlu_anatomy": 0,
|
520 |
+
"mmlu_astronomy": 0,
|
521 |
+
"mmlu_business_ethics": 0,
|
522 |
+
"mmlu_clinical_knowledge": 0,
|
523 |
+
"mmlu_college_biology": 0,
|
524 |
+
"mmlu_college_chemistry": 0,
|
525 |
+
"mmlu_college_computer_science": 0,
|
526 |
+
"mmlu_college_mathematics": 0,
|
527 |
+
"mmlu_college_medicine": 0,
|
528 |
+
"mmlu_college_physics": 0,
|
529 |
+
"mmlu_computer_security": 0,
|
530 |
+
"mmlu_conceptual_physics": 0,
|
531 |
+
"mmlu_econometrics": 0,
|
532 |
+
"mmlu_electrical_engineering": 0,
|
533 |
+
"mmlu_elementary_mathematics": 0,
|
534 |
+
"mmlu_formal_logic": 0,
|
535 |
+
"mmlu_global_facts": 0,
|
536 |
+
"mmlu_high_school_biology": 0,
|
537 |
+
"mmlu_high_school_chemistry": 0,
|
538 |
+
"mmlu_high_school_computer_science": 0,
|
539 |
+
"mmlu_high_school_european_history": 0,
|
540 |
+
"mmlu_high_school_geography": 0,
|
541 |
+
"mmlu_high_school_government_and_politics": 0,
|
542 |
+
"mmlu_high_school_macroeconomics": 0,
|
543 |
+
"mmlu_high_school_mathematics": 0,
|
544 |
+
"mmlu_high_school_microeconomics": 0,
|
545 |
+
"mmlu_high_school_physics": 0,
|
546 |
+
"mmlu_high_school_psychology": 0,
|
547 |
+
"mmlu_high_school_statistics": 0,
|
548 |
+
"mmlu_high_school_us_history": 0,
|
549 |
+
"mmlu_high_school_world_history": 0,
|
550 |
+
"mmlu_human_aging": 0,
|
551 |
+
"mmlu_human_sexuality": 0,
|
552 |
+
"mmlu_humanities": 0,
|
553 |
+
"mmlu_international_law": 0,
|
554 |
+
"mmlu_jurisprudence": 0,
|
555 |
+
"mmlu_logical_fallacies": 0,
|
556 |
+
"mmlu_machine_learning": 0,
|
557 |
+
"mmlu_management": 0,
|
558 |
+
"mmlu_marketing": 0,
|
559 |
+
"mmlu_medical_genetics": 0,
|
560 |
+
"mmlu_miscellaneous": 0,
|
561 |
+
"mmlu_moral_disputes": 0,
|
562 |
+
"mmlu_moral_scenarios": 0,
|
563 |
+
"mmlu_nutrition": 0,
|
564 |
+
"mmlu_other": 0,
|
565 |
+
"mmlu_philosophy": 0,
|
566 |
+
"mmlu_prehistory": 0,
|
567 |
+
"mmlu_professional_accounting": 0,
|
568 |
+
"mmlu_professional_law": 0,
|
569 |
+
"mmlu_professional_medicine": 0,
|
570 |
+
"mmlu_professional_psychology": 0,
|
571 |
+
"mmlu_public_relations": 0,
|
572 |
+
"mmlu_security_studies": 0,
|
573 |
+
"mmlu_social_sciences": 0,
|
574 |
+
"mmlu_sociology": 0,
|
575 |
+
"mmlu_stem": 0,
|
576 |
+
"mmlu_us_foreign_policy": 0,
|
577 |
+
"mmlu_virology": 0,
|
578 |
+
"mmlu_world_religions": 0,
|
579 |
+
"openbookqa": 0,
|
580 |
+
"piqa": 0,
|
581 |
+
"truthfulqa_mc1": 0,
|
582 |
+
"truthfulqa_mc2": 0,
|
583 |
+
"winogrande": 0
|
584 |
+
},
|
585 |
+
"date": 1717161719.2662494,
|
586 |
+
"config": {
|
587 |
+
"model": "hf",
|
588 |
+
"model_args": "pretrained=Intel/Mistral-7B-Instruct-v0.2-int2-inc,trust_remote_code=True,dtype=float16,_commit_hash=main",
|
589 |
+
"batch_size": 4,
|
590 |
+
"batch_sizes": [],
|
591 |
+
"device": "cuda",
|
592 |
+
"use_cache": null,
|
593 |
+
"limit": null,
|
594 |
+
"bootstrap_iters": 100000,
|
595 |
+
"gen_kwargs": null
|
596 |
+
}
|
597 |
+
}
|
QuantFactory/results_2024-05-25-03-11-56.json
ADDED
@@ -0,0 +1,579 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "no",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": -1,
|
9 |
+
"start_time": null,
|
10 |
+
"end_time": "2024-05-25-03-11-56",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "QuantFactory/Phi-3-mini-4k-instruct-GGUF",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "4bit",
|
15 |
+
"model_size": 2.175438336,
|
16 |
+
"model_params": 3.821079552,
|
17 |
+
"quant_type": "llama.cpp",
|
18 |
+
"precision": "4bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|mmlu|0": {
|
22 |
+
"acc,none": 0.654037886340977,
|
23 |
+
"acc_stderr,none": 0.0038135827737680933,
|
24 |
+
"alias": "mmlu"
|
25 |
+
},
|
26 |
+
"harness|mmlu_humanities|0": {
|
27 |
+
"alias": " - humanities",
|
28 |
+
"acc,none": 0.612327311370882,
|
29 |
+
"acc_stderr,none": 0.00681296950147169
|
30 |
+
},
|
31 |
+
"harness|mmlu_formal_logic|0": {
|
32 |
+
"alias": " - formal_logic",
|
33 |
+
"acc,none": 0.5555555555555556,
|
34 |
+
"acc_stderr,none": 0.044444444444444495
|
35 |
+
},
|
36 |
+
"harness|mmlu_high_school_european_history|0": {
|
37 |
+
"alias": " - high_school_european_history",
|
38 |
+
"acc,none": 0.806060606060606,
|
39 |
+
"acc_stderr,none": 0.03087414513656208
|
40 |
+
},
|
41 |
+
"harness|mmlu_high_school_us_history|0": {
|
42 |
+
"alias": " - high_school_us_history",
|
43 |
+
"acc,none": 0.7745098039215687,
|
44 |
+
"acc_stderr,none": 0.029331162294251728
|
45 |
+
},
|
46 |
+
"harness|mmlu_high_school_world_history|0": {
|
47 |
+
"alias": " - high_school_world_history",
|
48 |
+
"acc,none": 0.7890295358649789,
|
49 |
+
"acc_stderr,none": 0.02655837250266192
|
50 |
+
},
|
51 |
+
"harness|mmlu_international_law|0": {
|
52 |
+
"alias": " - international_law",
|
53 |
+
"acc,none": 0.7768595041322314,
|
54 |
+
"acc_stderr,none": 0.03800754475228733
|
55 |
+
},
|
56 |
+
"harness|mmlu_jurisprudence|0": {
|
57 |
+
"alias": " - jurisprudence",
|
58 |
+
"acc,none": 0.7407407407407407,
|
59 |
+
"acc_stderr,none": 0.042365112580946315
|
60 |
+
},
|
61 |
+
"harness|mmlu_logical_fallacies|0": {
|
62 |
+
"alias": " - logical_fallacies",
|
63 |
+
"acc,none": 0.8098159509202454,
|
64 |
+
"acc_stderr,none": 0.030833491146281214
|
65 |
+
},
|
66 |
+
"harness|mmlu_moral_disputes|0": {
|
67 |
+
"alias": " - moral_disputes",
|
68 |
+
"acc,none": 0.7109826589595376,
|
69 |
+
"acc_stderr,none": 0.02440517393578323
|
70 |
+
},
|
71 |
+
"harness|mmlu_moral_scenarios|0": {
|
72 |
+
"alias": " - moral_scenarios",
|
73 |
+
"acc,none": 0.48268156424581005,
|
74 |
+
"acc_stderr,none": 0.01671246744170252
|
75 |
+
},
|
76 |
+
"harness|mmlu_philosophy|0": {
|
77 |
+
"alias": " - philosophy",
|
78 |
+
"acc,none": 0.7009646302250804,
|
79 |
+
"acc_stderr,none": 0.02600330111788514
|
80 |
+
},
|
81 |
+
"harness|mmlu_prehistory|0": {
|
82 |
+
"alias": " - prehistory",
|
83 |
+
"acc,none": 0.7685185185185185,
|
84 |
+
"acc_stderr,none": 0.023468429832451152
|
85 |
+
},
|
86 |
+
"harness|mmlu_professional_law|0": {
|
87 |
+
"alias": " - professional_law",
|
88 |
+
"acc,none": 0.48370273794002605,
|
89 |
+
"acc_stderr,none": 0.012763450734699816
|
90 |
+
},
|
91 |
+
"harness|mmlu_world_religions|0": {
|
92 |
+
"alias": " - world_religions",
|
93 |
+
"acc,none": 0.8187134502923976,
|
94 |
+
"acc_stderr,none": 0.029547741687640038
|
95 |
+
},
|
96 |
+
"harness|mmlu_other|0": {
|
97 |
+
"alias": " - other",
|
98 |
+
"acc,none": 0.6971355004827808,
|
99 |
+
"acc_stderr,none": 0.007949711839349649
|
100 |
+
},
|
101 |
+
"harness|mmlu_business_ethics|0": {
|
102 |
+
"alias": " - business_ethics",
|
103 |
+
"acc,none": 0.69,
|
104 |
+
"acc_stderr,none": 0.04648231987117316
|
105 |
+
},
|
106 |
+
"harness|mmlu_clinical_knowledge|0": {
|
107 |
+
"alias": " - clinical_knowledge",
|
108 |
+
"acc,none": 0.7283018867924528,
|
109 |
+
"acc_stderr,none": 0.027377706624670713
|
110 |
+
},
|
111 |
+
"harness|mmlu_college_medicine|0": {
|
112 |
+
"alias": " - college_medicine",
|
113 |
+
"acc,none": 0.6473988439306358,
|
114 |
+
"acc_stderr,none": 0.03643037168958548
|
115 |
+
},
|
116 |
+
"harness|mmlu_global_facts|0": {
|
117 |
+
"alias": " - global_facts",
|
118 |
+
"acc,none": 0.31,
|
119 |
+
"acc_stderr,none": 0.04648231987117316
|
120 |
+
},
|
121 |
+
"harness|mmlu_human_aging|0": {
|
122 |
+
"alias": " - human_aging",
|
123 |
+
"acc,none": 0.6681614349775785,
|
124 |
+
"acc_stderr,none": 0.03160295143776679
|
125 |
+
},
|
126 |
+
"harness|mmlu_management|0": {
|
127 |
+
"alias": " - management",
|
128 |
+
"acc,none": 0.8349514563106796,
|
129 |
+
"acc_stderr,none": 0.03675668832233189
|
130 |
+
},
|
131 |
+
"harness|mmlu_marketing|0": {
|
132 |
+
"alias": " - marketing",
|
133 |
+
"acc,none": 0.8717948717948718,
|
134 |
+
"acc_stderr,none": 0.021901905115073318
|
135 |
+
},
|
136 |
+
"harness|mmlu_medical_genetics|0": {
|
137 |
+
"alias": " - medical_genetics",
|
138 |
+
"acc,none": 0.7,
|
139 |
+
"acc_stderr,none": 0.046056618647183814
|
140 |
+
},
|
141 |
+
"harness|mmlu_miscellaneous|0": {
|
142 |
+
"alias": " - miscellaneous",
|
143 |
+
"acc,none": 0.8033205619412516,
|
144 |
+
"acc_stderr,none": 0.01421413855691391
|
145 |
+
},
|
146 |
+
"harness|mmlu_nutrition|0": {
|
147 |
+
"alias": " - nutrition",
|
148 |
+
"acc,none": 0.6993464052287581,
|
149 |
+
"acc_stderr,none": 0.026256053835718964
|
150 |
+
},
|
151 |
+
"harness|mmlu_professional_accounting|0": {
|
152 |
+
"alias": " - professional_accounting",
|
153 |
+
"acc,none": 0.5390070921985816,
|
154 |
+
"acc_stderr,none": 0.029736592526424434
|
155 |
+
},
|
156 |
+
"harness|mmlu_professional_medicine|0": {
|
157 |
+
"alias": " - professional_medicine",
|
158 |
+
"acc,none": 0.6433823529411765,
|
159 |
+
"acc_stderr,none": 0.02909720956841196
|
160 |
+
},
|
161 |
+
"harness|mmlu_virology|0": {
|
162 |
+
"alias": " - virology",
|
163 |
+
"acc,none": 0.4939759036144578,
|
164 |
+
"acc_stderr,none": 0.03892212195333045
|
165 |
+
},
|
166 |
+
"harness|mmlu_social_sciences|0": {
|
167 |
+
"alias": " - social_sciences",
|
168 |
+
"acc,none": 0.7689307767305817,
|
169 |
+
"acc_stderr,none": 0.0074364870765378744
|
170 |
+
},
|
171 |
+
"harness|mmlu_econometrics|0": {
|
172 |
+
"alias": " - econometrics",
|
173 |
+
"acc,none": 0.5263157894736842,
|
174 |
+
"acc_stderr,none": 0.046970851366478626
|
175 |
+
},
|
176 |
+
"harness|mmlu_high_school_geography|0": {
|
177 |
+
"alias": " - high_school_geography",
|
178 |
+
"acc,none": 0.803030303030303,
|
179 |
+
"acc_stderr,none": 0.02833560973246336
|
180 |
+
},
|
181 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
182 |
+
"alias": " - high_school_government_and_politics",
|
183 |
+
"acc,none": 0.8860103626943006,
|
184 |
+
"acc_stderr,none": 0.022935144053919432
|
185 |
+
},
|
186 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
187 |
+
"alias": " - high_school_macroeconomics",
|
188 |
+
"acc,none": 0.7,
|
189 |
+
"acc_stderr,none": 0.023234581088428487
|
190 |
+
},
|
191 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
192 |
+
"alias": " - high_school_microeconomics",
|
193 |
+
"acc,none": 0.8109243697478992,
|
194 |
+
"acc_stderr,none": 0.025435119438105357
|
195 |
+
},
|
196 |
+
"harness|mmlu_high_school_psychology|0": {
|
197 |
+
"alias": " - high_school_psychology",
|
198 |
+
"acc,none": 0.8715596330275229,
|
199 |
+
"acc_stderr,none": 0.014344977542914307
|
200 |
+
},
|
201 |
+
"harness|mmlu_human_sexuality|0": {
|
202 |
+
"alias": " - human_sexuality",
|
203 |
+
"acc,none": 0.7099236641221374,
|
204 |
+
"acc_stderr,none": 0.03980066246467765
|
205 |
+
},
|
206 |
+
"harness|mmlu_professional_psychology|0": {
|
207 |
+
"alias": " - professional_psychology",
|
208 |
+
"acc,none": 0.6944444444444444,
|
209 |
+
"acc_stderr,none": 0.018635594034423976
|
210 |
+
},
|
211 |
+
"harness|mmlu_public_relations|0": {
|
212 |
+
"alias": " - public_relations",
|
213 |
+
"acc,none": 0.6818181818181818,
|
214 |
+
"acc_stderr,none": 0.04461272175910508
|
215 |
+
},
|
216 |
+
"harness|mmlu_security_studies|0": {
|
217 |
+
"alias": " - security_studies",
|
218 |
+
"acc,none": 0.7428571428571429,
|
219 |
+
"acc_stderr,none": 0.027979823538744543
|
220 |
+
},
|
221 |
+
"harness|mmlu_sociology|0": {
|
222 |
+
"alias": " - sociology",
|
223 |
+
"acc,none": 0.8656716417910447,
|
224 |
+
"acc_stderr,none": 0.024112678240900826
|
225 |
+
},
|
226 |
+
"harness|mmlu_us_foreign_policy|0": {
|
227 |
+
"alias": " - us_foreign_policy",
|
228 |
+
"acc,none": 0.86,
|
229 |
+
"acc_stderr,none": 0.03487350880197768
|
230 |
+
},
|
231 |
+
"harness|mmlu_stem|0": {
|
232 |
+
"alias": " - stem",
|
233 |
+
"acc,none": 0.5616872819536949,
|
234 |
+
"acc_stderr,none": 0.00842972061711791
|
235 |
+
},
|
236 |
+
"harness|mmlu_abstract_algebra|0": {
|
237 |
+
"alias": " - abstract_algebra",
|
238 |
+
"acc,none": 0.36,
|
239 |
+
"acc_stderr,none": 0.04824181513244218
|
240 |
+
},
|
241 |
+
"harness|mmlu_anatomy|0": {
|
242 |
+
"alias": " - anatomy",
|
243 |
+
"acc,none": 0.6296296296296297,
|
244 |
+
"acc_stderr,none": 0.041716541613545426
|
245 |
+
},
|
246 |
+
"harness|mmlu_astronomy|0": {
|
247 |
+
"alias": " - astronomy",
|
248 |
+
"acc,none": 0.7368421052631579,
|
249 |
+
"acc_stderr,none": 0.03583496176361072
|
250 |
+
},
|
251 |
+
"harness|mmlu_college_biology|0": {
|
252 |
+
"alias": " - college_biology",
|
253 |
+
"acc,none": 0.8125,
|
254 |
+
"acc_stderr,none": 0.032639560491693344
|
255 |
+
},
|
256 |
+
"harness|mmlu_college_chemistry|0": {
|
257 |
+
"alias": " - college_chemistry",
|
258 |
+
"acc,none": 0.46,
|
259 |
+
"acc_stderr,none": 0.05009082659620332
|
260 |
+
},
|
261 |
+
"harness|mmlu_college_computer_science|0": {
|
262 |
+
"alias": " - college_computer_science",
|
263 |
+
"acc,none": 0.47,
|
264 |
+
"acc_stderr,none": 0.05016135580465919
|
265 |
+
},
|
266 |
+
"harness|mmlu_college_mathematics|0": {
|
267 |
+
"alias": " - college_mathematics",
|
268 |
+
"acc,none": 0.34,
|
269 |
+
"acc_stderr,none": 0.04760952285695235
|
270 |
+
},
|
271 |
+
"harness|mmlu_college_physics|0": {
|
272 |
+
"alias": " - college_physics",
|
273 |
+
"acc,none": 0.37254901960784315,
|
274 |
+
"acc_stderr,none": 0.04810840148082634
|
275 |
+
},
|
276 |
+
"harness|mmlu_computer_security|0": {
|
277 |
+
"alias": " - computer_security",
|
278 |
+
"acc,none": 0.75,
|
279 |
+
"acc_stderr,none": 0.04351941398892446
|
280 |
+
},
|
281 |
+
"harness|mmlu_conceptual_physics|0": {
|
282 |
+
"alias": " - conceptual_physics",
|
283 |
+
"acc,none": 0.6212765957446809,
|
284 |
+
"acc_stderr,none": 0.03170995606040655
|
285 |
+
},
|
286 |
+
"harness|mmlu_electrical_engineering|0": {
|
287 |
+
"alias": " - electrical_engineering",
|
288 |
+
"acc,none": 0.5517241379310345,
|
289 |
+
"acc_stderr,none": 0.04144311810878152
|
290 |
+
},
|
291 |
+
"harness|mmlu_elementary_mathematics|0": {
|
292 |
+
"alias": " - elementary_mathematics",
|
293 |
+
"acc,none": 0.48677248677248675,
|
294 |
+
"acc_stderr,none": 0.025742297289575142
|
295 |
+
},
|
296 |
+
"harness|mmlu_high_school_biology|0": {
|
297 |
+
"alias": " - high_school_biology",
|
298 |
+
"acc,none": 0.8258064516129032,
|
299 |
+
"acc_stderr,none": 0.021576248184514566
|
300 |
+
},
|
301 |
+
"harness|mmlu_high_school_chemistry|0": {
|
302 |
+
"alias": " - high_school_chemistry",
|
303 |
+
"acc,none": 0.5566502463054187,
|
304 |
+
"acc_stderr,none": 0.03495334582162934
|
305 |
+
},
|
306 |
+
"harness|mmlu_high_school_computer_science|0": {
|
307 |
+
"alias": " - high_school_computer_science",
|
308 |
+
"acc,none": 0.64,
|
309 |
+
"acc_stderr,none": 0.04824181513244218
|
310 |
+
},
|
311 |
+
"harness|mmlu_high_school_mathematics|0": {
|
312 |
+
"alias": " - high_school_mathematics",
|
313 |
+
"acc,none": 0.337037037037037,
|
314 |
+
"acc_stderr,none": 0.028820884666253252
|
315 |
+
},
|
316 |
+
"harness|mmlu_high_school_physics|0": {
|
317 |
+
"alias": " - high_school_physics",
|
318 |
+
"acc,none": 0.4370860927152318,
|
319 |
+
"acc_stderr,none": 0.040500357222306355
|
320 |
+
},
|
321 |
+
"harness|mmlu_high_school_statistics|0": {
|
322 |
+
"alias": " - high_school_statistics",
|
323 |
+
"acc,none": 0.5787037037037037,
|
324 |
+
"acc_stderr,none": 0.03367462138896078
|
325 |
+
},
|
326 |
+
"harness|mmlu_machine_learning|0": {
|
327 |
+
"alias": " - machine_learning",
|
328 |
+
"acc,none": 0.5,
|
329 |
+
"acc_stderr,none": 0.04745789978762494
|
330 |
+
},
|
331 |
+
"harness|hellaswag|0": {
|
332 |
+
"acc,none": 0.5984863572993427,
|
333 |
+
"acc_stderr,none": 0.004892026457294725,
|
334 |
+
"acc_norm,none": 0.7739494124676359,
|
335 |
+
"acc_norm_stderr,none": 0.0041741747242881,
|
336 |
+
"alias": "hellaswag"
|
337 |
+
},
|
338 |
+
"harness|openbookqa|0": {
|
339 |
+
"acc,none": 0.338,
|
340 |
+
"acc_stderr,none": 0.02117566569520941,
|
341 |
+
"acc_norm,none": 0.436,
|
342 |
+
"acc_norm_stderr,none": 0.0221989546414768,
|
343 |
+
"alias": "openbookqa"
|
344 |
+
},
|
345 |
+
"harness|arc:easy|0": {
|
346 |
+
"acc,none": 0.8051346801346801,
|
347 |
+
"acc_stderr,none": 0.008127738779969257,
|
348 |
+
"acc_norm,none": 0.7857744107744108,
|
349 |
+
"acc_norm_stderr,none": 0.008418850681568162,
|
350 |
+
"alias": "arc_easy"
|
351 |
+
},
|
352 |
+
"harness|boolq|0": {
|
353 |
+
"acc,none": 0.8642201834862385,
|
354 |
+
"acc_stderr,none": 0.005991317719933094,
|
355 |
+
"alias": "boolq"
|
356 |
+
},
|
357 |
+
"harness|lambada:openai|0": {
|
358 |
+
"perplexity,none": 5.9716028864360515,
|
359 |
+
"perplexity_stderr,none": 0.17600170081638883,
|
360 |
+
"acc,none": 0.33844362507277315,
|
361 |
+
"acc_stderr,none": 0.006592325932741157,
|
362 |
+
"alias": "lambada_openai"
|
363 |
+
},
|
364 |
+
"harness|truthfulqa:mc2|0": {
|
365 |
+
"acc,none": 0.606890311170753,
|
366 |
+
"acc_stderr,none": 0.015406862286089968,
|
367 |
+
"alias": "truthfulqa_mc2"
|
368 |
+
},
|
369 |
+
"harness|winogrande|0": {
|
370 |
+
"acc,none": 0.696921862667719,
|
371 |
+
"acc_stderr,none": 0.012916727462634463,
|
372 |
+
"alias": "winogrande"
|
373 |
+
},
|
374 |
+
"harness|arc:challenge|0": {
|
375 |
+
"acc,none": 0.5196245733788396,
|
376 |
+
"acc_stderr,none": 0.014600132075947085,
|
377 |
+
"acc_norm,none": 0.5469283276450512,
|
378 |
+
"acc_norm_stderr,none": 0.01454689205200563,
|
379 |
+
"alias": "arc_challenge"
|
380 |
+
},
|
381 |
+
"harness|truthfulqa:mc1|0": {
|
382 |
+
"acc,none": 0.4149326805385557,
|
383 |
+
"acc_stderr,none": 0.017248314465805978,
|
384 |
+
"alias": "truthfulqa_mc1"
|
385 |
+
},
|
386 |
+
"harness|piqa|0": {
|
387 |
+
"acc,none": 0.778563656147987,
|
388 |
+
"acc_stderr,none": 0.009687616456840253,
|
389 |
+
"acc_norm,none": 0.7752992383025027,
|
390 |
+
"acc_norm_stderr,none": 0.009738282586548361,
|
391 |
+
"alias": "piqa"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "QuantFactory/Phi-3-mini-4k-instruct-GGUF",
|
396 |
+
"revision": "main",
|
397 |
+
"private": false,
|
398 |
+
"params": 3.6,
|
399 |
+
"architectures": "?",
|
400 |
+
"quant_type": "llama.cpp",
|
401 |
+
"precision": "4bit",
|
402 |
+
"model_params": 3.6,
|
403 |
+
"model_size": 2.18,
|
404 |
+
"weight_dtype": "int4",
|
405 |
+
"compute_dtype": "float16",
|
406 |
+
"gguf_ftype": "*Q4_0.gguf",
|
407 |
+
"hardware": "cpu",
|
408 |
+
"status": "Pending",
|
409 |
+
"submitted_time": "2024-04-29T07:28:31Z",
|
410 |
+
"model_type": "quantization",
|
411 |
+
"job_id": -1,
|
412 |
+
"job_start_time": null,
|
413 |
+
"scripts": "llama_cpp"
|
414 |
+
},
|
415 |
+
"quantization_config": {
|
416 |
+
"quant_method": "llama.cpp",
|
417 |
+
"ftype": "*Q4_0.gguf"
|
418 |
+
},
|
419 |
+
"versions": {
|
420 |
+
"harness|mmlu|0": null,
|
421 |
+
"harness|mmlu_humanities|0": null,
|
422 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
423 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
424 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
425 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
426 |
+
"harness|mmlu_international_law|0": 0.0,
|
427 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
428 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
429 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
430 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
431 |
+
"harness|mmlu_philosophy|0": 0.0,
|
432 |
+
"harness|mmlu_prehistory|0": 0.0,
|
433 |
+
"harness|mmlu_professional_law|0": 0.0,
|
434 |
+
"harness|mmlu_world_religions|0": 0.0,
|
435 |
+
"harness|mmlu_other|0": null,
|
436 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
437 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
438 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
439 |
+
"harness|mmlu_global_facts|0": 0.0,
|
440 |
+
"harness|mmlu_human_aging|0": 0.0,
|
441 |
+
"harness|mmlu_management|0": 0.0,
|
442 |
+
"harness|mmlu_marketing|0": 0.0,
|
443 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
444 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
445 |
+
"harness|mmlu_nutrition|0": 0.0,
|
446 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
447 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
448 |
+
"harness|mmlu_virology|0": 0.0,
|
449 |
+
"harness|mmlu_social_sciences|0": null,
|
450 |
+
"harness|mmlu_econometrics|0": 0.0,
|
451 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
452 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
453 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
454 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
455 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
456 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
457 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
458 |
+
"harness|mmlu_public_relations|0": 0.0,
|
459 |
+
"harness|mmlu_security_studies|0": 0.0,
|
460 |
+
"harness|mmlu_sociology|0": 0.0,
|
461 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
462 |
+
"harness|mmlu_stem|0": null,
|
463 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
464 |
+
"harness|mmlu_anatomy|0": 0.0,
|
465 |
+
"harness|mmlu_astronomy|0": 0.0,
|
466 |
+
"harness|mmlu_college_biology|0": 0.0,
|
467 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
468 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
469 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
470 |
+
"harness|mmlu_college_physics|0": 0.0,
|
471 |
+
"harness|mmlu_computer_security|0": 0.0,
|
472 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
473 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
474 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
475 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
476 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
477 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
478 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
479 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
480 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
481 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
482 |
+
"harness|hellaswag|0": 1.0,
|
483 |
+
"harness|openbookqa|0": 1.0,
|
484 |
+
"harness|arc:easy|0": 1.0,
|
485 |
+
"harness|boolq|0": 2.0,
|
486 |
+
"harness|lambada:openai|0": 1.0,
|
487 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
488 |
+
"harness|winogrande|0": 1.0,
|
489 |
+
"harness|arc:challenge|0": 1.0,
|
490 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
491 |
+
"harness|piqa|0": 1.0
|
492 |
+
},
|
493 |
+
"n-shot": {
|
494 |
+
"arc_challenge": 0,
|
495 |
+
"arc_easy": 0,
|
496 |
+
"boolq": 0,
|
497 |
+
"hellaswag": 0,
|
498 |
+
"lambada_openai": 0,
|
499 |
+
"mmlu": 0,
|
500 |
+
"mmlu_abstract_algebra": 0,
|
501 |
+
"mmlu_anatomy": 0,
|
502 |
+
"mmlu_astronomy": 0,
|
503 |
+
"mmlu_business_ethics": 0,
|
504 |
+
"mmlu_clinical_knowledge": 0,
|
505 |
+
"mmlu_college_biology": 0,
|
506 |
+
"mmlu_college_chemistry": 0,
|
507 |
+
"mmlu_college_computer_science": 0,
|
508 |
+
"mmlu_college_mathematics": 0,
|
509 |
+
"mmlu_college_medicine": 0,
|
510 |
+
"mmlu_college_physics": 0,
|
511 |
+
"mmlu_computer_security": 0,
|
512 |
+
"mmlu_conceptual_physics": 0,
|
513 |
+
"mmlu_econometrics": 0,
|
514 |
+
"mmlu_electrical_engineering": 0,
|
515 |
+
"mmlu_elementary_mathematics": 0,
|
516 |
+
"mmlu_formal_logic": 0,
|
517 |
+
"mmlu_global_facts": 0,
|
518 |
+
"mmlu_high_school_biology": 0,
|
519 |
+
"mmlu_high_school_chemistry": 0,
|
520 |
+
"mmlu_high_school_computer_science": 0,
|
521 |
+
"mmlu_high_school_european_history": 0,
|
522 |
+
"mmlu_high_school_geography": 0,
|
523 |
+
"mmlu_high_school_government_and_politics": 0,
|
524 |
+
"mmlu_high_school_macroeconomics": 0,
|
525 |
+
"mmlu_high_school_mathematics": 0,
|
526 |
+
"mmlu_high_school_microeconomics": 0,
|
527 |
+
"mmlu_high_school_physics": 0,
|
528 |
+
"mmlu_high_school_psychology": 0,
|
529 |
+
"mmlu_high_school_statistics": 0,
|
530 |
+
"mmlu_high_school_us_history": 0,
|
531 |
+
"mmlu_high_school_world_history": 0,
|
532 |
+
"mmlu_human_aging": 0,
|
533 |
+
"mmlu_human_sexuality": 0,
|
534 |
+
"mmlu_humanities": 0,
|
535 |
+
"mmlu_international_law": 0,
|
536 |
+
"mmlu_jurisprudence": 0,
|
537 |
+
"mmlu_logical_fallacies": 0,
|
538 |
+
"mmlu_machine_learning": 0,
|
539 |
+
"mmlu_management": 0,
|
540 |
+
"mmlu_marketing": 0,
|
541 |
+
"mmlu_medical_genetics": 0,
|
542 |
+
"mmlu_miscellaneous": 0,
|
543 |
+
"mmlu_moral_disputes": 0,
|
544 |
+
"mmlu_moral_scenarios": 0,
|
545 |
+
"mmlu_nutrition": 0,
|
546 |
+
"mmlu_other": 0,
|
547 |
+
"mmlu_philosophy": 0,
|
548 |
+
"mmlu_prehistory": 0,
|
549 |
+
"mmlu_professional_accounting": 0,
|
550 |
+
"mmlu_professional_law": 0,
|
551 |
+
"mmlu_professional_medicine": 0,
|
552 |
+
"mmlu_professional_psychology": 0,
|
553 |
+
"mmlu_public_relations": 0,
|
554 |
+
"mmlu_security_studies": 0,
|
555 |
+
"mmlu_social_sciences": 0,
|
556 |
+
"mmlu_sociology": 0,
|
557 |
+
"mmlu_stem": 0,
|
558 |
+
"mmlu_us_foreign_policy": 0,
|
559 |
+
"mmlu_virology": 0,
|
560 |
+
"mmlu_world_religions": 0,
|
561 |
+
"openbookqa": 0,
|
562 |
+
"piqa": 0,
|
563 |
+
"truthfulqa_mc1": 0,
|
564 |
+
"truthfulqa_mc2": 0,
|
565 |
+
"winogrande": 0
|
566 |
+
},
|
567 |
+
"date": 1716562293.095405,
|
568 |
+
"config": {
|
569 |
+
"model": "WrapperGGUFLM",
|
570 |
+
"model_args": "gguf_model=QuantFactory/Phi-3-mini-4k-instruct-GGUF,ftype=*Q4_0.gguf,dtype=float16,_commit_hash=main",
|
571 |
+
"batch_size": 1,
|
572 |
+
"batch_sizes": [],
|
573 |
+
"device": "cuda",
|
574 |
+
"use_cache": null,
|
575 |
+
"limit": null,
|
576 |
+
"bootstrap_iters": 100000,
|
577 |
+
"gen_kwargs": null
|
578 |
+
}
|
579 |
+
}
|
Qwen/results_2024-05-19-02-59-07_Qwen1.5-7B-Chat.json
ADDED
@@ -0,0 +1,576 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "1.4",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": "-1",
|
9 |
+
"start_time": "",
|
10 |
+
"end_time": "",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "Qwen/Qwen1.5-7B-Chat",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "16bit",
|
15 |
+
"model_size": 15.44,
|
16 |
+
"model_params": 7.72,
|
17 |
+
"quant_type": null,
|
18 |
+
"precision": "16bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|boolq|0": {
|
22 |
+
"acc,none": 0.8385321100917431,
|
23 |
+
"acc_stderr,none": 0.006435695291658146,
|
24 |
+
"alias": "boolq"
|
25 |
+
},
|
26 |
+
"harness|hellaswag|0": {
|
27 |
+
"acc,none": 0.5876319458275244,
|
28 |
+
"acc_stderr,none": 0.004912547040132872,
|
29 |
+
"acc_norm,none": 0.7700657239593707,
|
30 |
+
"acc_norm_stderr,none": 0.004199303568134895,
|
31 |
+
"alias": "hellaswag"
|
32 |
+
},
|
33 |
+
"harness|piqa|0": {
|
34 |
+
"acc,none": 0.7535364526659413,
|
35 |
+
"acc_stderr,none": 0.010054810789671825,
|
36 |
+
"acc_norm,none": 0.7600652883569097,
|
37 |
+
"acc_norm_stderr,none": 0.009963625892809544,
|
38 |
+
"alias": "piqa"
|
39 |
+
},
|
40 |
+
"harness|lambada:openai|0": {
|
41 |
+
"perplexity,none": 5.447365580615996,
|
42 |
+
"perplexity_stderr,none": 0.1829804797972089,
|
43 |
+
"acc,none": 0.609353774500291,
|
44 |
+
"acc_stderr,none": 0.006797334493142832,
|
45 |
+
"alias": "lambada_openai"
|
46 |
+
},
|
47 |
+
"harness|arc:easy|0": {
|
48 |
+
"acc,none": 0.6847643097643098,
|
49 |
+
"acc_stderr,none": 0.009533589368505844,
|
50 |
+
"acc_norm,none": 0.6334175084175084,
|
51 |
+
"acc_norm_stderr,none": 0.009887786585323955,
|
52 |
+
"alias": "arc_easy"
|
53 |
+
},
|
54 |
+
"harness|arc:challenge|0": {
|
55 |
+
"acc,none": 0.43430034129692835,
|
56 |
+
"acc_stderr,none": 0.014484703048857359,
|
57 |
+
"acc_norm,none": 0.4513651877133106,
|
58 |
+
"acc_norm_stderr,none": 0.014542104569955262,
|
59 |
+
"alias": "arc_challenge"
|
60 |
+
},
|
61 |
+
"harness|mmlu|0": {
|
62 |
+
"acc,none": 0.6009115510611024,
|
63 |
+
"acc_stderr,none": 0.0039547457776823065,
|
64 |
+
"alias": "mmlu"
|
65 |
+
},
|
66 |
+
"harness|mmlu_humanities|0": {
|
67 |
+
"alias": " - humanities",
|
68 |
+
"acc,none": 0.5509032943676939,
|
69 |
+
"acc_stderr,none": 0.006910815710445093
|
70 |
+
},
|
71 |
+
"harness|mmlu_formal_logic|0": {
|
72 |
+
"alias": " - formal_logic",
|
73 |
+
"acc,none": 0.42063492063492064,
|
74 |
+
"acc_stderr,none": 0.04415438226743744
|
75 |
+
},
|
76 |
+
"harness|mmlu_high_school_european_history|0": {
|
77 |
+
"alias": " - high_school_european_history",
|
78 |
+
"acc,none": 0.7515151515151515,
|
79 |
+
"acc_stderr,none": 0.033744026441394036
|
80 |
+
},
|
81 |
+
"harness|mmlu_high_school_us_history|0": {
|
82 |
+
"alias": " - high_school_us_history",
|
83 |
+
"acc,none": 0.7745098039215687,
|
84 |
+
"acc_stderr,none": 0.02933116229425173
|
85 |
+
},
|
86 |
+
"harness|mmlu_high_school_world_history|0": {
|
87 |
+
"alias": " - high_school_world_history",
|
88 |
+
"acc,none": 0.759493670886076,
|
89 |
+
"acc_stderr,none": 0.027820781981149678
|
90 |
+
},
|
91 |
+
"harness|mmlu_international_law|0": {
|
92 |
+
"alias": " - international_law",
|
93 |
+
"acc,none": 0.71900826446281,
|
94 |
+
"acc_stderr,none": 0.04103203830514512
|
95 |
+
},
|
96 |
+
"harness|mmlu_jurisprudence|0": {
|
97 |
+
"alias": " - jurisprudence",
|
98 |
+
"acc,none": 0.7777777777777778,
|
99 |
+
"acc_stderr,none": 0.040191074725573483
|
100 |
+
},
|
101 |
+
"harness|mmlu_logical_fallacies|0": {
|
102 |
+
"alias": " - logical_fallacies",
|
103 |
+
"acc,none": 0.6687116564417178,
|
104 |
+
"acc_stderr,none": 0.03697983910025588
|
105 |
+
},
|
106 |
+
"harness|mmlu_moral_disputes|0": {
|
107 |
+
"alias": " - moral_disputes",
|
108 |
+
"acc,none": 0.6705202312138728,
|
109 |
+
"acc_stderr,none": 0.025305258131879716
|
110 |
+
},
|
111 |
+
"harness|mmlu_moral_scenarios|0": {
|
112 |
+
"alias": " - moral_scenarios",
|
113 |
+
"acc,none": 0.36983240223463687,
|
114 |
+
"acc_stderr,none": 0.01614588125605621
|
115 |
+
},
|
116 |
+
"harness|mmlu_philosophy|0": {
|
117 |
+
"alias": " - philosophy",
|
118 |
+
"acc,none": 0.6881028938906752,
|
119 |
+
"acc_stderr,none": 0.02631185807185416
|
120 |
+
},
|
121 |
+
"harness|mmlu_prehistory|0": {
|
122 |
+
"alias": " - prehistory",
|
123 |
+
"acc,none": 0.6666666666666666,
|
124 |
+
"acc_stderr,none": 0.026229649178821157
|
125 |
+
},
|
126 |
+
"harness|mmlu_professional_law|0": {
|
127 |
+
"alias": " - professional_law",
|
128 |
+
"acc,none": 0.44132985658409385,
|
129 |
+
"acc_stderr,none": 0.012682016335646666
|
130 |
+
},
|
131 |
+
"harness|mmlu_world_religions|0": {
|
132 |
+
"alias": " - world_religions",
|
133 |
+
"acc,none": 0.7426900584795322,
|
134 |
+
"acc_stderr,none": 0.03352799844161865
|
135 |
+
},
|
136 |
+
"harness|mmlu_other|0": {
|
137 |
+
"alias": " - other",
|
138 |
+
"acc,none": 0.6649501126488574,
|
139 |
+
"acc_stderr,none": 0.008181942013523022
|
140 |
+
},
|
141 |
+
"harness|mmlu_business_ethics|0": {
|
142 |
+
"alias": " - business_ethics",
|
143 |
+
"acc,none": 0.64,
|
144 |
+
"acc_stderr,none": 0.048241815132442176
|
145 |
+
},
|
146 |
+
"harness|mmlu_clinical_knowledge|0": {
|
147 |
+
"alias": " - clinical_knowledge",
|
148 |
+
"acc,none": 0.6792452830188679,
|
149 |
+
"acc_stderr,none": 0.028727502957880263
|
150 |
+
},
|
151 |
+
"harness|mmlu_college_medicine|0": {
|
152 |
+
"alias": " - college_medicine",
|
153 |
+
"acc,none": 0.5838150289017341,
|
154 |
+
"acc_stderr,none": 0.03758517775404948
|
155 |
+
},
|
156 |
+
"harness|mmlu_global_facts|0": {
|
157 |
+
"alias": " - global_facts",
|
158 |
+
"acc,none": 0.37,
|
159 |
+
"acc_stderr,none": 0.048523658709391
|
160 |
+
},
|
161 |
+
"harness|mmlu_human_aging|0": {
|
162 |
+
"alias": " - human_aging",
|
163 |
+
"acc,none": 0.6367713004484304,
|
164 |
+
"acc_stderr,none": 0.032277904428505
|
165 |
+
},
|
166 |
+
"harness|mmlu_management|0": {
|
167 |
+
"alias": " - management",
|
168 |
+
"acc,none": 0.7669902912621359,
|
169 |
+
"acc_stderr,none": 0.04185832598928315
|
170 |
+
},
|
171 |
+
"harness|mmlu_marketing|0": {
|
172 |
+
"alias": " - marketing",
|
173 |
+
"acc,none": 0.8589743589743589,
|
174 |
+
"acc_stderr,none": 0.022801382534597518
|
175 |
+
},
|
176 |
+
"harness|mmlu_medical_genetics|0": {
|
177 |
+
"alias": " - medical_genetics",
|
178 |
+
"acc,none": 0.68,
|
179 |
+
"acc_stderr,none": 0.046882617226215034
|
180 |
+
},
|
181 |
+
"harness|mmlu_miscellaneous|0": {
|
182 |
+
"alias": " - miscellaneous",
|
183 |
+
"acc,none": 0.7701149425287356,
|
184 |
+
"acc_stderr,none": 0.015046301846691826
|
185 |
+
},
|
186 |
+
"harness|mmlu_nutrition|0": {
|
187 |
+
"alias": " - nutrition",
|
188 |
+
"acc,none": 0.6797385620915033,
|
189 |
+
"acc_stderr,none": 0.026716118380156847
|
190 |
+
},
|
191 |
+
"harness|mmlu_professional_accounting|0": {
|
192 |
+
"alias": " - professional_accounting",
|
193 |
+
"acc,none": 0.42907801418439717,
|
194 |
+
"acc_stderr,none": 0.029525914302558555
|
195 |
+
},
|
196 |
+
"harness|mmlu_professional_medicine|0": {
|
197 |
+
"alias": " - professional_medicine",
|
198 |
+
"acc,none": 0.6544117647058824,
|
199 |
+
"acc_stderr,none": 0.02888819310398864
|
200 |
+
},
|
201 |
+
"harness|mmlu_virology|0": {
|
202 |
+
"alias": " - virology",
|
203 |
+
"acc,none": 0.5060240963855421,
|
204 |
+
"acc_stderr,none": 0.03892212195333045
|
205 |
+
},
|
206 |
+
"harness|mmlu_social_sciences|0": {
|
207 |
+
"alias": " - social_sciences",
|
208 |
+
"acc,none": 0.6837829054273643,
|
209 |
+
"acc_stderr,none": 0.008207129729378173
|
210 |
+
},
|
211 |
+
"harness|mmlu_econometrics|0": {
|
212 |
+
"alias": " - econometrics",
|
213 |
+
"acc,none": 0.5263157894736842,
|
214 |
+
"acc_stderr,none": 0.046970851366478626
|
215 |
+
},
|
216 |
+
"harness|mmlu_high_school_geography|0": {
|
217 |
+
"alias": " - high_school_geography",
|
218 |
+
"acc,none": 0.7727272727272727,
|
219 |
+
"acc_stderr,none": 0.0298575156733864
|
220 |
+
},
|
221 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
222 |
+
"alias": " - high_school_government_and_politics",
|
223 |
+
"acc,none": 0.7927461139896373,
|
224 |
+
"acc_stderr,none": 0.02925282329180363
|
225 |
+
},
|
226 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
227 |
+
"alias": " - high_school_macroeconomics",
|
228 |
+
"acc,none": 0.5974358974358974,
|
229 |
+
"acc_stderr,none": 0.024864995159767752
|
230 |
+
},
|
231 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
232 |
+
"alias": " - high_school_microeconomics",
|
233 |
+
"acc,none": 0.6428571428571429,
|
234 |
+
"acc_stderr,none": 0.031124619309328177
|
235 |
+
},
|
236 |
+
"harness|mmlu_high_school_psychology|0": {
|
237 |
+
"alias": " - high_school_psychology",
|
238 |
+
"acc,none": 0.8073394495412844,
|
239 |
+
"acc_stderr,none": 0.016909276884936084
|
240 |
+
},
|
241 |
+
"harness|mmlu_human_sexuality|0": {
|
242 |
+
"alias": " - human_sexuality",
|
243 |
+
"acc,none": 0.6870229007633588,
|
244 |
+
"acc_stderr,none": 0.04066962905677697
|
245 |
+
},
|
246 |
+
"harness|mmlu_professional_psychology|0": {
|
247 |
+
"alias": " - professional_psychology",
|
248 |
+
"acc,none": 0.5669934640522876,
|
249 |
+
"acc_stderr,none": 0.020045442473324227
|
250 |
+
},
|
251 |
+
"harness|mmlu_public_relations|0": {
|
252 |
+
"alias": " - public_relations",
|
253 |
+
"acc,none": 0.6454545454545455,
|
254 |
+
"acc_stderr,none": 0.04582004841505417
|
255 |
+
},
|
256 |
+
"harness|mmlu_security_studies|0": {
|
257 |
+
"alias": " - security_studies",
|
258 |
+
"acc,none": 0.6816326530612244,
|
259 |
+
"acc_stderr,none": 0.029822533793982066
|
260 |
+
},
|
261 |
+
"harness|mmlu_sociology|0": {
|
262 |
+
"alias": " - sociology",
|
263 |
+
"acc,none": 0.7810945273631841,
|
264 |
+
"acc_stderr,none": 0.029239174636647
|
265 |
+
},
|
266 |
+
"harness|mmlu_us_foreign_policy|0": {
|
267 |
+
"alias": " - us_foreign_policy",
|
268 |
+
"acc,none": 0.8,
|
269 |
+
"acc_stderr,none": 0.04020151261036845
|
270 |
+
},
|
271 |
+
"harness|mmlu_stem|0": {
|
272 |
+
"alias": " - stem",
|
273 |
+
"acc,none": 0.531557247066286,
|
274 |
+
"acc_stderr,none": 0.008642992258979928
|
275 |
+
},
|
276 |
+
"harness|mmlu_abstract_algebra|0": {
|
277 |
+
"alias": " - abstract_algebra",
|
278 |
+
"acc,none": 0.44,
|
279 |
+
"acc_stderr,none": 0.049888765156985884
|
280 |
+
},
|
281 |
+
"harness|mmlu_anatomy|0": {
|
282 |
+
"alias": " - anatomy",
|
283 |
+
"acc,none": 0.5481481481481482,
|
284 |
+
"acc_stderr,none": 0.04299268905480864
|
285 |
+
},
|
286 |
+
"harness|mmlu_astronomy|0": {
|
287 |
+
"alias": " - astronomy",
|
288 |
+
"acc,none": 0.6578947368421053,
|
289 |
+
"acc_stderr,none": 0.038607315993160904
|
290 |
+
},
|
291 |
+
"harness|mmlu_college_biology|0": {
|
292 |
+
"alias": " - college_biology",
|
293 |
+
"acc,none": 0.6319444444444444,
|
294 |
+
"acc_stderr,none": 0.04032999053960719
|
295 |
+
},
|
296 |
+
"harness|mmlu_college_chemistry|0": {
|
297 |
+
"alias": " - college_chemistry",
|
298 |
+
"acc,none": 0.43,
|
299 |
+
"acc_stderr,none": 0.049756985195624284
|
300 |
+
},
|
301 |
+
"harness|mmlu_college_computer_science|0": {
|
302 |
+
"alias": " - college_computer_science",
|
303 |
+
"acc,none": 0.61,
|
304 |
+
"acc_stderr,none": 0.04902071300001975
|
305 |
+
},
|
306 |
+
"harness|mmlu_college_mathematics|0": {
|
307 |
+
"alias": " - college_mathematics",
|
308 |
+
"acc,none": 0.36,
|
309 |
+
"acc_stderr,none": 0.048241815132442176
|
310 |
+
},
|
311 |
+
"harness|mmlu_college_physics|0": {
|
312 |
+
"alias": " - college_physics",
|
313 |
+
"acc,none": 0.38235294117647056,
|
314 |
+
"acc_stderr,none": 0.04835503696107224
|
315 |
+
},
|
316 |
+
"harness|mmlu_computer_security|0": {
|
317 |
+
"alias": " - computer_security",
|
318 |
+
"acc,none": 0.76,
|
319 |
+
"acc_stderr,none": 0.04292346959909282
|
320 |
+
},
|
321 |
+
"harness|mmlu_conceptual_physics|0": {
|
322 |
+
"alias": " - conceptual_physics",
|
323 |
+
"acc,none": 0.5531914893617021,
|
324 |
+
"acc_stderr,none": 0.0325005368436584
|
325 |
+
},
|
326 |
+
"harness|mmlu_electrical_engineering|0": {
|
327 |
+
"alias": " - electrical_engineering",
|
328 |
+
"acc,none": 0.5793103448275863,
|
329 |
+
"acc_stderr,none": 0.0411391498118926
|
330 |
+
},
|
331 |
+
"harness|mmlu_elementary_mathematics|0": {
|
332 |
+
"alias": " - elementary_mathematics",
|
333 |
+
"acc,none": 0.47354497354497355,
|
334 |
+
"acc_stderr,none": 0.025715239811346755
|
335 |
+
},
|
336 |
+
"harness|mmlu_high_school_biology|0": {
|
337 |
+
"alias": " - high_school_biology",
|
338 |
+
"acc,none": 0.7193548387096774,
|
339 |
+
"acc_stderr,none": 0.025560604721022895
|
340 |
+
},
|
341 |
+
"harness|mmlu_high_school_chemistry|0": {
|
342 |
+
"alias": " - high_school_chemistry",
|
343 |
+
"acc,none": 0.5517241379310345,
|
344 |
+
"acc_stderr,none": 0.03499113137676744
|
345 |
+
},
|
346 |
+
"harness|mmlu_high_school_computer_science|0": {
|
347 |
+
"alias": " - high_school_computer_science",
|
348 |
+
"acc,none": 0.72,
|
349 |
+
"acc_stderr,none": 0.04512608598542128
|
350 |
+
},
|
351 |
+
"harness|mmlu_high_school_mathematics|0": {
|
352 |
+
"alias": " - high_school_mathematics",
|
353 |
+
"acc,none": 0.35555555555555557,
|
354 |
+
"acc_stderr,none": 0.029185714949857406
|
355 |
+
},
|
356 |
+
"harness|mmlu_high_school_physics|0": {
|
357 |
+
"alias": " - high_school_physics",
|
358 |
+
"acc,none": 0.37748344370860926,
|
359 |
+
"acc_stderr,none": 0.0395802723112157
|
360 |
+
},
|
361 |
+
"harness|mmlu_high_school_statistics|0": {
|
362 |
+
"alias": " - high_school_statistics",
|
363 |
+
"acc,none": 0.5277777777777778,
|
364 |
+
"acc_stderr,none": 0.0340470532865388
|
365 |
+
},
|
366 |
+
"harness|mmlu_machine_learning|0": {
|
367 |
+
"alias": " - machine_learning",
|
368 |
+
"acc,none": 0.4017857142857143,
|
369 |
+
"acc_stderr,none": 0.04653333146973646
|
370 |
+
},
|
371 |
+
"harness|truthfulqa:mc2|0": {
|
372 |
+
"acc,none": 0.5354003964835502,
|
373 |
+
"acc_stderr,none": 0.01598976897007417,
|
374 |
+
"alias": "truthfulqa_mc2"
|
375 |
+
},
|
376 |
+
"harness|winogrande|0": {
|
377 |
+
"acc,none": 0.6527229676400947,
|
378 |
+
"acc_stderr,none": 0.013380909249751242,
|
379 |
+
"alias": "winogrande"
|
380 |
+
},
|
381 |
+
"harness|openbookqa|0": {
|
382 |
+
"acc,none": 0.324,
|
383 |
+
"acc_stderr,none": 0.020950557312477455,
|
384 |
+
"acc_norm,none": 0.426,
|
385 |
+
"acc_norm_stderr,none": 0.022136577335085637,
|
386 |
+
"alias": "openbookqa"
|
387 |
+
},
|
388 |
+
"harness|truthfulqa:mc1|0": {
|
389 |
+
"acc,none": 0.36107711138310894,
|
390 |
+
"acc_stderr,none": 0.016814312844836886,
|
391 |
+
"alias": "truthfulqa_mc1"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "Qwen/Qwen1.5-7B-Chat",
|
396 |
+
"revision": "main",
|
397 |
+
"private": false,
|
398 |
+
"params": 7.72,
|
399 |
+
"architectures": "Qwen2ForCausalLM",
|
400 |
+
"quant_type": null,
|
401 |
+
"precision": "16bit",
|
402 |
+
"model_params": 7.72,
|
403 |
+
"model_size": 15.44,
|
404 |
+
"weight_dtype": "bfloat16",
|
405 |
+
"compute_dtype": "float16",
|
406 |
+
"gguf_ftype": "*Q4_0.gguf",
|
407 |
+
"hardware": "gpu",
|
408 |
+
"status": "Pending",
|
409 |
+
"submitted_time": "2024-04-27T08:04:58Z",
|
410 |
+
"model_type": "original",
|
411 |
+
"job_id": -1,
|
412 |
+
"job_start_time": null,
|
413 |
+
"scripts": "ITREX"
|
414 |
+
},
|
415 |
+
"quantization_config": null,
|
416 |
+
"versions": {
|
417 |
+
"harness|boolq|0": 2.0,
|
418 |
+
"harness|hellaswag|0": 1.0,
|
419 |
+
"harness|piqa|0": 1.0,
|
420 |
+
"harness|lambada:openai|0": 1.0,
|
421 |
+
"harness|arc:easy|0": 1.0,
|
422 |
+
"harness|arc:challenge|0": 1.0,
|
423 |
+
"harness|mmlu|0": null,
|
424 |
+
"harness|mmlu_humanities|0": null,
|
425 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
426 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
427 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
428 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
429 |
+
"harness|mmlu_international_law|0": 0.0,
|
430 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
431 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
432 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
433 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
434 |
+
"harness|mmlu_philosophy|0": 0.0,
|
435 |
+
"harness|mmlu_prehistory|0": 0.0,
|
436 |
+
"harness|mmlu_professional_law|0": 0.0,
|
437 |
+
"harness|mmlu_world_religions|0": 0.0,
|
438 |
+
"harness|mmlu_other|0": null,
|
439 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
440 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
441 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
442 |
+
"harness|mmlu_global_facts|0": 0.0,
|
443 |
+
"harness|mmlu_human_aging|0": 0.0,
|
444 |
+
"harness|mmlu_management|0": 0.0,
|
445 |
+
"harness|mmlu_marketing|0": 0.0,
|
446 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
447 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
448 |
+
"harness|mmlu_nutrition|0": 0.0,
|
449 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
450 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
451 |
+
"harness|mmlu_virology|0": 0.0,
|
452 |
+
"harness|mmlu_social_sciences|0": null,
|
453 |
+
"harness|mmlu_econometrics|0": 0.0,
|
454 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
455 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
456 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
457 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
458 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
459 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
460 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
461 |
+
"harness|mmlu_public_relations|0": 0.0,
|
462 |
+
"harness|mmlu_security_studies|0": 0.0,
|
463 |
+
"harness|mmlu_sociology|0": 0.0,
|
464 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
465 |
+
"harness|mmlu_stem|0": null,
|
466 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
467 |
+
"harness|mmlu_anatomy|0": 0.0,
|
468 |
+
"harness|mmlu_astronomy|0": 0.0,
|
469 |
+
"harness|mmlu_college_biology|0": 0.0,
|
470 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
471 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
472 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
473 |
+
"harness|mmlu_college_physics|0": 0.0,
|
474 |
+
"harness|mmlu_computer_security|0": 0.0,
|
475 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
476 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
477 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
478 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
479 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
480 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
481 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
482 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
483 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
484 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
485 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
486 |
+
"harness|winogrande|0": 1.0,
|
487 |
+
"harness|openbookqa|0": 1.0,
|
488 |
+
"harness|truthfulqa:mc1|0": 2.0
|
489 |
+
},
|
490 |
+
"n-shot": {
|
491 |
+
"arc_challenge": 0,
|
492 |
+
"arc_easy": 0,
|
493 |
+
"boolq": 0,
|
494 |
+
"hellaswag": 0,
|
495 |
+
"lambada_openai": 0,
|
496 |
+
"mmlu": 0,
|
497 |
+
"mmlu_abstract_algebra": 0,
|
498 |
+
"mmlu_anatomy": 0,
|
499 |
+
"mmlu_astronomy": 0,
|
500 |
+
"mmlu_business_ethics": 0,
|
501 |
+
"mmlu_clinical_knowledge": 0,
|
502 |
+
"mmlu_college_biology": 0,
|
503 |
+
"mmlu_college_chemistry": 0,
|
504 |
+
"mmlu_college_computer_science": 0,
|
505 |
+
"mmlu_college_mathematics": 0,
|
506 |
+
"mmlu_college_medicine": 0,
|
507 |
+
"mmlu_college_physics": 0,
|
508 |
+
"mmlu_computer_security": 0,
|
509 |
+
"mmlu_conceptual_physics": 0,
|
510 |
+
"mmlu_econometrics": 0,
|
511 |
+
"mmlu_electrical_engineering": 0,
|
512 |
+
"mmlu_elementary_mathematics": 0,
|
513 |
+
"mmlu_formal_logic": 0,
|
514 |
+
"mmlu_global_facts": 0,
|
515 |
+
"mmlu_high_school_biology": 0,
|
516 |
+
"mmlu_high_school_chemistry": 0,
|
517 |
+
"mmlu_high_school_computer_science": 0,
|
518 |
+
"mmlu_high_school_european_history": 0,
|
519 |
+
"mmlu_high_school_geography": 0,
|
520 |
+
"mmlu_high_school_government_and_politics": 0,
|
521 |
+
"mmlu_high_school_macroeconomics": 0,
|
522 |
+
"mmlu_high_school_mathematics": 0,
|
523 |
+
"mmlu_high_school_microeconomics": 0,
|
524 |
+
"mmlu_high_school_physics": 0,
|
525 |
+
"mmlu_high_school_psychology": 0,
|
526 |
+
"mmlu_high_school_statistics": 0,
|
527 |
+
"mmlu_high_school_us_history": 0,
|
528 |
+
"mmlu_high_school_world_history": 0,
|
529 |
+
"mmlu_human_aging": 0,
|
530 |
+
"mmlu_human_sexuality": 0,
|
531 |
+
"mmlu_humanities": 0,
|
532 |
+
"mmlu_international_law": 0,
|
533 |
+
"mmlu_jurisprudence": 0,
|
534 |
+
"mmlu_logical_fallacies": 0,
|
535 |
+
"mmlu_machine_learning": 0,
|
536 |
+
"mmlu_management": 0,
|
537 |
+
"mmlu_marketing": 0,
|
538 |
+
"mmlu_medical_genetics": 0,
|
539 |
+
"mmlu_miscellaneous": 0,
|
540 |
+
"mmlu_moral_disputes": 0,
|
541 |
+
"mmlu_moral_scenarios": 0,
|
542 |
+
"mmlu_nutrition": 0,
|
543 |
+
"mmlu_other": 0,
|
544 |
+
"mmlu_philosophy": 0,
|
545 |
+
"mmlu_prehistory": 0,
|
546 |
+
"mmlu_professional_accounting": 0,
|
547 |
+
"mmlu_professional_law": 0,
|
548 |
+
"mmlu_professional_medicine": 0,
|
549 |
+
"mmlu_professional_psychology": 0,
|
550 |
+
"mmlu_public_relations": 0,
|
551 |
+
"mmlu_security_studies": 0,
|
552 |
+
"mmlu_social_sciences": 0,
|
553 |
+
"mmlu_sociology": 0,
|
554 |
+
"mmlu_stem": 0,
|
555 |
+
"mmlu_us_foreign_policy": 0,
|
556 |
+
"mmlu_virology": 0,
|
557 |
+
"mmlu_world_religions": 0,
|
558 |
+
"openbookqa": 0,
|
559 |
+
"piqa": 0,
|
560 |
+
"truthfulqa_mc1": 0,
|
561 |
+
"truthfulqa_mc2": 0,
|
562 |
+
"winogrande": 0
|
563 |
+
},
|
564 |
+
"date": 1716055033.4916809,
|
565 |
+
"config": {
|
566 |
+
"model": "hf",
|
567 |
+
"model_args": "pretrained=Qwen/Qwen1.5-7B-Chat,trust_remote_code=True,dtype=float16,_commit_hash=main",
|
568 |
+
"batch_size": 4,
|
569 |
+
"batch_sizes": [],
|
570 |
+
"device": "cuda",
|
571 |
+
"use_cache": null,
|
572 |
+
"limit": null,
|
573 |
+
"bootstrap_iters": 100000,
|
574 |
+
"gen_kwargs": null
|
575 |
+
}
|
576 |
+
}
|
Qwen/results_2024-05-19-03-23-26_Qwen1.5-0.5B-Chat.json
ADDED
@@ -0,0 +1,576 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "1.4",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": "-1",
|
9 |
+
"start_time": "",
|
10 |
+
"end_time": "",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "Qwen/Qwen1.5-0.5B-Chat",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "16bitt",
|
15 |
+
"model_size": 1.24,
|
16 |
+
"model_params": 0.62,
|
17 |
+
"quant_type": null,
|
18 |
+
"precision": "16bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|truthfulqa:mc1|0": {
|
22 |
+
"acc,none": 0.2582619339045288,
|
23 |
+
"acc_stderr,none": 0.01532182168847618,
|
24 |
+
"alias": "truthfulqa_mc1"
|
25 |
+
},
|
26 |
+
"harness|piqa|0": {
|
27 |
+
"acc,none": 0.6730141458106638,
|
28 |
+
"acc_stderr,none": 0.01094515712697823,
|
29 |
+
"acc_norm,none": 0.6659412404787813,
|
30 |
+
"acc_norm_stderr,none": 0.011004613886336738,
|
31 |
+
"alias": "piqa"
|
32 |
+
},
|
33 |
+
"harness|truthfulqa:mc2|0": {
|
34 |
+
"acc,none": 0.42945731897141587,
|
35 |
+
"acc_stderr,none": 0.01511570497748254,
|
36 |
+
"alias": "truthfulqa_mc2"
|
37 |
+
},
|
38 |
+
"harness|arc:easy|0": {
|
39 |
+
"acc,none": 0.48484848484848486,
|
40 |
+
"acc_stderr,none": 0.010255071794531506,
|
41 |
+
"acc_norm,none": 0.42845117845117847,
|
42 |
+
"acc_norm_stderr,none": 0.010154195733990967,
|
43 |
+
"alias": "arc_easy"
|
44 |
+
},
|
45 |
+
"harness|lambada:openai|0": {
|
46 |
+
"perplexity,none": 28.21724128185796,
|
47 |
+
"perplexity_stderr,none": 1.3671762617467076,
|
48 |
+
"acc,none": 0.4137395691830002,
|
49 |
+
"acc_stderr,none": 0.006861528841487098,
|
50 |
+
"alias": "lambada_openai"
|
51 |
+
},
|
52 |
+
"harness|winogrande|0": {
|
53 |
+
"acc,none": 0.5509076558800315,
|
54 |
+
"acc_stderr,none": 0.01397945938914084,
|
55 |
+
"alias": "winogrande"
|
56 |
+
},
|
57 |
+
"harness|arc:challenge|0": {
|
58 |
+
"acc,none": 0.24914675767918087,
|
59 |
+
"acc_stderr,none": 0.012639407111926437,
|
60 |
+
"acc_norm,none": 0.295221843003413,
|
61 |
+
"acc_norm_stderr,none": 0.013329750293382316,
|
62 |
+
"alias": "arc_challenge"
|
63 |
+
},
|
64 |
+
"harness|openbookqa|0": {
|
65 |
+
"acc,none": 0.188,
|
66 |
+
"acc_stderr,none": 0.017490678880346264,
|
67 |
+
"acc_norm,none": 0.312,
|
68 |
+
"acc_norm_stderr,none": 0.020740596536488087,
|
69 |
+
"alias": "openbookqa"
|
70 |
+
},
|
71 |
+
"harness|hellaswag|0": {
|
72 |
+
"acc,none": 0.36297550288787095,
|
73 |
+
"acc_stderr,none": 0.004798751281560845,
|
74 |
+
"acc_norm,none": 0.44971121290579563,
|
75 |
+
"acc_norm_stderr,none": 0.004964479324552527,
|
76 |
+
"alias": "hellaswag"
|
77 |
+
},
|
78 |
+
"harness|boolq|0": {
|
79 |
+
"acc,none": 0.39938837920489295,
|
80 |
+
"acc_stderr,none": 0.008566178448007835,
|
81 |
+
"alias": "boolq"
|
82 |
+
},
|
83 |
+
"harness|mmlu|0": {
|
84 |
+
"acc,none": 0.31619427431989744,
|
85 |
+
"acc_stderr,none": 0.003877791398156741,
|
86 |
+
"alias": "mmlu"
|
87 |
+
},
|
88 |
+
"harness|mmlu_humanities|0": {
|
89 |
+
"alias": " - humanities",
|
90 |
+
"acc,none": 0.3273113708820404,
|
91 |
+
"acc_stderr,none": 0.006758326816600935
|
92 |
+
},
|
93 |
+
"harness|mmlu_formal_logic|0": {
|
94 |
+
"alias": " - formal_logic",
|
95 |
+
"acc,none": 0.3253968253968254,
|
96 |
+
"acc_stderr,none": 0.04190596438871137
|
97 |
+
},
|
98 |
+
"harness|mmlu_high_school_european_history|0": {
|
99 |
+
"alias": " - high_school_european_history",
|
100 |
+
"acc,none": 0.503030303030303,
|
101 |
+
"acc_stderr,none": 0.03904272341431857
|
102 |
+
},
|
103 |
+
"harness|mmlu_high_school_us_history|0": {
|
104 |
+
"alias": " - high_school_us_history",
|
105 |
+
"acc,none": 0.37745098039215685,
|
106 |
+
"acc_stderr,none": 0.03402272044340703
|
107 |
+
},
|
108 |
+
"harness|mmlu_high_school_world_history|0": {
|
109 |
+
"alias": " - high_school_world_history",
|
110 |
+
"acc,none": 0.4978902953586498,
|
111 |
+
"acc_stderr,none": 0.032546938018020076
|
112 |
+
},
|
113 |
+
"harness|mmlu_international_law|0": {
|
114 |
+
"alias": " - international_law",
|
115 |
+
"acc,none": 0.5041322314049587,
|
116 |
+
"acc_stderr,none": 0.04564198767432754
|
117 |
+
},
|
118 |
+
"harness|mmlu_jurisprudence|0": {
|
119 |
+
"alias": " - jurisprudence",
|
120 |
+
"acc,none": 0.37962962962962965,
|
121 |
+
"acc_stderr,none": 0.04691521224077742
|
122 |
+
},
|
123 |
+
"harness|mmlu_logical_fallacies|0": {
|
124 |
+
"alias": " - logical_fallacies",
|
125 |
+
"acc,none": 0.36809815950920244,
|
126 |
+
"acc_stderr,none": 0.03789213935838396
|
127 |
+
},
|
128 |
+
"harness|mmlu_moral_disputes|0": {
|
129 |
+
"alias": " - moral_disputes",
|
130 |
+
"acc,none": 0.3670520231213873,
|
131 |
+
"acc_stderr,none": 0.02595005433765408
|
132 |
+
},
|
133 |
+
"harness|mmlu_moral_scenarios|0": {
|
134 |
+
"alias": " - moral_scenarios",
|
135 |
+
"acc,none": 0.23798882681564246,
|
136 |
+
"acc_stderr,none": 0.014242630070574885
|
137 |
+
},
|
138 |
+
"harness|mmlu_philosophy|0": {
|
139 |
+
"alias": " - philosophy",
|
140 |
+
"acc,none": 0.36012861736334406,
|
141 |
+
"acc_stderr,none": 0.02726429759980401
|
142 |
+
},
|
143 |
+
"harness|mmlu_prehistory|0": {
|
144 |
+
"alias": " - prehistory",
|
145 |
+
"acc,none": 0.3487654320987654,
|
146 |
+
"acc_stderr,none": 0.02651759772446501
|
147 |
+
},
|
148 |
+
"harness|mmlu_professional_law|0": {
|
149 |
+
"alias": " - professional_law",
|
150 |
+
"acc,none": 0.2861799217731421,
|
151 |
+
"acc_stderr,none": 0.011543642878150757
|
152 |
+
},
|
153 |
+
"harness|mmlu_world_religions|0": {
|
154 |
+
"alias": " - world_religions",
|
155 |
+
"acc,none": 0.3216374269005848,
|
156 |
+
"acc_stderr,none": 0.03582529442573122
|
157 |
+
},
|
158 |
+
"harness|mmlu_other|0": {
|
159 |
+
"alias": " - other",
|
160 |
+
"acc,none": 0.34921145799806885,
|
161 |
+
"acc_stderr,none": 0.008445170092725407
|
162 |
+
},
|
163 |
+
"harness|mmlu_business_ethics|0": {
|
164 |
+
"alias": " - business_ethics",
|
165 |
+
"acc,none": 0.38,
|
166 |
+
"acc_stderr,none": 0.04878317312145632
|
167 |
+
},
|
168 |
+
"harness|mmlu_clinical_knowledge|0": {
|
169 |
+
"alias": " - clinical_knowledge",
|
170 |
+
"acc,none": 0.2943396226415094,
|
171 |
+
"acc_stderr,none": 0.02804918631569525
|
172 |
+
},
|
173 |
+
"harness|mmlu_college_medicine|0": {
|
174 |
+
"alias": " - college_medicine",
|
175 |
+
"acc,none": 0.3236994219653179,
|
176 |
+
"acc_stderr,none": 0.0356760379963917
|
177 |
+
},
|
178 |
+
"harness|mmlu_global_facts|0": {
|
179 |
+
"alias": " - global_facts",
|
180 |
+
"acc,none": 0.24,
|
181 |
+
"acc_stderr,none": 0.04292346959909282
|
182 |
+
},
|
183 |
+
"harness|mmlu_human_aging|0": {
|
184 |
+
"alias": " - human_aging",
|
185 |
+
"acc,none": 0.40358744394618834,
|
186 |
+
"acc_stderr,none": 0.03292802819330313
|
187 |
+
},
|
188 |
+
"harness|mmlu_management|0": {
|
189 |
+
"alias": " - management",
|
190 |
+
"acc,none": 0.4174757281553398,
|
191 |
+
"acc_stderr,none": 0.04882840548212238
|
192 |
+
},
|
193 |
+
"harness|mmlu_marketing|0": {
|
194 |
+
"alias": " - marketing",
|
195 |
+
"acc,none": 0.4658119658119658,
|
196 |
+
"acc_stderr,none": 0.03267942734081228
|
197 |
+
},
|
198 |
+
"harness|mmlu_medical_genetics|0": {
|
199 |
+
"alias": " - medical_genetics",
|
200 |
+
"acc,none": 0.41,
|
201 |
+
"acc_stderr,none": 0.049431107042371025
|
202 |
+
},
|
203 |
+
"harness|mmlu_miscellaneous|0": {
|
204 |
+
"alias": " - miscellaneous",
|
205 |
+
"acc,none": 0.40229885057471265,
|
206 |
+
"acc_stderr,none": 0.017535294529068955
|
207 |
+
},
|
208 |
+
"harness|mmlu_nutrition|0": {
|
209 |
+
"alias": " - nutrition",
|
210 |
+
"acc,none": 0.3954248366013072,
|
211 |
+
"acc_stderr,none": 0.027996723180631455
|
212 |
+
},
|
213 |
+
"harness|mmlu_professional_accounting|0": {
|
214 |
+
"alias": " - professional_accounting",
|
215 |
+
"acc,none": 0.2553191489361702,
|
216 |
+
"acc_stderr,none": 0.026011992930902013
|
217 |
+
},
|
218 |
+
"harness|mmlu_professional_medicine|0": {
|
219 |
+
"alias": " - professional_medicine",
|
220 |
+
"acc,none": 0.1801470588235294,
|
221 |
+
"acc_stderr,none": 0.023345163616544866
|
222 |
+
},
|
223 |
+
"harness|mmlu_virology|0": {
|
224 |
+
"alias": " - virology",
|
225 |
+
"acc,none": 0.29518072289156627,
|
226 |
+
"acc_stderr,none": 0.0355092018568963
|
227 |
+
},
|
228 |
+
"harness|mmlu_social_sciences|0": {
|
229 |
+
"alias": " - social_sciences",
|
230 |
+
"acc,none": 0.31069223269418267,
|
231 |
+
"acc_stderr,none": 0.008278657494296564
|
232 |
+
},
|
233 |
+
"harness|mmlu_econometrics|0": {
|
234 |
+
"alias": " - econometrics",
|
235 |
+
"acc,none": 0.22807017543859648,
|
236 |
+
"acc_stderr,none": 0.03947152782669415
|
237 |
+
},
|
238 |
+
"harness|mmlu_high_school_geography|0": {
|
239 |
+
"alias": " - high_school_geography",
|
240 |
+
"acc,none": 0.3181818181818182,
|
241 |
+
"acc_stderr,none": 0.03318477333845331
|
242 |
+
},
|
243 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
244 |
+
"alias": " - high_school_government_and_politics",
|
245 |
+
"acc,none": 0.27979274611398963,
|
246 |
+
"acc_stderr,none": 0.032396370467357015
|
247 |
+
},
|
248 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
249 |
+
"alias": " - high_school_macroeconomics",
|
250 |
+
"acc,none": 0.2641025641025641,
|
251 |
+
"acc_stderr,none": 0.022352193737453285
|
252 |
+
},
|
253 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
254 |
+
"alias": " - high_school_microeconomics",
|
255 |
+
"acc,none": 0.25210084033613445,
|
256 |
+
"acc_stderr,none": 0.028205545033277723
|
257 |
+
},
|
258 |
+
"harness|mmlu_high_school_psychology|0": {
|
259 |
+
"alias": " - high_school_psychology",
|
260 |
+
"acc,none": 0.3394495412844037,
|
261 |
+
"acc_stderr,none": 0.02030210934266235
|
262 |
+
},
|
263 |
+
"harness|mmlu_human_sexuality|0": {
|
264 |
+
"alias": " - human_sexuality",
|
265 |
+
"acc,none": 0.4351145038167939,
|
266 |
+
"acc_stderr,none": 0.04348208051644858
|
267 |
+
},
|
268 |
+
"harness|mmlu_professional_psychology|0": {
|
269 |
+
"alias": " - professional_psychology",
|
270 |
+
"acc,none": 0.31209150326797386,
|
271 |
+
"acc_stderr,none": 0.01874501120127766
|
272 |
+
},
|
273 |
+
"harness|mmlu_public_relations|0": {
|
274 |
+
"alias": " - public_relations",
|
275 |
+
"acc,none": 0.33636363636363636,
|
276 |
+
"acc_stderr,none": 0.04525393596302506
|
277 |
+
},
|
278 |
+
"harness|mmlu_security_studies|0": {
|
279 |
+
"alias": " - security_studies",
|
280 |
+
"acc,none": 0.20408163265306123,
|
281 |
+
"acc_stderr,none": 0.02580128347509051
|
282 |
+
},
|
283 |
+
"harness|mmlu_sociology|0": {
|
284 |
+
"alias": " - sociology",
|
285 |
+
"acc,none": 0.4129353233830846,
|
286 |
+
"acc_stderr,none": 0.03481520803367348
|
287 |
+
},
|
288 |
+
"harness|mmlu_us_foreign_policy|0": {
|
289 |
+
"alias": " - us_foreign_policy",
|
290 |
+
"acc,none": 0.47,
|
291 |
+
"acc_stderr,none": 0.05016135580465919
|
292 |
+
},
|
293 |
+
"harness|mmlu_stem|0": {
|
294 |
+
"alias": " - stem",
|
295 |
+
"acc,none": 0.2724389470345703,
|
296 |
+
"acc_stderr,none": 0.007874959601829497
|
297 |
+
},
|
298 |
+
"harness|mmlu_abstract_algebra|0": {
|
299 |
+
"alias": " - abstract_algebra",
|
300 |
+
"acc,none": 0.26,
|
301 |
+
"acc_stderr,none": 0.04408440022768079
|
302 |
+
},
|
303 |
+
"harness|mmlu_anatomy|0": {
|
304 |
+
"alias": " - anatomy",
|
305 |
+
"acc,none": 0.3333333333333333,
|
306 |
+
"acc_stderr,none": 0.04072314811876837
|
307 |
+
},
|
308 |
+
"harness|mmlu_astronomy|0": {
|
309 |
+
"alias": " - astronomy",
|
310 |
+
"acc,none": 0.29605263157894735,
|
311 |
+
"acc_stderr,none": 0.03715062154998905
|
312 |
+
},
|
313 |
+
"harness|mmlu_college_biology|0": {
|
314 |
+
"alias": " - college_biology",
|
315 |
+
"acc,none": 0.2986111111111111,
|
316 |
+
"acc_stderr,none": 0.03827052357950756
|
317 |
+
},
|
318 |
+
"harness|mmlu_college_chemistry|0": {
|
319 |
+
"alias": " - college_chemistry",
|
320 |
+
"acc,none": 0.22,
|
321 |
+
"acc_stderr,none": 0.041633319989322695
|
322 |
+
},
|
323 |
+
"harness|mmlu_college_computer_science|0": {
|
324 |
+
"alias": " - college_computer_science",
|
325 |
+
"acc,none": 0.4,
|
326 |
+
"acc_stderr,none": 0.04923659639173309
|
327 |
+
},
|
328 |
+
"harness|mmlu_college_mathematics|0": {
|
329 |
+
"alias": " - college_mathematics",
|
330 |
+
"acc,none": 0.29,
|
331 |
+
"acc_stderr,none": 0.045604802157206845
|
332 |
+
},
|
333 |
+
"harness|mmlu_college_physics|0": {
|
334 |
+
"alias": " - college_physics",
|
335 |
+
"acc,none": 0.27450980392156865,
|
336 |
+
"acc_stderr,none": 0.044405219061793275
|
337 |
+
},
|
338 |
+
"harness|mmlu_computer_security|0": {
|
339 |
+
"alias": " - computer_security",
|
340 |
+
"acc,none": 0.38,
|
341 |
+
"acc_stderr,none": 0.04878317312145633
|
342 |
+
},
|
343 |
+
"harness|mmlu_conceptual_physics|0": {
|
344 |
+
"alias": " - conceptual_physics",
|
345 |
+
"acc,none": 0.28936170212765955,
|
346 |
+
"acc_stderr,none": 0.02964400657700962
|
347 |
+
},
|
348 |
+
"harness|mmlu_electrical_engineering|0": {
|
349 |
+
"alias": " - electrical_engineering",
|
350 |
+
"acc,none": 0.35172413793103446,
|
351 |
+
"acc_stderr,none": 0.0397923663749741
|
352 |
+
},
|
353 |
+
"harness|mmlu_elementary_mathematics|0": {
|
354 |
+
"alias": " - elementary_mathematics",
|
355 |
+
"acc,none": 0.21693121693121692,
|
356 |
+
"acc_stderr,none": 0.02122708244944506
|
357 |
+
},
|
358 |
+
"harness|mmlu_high_school_biology|0": {
|
359 |
+
"alias": " - high_school_biology",
|
360 |
+
"acc,none": 0.3258064516129032,
|
361 |
+
"acc_stderr,none": 0.026662010578567104
|
362 |
+
},
|
363 |
+
"harness|mmlu_high_school_chemistry|0": {
|
364 |
+
"alias": " - high_school_chemistry",
|
365 |
+
"acc,none": 0.21674876847290642,
|
366 |
+
"acc_stderr,none": 0.02899033125251624
|
367 |
+
},
|
368 |
+
"harness|mmlu_high_school_computer_science|0": {
|
369 |
+
"alias": " - high_school_computer_science",
|
370 |
+
"acc,none": 0.36,
|
371 |
+
"acc_stderr,none": 0.048241815132442176
|
372 |
+
},
|
373 |
+
"harness|mmlu_high_school_mathematics|0": {
|
374 |
+
"alias": " - high_school_mathematics",
|
375 |
+
"acc,none": 0.22962962962962963,
|
376 |
+
"acc_stderr,none": 0.025644108639267624
|
377 |
+
},
|
378 |
+
"harness|mmlu_high_school_physics|0": {
|
379 |
+
"alias": " - high_school_physics",
|
380 |
+
"acc,none": 0.2119205298013245,
|
381 |
+
"acc_stderr,none": 0.033367670865679766
|
382 |
+
},
|
383 |
+
"harness|mmlu_high_school_statistics|0": {
|
384 |
+
"alias": " - high_school_statistics",
|
385 |
+
"acc,none": 0.1574074074074074,
|
386 |
+
"acc_stderr,none": 0.024837173518242384
|
387 |
+
},
|
388 |
+
"harness|mmlu_machine_learning|0": {
|
389 |
+
"alias": " - machine_learning",
|
390 |
+
"acc,none": 0.29464285714285715,
|
391 |
+
"acc_stderr,none": 0.0432704093257873
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "Qwen/Qwen1.5-0.5B-Chat",
|
396 |
+
"revision": "main",
|
397 |
+
"private": false,
|
398 |
+
"params": 0.62,
|
399 |
+
"architectures": "Qwen2ForCausalLM",
|
400 |
+
"quant_type": null,
|
401 |
+
"precision": "16bit",
|
402 |
+
"model_params": 0.62,
|
403 |
+
"model_size": 1.24,
|
404 |
+
"weight_dtype": "bfloat16",
|
405 |
+
"compute_dtype": "float16",
|
406 |
+
"gguf_ftype": "*Q4_0.gguf",
|
407 |
+
"hardware": "gpu",
|
408 |
+
"status": "Pending",
|
409 |
+
"submitted_time": "2024-04-27T08:04:58Z",
|
410 |
+
"model_type": "original",
|
411 |
+
"job_id": -1,
|
412 |
+
"job_start_time": null,
|
413 |
+
"scripts": "ITREX"
|
414 |
+
},
|
415 |
+
"quantization_config": null,
|
416 |
+
"versions": {
|
417 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
418 |
+
"harness|piqa|0": 1.0,
|
419 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
420 |
+
"harness|arc:easy|0": 1.0,
|
421 |
+
"harness|lambada:openai|0": 1.0,
|
422 |
+
"harness|winogrande|0": 1.0,
|
423 |
+
"harness|arc:challenge|0": 1.0,
|
424 |
+
"harness|openbookqa|0": 1.0,
|
425 |
+
"harness|hellaswag|0": 1.0,
|
426 |
+
"harness|boolq|0": 2.0,
|
427 |
+
"harness|mmlu|0": null,
|
428 |
+
"harness|mmlu_humanities|0": null,
|
429 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
430 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
431 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
432 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
433 |
+
"harness|mmlu_international_law|0": 0.0,
|
434 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
435 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
436 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
437 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
438 |
+
"harness|mmlu_philosophy|0": 0.0,
|
439 |
+
"harness|mmlu_prehistory|0": 0.0,
|
440 |
+
"harness|mmlu_professional_law|0": 0.0,
|
441 |
+
"harness|mmlu_world_religions|0": 0.0,
|
442 |
+
"harness|mmlu_other|0": null,
|
443 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
444 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
445 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
446 |
+
"harness|mmlu_global_facts|0": 0.0,
|
447 |
+
"harness|mmlu_human_aging|0": 0.0,
|
448 |
+
"harness|mmlu_management|0": 0.0,
|
449 |
+
"harness|mmlu_marketing|0": 0.0,
|
450 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
451 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
452 |
+
"harness|mmlu_nutrition|0": 0.0,
|
453 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
454 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
455 |
+
"harness|mmlu_virology|0": 0.0,
|
456 |
+
"harness|mmlu_social_sciences|0": null,
|
457 |
+
"harness|mmlu_econometrics|0": 0.0,
|
458 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
459 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
460 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
461 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
462 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
463 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
464 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
465 |
+
"harness|mmlu_public_relations|0": 0.0,
|
466 |
+
"harness|mmlu_security_studies|0": 0.0,
|
467 |
+
"harness|mmlu_sociology|0": 0.0,
|
468 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
469 |
+
"harness|mmlu_stem|0": null,
|
470 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
471 |
+
"harness|mmlu_anatomy|0": 0.0,
|
472 |
+
"harness|mmlu_astronomy|0": 0.0,
|
473 |
+
"harness|mmlu_college_biology|0": 0.0,
|
474 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
475 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
476 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
477 |
+
"harness|mmlu_college_physics|0": 0.0,
|
478 |
+
"harness|mmlu_computer_security|0": 0.0,
|
479 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
480 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
481 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
482 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
483 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
484 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
485 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
486 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
487 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
488 |
+
"harness|mmlu_machine_learning|0": 0.0
|
489 |
+
},
|
490 |
+
"n-shot": {
|
491 |
+
"arc_challenge": 0,
|
492 |
+
"arc_easy": 0,
|
493 |
+
"boolq": 0,
|
494 |
+
"hellaswag": 0,
|
495 |
+
"lambada_openai": 0,
|
496 |
+
"mmlu": 0,
|
497 |
+
"mmlu_abstract_algebra": 0,
|
498 |
+
"mmlu_anatomy": 0,
|
499 |
+
"mmlu_astronomy": 0,
|
500 |
+
"mmlu_business_ethics": 0,
|
501 |
+
"mmlu_clinical_knowledge": 0,
|
502 |
+
"mmlu_college_biology": 0,
|
503 |
+
"mmlu_college_chemistry": 0,
|
504 |
+
"mmlu_college_computer_science": 0,
|
505 |
+
"mmlu_college_mathematics": 0,
|
506 |
+
"mmlu_college_medicine": 0,
|
507 |
+
"mmlu_college_physics": 0,
|
508 |
+
"mmlu_computer_security": 0,
|
509 |
+
"mmlu_conceptual_physics": 0,
|
510 |
+
"mmlu_econometrics": 0,
|
511 |
+
"mmlu_electrical_engineering": 0,
|
512 |
+
"mmlu_elementary_mathematics": 0,
|
513 |
+
"mmlu_formal_logic": 0,
|
514 |
+
"mmlu_global_facts": 0,
|
515 |
+
"mmlu_high_school_biology": 0,
|
516 |
+
"mmlu_high_school_chemistry": 0,
|
517 |
+
"mmlu_high_school_computer_science": 0,
|
518 |
+
"mmlu_high_school_european_history": 0,
|
519 |
+
"mmlu_high_school_geography": 0,
|
520 |
+
"mmlu_high_school_government_and_politics": 0,
|
521 |
+
"mmlu_high_school_macroeconomics": 0,
|
522 |
+
"mmlu_high_school_mathematics": 0,
|
523 |
+
"mmlu_high_school_microeconomics": 0,
|
524 |
+
"mmlu_high_school_physics": 0,
|
525 |
+
"mmlu_high_school_psychology": 0,
|
526 |
+
"mmlu_high_school_statistics": 0,
|
527 |
+
"mmlu_high_school_us_history": 0,
|
528 |
+
"mmlu_high_school_world_history": 0,
|
529 |
+
"mmlu_human_aging": 0,
|
530 |
+
"mmlu_human_sexuality": 0,
|
531 |
+
"mmlu_humanities": 0,
|
532 |
+
"mmlu_international_law": 0,
|
533 |
+
"mmlu_jurisprudence": 0,
|
534 |
+
"mmlu_logical_fallacies": 0,
|
535 |
+
"mmlu_machine_learning": 0,
|
536 |
+
"mmlu_management": 0,
|
537 |
+
"mmlu_marketing": 0,
|
538 |
+
"mmlu_medical_genetics": 0,
|
539 |
+
"mmlu_miscellaneous": 0,
|
540 |
+
"mmlu_moral_disputes": 0,
|
541 |
+
"mmlu_moral_scenarios": 0,
|
542 |
+
"mmlu_nutrition": 0,
|
543 |
+
"mmlu_other": 0,
|
544 |
+
"mmlu_philosophy": 0,
|
545 |
+
"mmlu_prehistory": 0,
|
546 |
+
"mmlu_professional_accounting": 0,
|
547 |
+
"mmlu_professional_law": 0,
|
548 |
+
"mmlu_professional_medicine": 0,
|
549 |
+
"mmlu_professional_psychology": 0,
|
550 |
+
"mmlu_public_relations": 0,
|
551 |
+
"mmlu_security_studies": 0,
|
552 |
+
"mmlu_social_sciences": 0,
|
553 |
+
"mmlu_sociology": 0,
|
554 |
+
"mmlu_stem": 0,
|
555 |
+
"mmlu_us_foreign_policy": 0,
|
556 |
+
"mmlu_virology": 0,
|
557 |
+
"mmlu_world_religions": 0,
|
558 |
+
"openbookqa": 0,
|
559 |
+
"piqa": 0,
|
560 |
+
"truthfulqa_mc1": 0,
|
561 |
+
"truthfulqa_mc2": 0,
|
562 |
+
"winogrande": 0
|
563 |
+
},
|
564 |
+
"date": 1716058768.9074655,
|
565 |
+
"config": {
|
566 |
+
"model": "hf",
|
567 |
+
"model_args": "pretrained=Qwen/Qwen1.5-0.5B-Chat,trust_remote_code=True,dtype=float16,_commit_hash=main",
|
568 |
+
"batch_size": 4,
|
569 |
+
"batch_sizes": [],
|
570 |
+
"device": "cuda",
|
571 |
+
"use_cache": null,
|
572 |
+
"limit": null,
|
573 |
+
"bootstrap_iters": 100000,
|
574 |
+
"gen_kwargs": null
|
575 |
+
}
|
576 |
+
}
|
baichuan-inc/results_2024-05-19-01-02-50_Baichuan2-13B-Chat.json
ADDED
@@ -0,0 +1,576 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "1.4",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": "-1",
|
9 |
+
"start_time": "",
|
10 |
+
"end_time": "",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "baichuan-inc/Baichuan2-13B-Chat",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "16bit",
|
15 |
+
"model_size": 26.0,
|
16 |
+
"model_params": 13.0,
|
17 |
+
"quant_type": null,
|
18 |
+
"precision": "16bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|arc:challenge|0": {
|
22 |
+
"acc,none": 0.47440273037542663,
|
23 |
+
"acc_stderr,none": 0.014592230885298964,
|
24 |
+
"acc_norm,none": 0.4761092150170648,
|
25 |
+
"acc_norm_stderr,none": 0.014594701798071654,
|
26 |
+
"alias": "arc_challenge"
|
27 |
+
},
|
28 |
+
"harness|hellaswag|0": {
|
29 |
+
"acc,none": 0.5778729336785501,
|
30 |
+
"acc_stderr,none": 0.004928891895874297,
|
31 |
+
"acc_norm,none": 0.7600079665405298,
|
32 |
+
"acc_norm_stderr,none": 0.00426205452657707,
|
33 |
+
"alias": "hellaswag"
|
34 |
+
},
|
35 |
+
"harness|winogrande|0": {
|
36 |
+
"acc,none": 0.7198105761641673,
|
37 |
+
"acc_stderr,none": 0.0126217079797985,
|
38 |
+
"alias": "winogrande"
|
39 |
+
},
|
40 |
+
"harness|lambada:openai|0": {
|
41 |
+
"perplexity,none": 3.189163879397028,
|
42 |
+
"perplexity_stderr,none": 0.08034962990833386,
|
43 |
+
"acc,none": 0.7141470987774112,
|
44 |
+
"acc_stderr,none": 0.00629473154335249,
|
45 |
+
"alias": "lambada_openai"
|
46 |
+
},
|
47 |
+
"harness|truthfulqa:mc2|0": {
|
48 |
+
"acc,none": 0.5032627320436495,
|
49 |
+
"acc_stderr,none": 0.01573739173365504,
|
50 |
+
"alias": "truthfulqa_mc2"
|
51 |
+
},
|
52 |
+
"harness|arc:easy|0": {
|
53 |
+
"acc,none": 0.7512626262626263,
|
54 |
+
"acc_stderr,none": 0.008870224411653797,
|
55 |
+
"acc_norm,none": 0.7007575757575758,
|
56 |
+
"acc_norm_stderr,none": 0.009396447162309824,
|
57 |
+
"alias": "arc_easy"
|
58 |
+
},
|
59 |
+
"harness|truthfulqa:mc1|0": {
|
60 |
+
"acc,none": 0.3598531211750306,
|
61 |
+
"acc_stderr,none": 0.016801860466677157,
|
62 |
+
"alias": "truthfulqa_mc1"
|
63 |
+
},
|
64 |
+
"harness|mmlu|0": {
|
65 |
+
"acc,none": 0.5592508189716564,
|
66 |
+
"acc_stderr,none": 0.003953538449426825,
|
67 |
+
"alias": "mmlu"
|
68 |
+
},
|
69 |
+
"harness|mmlu_humanities|0": {
|
70 |
+
"alias": " - humanities",
|
71 |
+
"acc,none": 0.5071200850159405,
|
72 |
+
"acc_stderr,none": 0.0068319935524088216
|
73 |
+
},
|
74 |
+
"harness|mmlu_formal_logic|0": {
|
75 |
+
"alias": " - formal_logic",
|
76 |
+
"acc,none": 0.3888888888888889,
|
77 |
+
"acc_stderr,none": 0.04360314860077459
|
78 |
+
},
|
79 |
+
"harness|mmlu_high_school_european_history|0": {
|
80 |
+
"alias": " - high_school_european_history",
|
81 |
+
"acc,none": 0.7393939393939394,
|
82 |
+
"acc_stderr,none": 0.034277431758165236
|
83 |
+
},
|
84 |
+
"harness|mmlu_high_school_us_history|0": {
|
85 |
+
"alias": " - high_school_us_history",
|
86 |
+
"acc,none": 0.7549019607843137,
|
87 |
+
"acc_stderr,none": 0.03019028245350195
|
88 |
+
},
|
89 |
+
"harness|mmlu_high_school_world_history|0": {
|
90 |
+
"alias": " - high_school_world_history",
|
91 |
+
"acc,none": 0.7383966244725738,
|
92 |
+
"acc_stderr,none": 0.028609516716994934
|
93 |
+
},
|
94 |
+
"harness|mmlu_international_law|0": {
|
95 |
+
"alias": " - international_law",
|
96 |
+
"acc,none": 0.6942148760330579,
|
97 |
+
"acc_stderr,none": 0.04205953933884124
|
98 |
+
},
|
99 |
+
"harness|mmlu_jurisprudence|0": {
|
100 |
+
"alias": " - jurisprudence",
|
101 |
+
"acc,none": 0.6944444444444444,
|
102 |
+
"acc_stderr,none": 0.044531975073749834
|
103 |
+
},
|
104 |
+
"harness|mmlu_logical_fallacies|0": {
|
105 |
+
"alias": " - logical_fallacies",
|
106 |
+
"acc,none": 0.6993865030674846,
|
107 |
+
"acc_stderr,none": 0.03602511318806771
|
108 |
+
},
|
109 |
+
"harness|mmlu_moral_disputes|0": {
|
110 |
+
"alias": " - moral_disputes",
|
111 |
+
"acc,none": 0.6069364161849711,
|
112 |
+
"acc_stderr,none": 0.026296227915613674
|
113 |
+
},
|
114 |
+
"harness|mmlu_moral_scenarios|0": {
|
115 |
+
"alias": " - moral_scenarios",
|
116 |
+
"acc,none": 0.24692737430167597,
|
117 |
+
"acc_stderr,none": 0.014422292204808838
|
118 |
+
},
|
119 |
+
"harness|mmlu_philosophy|0": {
|
120 |
+
"alias": " - philosophy",
|
121 |
+
"acc,none": 0.617363344051447,
|
122 |
+
"acc_stderr,none": 0.027604689028581986
|
123 |
+
},
|
124 |
+
"harness|mmlu_prehistory|0": {
|
125 |
+
"alias": " - prehistory",
|
126 |
+
"acc,none": 0.6419753086419753,
|
127 |
+
"acc_stderr,none": 0.026675611926037082
|
128 |
+
},
|
129 |
+
"harness|mmlu_professional_law|0": {
|
130 |
+
"alias": " - professional_law",
|
131 |
+
"acc,none": 0.424380704041721,
|
132 |
+
"acc_stderr,none": 0.012623343757430015
|
133 |
+
},
|
134 |
+
"harness|mmlu_world_religions|0": {
|
135 |
+
"alias": " - world_religions",
|
136 |
+
"acc,none": 0.7660818713450293,
|
137 |
+
"acc_stderr,none": 0.03246721765117826
|
138 |
+
},
|
139 |
+
"harness|mmlu_other|0": {
|
140 |
+
"alias": " - other",
|
141 |
+
"acc,none": 0.6443514644351465,
|
142 |
+
"acc_stderr,none": 0.008297767909816914
|
143 |
+
},
|
144 |
+
"harness|mmlu_business_ethics|0": {
|
145 |
+
"alias": " - business_ethics",
|
146 |
+
"acc,none": 0.61,
|
147 |
+
"acc_stderr,none": 0.04902071300001974
|
148 |
+
},
|
149 |
+
"harness|mmlu_clinical_knowledge|0": {
|
150 |
+
"alias": " - clinical_knowledge",
|
151 |
+
"acc,none": 0.630188679245283,
|
152 |
+
"acc_stderr,none": 0.029711421880107933
|
153 |
+
},
|
154 |
+
"harness|mmlu_college_medicine|0": {
|
155 |
+
"alias": " - college_medicine",
|
156 |
+
"acc,none": 0.5549132947976878,
|
157 |
+
"acc_stderr,none": 0.03789401760283647
|
158 |
+
},
|
159 |
+
"harness|mmlu_global_facts|0": {
|
160 |
+
"alias": " - global_facts",
|
161 |
+
"acc,none": 0.38,
|
162 |
+
"acc_stderr,none": 0.04878317312145633
|
163 |
+
},
|
164 |
+
"harness|mmlu_human_aging|0": {
|
165 |
+
"alias": " - human_aging",
|
166 |
+
"acc,none": 0.6457399103139013,
|
167 |
+
"acc_stderr,none": 0.032100621541349864
|
168 |
+
},
|
169 |
+
"harness|mmlu_management|0": {
|
170 |
+
"alias": " - management",
|
171 |
+
"acc,none": 0.7669902912621359,
|
172 |
+
"acc_stderr,none": 0.04185832598928315
|
173 |
+
},
|
174 |
+
"harness|mmlu_marketing|0": {
|
175 |
+
"alias": " - marketing",
|
176 |
+
"acc,none": 0.8290598290598291,
|
177 |
+
"acc_stderr,none": 0.02466249684520981
|
178 |
+
},
|
179 |
+
"harness|mmlu_medical_genetics|0": {
|
180 |
+
"alias": " - medical_genetics",
|
181 |
+
"acc,none": 0.65,
|
182 |
+
"acc_stderr,none": 0.047937248544110196
|
183 |
+
},
|
184 |
+
"harness|mmlu_miscellaneous|0": {
|
185 |
+
"alias": " - miscellaneous",
|
186 |
+
"acc,none": 0.7790549169859514,
|
187 |
+
"acc_stderr,none": 0.014836205167333569
|
188 |
+
},
|
189 |
+
"harness|mmlu_nutrition|0": {
|
190 |
+
"alias": " - nutrition",
|
191 |
+
"acc,none": 0.6209150326797386,
|
192 |
+
"acc_stderr,none": 0.02778014120702334
|
193 |
+
},
|
194 |
+
"harness|mmlu_professional_accounting|0": {
|
195 |
+
"alias": " - professional_accounting",
|
196 |
+
"acc,none": 0.45390070921985815,
|
197 |
+
"acc_stderr,none": 0.02970045324729148
|
198 |
+
},
|
199 |
+
"harness|mmlu_professional_medicine|0": {
|
200 |
+
"alias": " - professional_medicine",
|
201 |
+
"acc,none": 0.5661764705882353,
|
202 |
+
"acc_stderr,none": 0.03010563657001663
|
203 |
+
},
|
204 |
+
"harness|mmlu_virology|0": {
|
205 |
+
"alias": " - virology",
|
206 |
+
"acc,none": 0.4578313253012048,
|
207 |
+
"acc_stderr,none": 0.0387862677100236
|
208 |
+
},
|
209 |
+
"harness|mmlu_social_sciences|0": {
|
210 |
+
"alias": " - social_sciences",
|
211 |
+
"acc,none": 0.6480337991550211,
|
212 |
+
"acc_stderr,none": 0.008358270030925742
|
213 |
+
},
|
214 |
+
"harness|mmlu_econometrics|0": {
|
215 |
+
"alias": " - econometrics",
|
216 |
+
"acc,none": 0.40350877192982454,
|
217 |
+
"acc_stderr,none": 0.04615186962583703
|
218 |
+
},
|
219 |
+
"harness|mmlu_high_school_geography|0": {
|
220 |
+
"alias": " - high_school_geography",
|
221 |
+
"acc,none": 0.7272727272727273,
|
222 |
+
"acc_stderr,none": 0.03173071239071724
|
223 |
+
},
|
224 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
225 |
+
"alias": " - high_school_government_and_politics",
|
226 |
+
"acc,none": 0.8186528497409327,
|
227 |
+
"acc_stderr,none": 0.02780703236068609
|
228 |
+
},
|
229 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
230 |
+
"alias": " - high_school_macroeconomics",
|
231 |
+
"acc,none": 0.5358974358974359,
|
232 |
+
"acc_stderr,none": 0.025285585990017848
|
233 |
+
},
|
234 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
235 |
+
"alias": " - high_school_microeconomics",
|
236 |
+
"acc,none": 0.542016806722689,
|
237 |
+
"acc_stderr,none": 0.032363611119519416
|
238 |
+
},
|
239 |
+
"harness|mmlu_high_school_psychology|0": {
|
240 |
+
"alias": " - high_school_psychology",
|
241 |
+
"acc,none": 0.763302752293578,
|
242 |
+
"acc_stderr,none": 0.018224078117299085
|
243 |
+
},
|
244 |
+
"harness|mmlu_human_sexuality|0": {
|
245 |
+
"alias": " - human_sexuality",
|
246 |
+
"acc,none": 0.6946564885496184,
|
247 |
+
"acc_stderr,none": 0.04039314978724561
|
248 |
+
},
|
249 |
+
"harness|mmlu_professional_psychology|0": {
|
250 |
+
"alias": " - professional_psychology",
|
251 |
+
"acc,none": 0.5506535947712419,
|
252 |
+
"acc_stderr,none": 0.020123766528027262
|
253 |
+
},
|
254 |
+
"harness|mmlu_public_relations|0": {
|
255 |
+
"alias": " - public_relations",
|
256 |
+
"acc,none": 0.5727272727272728,
|
257 |
+
"acc_stderr,none": 0.04738198703545483
|
258 |
+
},
|
259 |
+
"harness|mmlu_security_studies|0": {
|
260 |
+
"alias": " - security_studies",
|
261 |
+
"acc,none": 0.6571428571428571,
|
262 |
+
"acc_stderr,none": 0.030387262919547728
|
263 |
+
},
|
264 |
+
"harness|mmlu_sociology|0": {
|
265 |
+
"alias": " - sociology",
|
266 |
+
"acc,none": 0.7562189054726368,
|
267 |
+
"acc_stderr,none": 0.03036049015401465
|
268 |
+
},
|
269 |
+
"harness|mmlu_us_foreign_policy|0": {
|
270 |
+
"alias": " - us_foreign_policy",
|
271 |
+
"acc,none": 0.88,
|
272 |
+
"acc_stderr,none": 0.032659863237109066
|
273 |
+
},
|
274 |
+
"harness|mmlu_stem|0": {
|
275 |
+
"alias": " - stem",
|
276 |
+
"acc,none": 0.46653980336187756,
|
277 |
+
"acc_stderr,none": 0.008525630267008885
|
278 |
+
},
|
279 |
+
"harness|mmlu_abstract_algebra|0": {
|
280 |
+
"alias": " - abstract_algebra",
|
281 |
+
"acc,none": 0.29,
|
282 |
+
"acc_stderr,none": 0.045604802157206845
|
283 |
+
},
|
284 |
+
"harness|mmlu_anatomy|0": {
|
285 |
+
"alias": " - anatomy",
|
286 |
+
"acc,none": 0.5259259259259259,
|
287 |
+
"acc_stderr,none": 0.04313531696750575
|
288 |
+
},
|
289 |
+
"harness|mmlu_astronomy|0": {
|
290 |
+
"alias": " - astronomy",
|
291 |
+
"acc,none": 0.6118421052631579,
|
292 |
+
"acc_stderr,none": 0.03965842097512744
|
293 |
+
},
|
294 |
+
"harness|mmlu_college_biology|0": {
|
295 |
+
"alias": " - college_biology",
|
296 |
+
"acc,none": 0.6736111111111112,
|
297 |
+
"acc_stderr,none": 0.03921067198982266
|
298 |
+
},
|
299 |
+
"harness|mmlu_college_chemistry|0": {
|
300 |
+
"alias": " - college_chemistry",
|
301 |
+
"acc,none": 0.41,
|
302 |
+
"acc_stderr,none": 0.04943110704237102
|
303 |
+
},
|
304 |
+
"harness|mmlu_college_computer_science|0": {
|
305 |
+
"alias": " - college_computer_science",
|
306 |
+
"acc,none": 0.46,
|
307 |
+
"acc_stderr,none": 0.05009082659620333
|
308 |
+
},
|
309 |
+
"harness|mmlu_college_mathematics|0": {
|
310 |
+
"alias": " - college_mathematics",
|
311 |
+
"acc,none": 0.37,
|
312 |
+
"acc_stderr,none": 0.04852365870939099
|
313 |
+
},
|
314 |
+
"harness|mmlu_college_physics|0": {
|
315 |
+
"alias": " - college_physics",
|
316 |
+
"acc,none": 0.4215686274509804,
|
317 |
+
"acc_stderr,none": 0.04913595201274498
|
318 |
+
},
|
319 |
+
"harness|mmlu_computer_security|0": {
|
320 |
+
"alias": " - computer_security",
|
321 |
+
"acc,none": 0.72,
|
322 |
+
"acc_stderr,none": 0.045126085985421296
|
323 |
+
},
|
324 |
+
"harness|mmlu_conceptual_physics|0": {
|
325 |
+
"alias": " - conceptual_physics",
|
326 |
+
"acc,none": 0.4808510638297872,
|
327 |
+
"acc_stderr,none": 0.032662042990646775
|
328 |
+
},
|
329 |
+
"harness|mmlu_electrical_engineering|0": {
|
330 |
+
"alias": " - electrical_engineering",
|
331 |
+
"acc,none": 0.503448275862069,
|
332 |
+
"acc_stderr,none": 0.04166567577101579
|
333 |
+
},
|
334 |
+
"harness|mmlu_elementary_mathematics|0": {
|
335 |
+
"alias": " - elementary_mathematics",
|
336 |
+
"acc,none": 0.3783068783068783,
|
337 |
+
"acc_stderr,none": 0.024976954053155254
|
338 |
+
},
|
339 |
+
"harness|mmlu_high_school_biology|0": {
|
340 |
+
"alias": " - high_school_biology",
|
341 |
+
"acc,none": 0.7354838709677419,
|
342 |
+
"acc_stderr,none": 0.02509189237885928
|
343 |
+
},
|
344 |
+
"harness|mmlu_high_school_chemistry|0": {
|
345 |
+
"alias": " - high_school_chemistry",
|
346 |
+
"acc,none": 0.4433497536945813,
|
347 |
+
"acc_stderr,none": 0.03495334582162933
|
348 |
+
},
|
349 |
+
"harness|mmlu_high_school_computer_science|0": {
|
350 |
+
"alias": " - high_school_computer_science",
|
351 |
+
"acc,none": 0.58,
|
352 |
+
"acc_stderr,none": 0.049604496374885836
|
353 |
+
},
|
354 |
+
"harness|mmlu_high_school_mathematics|0": {
|
355 |
+
"alias": " - high_school_mathematics",
|
356 |
+
"acc,none": 0.25555555555555554,
|
357 |
+
"acc_stderr,none": 0.026593939101844065
|
358 |
+
},
|
359 |
+
"harness|mmlu_high_school_physics|0": {
|
360 |
+
"alias": " - high_school_physics",
|
361 |
+
"acc,none": 0.33112582781456956,
|
362 |
+
"acc_stderr,none": 0.038425817186598696
|
363 |
+
},
|
364 |
+
"harness|mmlu_high_school_statistics|0": {
|
365 |
+
"alias": " - high_school_statistics",
|
366 |
+
"acc,none": 0.37037037037037035,
|
367 |
+
"acc_stderr,none": 0.03293377139415191
|
368 |
+
},
|
369 |
+
"harness|mmlu_machine_learning|0": {
|
370 |
+
"alias": " - machine_learning",
|
371 |
+
"acc,none": 0.3392857142857143,
|
372 |
+
"acc_stderr,none": 0.044939490686135404
|
373 |
+
},
|
374 |
+
"harness|openbookqa|0": {
|
375 |
+
"acc,none": 0.308,
|
376 |
+
"acc_stderr,none": 0.020667032987466104,
|
377 |
+
"acc_norm,none": 0.43,
|
378 |
+
"acc_norm_stderr,none": 0.022162634426652835,
|
379 |
+
"alias": "openbookqa"
|
380 |
+
},
|
381 |
+
"harness|piqa|0": {
|
382 |
+
"acc,none": 0.7551686615886833,
|
383 |
+
"acc_stderr,none": 0.010032309105568777,
|
384 |
+
"acc_norm,none": 0.7687704026115343,
|
385 |
+
"acc_norm_stderr,none": 0.009837063180625327,
|
386 |
+
"alias": "piqa"
|
387 |
+
},
|
388 |
+
"harness|boolq|0": {
|
389 |
+
"acc,none": 0.8269113149847095,
|
390 |
+
"acc_stderr,none": 0.006616927043886643,
|
391 |
+
"alias": "boolq"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "baichuan-inc/Baichuan2-13B-Chat",
|
396 |
+
"revision": "main",
|
397 |
+
"private": false,
|
398 |
+
"params": 13.0,
|
399 |
+
"architectures": "BaichuanForCausalLM",
|
400 |
+
"quant_type": null,
|
401 |
+
"precision": "16bit",
|
402 |
+
"model_params": 13.0,
|
403 |
+
"model_size": 26.0,
|
404 |
+
"weight_dtype": "bfloat16",
|
405 |
+
"compute_dtype": "float16",
|
406 |
+
"gguf_ftype": "*Q4_0.gguf",
|
407 |
+
"hardware": "gpu",
|
408 |
+
"status": "Pending",
|
409 |
+
"submitted_time": "2024-04-27T08:04:58Z",
|
410 |
+
"model_type": "original",
|
411 |
+
"job_id": -1,
|
412 |
+
"job_start_time": null,
|
413 |
+
"scripts": "ITREX"
|
414 |
+
},
|
415 |
+
"quantization_config": null,
|
416 |
+
"versions": {
|
417 |
+
"harness|arc:challenge|0": 1.0,
|
418 |
+
"harness|hellaswag|0": 1.0,
|
419 |
+
"harness|winogrande|0": 1.0,
|
420 |
+
"harness|lambada:openai|0": 1.0,
|
421 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
422 |
+
"harness|arc:easy|0": 1.0,
|
423 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
424 |
+
"harness|mmlu|0": null,
|
425 |
+
"harness|mmlu_humanities|0": null,
|
426 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
427 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
428 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
429 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
430 |
+
"harness|mmlu_international_law|0": 0.0,
|
431 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
432 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
433 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
434 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
435 |
+
"harness|mmlu_philosophy|0": 0.0,
|
436 |
+
"harness|mmlu_prehistory|0": 0.0,
|
437 |
+
"harness|mmlu_professional_law|0": 0.0,
|
438 |
+
"harness|mmlu_world_religions|0": 0.0,
|
439 |
+
"harness|mmlu_other|0": null,
|
440 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
441 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
442 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
443 |
+
"harness|mmlu_global_facts|0": 0.0,
|
444 |
+
"harness|mmlu_human_aging|0": 0.0,
|
445 |
+
"harness|mmlu_management|0": 0.0,
|
446 |
+
"harness|mmlu_marketing|0": 0.0,
|
447 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
448 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
449 |
+
"harness|mmlu_nutrition|0": 0.0,
|
450 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
451 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
452 |
+
"harness|mmlu_virology|0": 0.0,
|
453 |
+
"harness|mmlu_social_sciences|0": null,
|
454 |
+
"harness|mmlu_econometrics|0": 0.0,
|
455 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
456 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
457 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
458 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
459 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
460 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
461 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
462 |
+
"harness|mmlu_public_relations|0": 0.0,
|
463 |
+
"harness|mmlu_security_studies|0": 0.0,
|
464 |
+
"harness|mmlu_sociology|0": 0.0,
|
465 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
466 |
+
"harness|mmlu_stem|0": null,
|
467 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
468 |
+
"harness|mmlu_anatomy|0": 0.0,
|
469 |
+
"harness|mmlu_astronomy|0": 0.0,
|
470 |
+
"harness|mmlu_college_biology|0": 0.0,
|
471 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
472 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
473 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
474 |
+
"harness|mmlu_college_physics|0": 0.0,
|
475 |
+
"harness|mmlu_computer_security|0": 0.0,
|
476 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
477 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
478 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
479 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
480 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
481 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
482 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
483 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
484 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
485 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
486 |
+
"harness|openbookqa|0": 1.0,
|
487 |
+
"harness|piqa|0": 1.0,
|
488 |
+
"harness|boolq|0": 2.0
|
489 |
+
},
|
490 |
+
"n-shot": {
|
491 |
+
"arc_challenge": 0,
|
492 |
+
"arc_easy": 0,
|
493 |
+
"boolq": 0,
|
494 |
+
"hellaswag": 0,
|
495 |
+
"lambada_openai": 0,
|
496 |
+
"mmlu": 0,
|
497 |
+
"mmlu_abstract_algebra": 0,
|
498 |
+
"mmlu_anatomy": 0,
|
499 |
+
"mmlu_astronomy": 0,
|
500 |
+
"mmlu_business_ethics": 0,
|
501 |
+
"mmlu_clinical_knowledge": 0,
|
502 |
+
"mmlu_college_biology": 0,
|
503 |
+
"mmlu_college_chemistry": 0,
|
504 |
+
"mmlu_college_computer_science": 0,
|
505 |
+
"mmlu_college_mathematics": 0,
|
506 |
+
"mmlu_college_medicine": 0,
|
507 |
+
"mmlu_college_physics": 0,
|
508 |
+
"mmlu_computer_security": 0,
|
509 |
+
"mmlu_conceptual_physics": 0,
|
510 |
+
"mmlu_econometrics": 0,
|
511 |
+
"mmlu_electrical_engineering": 0,
|
512 |
+
"mmlu_elementary_mathematics": 0,
|
513 |
+
"mmlu_formal_logic": 0,
|
514 |
+
"mmlu_global_facts": 0,
|
515 |
+
"mmlu_high_school_biology": 0,
|
516 |
+
"mmlu_high_school_chemistry": 0,
|
517 |
+
"mmlu_high_school_computer_science": 0,
|
518 |
+
"mmlu_high_school_european_history": 0,
|
519 |
+
"mmlu_high_school_geography": 0,
|
520 |
+
"mmlu_high_school_government_and_politics": 0,
|
521 |
+
"mmlu_high_school_macroeconomics": 0,
|
522 |
+
"mmlu_high_school_mathematics": 0,
|
523 |
+
"mmlu_high_school_microeconomics": 0,
|
524 |
+
"mmlu_high_school_physics": 0,
|
525 |
+
"mmlu_high_school_psychology": 0,
|
526 |
+
"mmlu_high_school_statistics": 0,
|
527 |
+
"mmlu_high_school_us_history": 0,
|
528 |
+
"mmlu_high_school_world_history": 0,
|
529 |
+
"mmlu_human_aging": 0,
|
530 |
+
"mmlu_human_sexuality": 0,
|
531 |
+
"mmlu_humanities": 0,
|
532 |
+
"mmlu_international_law": 0,
|
533 |
+
"mmlu_jurisprudence": 0,
|
534 |
+
"mmlu_logical_fallacies": 0,
|
535 |
+
"mmlu_machine_learning": 0,
|
536 |
+
"mmlu_management": 0,
|
537 |
+
"mmlu_marketing": 0,
|
538 |
+
"mmlu_medical_genetics": 0,
|
539 |
+
"mmlu_miscellaneous": 0,
|
540 |
+
"mmlu_moral_disputes": 0,
|
541 |
+
"mmlu_moral_scenarios": 0,
|
542 |
+
"mmlu_nutrition": 0,
|
543 |
+
"mmlu_other": 0,
|
544 |
+
"mmlu_philosophy": 0,
|
545 |
+
"mmlu_prehistory": 0,
|
546 |
+
"mmlu_professional_accounting": 0,
|
547 |
+
"mmlu_professional_law": 0,
|
548 |
+
"mmlu_professional_medicine": 0,
|
549 |
+
"mmlu_professional_psychology": 0,
|
550 |
+
"mmlu_public_relations": 0,
|
551 |
+
"mmlu_security_studies": 0,
|
552 |
+
"mmlu_social_sciences": 0,
|
553 |
+
"mmlu_sociology": 0,
|
554 |
+
"mmlu_stem": 0,
|
555 |
+
"mmlu_us_foreign_policy": 0,
|
556 |
+
"mmlu_virology": 0,
|
557 |
+
"mmlu_world_religions": 0,
|
558 |
+
"openbookqa": 0,
|
559 |
+
"piqa": 0,
|
560 |
+
"truthfulqa_mc1": 0,
|
561 |
+
"truthfulqa_mc2": 0,
|
562 |
+
"winogrande": 0
|
563 |
+
},
|
564 |
+
"date": 1716046398.8423457,
|
565 |
+
"config": {
|
566 |
+
"model": "hf",
|
567 |
+
"model_args": "pretrained=baichuan-inc/Baichuan2-13B-Chat,trust_remote_code=True,dtype=float16,_commit_hash=main",
|
568 |
+
"batch_size": 4,
|
569 |
+
"batch_sizes": [],
|
570 |
+
"device": "cuda",
|
571 |
+
"use_cache": null,
|
572 |
+
"limit": null,
|
573 |
+
"bootstrap_iters": 100000,
|
574 |
+
"gen_kwargs": null
|
575 |
+
}
|
576 |
+
}
|
baichuan-inc/results_2024-05-19-01-56-52_Baichuan2-7B-Chat.json
ADDED
@@ -0,0 +1,576 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "1.4",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": "-1",
|
9 |
+
"start_time": "",
|
10 |
+
"end_time": "",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "baichuan-inc/Baichuan2-7B-Chat",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "16bit",
|
15 |
+
"model_size": 14.0,
|
16 |
+
"model_params": 7.0,
|
17 |
+
"quant_type": null,
|
18 |
+
"precision": "16bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|boolq|0": {
|
22 |
+
"acc,none": 0.7944954128440367,
|
23 |
+
"acc_stderr,none": 0.007067226281166813,
|
24 |
+
"alias": "boolq"
|
25 |
+
},
|
26 |
+
"harness|winogrande|0": {
|
27 |
+
"acc,none": 0.6890292028413575,
|
28 |
+
"acc_stderr,none": 0.013009534736286065,
|
29 |
+
"alias": "winogrande"
|
30 |
+
},
|
31 |
+
"harness|truthfulqa:mc2|0": {
|
32 |
+
"acc,none": 0.48030646232709184,
|
33 |
+
"acc_stderr,none": 0.015505707499723619,
|
34 |
+
"alias": "truthfulqa_mc2"
|
35 |
+
},
|
36 |
+
"harness|arc:easy|0": {
|
37 |
+
"acc,none": 0.7281144781144782,
|
38 |
+
"acc_stderr,none": 0.009129795867310489,
|
39 |
+
"acc_norm,none": 0.6738215488215489,
|
40 |
+
"acc_norm_stderr,none": 0.00961984941703517,
|
41 |
+
"alias": "arc_easy"
|
42 |
+
},
|
43 |
+
"harness|mmlu|0": {
|
44 |
+
"acc,none": 0.5081897165645919,
|
45 |
+
"acc_stderr,none": 0.00403078396182327,
|
46 |
+
"alias": "mmlu"
|
47 |
+
},
|
48 |
+
"harness|mmlu_humanities|0": {
|
49 |
+
"alias": " - humanities",
|
50 |
+
"acc,none": 0.4667375132837407,
|
51 |
+
"acc_stderr,none": 0.006905658217429286
|
52 |
+
},
|
53 |
+
"harness|mmlu_formal_logic|0": {
|
54 |
+
"alias": " - formal_logic",
|
55 |
+
"acc,none": 0.3253968253968254,
|
56 |
+
"acc_stderr,none": 0.041905964388711366
|
57 |
+
},
|
58 |
+
"harness|mmlu_high_school_european_history|0": {
|
59 |
+
"alias": " - high_school_european_history",
|
60 |
+
"acc,none": 0.6666666666666666,
|
61 |
+
"acc_stderr,none": 0.03681050869161551
|
62 |
+
},
|
63 |
+
"harness|mmlu_high_school_us_history|0": {
|
64 |
+
"alias": " - high_school_us_history",
|
65 |
+
"acc,none": 0.6911764705882353,
|
66 |
+
"acc_stderr,none": 0.03242661719827218
|
67 |
+
},
|
68 |
+
"harness|mmlu_high_school_world_history|0": {
|
69 |
+
"alias": " - high_school_world_history",
|
70 |
+
"acc,none": 0.7215189873417721,
|
71 |
+
"acc_stderr,none": 0.029178682304842548
|
72 |
+
},
|
73 |
+
"harness|mmlu_international_law|0": {
|
74 |
+
"alias": " - international_law",
|
75 |
+
"acc,none": 0.6033057851239669,
|
76 |
+
"acc_stderr,none": 0.04465869780531009
|
77 |
+
},
|
78 |
+
"harness|mmlu_jurisprudence|0": {
|
79 |
+
"alias": " - jurisprudence",
|
80 |
+
"acc,none": 0.6296296296296297,
|
81 |
+
"acc_stderr,none": 0.04668408033024931
|
82 |
+
},
|
83 |
+
"harness|mmlu_logical_fallacies|0": {
|
84 |
+
"alias": " - logical_fallacies",
|
85 |
+
"acc,none": 0.6380368098159509,
|
86 |
+
"acc_stderr,none": 0.037757007291414416
|
87 |
+
},
|
88 |
+
"harness|mmlu_moral_disputes|0": {
|
89 |
+
"alias": " - moral_disputes",
|
90 |
+
"acc,none": 0.5086705202312138,
|
91 |
+
"acc_stderr,none": 0.026915047355369804
|
92 |
+
},
|
93 |
+
"harness|mmlu_moral_scenarios|0": {
|
94 |
+
"alias": " - moral_scenarios",
|
95 |
+
"acc,none": 0.2446927374301676,
|
96 |
+
"acc_stderr,none": 0.014378169884098417
|
97 |
+
},
|
98 |
+
"harness|mmlu_philosophy|0": {
|
99 |
+
"alias": " - philosophy",
|
100 |
+
"acc,none": 0.5819935691318328,
|
101 |
+
"acc_stderr,none": 0.028013651891995076
|
102 |
+
},
|
103 |
+
"harness|mmlu_prehistory|0": {
|
104 |
+
"alias": " - prehistory",
|
105 |
+
"acc,none": 0.5833333333333334,
|
106 |
+
"acc_stderr,none": 0.027431623722415012
|
107 |
+
},
|
108 |
+
"harness|mmlu_professional_law|0": {
|
109 |
+
"alias": " - professional_law",
|
110 |
+
"acc,none": 0.39048239895697523,
|
111 |
+
"acc_stderr,none": 0.01246013591394507
|
112 |
+
},
|
113 |
+
"harness|mmlu_world_religions|0": {
|
114 |
+
"alias": " - world_religions",
|
115 |
+
"acc,none": 0.7251461988304093,
|
116 |
+
"acc_stderr,none": 0.03424042924691584
|
117 |
+
},
|
118 |
+
"harness|mmlu_other|0": {
|
119 |
+
"alias": " - other",
|
120 |
+
"acc,none": 0.5857740585774058,
|
121 |
+
"acc_stderr,none": 0.008555060777709587
|
122 |
+
},
|
123 |
+
"harness|mmlu_business_ethics|0": {
|
124 |
+
"alias": " - business_ethics",
|
125 |
+
"acc,none": 0.56,
|
126 |
+
"acc_stderr,none": 0.04988876515698589
|
127 |
+
},
|
128 |
+
"harness|mmlu_clinical_knowledge|0": {
|
129 |
+
"alias": " - clinical_knowledge",
|
130 |
+
"acc,none": 0.539622641509434,
|
131 |
+
"acc_stderr,none": 0.03067609659938918
|
132 |
+
},
|
133 |
+
"harness|mmlu_college_medicine|0": {
|
134 |
+
"alias": " - college_medicine",
|
135 |
+
"acc,none": 0.5028901734104047,
|
136 |
+
"acc_stderr,none": 0.03812400565974833
|
137 |
+
},
|
138 |
+
"harness|mmlu_global_facts|0": {
|
139 |
+
"alias": " - global_facts",
|
140 |
+
"acc,none": 0.37,
|
141 |
+
"acc_stderr,none": 0.04852365870939099
|
142 |
+
},
|
143 |
+
"harness|mmlu_human_aging|0": {
|
144 |
+
"alias": " - human_aging",
|
145 |
+
"acc,none": 0.5515695067264574,
|
146 |
+
"acc_stderr,none": 0.03337883736255098
|
147 |
+
},
|
148 |
+
"harness|mmlu_management|0": {
|
149 |
+
"alias": " - management",
|
150 |
+
"acc,none": 0.6407766990291263,
|
151 |
+
"acc_stderr,none": 0.04750458399041694
|
152 |
+
},
|
153 |
+
"harness|mmlu_marketing|0": {
|
154 |
+
"alias": " - marketing",
|
155 |
+
"acc,none": 0.7905982905982906,
|
156 |
+
"acc_stderr,none": 0.026655699653922737
|
157 |
+
},
|
158 |
+
"harness|mmlu_medical_genetics|0": {
|
159 |
+
"alias": " - medical_genetics",
|
160 |
+
"acc,none": 0.54,
|
161 |
+
"acc_stderr,none": 0.05009082659620332
|
162 |
+
},
|
163 |
+
"harness|mmlu_miscellaneous|0": {
|
164 |
+
"alias": " - miscellaneous",
|
165 |
+
"acc,none": 0.7279693486590039,
|
166 |
+
"acc_stderr,none": 0.015913367447500514
|
167 |
+
},
|
168 |
+
"harness|mmlu_nutrition|0": {
|
169 |
+
"alias": " - nutrition",
|
170 |
+
"acc,none": 0.5784313725490197,
|
171 |
+
"acc_stderr,none": 0.02827549015679145
|
172 |
+
},
|
173 |
+
"harness|mmlu_professional_accounting|0": {
|
174 |
+
"alias": " - professional_accounting",
|
175 |
+
"acc,none": 0.3617021276595745,
|
176 |
+
"acc_stderr,none": 0.028663820147199495
|
177 |
+
},
|
178 |
+
"harness|mmlu_professional_medicine|0": {
|
179 |
+
"alias": " - professional_medicine",
|
180 |
+
"acc,none": 0.5183823529411765,
|
181 |
+
"acc_stderr,none": 0.03035230339535196
|
182 |
+
},
|
183 |
+
"harness|mmlu_virology|0": {
|
184 |
+
"alias": " - virology",
|
185 |
+
"acc,none": 0.4759036144578313,
|
186 |
+
"acc_stderr,none": 0.03887971849597264
|
187 |
+
},
|
188 |
+
"harness|mmlu_social_sciences|0": {
|
189 |
+
"alias": " - social_sciences",
|
190 |
+
"acc,none": 0.5823854403639909,
|
191 |
+
"acc_stderr,none": 0.008655563334160593
|
192 |
+
},
|
193 |
+
"harness|mmlu_econometrics|0": {
|
194 |
+
"alias": " - econometrics",
|
195 |
+
"acc,none": 0.2719298245614035,
|
196 |
+
"acc_stderr,none": 0.04185774424022056
|
197 |
+
},
|
198 |
+
"harness|mmlu_high_school_geography|0": {
|
199 |
+
"alias": " - high_school_geography",
|
200 |
+
"acc,none": 0.6767676767676768,
|
201 |
+
"acc_stderr,none": 0.033322999210706444
|
202 |
+
},
|
203 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
204 |
+
"alias": " - high_school_government_and_politics",
|
205 |
+
"acc,none": 0.7202072538860104,
|
206 |
+
"acc_stderr,none": 0.03239637046735704
|
207 |
+
},
|
208 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
209 |
+
"alias": " - high_school_macroeconomics",
|
210 |
+
"acc,none": 0.4717948717948718,
|
211 |
+
"acc_stderr,none": 0.025310639254933886
|
212 |
+
},
|
213 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
214 |
+
"alias": " - high_school_microeconomics",
|
215 |
+
"acc,none": 0.5168067226890757,
|
216 |
+
"acc_stderr,none": 0.03246013680375308
|
217 |
+
},
|
218 |
+
"harness|mmlu_high_school_psychology|0": {
|
219 |
+
"alias": " - high_school_psychology",
|
220 |
+
"acc,none": 0.6990825688073394,
|
221 |
+
"acc_stderr,none": 0.019664751366802114
|
222 |
+
},
|
223 |
+
"harness|mmlu_human_sexuality|0": {
|
224 |
+
"alias": " - human_sexuality",
|
225 |
+
"acc,none": 0.6412213740458015,
|
226 |
+
"acc_stderr,none": 0.04206739313864908
|
227 |
+
},
|
228 |
+
"harness|mmlu_professional_psychology|0": {
|
229 |
+
"alias": " - professional_psychology",
|
230 |
+
"acc,none": 0.47549019607843135,
|
231 |
+
"acc_stderr,none": 0.020203517280261447
|
232 |
+
},
|
233 |
+
"harness|mmlu_public_relations|0": {
|
234 |
+
"alias": " - public_relations",
|
235 |
+
"acc,none": 0.6454545454545455,
|
236 |
+
"acc_stderr,none": 0.04582004841505417
|
237 |
+
},
|
238 |
+
"harness|mmlu_security_studies|0": {
|
239 |
+
"alias": " - security_studies",
|
240 |
+
"acc,none": 0.5836734693877551,
|
241 |
+
"acc_stderr,none": 0.03155782816556164
|
242 |
+
},
|
243 |
+
"harness|mmlu_sociology|0": {
|
244 |
+
"alias": " - sociology",
|
245 |
+
"acc,none": 0.6915422885572139,
|
246 |
+
"acc_stderr,none": 0.03265819588512697
|
247 |
+
},
|
248 |
+
"harness|mmlu_us_foreign_policy|0": {
|
249 |
+
"alias": " - us_foreign_policy",
|
250 |
+
"acc,none": 0.72,
|
251 |
+
"acc_stderr,none": 0.04512608598542128
|
252 |
+
},
|
253 |
+
"harness|mmlu_stem|0": {
|
254 |
+
"alias": " - stem",
|
255 |
+
"acc,none": 0.421186171899778,
|
256 |
+
"acc_stderr,none": 0.008581265933096034
|
257 |
+
},
|
258 |
+
"harness|mmlu_abstract_algebra|0": {
|
259 |
+
"alias": " - abstract_algebra",
|
260 |
+
"acc,none": 0.3,
|
261 |
+
"acc_stderr,none": 0.046056618647183814
|
262 |
+
},
|
263 |
+
"harness|mmlu_anatomy|0": {
|
264 |
+
"alias": " - anatomy",
|
265 |
+
"acc,none": 0.4888888888888889,
|
266 |
+
"acc_stderr,none": 0.04318275491977976
|
267 |
+
},
|
268 |
+
"harness|mmlu_astronomy|0": {
|
269 |
+
"alias": " - astronomy",
|
270 |
+
"acc,none": 0.5460526315789473,
|
271 |
+
"acc_stderr,none": 0.04051646342874143
|
272 |
+
},
|
273 |
+
"harness|mmlu_college_biology|0": {
|
274 |
+
"alias": " - college_biology",
|
275 |
+
"acc,none": 0.5138888888888888,
|
276 |
+
"acc_stderr,none": 0.04179596617581
|
277 |
+
},
|
278 |
+
"harness|mmlu_college_chemistry|0": {
|
279 |
+
"alias": " - college_chemistry",
|
280 |
+
"acc,none": 0.39,
|
281 |
+
"acc_stderr,none": 0.04902071300001975
|
282 |
+
},
|
283 |
+
"harness|mmlu_college_computer_science|0": {
|
284 |
+
"alias": " - college_computer_science",
|
285 |
+
"acc,none": 0.51,
|
286 |
+
"acc_stderr,none": 0.05024183937956912
|
287 |
+
},
|
288 |
+
"harness|mmlu_college_mathematics|0": {
|
289 |
+
"alias": " - college_mathematics",
|
290 |
+
"acc,none": 0.3,
|
291 |
+
"acc_stderr,none": 0.046056618647183814
|
292 |
+
},
|
293 |
+
"harness|mmlu_college_physics|0": {
|
294 |
+
"alias": " - college_physics",
|
295 |
+
"acc,none": 0.3431372549019608,
|
296 |
+
"acc_stderr,none": 0.04724007352383889
|
297 |
+
},
|
298 |
+
"harness|mmlu_computer_security|0": {
|
299 |
+
"alias": " - computer_security",
|
300 |
+
"acc,none": 0.61,
|
301 |
+
"acc_stderr,none": 0.04902071300001975
|
302 |
+
},
|
303 |
+
"harness|mmlu_conceptual_physics|0": {
|
304 |
+
"alias": " - conceptual_physics",
|
305 |
+
"acc,none": 0.42127659574468085,
|
306 |
+
"acc_stderr,none": 0.03227834510146267
|
307 |
+
},
|
308 |
+
"harness|mmlu_electrical_engineering|0": {
|
309 |
+
"alias": " - electrical_engineering",
|
310 |
+
"acc,none": 0.42758620689655175,
|
311 |
+
"acc_stderr,none": 0.04122737111370333
|
312 |
+
},
|
313 |
+
"harness|mmlu_elementary_mathematics|0": {
|
314 |
+
"alias": " - elementary_mathematics",
|
315 |
+
"acc,none": 0.29365079365079366,
|
316 |
+
"acc_stderr,none": 0.023456037383982026
|
317 |
+
},
|
318 |
+
"harness|mmlu_high_school_biology|0": {
|
319 |
+
"alias": " - high_school_biology",
|
320 |
+
"acc,none": 0.6096774193548387,
|
321 |
+
"acc_stderr,none": 0.027751256636969576
|
322 |
+
},
|
323 |
+
"harness|mmlu_high_school_chemistry|0": {
|
324 |
+
"alias": " - high_school_chemistry",
|
325 |
+
"acc,none": 0.4187192118226601,
|
326 |
+
"acc_stderr,none": 0.03471192860518468
|
327 |
+
},
|
328 |
+
"harness|mmlu_high_school_computer_science|0": {
|
329 |
+
"alias": " - high_school_computer_science",
|
330 |
+
"acc,none": 0.52,
|
331 |
+
"acc_stderr,none": 0.05021167315686779
|
332 |
+
},
|
333 |
+
"harness|mmlu_high_school_mathematics|0": {
|
334 |
+
"alias": " - high_school_mathematics",
|
335 |
+
"acc,none": 0.2518518518518518,
|
336 |
+
"acc_stderr,none": 0.026466117538959912
|
337 |
+
},
|
338 |
+
"harness|mmlu_high_school_physics|0": {
|
339 |
+
"alias": " - high_school_physics",
|
340 |
+
"acc,none": 0.33774834437086093,
|
341 |
+
"acc_stderr,none": 0.03861557546255169
|
342 |
+
},
|
343 |
+
"harness|mmlu_high_school_statistics|0": {
|
344 |
+
"alias": " - high_school_statistics",
|
345 |
+
"acc,none": 0.4861111111111111,
|
346 |
+
"acc_stderr,none": 0.03408655867977749
|
347 |
+
},
|
348 |
+
"harness|mmlu_machine_learning|0": {
|
349 |
+
"alias": " - machine_learning",
|
350 |
+
"acc,none": 0.33035714285714285,
|
351 |
+
"acc_stderr,none": 0.04464285714285712
|
352 |
+
},
|
353 |
+
"harness|openbookqa|0": {
|
354 |
+
"acc,none": 0.304,
|
355 |
+
"acc_stderr,none": 0.020591649571224932,
|
356 |
+
"acc_norm,none": 0.4,
|
357 |
+
"acc_norm_stderr,none": 0.0219308441207285,
|
358 |
+
"alias": "openbookqa"
|
359 |
+
},
|
360 |
+
"harness|arc:challenge|0": {
|
361 |
+
"acc,none": 0.41552901023890787,
|
362 |
+
"acc_stderr,none": 0.014401366641216388,
|
363 |
+
"acc_norm,none": 0.4274744027303754,
|
364 |
+
"acc_norm_stderr,none": 0.014456862944650649,
|
365 |
+
"alias": "arc_challenge"
|
366 |
+
},
|
367 |
+
"harness|lambada:openai|0": {
|
368 |
+
"perplexity,none": 3.8826424326698503,
|
369 |
+
"perplexity_stderr,none": 0.10891554165574925,
|
370 |
+
"acc,none": 0.6755288181641762,
|
371 |
+
"acc_stderr,none": 0.006522620011589219,
|
372 |
+
"alias": "lambada_openai"
|
373 |
+
},
|
374 |
+
"harness|hellaswag|0": {
|
375 |
+
"acc,none": 0.5373431587333201,
|
376 |
+
"acc_stderr,none": 0.00497584533508662,
|
377 |
+
"acc_norm,none": 0.7159928301135232,
|
378 |
+
"acc_norm_stderr,none": 0.004500186424443732,
|
379 |
+
"alias": "hellaswag"
|
380 |
+
},
|
381 |
+
"harness|truthfulqa:mc1|0": {
|
382 |
+
"acc,none": 0.31211750305997554,
|
383 |
+
"acc_stderr,none": 0.016220756769520936,
|
384 |
+
"alias": "truthfulqa_mc1"
|
385 |
+
},
|
386 |
+
"harness|piqa|0": {
|
387 |
+
"acc,none": 0.7464635473340587,
|
388 |
+
"acc_stderr,none": 0.01015009083455177,
|
389 |
+
"acc_norm,none": 0.7377584330794341,
|
390 |
+
"acc_norm_stderr,none": 0.010262502565172447,
|
391 |
+
"alias": "piqa"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "baichuan-inc/Baichuan2-7B-Chat",
|
396 |
+
"revision": "main",
|
397 |
+
"private": false,
|
398 |
+
"params": 7.0,
|
399 |
+
"architectures": "BaichuanForCausalLM",
|
400 |
+
"quant_type": null,
|
401 |
+
"precision": "16bit",
|
402 |
+
"model_params": 7.0,
|
403 |
+
"model_size": 14.0,
|
404 |
+
"weight_dtype": "bfloat16",
|
405 |
+
"compute_dtype": "float16",
|
406 |
+
"gguf_ftype": "*Q4_0.gguf",
|
407 |
+
"hardware": "gpu",
|
408 |
+
"status": "Pending",
|
409 |
+
"submitted_time": "2024-04-27T08:04:58Z",
|
410 |
+
"model_type": "original",
|
411 |
+
"job_id": -1,
|
412 |
+
"job_start_time": null,
|
413 |
+
"scripts": "ITREX"
|
414 |
+
},
|
415 |
+
"quantization_config": null,
|
416 |
+
"versions": {
|
417 |
+
"harness|boolq|0": 2.0,
|
418 |
+
"harness|winogrande|0": 1.0,
|
419 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
420 |
+
"harness|arc:easy|0": 1.0,
|
421 |
+
"harness|mmlu|0": null,
|
422 |
+
"harness|mmlu_humanities|0": null,
|
423 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
424 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
425 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
426 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
427 |
+
"harness|mmlu_international_law|0": 0.0,
|
428 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
429 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
430 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
431 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
432 |
+
"harness|mmlu_philosophy|0": 0.0,
|
433 |
+
"harness|mmlu_prehistory|0": 0.0,
|
434 |
+
"harness|mmlu_professional_law|0": 0.0,
|
435 |
+
"harness|mmlu_world_religions|0": 0.0,
|
436 |
+
"harness|mmlu_other|0": null,
|
437 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
438 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
439 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
440 |
+
"harness|mmlu_global_facts|0": 0.0,
|
441 |
+
"harness|mmlu_human_aging|0": 0.0,
|
442 |
+
"harness|mmlu_management|0": 0.0,
|
443 |
+
"harness|mmlu_marketing|0": 0.0,
|
444 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
445 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
446 |
+
"harness|mmlu_nutrition|0": 0.0,
|
447 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
448 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
449 |
+
"harness|mmlu_virology|0": 0.0,
|
450 |
+
"harness|mmlu_social_sciences|0": null,
|
451 |
+
"harness|mmlu_econometrics|0": 0.0,
|
452 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
453 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
454 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
455 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
456 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
457 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
458 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
459 |
+
"harness|mmlu_public_relations|0": 0.0,
|
460 |
+
"harness|mmlu_security_studies|0": 0.0,
|
461 |
+
"harness|mmlu_sociology|0": 0.0,
|
462 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
463 |
+
"harness|mmlu_stem|0": null,
|
464 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
465 |
+
"harness|mmlu_anatomy|0": 0.0,
|
466 |
+
"harness|mmlu_astronomy|0": 0.0,
|
467 |
+
"harness|mmlu_college_biology|0": 0.0,
|
468 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
469 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
470 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
471 |
+
"harness|mmlu_college_physics|0": 0.0,
|
472 |
+
"harness|mmlu_computer_security|0": 0.0,
|
473 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
474 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
475 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
476 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
477 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
478 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
479 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
480 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
481 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
482 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
483 |
+
"harness|openbookqa|0": 1.0,
|
484 |
+
"harness|arc:challenge|0": 1.0,
|
485 |
+
"harness|lambada:openai|0": 1.0,
|
486 |
+
"harness|hellaswag|0": 1.0,
|
487 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
488 |
+
"harness|piqa|0": 1.0
|
489 |
+
},
|
490 |
+
"n-shot": {
|
491 |
+
"arc_challenge": 0,
|
492 |
+
"arc_easy": 0,
|
493 |
+
"boolq": 0,
|
494 |
+
"hellaswag": 0,
|
495 |
+
"lambada_openai": 0,
|
496 |
+
"mmlu": 0,
|
497 |
+
"mmlu_abstract_algebra": 0,
|
498 |
+
"mmlu_anatomy": 0,
|
499 |
+
"mmlu_astronomy": 0,
|
500 |
+
"mmlu_business_ethics": 0,
|
501 |
+
"mmlu_clinical_knowledge": 0,
|
502 |
+
"mmlu_college_biology": 0,
|
503 |
+
"mmlu_college_chemistry": 0,
|
504 |
+
"mmlu_college_computer_science": 0,
|
505 |
+
"mmlu_college_mathematics": 0,
|
506 |
+
"mmlu_college_medicine": 0,
|
507 |
+
"mmlu_college_physics": 0,
|
508 |
+
"mmlu_computer_security": 0,
|
509 |
+
"mmlu_conceptual_physics": 0,
|
510 |
+
"mmlu_econometrics": 0,
|
511 |
+
"mmlu_electrical_engineering": 0,
|
512 |
+
"mmlu_elementary_mathematics": 0,
|
513 |
+
"mmlu_formal_logic": 0,
|
514 |
+
"mmlu_global_facts": 0,
|
515 |
+
"mmlu_high_school_biology": 0,
|
516 |
+
"mmlu_high_school_chemistry": 0,
|
517 |
+
"mmlu_high_school_computer_science": 0,
|
518 |
+
"mmlu_high_school_european_history": 0,
|
519 |
+
"mmlu_high_school_geography": 0,
|
520 |
+
"mmlu_high_school_government_and_politics": 0,
|
521 |
+
"mmlu_high_school_macroeconomics": 0,
|
522 |
+
"mmlu_high_school_mathematics": 0,
|
523 |
+
"mmlu_high_school_microeconomics": 0,
|
524 |
+
"mmlu_high_school_physics": 0,
|
525 |
+
"mmlu_high_school_psychology": 0,
|
526 |
+
"mmlu_high_school_statistics": 0,
|
527 |
+
"mmlu_high_school_us_history": 0,
|
528 |
+
"mmlu_high_school_world_history": 0,
|
529 |
+
"mmlu_human_aging": 0,
|
530 |
+
"mmlu_human_sexuality": 0,
|
531 |
+
"mmlu_humanities": 0,
|
532 |
+
"mmlu_international_law": 0,
|
533 |
+
"mmlu_jurisprudence": 0,
|
534 |
+
"mmlu_logical_fallacies": 0,
|
535 |
+
"mmlu_machine_learning": 0,
|
536 |
+
"mmlu_management": 0,
|
537 |
+
"mmlu_marketing": 0,
|
538 |
+
"mmlu_medical_genetics": 0,
|
539 |
+
"mmlu_miscellaneous": 0,
|
540 |
+
"mmlu_moral_disputes": 0,
|
541 |
+
"mmlu_moral_scenarios": 0,
|
542 |
+
"mmlu_nutrition": 0,
|
543 |
+
"mmlu_other": 0,
|
544 |
+
"mmlu_philosophy": 0,
|
545 |
+
"mmlu_prehistory": 0,
|
546 |
+
"mmlu_professional_accounting": 0,
|
547 |
+
"mmlu_professional_law": 0,
|
548 |
+
"mmlu_professional_medicine": 0,
|
549 |
+
"mmlu_professional_psychology": 0,
|
550 |
+
"mmlu_public_relations": 0,
|
551 |
+
"mmlu_security_studies": 0,
|
552 |
+
"mmlu_social_sciences": 0,
|
553 |
+
"mmlu_sociology": 0,
|
554 |
+
"mmlu_stem": 0,
|
555 |
+
"mmlu_us_foreign_policy": 0,
|
556 |
+
"mmlu_virology": 0,
|
557 |
+
"mmlu_world_religions": 0,
|
558 |
+
"openbookqa": 0,
|
559 |
+
"piqa": 0,
|
560 |
+
"truthfulqa_mc1": 0,
|
561 |
+
"truthfulqa_mc2": 0,
|
562 |
+
"winogrande": 0
|
563 |
+
},
|
564 |
+
"date": 1716051791.9998326,
|
565 |
+
"config": {
|
566 |
+
"model": "hf",
|
567 |
+
"model_args": "pretrained=baichuan-inc/Baichuan2-7B-Chat,trust_remote_code=True,dtype=float16,_commit_hash=main",
|
568 |
+
"batch_size": 4,
|
569 |
+
"batch_sizes": [],
|
570 |
+
"device": "cuda",
|
571 |
+
"use_cache": null,
|
572 |
+
"limit": null,
|
573 |
+
"bootstrap_iters": 100000,
|
574 |
+
"gen_kwargs": null
|
575 |
+
}
|
576 |
+
}
|
bigscience/results_2024-05-19-06-01-34_bloom-7b1.json
ADDED
@@ -0,0 +1,576 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "1.4",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": "-1",
|
9 |
+
"start_time": "",
|
10 |
+
"end_time": "",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "bigscience/bloom-7b1",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "16bit",
|
15 |
+
"model_size": 14.14,
|
16 |
+
"model_params": 7.07,
|
17 |
+
"quant_type": null,
|
18 |
+
"precision": "16bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|mmlu|0": {
|
22 |
+
"acc,none": 0.26378008830650906,
|
23 |
+
"acc_stderr,none": 0.0037136452603742606,
|
24 |
+
"alias": "mmlu"
|
25 |
+
},
|
26 |
+
"harness|mmlu_humanities|0": {
|
27 |
+
"alias": " - humanities",
|
28 |
+
"acc,none": 0.2599362380446334,
|
29 |
+
"acc_stderr,none": 0.006391091875104127
|
30 |
+
},
|
31 |
+
"harness|mmlu_formal_logic|0": {
|
32 |
+
"alias": " - formal_logic",
|
33 |
+
"acc,none": 0.29365079365079366,
|
34 |
+
"acc_stderr,none": 0.04073524322147127
|
35 |
+
},
|
36 |
+
"harness|mmlu_high_school_european_history|0": {
|
37 |
+
"alias": " - high_school_european_history",
|
38 |
+
"acc,none": 0.23636363636363636,
|
39 |
+
"acc_stderr,none": 0.033175059300091805
|
40 |
+
},
|
41 |
+
"harness|mmlu_high_school_us_history|0": {
|
42 |
+
"alias": " - high_school_us_history",
|
43 |
+
"acc,none": 0.22058823529411764,
|
44 |
+
"acc_stderr,none": 0.029102254389674096
|
45 |
+
},
|
46 |
+
"harness|mmlu_high_school_world_history|0": {
|
47 |
+
"alias": " - high_school_world_history",
|
48 |
+
"acc,none": 0.23628691983122363,
|
49 |
+
"acc_stderr,none": 0.02765215314415926
|
50 |
+
},
|
51 |
+
"harness|mmlu_international_law|0": {
|
52 |
+
"alias": " - international_law",
|
53 |
+
"acc,none": 0.2809917355371901,
|
54 |
+
"acc_stderr,none": 0.04103203830514512
|
55 |
+
},
|
56 |
+
"harness|mmlu_jurisprudence|0": {
|
57 |
+
"alias": " - jurisprudence",
|
58 |
+
"acc,none": 0.21296296296296297,
|
59 |
+
"acc_stderr,none": 0.039578354719809784
|
60 |
+
},
|
61 |
+
"harness|mmlu_logical_fallacies|0": {
|
62 |
+
"alias": " - logical_fallacies",
|
63 |
+
"acc,none": 0.20245398773006135,
|
64 |
+
"acc_stderr,none": 0.031570650789119026
|
65 |
+
},
|
66 |
+
"harness|mmlu_moral_disputes|0": {
|
67 |
+
"alias": " - moral_disputes",
|
68 |
+
"acc,none": 0.2514450867052023,
|
69 |
+
"acc_stderr,none": 0.023357365785874037
|
70 |
+
},
|
71 |
+
"harness|mmlu_moral_scenarios|0": {
|
72 |
+
"alias": " - moral_scenarios",
|
73 |
+
"acc,none": 0.28156424581005585,
|
74 |
+
"acc_stderr,none": 0.015042290171866118
|
75 |
+
},
|
76 |
+
"harness|mmlu_philosophy|0": {
|
77 |
+
"alias": " - philosophy",
|
78 |
+
"acc,none": 0.2315112540192926,
|
79 |
+
"acc_stderr,none": 0.023956532766639133
|
80 |
+
},
|
81 |
+
"harness|mmlu_prehistory|0": {
|
82 |
+
"alias": " - prehistory",
|
83 |
+
"acc,none": 0.2191358024691358,
|
84 |
+
"acc_stderr,none": 0.023016705640262168
|
85 |
+
},
|
86 |
+
"harness|mmlu_professional_law|0": {
|
87 |
+
"alias": " - professional_law",
|
88 |
+
"acc,none": 0.2803129074315515,
|
89 |
+
"acc_stderr,none": 0.011471555944958616
|
90 |
+
},
|
91 |
+
"harness|mmlu_world_religions|0": {
|
92 |
+
"alias": " - world_religions",
|
93 |
+
"acc,none": 0.2573099415204678,
|
94 |
+
"acc_stderr,none": 0.03352799844161865
|
95 |
+
},
|
96 |
+
"harness|mmlu_other|0": {
|
97 |
+
"alias": " - other",
|
98 |
+
"acc,none": 0.23945928548439008,
|
99 |
+
"acc_stderr,none": 0.007646846406782092
|
100 |
+
},
|
101 |
+
"harness|mmlu_business_ethics|0": {
|
102 |
+
"alias": " - business_ethics",
|
103 |
+
"acc,none": 0.22,
|
104 |
+
"acc_stderr,none": 0.04163331998932269
|
105 |
+
},
|
106 |
+
"harness|mmlu_clinical_knowledge|0": {
|
107 |
+
"alias": " - clinical_knowledge",
|
108 |
+
"acc,none": 0.2679245283018868,
|
109 |
+
"acc_stderr,none": 0.02725726032249485
|
110 |
+
},
|
111 |
+
"harness|mmlu_college_medicine|0": {
|
112 |
+
"alias": " - college_medicine",
|
113 |
+
"acc,none": 0.2774566473988439,
|
114 |
+
"acc_stderr,none": 0.03414014007044036
|
115 |
+
},
|
116 |
+
"harness|mmlu_global_facts|0": {
|
117 |
+
"alias": " - global_facts",
|
118 |
+
"acc,none": 0.19,
|
119 |
+
"acc_stderr,none": 0.03942772444036625
|
120 |
+
},
|
121 |
+
"harness|mmlu_human_aging|0": {
|
122 |
+
"alias": " - human_aging",
|
123 |
+
"acc,none": 0.18385650224215247,
|
124 |
+
"acc_stderr,none": 0.02599837909235651
|
125 |
+
},
|
126 |
+
"harness|mmlu_management|0": {
|
127 |
+
"alias": " - management",
|
128 |
+
"acc,none": 0.2912621359223301,
|
129 |
+
"acc_stderr,none": 0.044986763205729224
|
130 |
+
},
|
131 |
+
"harness|mmlu_marketing|0": {
|
132 |
+
"alias": " - marketing",
|
133 |
+
"acc,none": 0.2564102564102564,
|
134 |
+
"acc_stderr,none": 0.028605953702004264
|
135 |
+
},
|
136 |
+
"harness|mmlu_medical_genetics|0": {
|
137 |
+
"alias": " - medical_genetics",
|
138 |
+
"acc,none": 0.29,
|
139 |
+
"acc_stderr,none": 0.04560480215720684
|
140 |
+
},
|
141 |
+
"harness|mmlu_miscellaneous|0": {
|
142 |
+
"alias": " - miscellaneous",
|
143 |
+
"acc,none": 0.2222222222222222,
|
144 |
+
"acc_stderr,none": 0.0148668216647096
|
145 |
+
},
|
146 |
+
"harness|mmlu_nutrition|0": {
|
147 |
+
"alias": " - nutrition",
|
148 |
+
"acc,none": 0.2875816993464052,
|
149 |
+
"acc_stderr,none": 0.02591780611714716
|
150 |
+
},
|
151 |
+
"harness|mmlu_professional_accounting|0": {
|
152 |
+
"alias": " - professional_accounting",
|
153 |
+
"acc,none": 0.23049645390070922,
|
154 |
+
"acc_stderr,none": 0.025123739226872405
|
155 |
+
},
|
156 |
+
"harness|mmlu_professional_medicine|0": {
|
157 |
+
"alias": " - professional_medicine",
|
158 |
+
"acc,none": 0.25,
|
159 |
+
"acc_stderr,none": 0.026303648393696036
|
160 |
+
},
|
161 |
+
"harness|mmlu_virology|0": {
|
162 |
+
"alias": " - virology",
|
163 |
+
"acc,none": 0.1746987951807229,
|
164 |
+
"acc_stderr,none": 0.029560326211256854
|
165 |
+
},
|
166 |
+
"harness|mmlu_social_sciences|0": {
|
167 |
+
"alias": " - social_sciences",
|
168 |
+
"acc,none": 0.28599285017874554,
|
169 |
+
"acc_stderr,none": 0.00814285665104558
|
170 |
+
},
|
171 |
+
"harness|mmlu_econometrics|0": {
|
172 |
+
"alias": " - econometrics",
|
173 |
+
"acc,none": 0.2631578947368421,
|
174 |
+
"acc_stderr,none": 0.041424397194893624
|
175 |
+
},
|
176 |
+
"harness|mmlu_high_school_geography|0": {
|
177 |
+
"alias": " - high_school_geography",
|
178 |
+
"acc,none": 0.30808080808080807,
|
179 |
+
"acc_stderr,none": 0.032894773300986155
|
180 |
+
},
|
181 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
182 |
+
"alias": " - high_school_government_and_politics",
|
183 |
+
"acc,none": 0.31088082901554404,
|
184 |
+
"acc_stderr,none": 0.03340361906276586
|
185 |
+
},
|
186 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
187 |
+
"alias": " - high_school_macroeconomics",
|
188 |
+
"acc,none": 0.2794871794871795,
|
189 |
+
"acc_stderr,none": 0.022752388839776823
|
190 |
+
},
|
191 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
192 |
+
"alias": " - high_school_microeconomics",
|
193 |
+
"acc,none": 0.3025210084033613,
|
194 |
+
"acc_stderr,none": 0.029837962388291932
|
195 |
+
},
|
196 |
+
"harness|mmlu_high_school_psychology|0": {
|
197 |
+
"alias": " - high_school_psychology",
|
198 |
+
"acc,none": 0.30275229357798167,
|
199 |
+
"acc_stderr,none": 0.019698711434756353
|
200 |
+
},
|
201 |
+
"harness|mmlu_human_sexuality|0": {
|
202 |
+
"alias": " - human_sexuality",
|
203 |
+
"acc,none": 0.25190839694656486,
|
204 |
+
"acc_stderr,none": 0.03807387116306086
|
205 |
+
},
|
206 |
+
"harness|mmlu_professional_psychology|0": {
|
207 |
+
"alias": " - professional_psychology",
|
208 |
+
"acc,none": 0.2630718954248366,
|
209 |
+
"acc_stderr,none": 0.017812676542320657
|
210 |
+
},
|
211 |
+
"harness|mmlu_public_relations|0": {
|
212 |
+
"alias": " - public_relations",
|
213 |
+
"acc,none": 0.23636363636363636,
|
214 |
+
"acc_stderr,none": 0.040693063197213754
|
215 |
+
},
|
216 |
+
"harness|mmlu_security_studies|0": {
|
217 |
+
"alias": " - security_studies",
|
218 |
+
"acc,none": 0.35918367346938773,
|
219 |
+
"acc_stderr,none": 0.030713560455108493
|
220 |
+
},
|
221 |
+
"harness|mmlu_sociology|0": {
|
222 |
+
"alias": " - sociology",
|
223 |
+
"acc,none": 0.24378109452736318,
|
224 |
+
"acc_stderr,none": 0.030360490154014676
|
225 |
+
},
|
226 |
+
"harness|mmlu_us_foreign_policy|0": {
|
227 |
+
"alias": " - us_foreign_policy",
|
228 |
+
"acc,none": 0.26,
|
229 |
+
"acc_stderr,none": 0.0440844002276808
|
230 |
+
},
|
231 |
+
"harness|mmlu_stem|0": {
|
232 |
+
"alias": " - stem",
|
233 |
+
"acc,none": 0.2718046305106248,
|
234 |
+
"acc_stderr,none": 0.00791525041315174
|
235 |
+
},
|
236 |
+
"harness|mmlu_abstract_algebra|0": {
|
237 |
+
"alias": " - abstract_algebra",
|
238 |
+
"acc,none": 0.21,
|
239 |
+
"acc_stderr,none": 0.04093601807403326
|
240 |
+
},
|
241 |
+
"harness|mmlu_anatomy|0": {
|
242 |
+
"alias": " - anatomy",
|
243 |
+
"acc,none": 0.22962962962962963,
|
244 |
+
"acc_stderr,none": 0.03633384414073465
|
245 |
+
},
|
246 |
+
"harness|mmlu_astronomy|0": {
|
247 |
+
"alias": " - astronomy",
|
248 |
+
"acc,none": 0.27631578947368424,
|
249 |
+
"acc_stderr,none": 0.03639057569952924
|
250 |
+
},
|
251 |
+
"harness|mmlu_college_biology|0": {
|
252 |
+
"alias": " - college_biology",
|
253 |
+
"acc,none": 0.22916666666666666,
|
254 |
+
"acc_stderr,none": 0.035146974678623884
|
255 |
+
},
|
256 |
+
"harness|mmlu_college_chemistry|0": {
|
257 |
+
"alias": " - college_chemistry",
|
258 |
+
"acc,none": 0.38,
|
259 |
+
"acc_stderr,none": 0.048783173121456316
|
260 |
+
},
|
261 |
+
"harness|mmlu_college_computer_science|0": {
|
262 |
+
"alias": " - college_computer_science",
|
263 |
+
"acc,none": 0.28,
|
264 |
+
"acc_stderr,none": 0.04512608598542127
|
265 |
+
},
|
266 |
+
"harness|mmlu_college_mathematics|0": {
|
267 |
+
"alias": " - college_mathematics",
|
268 |
+
"acc,none": 0.31,
|
269 |
+
"acc_stderr,none": 0.04648231987117316
|
270 |
+
},
|
271 |
+
"harness|mmlu_college_physics|0": {
|
272 |
+
"alias": " - college_physics",
|
273 |
+
"acc,none": 0.22549019607843138,
|
274 |
+
"acc_stderr,none": 0.041583075330832865
|
275 |
+
},
|
276 |
+
"harness|mmlu_computer_security|0": {
|
277 |
+
"alias": " - computer_security",
|
278 |
+
"acc,none": 0.23,
|
279 |
+
"acc_stderr,none": 0.042295258468165044
|
280 |
+
},
|
281 |
+
"harness|mmlu_conceptual_physics|0": {
|
282 |
+
"alias": " - conceptual_physics",
|
283 |
+
"acc,none": 0.33191489361702126,
|
284 |
+
"acc_stderr,none": 0.030783736757745643
|
285 |
+
},
|
286 |
+
"harness|mmlu_electrical_engineering|0": {
|
287 |
+
"alias": " - electrical_engineering",
|
288 |
+
"acc,none": 0.2206896551724138,
|
289 |
+
"acc_stderr,none": 0.03455930201924812
|
290 |
+
},
|
291 |
+
"harness|mmlu_elementary_mathematics|0": {
|
292 |
+
"alias": " - elementary_mathematics",
|
293 |
+
"acc,none": 0.2619047619047619,
|
294 |
+
"acc_stderr,none": 0.022644212615525208
|
295 |
+
},
|
296 |
+
"harness|mmlu_high_school_biology|0": {
|
297 |
+
"alias": " - high_school_biology",
|
298 |
+
"acc,none": 0.2838709677419355,
|
299 |
+
"acc_stderr,none": 0.025649381063029254
|
300 |
+
},
|
301 |
+
"harness|mmlu_high_school_chemistry|0": {
|
302 |
+
"alias": " - high_school_chemistry",
|
303 |
+
"acc,none": 0.28078817733990147,
|
304 |
+
"acc_stderr,none": 0.03161856335358609
|
305 |
+
},
|
306 |
+
"harness|mmlu_high_school_computer_science|0": {
|
307 |
+
"alias": " - high_school_computer_science",
|
308 |
+
"acc,none": 0.27,
|
309 |
+
"acc_stderr,none": 0.0446196043338474
|
310 |
+
},
|
311 |
+
"harness|mmlu_high_school_mathematics|0": {
|
312 |
+
"alias": " - high_school_mathematics",
|
313 |
+
"acc,none": 0.22592592592592592,
|
314 |
+
"acc_stderr,none": 0.025497532639609553
|
315 |
+
},
|
316 |
+
"harness|mmlu_high_school_physics|0": {
|
317 |
+
"alias": " - high_school_physics",
|
318 |
+
"acc,none": 0.31125827814569534,
|
319 |
+
"acc_stderr,none": 0.037804458505267334
|
320 |
+
},
|
321 |
+
"harness|mmlu_high_school_statistics|0": {
|
322 |
+
"alias": " - high_school_statistics",
|
323 |
+
"acc,none": 0.30092592592592593,
|
324 |
+
"acc_stderr,none": 0.031280390843298804
|
325 |
+
},
|
326 |
+
"harness|mmlu_machine_learning|0": {
|
327 |
+
"alias": " - machine_learning",
|
328 |
+
"acc,none": 0.29464285714285715,
|
329 |
+
"acc_stderr,none": 0.043270409325787296
|
330 |
+
},
|
331 |
+
"harness|truthfulqa:mc1|0": {
|
332 |
+
"acc,none": 0.22399020807833536,
|
333 |
+
"acc_stderr,none": 0.014594964329474205,
|
334 |
+
"alias": "truthfulqa_mc1"
|
335 |
+
},
|
336 |
+
"harness|arc:challenge|0": {
|
337 |
+
"acc,none": 0.302901023890785,
|
338 |
+
"acc_stderr,none": 0.013428241573185349,
|
339 |
+
"acc_norm,none": 0.33447098976109213,
|
340 |
+
"acc_norm_stderr,none": 0.013787460322441384,
|
341 |
+
"alias": "arc_challenge"
|
342 |
+
},
|
343 |
+
"harness|lambada:openai|0": {
|
344 |
+
"perplexity,none": 6.6192561666804774,
|
345 |
+
"perplexity_stderr,none": 0.17611631847798864,
|
346 |
+
"acc,none": 0.5759751601009121,
|
347 |
+
"acc_stderr,none": 0.006885089646655077,
|
348 |
+
"alias": "lambada_openai"
|
349 |
+
},
|
350 |
+
"harness|openbookqa|0": {
|
351 |
+
"acc,none": 0.25,
|
352 |
+
"acc_stderr,none": 0.019384310743640384,
|
353 |
+
"acc_norm,none": 0.36,
|
354 |
+
"acc_norm_stderr,none": 0.02148775108972052,
|
355 |
+
"alias": "openbookqa"
|
356 |
+
},
|
357 |
+
"harness|arc:easy|0": {
|
358 |
+
"acc,none": 0.6498316498316499,
|
359 |
+
"acc_stderr,none": 0.009788295410093144,
|
360 |
+
"acc_norm,none": 0.5723905723905723,
|
361 |
+
"acc_norm_stderr,none": 0.010151683397430679,
|
362 |
+
"alias": "arc_easy"
|
363 |
+
},
|
364 |
+
"harness|piqa|0": {
|
365 |
+
"acc,none": 0.7268770402611534,
|
366 |
+
"acc_stderr,none": 0.010395730264453264,
|
367 |
+
"acc_norm,none": 0.7366702937976061,
|
368 |
+
"acc_norm_stderr,none": 0.010276185322196764,
|
369 |
+
"alias": "piqa"
|
370 |
+
},
|
371 |
+
"harness|hellaswag|0": {
|
372 |
+
"acc,none": 0.4649472216689902,
|
373 |
+
"acc_stderr,none": 0.004977504446609005,
|
374 |
+
"acc_norm,none": 0.6228838876717785,
|
375 |
+
"acc_norm_stderr,none": 0.004836738514051306,
|
376 |
+
"alias": "hellaswag"
|
377 |
+
},
|
378 |
+
"harness|winogrande|0": {
|
379 |
+
"acc,none": 0.6456195737963694,
|
380 |
+
"acc_stderr,none": 0.013443314368356088,
|
381 |
+
"alias": "winogrande"
|
382 |
+
},
|
383 |
+
"harness|boolq|0": {
|
384 |
+
"acc,none": 0.6284403669724771,
|
385 |
+
"acc_stderr,none": 0.00845159814507658,
|
386 |
+
"alias": "boolq"
|
387 |
+
},
|
388 |
+
"harness|truthfulqa:mc2|0": {
|
389 |
+
"acc,none": 0.38896205089311475,
|
390 |
+
"acc_stderr,none": 0.014016564969739905,
|
391 |
+
"alias": "truthfulqa_mc2"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "bigscience/bloom-7b1",
|
396 |
+
"revision": "main",
|
397 |
+
"private": false,
|
398 |
+
"params": 7.07,
|
399 |
+
"architectures": "BloomForCausalLM",
|
400 |
+
"quant_type": null,
|
401 |
+
"precision": "16bit",
|
402 |
+
"model_params": 7.07,
|
403 |
+
"model_size": 14.14,
|
404 |
+
"weight_dtype": "float16",
|
405 |
+
"compute_dtype": "float16",
|
406 |
+
"gguf_ftype": "*Q4_0.gguf",
|
407 |
+
"hardware": "gpu",
|
408 |
+
"status": "Pending",
|
409 |
+
"submitted_time": "2024-04-27T08:04:58Z",
|
410 |
+
"model_type": "original",
|
411 |
+
"job_id": -1,
|
412 |
+
"job_start_time": null,
|
413 |
+
"scripts": "ITREX"
|
414 |
+
},
|
415 |
+
"quantization_config": null,
|
416 |
+
"versions": {
|
417 |
+
"harness|mmlu|0": null,
|
418 |
+
"harness|mmlu_humanities|0": null,
|
419 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
420 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
421 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
422 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
423 |
+
"harness|mmlu_international_law|0": 0.0,
|
424 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
425 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
426 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
427 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
428 |
+
"harness|mmlu_philosophy|0": 0.0,
|
429 |
+
"harness|mmlu_prehistory|0": 0.0,
|
430 |
+
"harness|mmlu_professional_law|0": 0.0,
|
431 |
+
"harness|mmlu_world_religions|0": 0.0,
|
432 |
+
"harness|mmlu_other|0": null,
|
433 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
434 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
435 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
436 |
+
"harness|mmlu_global_facts|0": 0.0,
|
437 |
+
"harness|mmlu_human_aging|0": 0.0,
|
438 |
+
"harness|mmlu_management|0": 0.0,
|
439 |
+
"harness|mmlu_marketing|0": 0.0,
|
440 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
441 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
442 |
+
"harness|mmlu_nutrition|0": 0.0,
|
443 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
444 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
445 |
+
"harness|mmlu_virology|0": 0.0,
|
446 |
+
"harness|mmlu_social_sciences|0": null,
|
447 |
+
"harness|mmlu_econometrics|0": 0.0,
|
448 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
449 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
450 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
451 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
452 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
453 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
454 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
455 |
+
"harness|mmlu_public_relations|0": 0.0,
|
456 |
+
"harness|mmlu_security_studies|0": 0.0,
|
457 |
+
"harness|mmlu_sociology|0": 0.0,
|
458 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
459 |
+
"harness|mmlu_stem|0": null,
|
460 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
461 |
+
"harness|mmlu_anatomy|0": 0.0,
|
462 |
+
"harness|mmlu_astronomy|0": 0.0,
|
463 |
+
"harness|mmlu_college_biology|0": 0.0,
|
464 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
465 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
466 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
467 |
+
"harness|mmlu_college_physics|0": 0.0,
|
468 |
+
"harness|mmlu_computer_security|0": 0.0,
|
469 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
470 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
471 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
472 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
473 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
474 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
475 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
476 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
477 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
478 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
479 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
480 |
+
"harness|arc:challenge|0": 1.0,
|
481 |
+
"harness|lambada:openai|0": 1.0,
|
482 |
+
"harness|openbookqa|0": 1.0,
|
483 |
+
"harness|arc:easy|0": 1.0,
|
484 |
+
"harness|piqa|0": 1.0,
|
485 |
+
"harness|hellaswag|0": 1.0,
|
486 |
+
"harness|winogrande|0": 1.0,
|
487 |
+
"harness|boolq|0": 2.0,
|
488 |
+
"harness|truthfulqa:mc2|0": 2.0
|
489 |
+
},
|
490 |
+
"n-shot": {
|
491 |
+
"arc_challenge": 0,
|
492 |
+
"arc_easy": 0,
|
493 |
+
"boolq": 0,
|
494 |
+
"hellaswag": 0,
|
495 |
+
"lambada_openai": 0,
|
496 |
+
"mmlu": 0,
|
497 |
+
"mmlu_abstract_algebra": 0,
|
498 |
+
"mmlu_anatomy": 0,
|
499 |
+
"mmlu_astronomy": 0,
|
500 |
+
"mmlu_business_ethics": 0,
|
501 |
+
"mmlu_clinical_knowledge": 0,
|
502 |
+
"mmlu_college_biology": 0,
|
503 |
+
"mmlu_college_chemistry": 0,
|
504 |
+
"mmlu_college_computer_science": 0,
|
505 |
+
"mmlu_college_mathematics": 0,
|
506 |
+
"mmlu_college_medicine": 0,
|
507 |
+
"mmlu_college_physics": 0,
|
508 |
+
"mmlu_computer_security": 0,
|
509 |
+
"mmlu_conceptual_physics": 0,
|
510 |
+
"mmlu_econometrics": 0,
|
511 |
+
"mmlu_electrical_engineering": 0,
|
512 |
+
"mmlu_elementary_mathematics": 0,
|
513 |
+
"mmlu_formal_logic": 0,
|
514 |
+
"mmlu_global_facts": 0,
|
515 |
+
"mmlu_high_school_biology": 0,
|
516 |
+
"mmlu_high_school_chemistry": 0,
|
517 |
+
"mmlu_high_school_computer_science": 0,
|
518 |
+
"mmlu_high_school_european_history": 0,
|
519 |
+
"mmlu_high_school_geography": 0,
|
520 |
+
"mmlu_high_school_government_and_politics": 0,
|
521 |
+
"mmlu_high_school_macroeconomics": 0,
|
522 |
+
"mmlu_high_school_mathematics": 0,
|
523 |
+
"mmlu_high_school_microeconomics": 0,
|
524 |
+
"mmlu_high_school_physics": 0,
|
525 |
+
"mmlu_high_school_psychology": 0,
|
526 |
+
"mmlu_high_school_statistics": 0,
|
527 |
+
"mmlu_high_school_us_history": 0,
|
528 |
+
"mmlu_high_school_world_history": 0,
|
529 |
+
"mmlu_human_aging": 0,
|
530 |
+
"mmlu_human_sexuality": 0,
|
531 |
+
"mmlu_humanities": 0,
|
532 |
+
"mmlu_international_law": 0,
|
533 |
+
"mmlu_jurisprudence": 0,
|
534 |
+
"mmlu_logical_fallacies": 0,
|
535 |
+
"mmlu_machine_learning": 0,
|
536 |
+
"mmlu_management": 0,
|
537 |
+
"mmlu_marketing": 0,
|
538 |
+
"mmlu_medical_genetics": 0,
|
539 |
+
"mmlu_miscellaneous": 0,
|
540 |
+
"mmlu_moral_disputes": 0,
|
541 |
+
"mmlu_moral_scenarios": 0,
|
542 |
+
"mmlu_nutrition": 0,
|
543 |
+
"mmlu_other": 0,
|
544 |
+
"mmlu_philosophy": 0,
|
545 |
+
"mmlu_prehistory": 0,
|
546 |
+
"mmlu_professional_accounting": 0,
|
547 |
+
"mmlu_professional_law": 0,
|
548 |
+
"mmlu_professional_medicine": 0,
|
549 |
+
"mmlu_professional_psychology": 0,
|
550 |
+
"mmlu_public_relations": 0,
|
551 |
+
"mmlu_security_studies": 0,
|
552 |
+
"mmlu_social_sciences": 0,
|
553 |
+
"mmlu_sociology": 0,
|
554 |
+
"mmlu_stem": 0,
|
555 |
+
"mmlu_us_foreign_policy": 0,
|
556 |
+
"mmlu_virology": 0,
|
557 |
+
"mmlu_world_religions": 0,
|
558 |
+
"openbookqa": 0,
|
559 |
+
"piqa": 0,
|
560 |
+
"truthfulqa_mc1": 0,
|
561 |
+
"truthfulqa_mc2": 0,
|
562 |
+
"winogrande": 0
|
563 |
+
},
|
564 |
+
"date": 1716066787.6823578,
|
565 |
+
"config": {
|
566 |
+
"model": "hf",
|
567 |
+
"model_args": "pretrained=bigscience/bloom-7b1,trust_remote_code=True,dtype=float16,_commit_hash=main",
|
568 |
+
"batch_size": 4,
|
569 |
+
"batch_sizes": [],
|
570 |
+
"device": "cuda",
|
571 |
+
"use_cache": null,
|
572 |
+
"limit": null,
|
573 |
+
"bootstrap_iters": 100000,
|
574 |
+
"gen_kwargs": null
|
575 |
+
}
|
576 |
+
}
|
facebook/results_2024-05-19-13-35-55_opt-13b.json
ADDED
@@ -0,0 +1,576 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "1.4",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": "-1",
|
9 |
+
"start_time": "",
|
10 |
+
"end_time": "",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "facebook/opt-13b",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "16bit",
|
15 |
+
"model_size": 26,
|
16 |
+
"model_params": 13,
|
17 |
+
"quant_type": null,
|
18 |
+
"precision": "16bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|winogrande|0": {
|
22 |
+
"acc,none": 0.6479873717442778,
|
23 |
+
"acc_stderr,none": 0.013422874824929711,
|
24 |
+
"alias": "winogrande"
|
25 |
+
},
|
26 |
+
"harness|piqa|0": {
|
27 |
+
"acc,none": 0.7589771490750816,
|
28 |
+
"acc_stderr,none": 0.009979042717267312,
|
29 |
+
"acc_norm,none": 0.7687704026115343,
|
30 |
+
"acc_norm_stderr,none": 0.009837063180625326,
|
31 |
+
"alias": "piqa"
|
32 |
+
},
|
33 |
+
"harness|arc:challenge|0": {
|
34 |
+
"acc,none": 0.3293515358361775,
|
35 |
+
"acc_stderr,none": 0.013734057652635476,
|
36 |
+
"acc_norm,none": 0.35665529010238906,
|
37 |
+
"acc_norm_stderr,none": 0.013998056902620199,
|
38 |
+
"alias": "arc_challenge"
|
39 |
+
},
|
40 |
+
"harness|boolq|0": {
|
41 |
+
"acc,none": 0.6584097859327217,
|
42 |
+
"acc_stderr,none": 0.008294560677768492,
|
43 |
+
"alias": "boolq"
|
44 |
+
},
|
45 |
+
"harness|hellaswag|0": {
|
46 |
+
"acc,none": 0.5246962756423024,
|
47 |
+
"acc_stderr,none": 0.004983691099110906,
|
48 |
+
"acc_norm,none": 0.6984664409480184,
|
49 |
+
"acc_norm_stderr,none": 0.004579859084500809,
|
50 |
+
"alias": "hellaswag"
|
51 |
+
},
|
52 |
+
"harness|openbookqa|0": {
|
53 |
+
"acc,none": 0.268,
|
54 |
+
"acc_stderr,none": 0.019827714859587564,
|
55 |
+
"acc_norm,none": 0.39,
|
56 |
+
"acc_norm_stderr,none": 0.021834685869369215,
|
57 |
+
"alias": "openbookqa"
|
58 |
+
},
|
59 |
+
"harness|truthfulqa:mc1|0": {
|
60 |
+
"acc,none": 0.19706242350061198,
|
61 |
+
"acc_stderr,none": 0.013925080734473747,
|
62 |
+
"alias": "truthfulqa_mc1"
|
63 |
+
},
|
64 |
+
"harness|mmlu|0": {
|
65 |
+
"acc,none": 0.2472582253240279,
|
66 |
+
"acc_stderr,none": 0.0036415810372731265,
|
67 |
+
"alias": "mmlu"
|
68 |
+
},
|
69 |
+
"harness|mmlu_humanities|0": {
|
70 |
+
"alias": " - humanities",
|
71 |
+
"acc,none": 0.24357066950053136,
|
72 |
+
"acc_stderr,none": 0.006260549466082156
|
73 |
+
},
|
74 |
+
"harness|mmlu_formal_logic|0": {
|
75 |
+
"alias": " - formal_logic",
|
76 |
+
"acc,none": 0.24603174603174602,
|
77 |
+
"acc_stderr,none": 0.03852273364924316
|
78 |
+
},
|
79 |
+
"harness|mmlu_high_school_european_history|0": {
|
80 |
+
"alias": " - high_school_european_history",
|
81 |
+
"acc,none": 0.22424242424242424,
|
82 |
+
"acc_stderr,none": 0.032568666616811015
|
83 |
+
},
|
84 |
+
"harness|mmlu_high_school_us_history|0": {
|
85 |
+
"alias": " - high_school_us_history",
|
86 |
+
"acc,none": 0.2647058823529412,
|
87 |
+
"acc_stderr,none": 0.03096451792692339
|
88 |
+
},
|
89 |
+
"harness|mmlu_high_school_world_history|0": {
|
90 |
+
"alias": " - high_school_world_history",
|
91 |
+
"acc,none": 0.2109704641350211,
|
92 |
+
"acc_stderr,none": 0.02655837250266192
|
93 |
+
},
|
94 |
+
"harness|mmlu_international_law|0": {
|
95 |
+
"alias": " - international_law",
|
96 |
+
"acc,none": 0.21487603305785125,
|
97 |
+
"acc_stderr,none": 0.03749492448709699
|
98 |
+
},
|
99 |
+
"harness|mmlu_jurisprudence|0": {
|
100 |
+
"alias": " - jurisprudence",
|
101 |
+
"acc,none": 0.24074074074074073,
|
102 |
+
"acc_stderr,none": 0.041331194402438376
|
103 |
+
},
|
104 |
+
"harness|mmlu_logical_fallacies|0": {
|
105 |
+
"alias": " - logical_fallacies",
|
106 |
+
"acc,none": 0.22085889570552147,
|
107 |
+
"acc_stderr,none": 0.032591773927421776
|
108 |
+
},
|
109 |
+
"harness|mmlu_moral_disputes|0": {
|
110 |
+
"alias": " - moral_disputes",
|
111 |
+
"acc,none": 0.29190751445086704,
|
112 |
+
"acc_stderr,none": 0.02447699407624731
|
113 |
+
},
|
114 |
+
"harness|mmlu_moral_scenarios|0": {
|
115 |
+
"alias": " - moral_scenarios",
|
116 |
+
"acc,none": 0.23798882681564246,
|
117 |
+
"acc_stderr,none": 0.014242630070574885
|
118 |
+
},
|
119 |
+
"harness|mmlu_philosophy|0": {
|
120 |
+
"alias": " - philosophy",
|
121 |
+
"acc,none": 0.2282958199356913,
|
122 |
+
"acc_stderr,none": 0.023839303311398195
|
123 |
+
},
|
124 |
+
"harness|mmlu_prehistory|0": {
|
125 |
+
"alias": " - prehistory",
|
126 |
+
"acc,none": 0.25617283950617287,
|
127 |
+
"acc_stderr,none": 0.0242885336377261
|
128 |
+
},
|
129 |
+
"harness|mmlu_professional_law|0": {
|
130 |
+
"alias": " - professional_law",
|
131 |
+
"acc,none": 0.24315514993481094,
|
132 |
+
"acc_stderr,none": 0.010956556654417355
|
133 |
+
},
|
134 |
+
"harness|mmlu_world_religions|0": {
|
135 |
+
"alias": " - world_religions",
|
136 |
+
"acc,none": 0.2631578947368421,
|
137 |
+
"acc_stderr,none": 0.033773102522091945
|
138 |
+
},
|
139 |
+
"harness|mmlu_other|0": {
|
140 |
+
"alias": " - other",
|
141 |
+
"acc,none": 0.2481493401995494,
|
142 |
+
"acc_stderr,none": 0.007747342709768045
|
143 |
+
},
|
144 |
+
"harness|mmlu_business_ethics|0": {
|
145 |
+
"alias": " - business_ethics",
|
146 |
+
"acc,none": 0.34,
|
147 |
+
"acc_stderr,none": 0.04760952285695235
|
148 |
+
},
|
149 |
+
"harness|mmlu_clinical_knowledge|0": {
|
150 |
+
"alias": " - clinical_knowledge",
|
151 |
+
"acc,none": 0.22641509433962265,
|
152 |
+
"acc_stderr,none": 0.02575755989310675
|
153 |
+
},
|
154 |
+
"harness|mmlu_college_medicine|0": {
|
155 |
+
"alias": " - college_medicine",
|
156 |
+
"acc,none": 0.2254335260115607,
|
157 |
+
"acc_stderr,none": 0.03186209851641143
|
158 |
+
},
|
159 |
+
"harness|mmlu_global_facts|0": {
|
160 |
+
"alias": " - global_facts",
|
161 |
+
"acc,none": 0.21,
|
162 |
+
"acc_stderr,none": 0.040936018074033256
|
163 |
+
},
|
164 |
+
"harness|mmlu_human_aging|0": {
|
165 |
+
"alias": " - human_aging",
|
166 |
+
"acc,none": 0.20179372197309417,
|
167 |
+
"acc_stderr,none": 0.026936111912802273
|
168 |
+
},
|
169 |
+
"harness|mmlu_management|0": {
|
170 |
+
"alias": " - management",
|
171 |
+
"acc,none": 0.22330097087378642,
|
172 |
+
"acc_stderr,none": 0.04123553189891431
|
173 |
+
},
|
174 |
+
"harness|mmlu_marketing|0": {
|
175 |
+
"alias": " - marketing",
|
176 |
+
"acc,none": 0.2564102564102564,
|
177 |
+
"acc_stderr,none": 0.028605953702004253
|
178 |
+
},
|
179 |
+
"harness|mmlu_medical_genetics|0": {
|
180 |
+
"alias": " - medical_genetics",
|
181 |
+
"acc,none": 0.25,
|
182 |
+
"acc_stderr,none": 0.04351941398892446
|
183 |
+
},
|
184 |
+
"harness|mmlu_miscellaneous|0": {
|
185 |
+
"alias": " - miscellaneous",
|
186 |
+
"acc,none": 0.26309067688378035,
|
187 |
+
"acc_stderr,none": 0.015745497169049046
|
188 |
+
},
|
189 |
+
"harness|mmlu_nutrition|0": {
|
190 |
+
"alias": " - nutrition",
|
191 |
+
"acc,none": 0.25163398692810457,
|
192 |
+
"acc_stderr,none": 0.024848018263875202
|
193 |
+
},
|
194 |
+
"harness|mmlu_professional_accounting|0": {
|
195 |
+
"alias": " - professional_accounting",
|
196 |
+
"acc,none": 0.2553191489361702,
|
197 |
+
"acc_stderr,none": 0.026011992930902006
|
198 |
+
},
|
199 |
+
"harness|mmlu_professional_medicine|0": {
|
200 |
+
"alias": " - professional_medicine",
|
201 |
+
"acc,none": 0.21691176470588236,
|
202 |
+
"acc_stderr,none": 0.025035845227711274
|
203 |
+
},
|
204 |
+
"harness|mmlu_virology|0": {
|
205 |
+
"alias": " - virology",
|
206 |
+
"acc,none": 0.30120481927710846,
|
207 |
+
"acc_stderr,none": 0.03571609230053481
|
208 |
+
},
|
209 |
+
"harness|mmlu_social_sciences|0": {
|
210 |
+
"alias": " - social_sciences",
|
211 |
+
"acc,none": 0.25901852453688656,
|
212 |
+
"acc_stderr,none": 0.007899548417350766
|
213 |
+
},
|
214 |
+
"harness|mmlu_econometrics|0": {
|
215 |
+
"alias": " - econometrics",
|
216 |
+
"acc,none": 0.19298245614035087,
|
217 |
+
"acc_stderr,none": 0.037124548537213684
|
218 |
+
},
|
219 |
+
"harness|mmlu_high_school_geography|0": {
|
220 |
+
"alias": " - high_school_geography",
|
221 |
+
"acc,none": 0.2474747474747475,
|
222 |
+
"acc_stderr,none": 0.030746300742124498
|
223 |
+
},
|
224 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
225 |
+
"alias": " - high_school_government_and_politics",
|
226 |
+
"acc,none": 0.23834196891191708,
|
227 |
+
"acc_stderr,none": 0.030748905363909892
|
228 |
+
},
|
229 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
230 |
+
"alias": " - high_school_macroeconomics",
|
231 |
+
"acc,none": 0.2794871794871795,
|
232 |
+
"acc_stderr,none": 0.02275238883977683
|
233 |
+
},
|
234 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
235 |
+
"alias": " - high_school_microeconomics",
|
236 |
+
"acc,none": 0.23109243697478993,
|
237 |
+
"acc_stderr,none": 0.027381406927868966
|
238 |
+
},
|
239 |
+
"harness|mmlu_high_school_psychology|0": {
|
240 |
+
"alias": " - high_school_psychology",
|
241 |
+
"acc,none": 0.24587155963302754,
|
242 |
+
"acc_stderr,none": 0.018461940968708443
|
243 |
+
},
|
244 |
+
"harness|mmlu_human_sexuality|0": {
|
245 |
+
"alias": " - human_sexuality",
|
246 |
+
"acc,none": 0.2748091603053435,
|
247 |
+
"acc_stderr,none": 0.03915345408847834
|
248 |
+
},
|
249 |
+
"harness|mmlu_professional_psychology|0": {
|
250 |
+
"alias": " - professional_psychology",
|
251 |
+
"acc,none": 0.2549019607843137,
|
252 |
+
"acc_stderr,none": 0.017630827375148383
|
253 |
+
},
|
254 |
+
"harness|mmlu_public_relations|0": {
|
255 |
+
"alias": " - public_relations",
|
256 |
+
"acc,none": 0.2727272727272727,
|
257 |
+
"acc_stderr,none": 0.04265792110940589
|
258 |
+
},
|
259 |
+
"harness|mmlu_security_studies|0": {
|
260 |
+
"alias": " - security_studies",
|
261 |
+
"acc,none": 0.2693877551020408,
|
262 |
+
"acc_stderr,none": 0.02840125202902294
|
263 |
+
},
|
264 |
+
"harness|mmlu_sociology|0": {
|
265 |
+
"alias": " - sociology",
|
266 |
+
"acc,none": 0.3034825870646766,
|
267 |
+
"acc_stderr,none": 0.03251006816458617
|
268 |
+
},
|
269 |
+
"harness|mmlu_us_foreign_policy|0": {
|
270 |
+
"alias": " - us_foreign_policy",
|
271 |
+
"acc,none": 0.33,
|
272 |
+
"acc_stderr,none": 0.047258156262526045
|
273 |
+
},
|
274 |
+
"harness|mmlu_stem|0": {
|
275 |
+
"alias": " - stem",
|
276 |
+
"acc,none": 0.2404059625753251,
|
277 |
+
"acc_stderr,none": 0.0076178068440744895
|
278 |
+
},
|
279 |
+
"harness|mmlu_abstract_algebra|0": {
|
280 |
+
"alias": " - abstract_algebra",
|
281 |
+
"acc,none": 0.24,
|
282 |
+
"acc_stderr,none": 0.042923469599092816
|
283 |
+
},
|
284 |
+
"harness|mmlu_anatomy|0": {
|
285 |
+
"alias": " - anatomy",
|
286 |
+
"acc,none": 0.24444444444444444,
|
287 |
+
"acc_stderr,none": 0.03712537833614867
|
288 |
+
},
|
289 |
+
"harness|mmlu_astronomy|0": {
|
290 |
+
"alias": " - astronomy",
|
291 |
+
"acc,none": 0.26973684210526316,
|
292 |
+
"acc_stderr,none": 0.03611780560284898
|
293 |
+
},
|
294 |
+
"harness|mmlu_college_biology|0": {
|
295 |
+
"alias": " - college_biology",
|
296 |
+
"acc,none": 0.2847222222222222,
|
297 |
+
"acc_stderr,none": 0.037738099906869334
|
298 |
+
},
|
299 |
+
"harness|mmlu_college_chemistry|0": {
|
300 |
+
"alias": " - college_chemistry",
|
301 |
+
"acc,none": 0.2,
|
302 |
+
"acc_stderr,none": 0.040201512610368466
|
303 |
+
},
|
304 |
+
"harness|mmlu_college_computer_science|0": {
|
305 |
+
"alias": " - college_computer_science",
|
306 |
+
"acc,none": 0.19,
|
307 |
+
"acc_stderr,none": 0.03942772444036622
|
308 |
+
},
|
309 |
+
"harness|mmlu_college_mathematics|0": {
|
310 |
+
"alias": " - college_mathematics",
|
311 |
+
"acc,none": 0.22,
|
312 |
+
"acc_stderr,none": 0.041633319989322695
|
313 |
+
},
|
314 |
+
"harness|mmlu_college_physics|0": {
|
315 |
+
"alias": " - college_physics",
|
316 |
+
"acc,none": 0.19607843137254902,
|
317 |
+
"acc_stderr,none": 0.039505818611799616
|
318 |
+
},
|
319 |
+
"harness|mmlu_computer_security|0": {
|
320 |
+
"alias": " - computer_security",
|
321 |
+
"acc,none": 0.28,
|
322 |
+
"acc_stderr,none": 0.045126085985421276
|
323 |
+
},
|
324 |
+
"harness|mmlu_conceptual_physics|0": {
|
325 |
+
"alias": " - conceptual_physics",
|
326 |
+
"acc,none": 0.24680851063829787,
|
327 |
+
"acc_stderr,none": 0.028185441301234095
|
328 |
+
},
|
329 |
+
"harness|mmlu_electrical_engineering|0": {
|
330 |
+
"alias": " - electrical_engineering",
|
331 |
+
"acc,none": 0.22758620689655173,
|
332 |
+
"acc_stderr,none": 0.03493950380131183
|
333 |
+
},
|
334 |
+
"harness|mmlu_elementary_mathematics|0": {
|
335 |
+
"alias": " - elementary_mathematics",
|
336 |
+
"acc,none": 0.21957671957671956,
|
337 |
+
"acc_stderr,none": 0.02132001859977036
|
338 |
+
},
|
339 |
+
"harness|mmlu_high_school_biology|0": {
|
340 |
+
"alias": " - high_school_biology",
|
341 |
+
"acc,none": 0.2645161290322581,
|
342 |
+
"acc_stderr,none": 0.025091892378859275
|
343 |
+
},
|
344 |
+
"harness|mmlu_high_school_chemistry|0": {
|
345 |
+
"alias": " - high_school_chemistry",
|
346 |
+
"acc,none": 0.22167487684729065,
|
347 |
+
"acc_stderr,none": 0.029225575892489607
|
348 |
+
},
|
349 |
+
"harness|mmlu_high_school_computer_science|0": {
|
350 |
+
"alias": " - high_school_computer_science",
|
351 |
+
"acc,none": 0.3,
|
352 |
+
"acc_stderr,none": 0.046056618647183814
|
353 |
+
},
|
354 |
+
"harness|mmlu_high_school_mathematics|0": {
|
355 |
+
"alias": " - high_school_mathematics",
|
356 |
+
"acc,none": 0.22592592592592592,
|
357 |
+
"acc_stderr,none": 0.025497532639609556
|
358 |
+
},
|
359 |
+
"harness|mmlu_high_school_physics|0": {
|
360 |
+
"alias": " - high_school_physics",
|
361 |
+
"acc,none": 0.26490066225165565,
|
362 |
+
"acc_stderr,none": 0.03603038545360384
|
363 |
+
},
|
364 |
+
"harness|mmlu_high_school_statistics|0": {
|
365 |
+
"alias": " - high_school_statistics",
|
366 |
+
"acc,none": 0.2222222222222222,
|
367 |
+
"acc_stderr,none": 0.02835321286686344
|
368 |
+
},
|
369 |
+
"harness|mmlu_machine_learning|0": {
|
370 |
+
"alias": " - machine_learning",
|
371 |
+
"acc,none": 0.26785714285714285,
|
372 |
+
"acc_stderr,none": 0.04203277291467762
|
373 |
+
},
|
374 |
+
"harness|lambada:openai|0": {
|
375 |
+
"perplexity,none": 4.038197085285582,
|
376 |
+
"perplexity_stderr,none": 0.08692727713669275,
|
377 |
+
"acc,none": 0.685814088880264,
|
378 |
+
"acc_stderr,none": 0.006467085866653894,
|
379 |
+
"alias": "lambada_openai"
|
380 |
+
},
|
381 |
+
"harness|truthfulqa:mc2|0": {
|
382 |
+
"acc,none": 0.3402519286265322,
|
383 |
+
"acc_stderr,none": 0.013334973433373252,
|
384 |
+
"alias": "truthfulqa_mc2"
|
385 |
+
},
|
386 |
+
"harness|arc:easy|0": {
|
387 |
+
"acc,none": 0.6712962962962963,
|
388 |
+
"acc_stderr,none": 0.00963890316702218,
|
389 |
+
"acc_norm,none": 0.6186868686868687,
|
390 |
+
"acc_norm_stderr,none": 0.009966542497171016,
|
391 |
+
"alias": "arc_easy"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "facebook/opt-13b",
|
396 |
+
"revision": "main",
|
397 |
+
"private": false,
|
398 |
+
"params": 13,
|
399 |
+
"architectures": "OPTForCausalLM",
|
400 |
+
"quant_type": null,
|
401 |
+
"precision": "16bit",
|
402 |
+
"model_params": 13,
|
403 |
+
"model_size": 26,
|
404 |
+
"weight_dtype": "float16",
|
405 |
+
"compute_dtype": "float16",
|
406 |
+
"gguf_ftype": "*Q4_0.gguf",
|
407 |
+
"hardware": "gpu",
|
408 |
+
"status": "Pending",
|
409 |
+
"submitted_time": "2024-04-27T08:04:58Z",
|
410 |
+
"model_type": "original",
|
411 |
+
"job_id": -1,
|
412 |
+
"job_start_time": null,
|
413 |
+
"scripts": "ITREX"
|
414 |
+
},
|
415 |
+
"quantization_config": null,
|
416 |
+
"versions": {
|
417 |
+
"harness|winogrande|0": 1.0,
|
418 |
+
"harness|piqa|0": 1.0,
|
419 |
+
"harness|arc:challenge|0": 1.0,
|
420 |
+
"harness|boolq|0": 2.0,
|
421 |
+
"harness|hellaswag|0": 1.0,
|
422 |
+
"harness|openbookqa|0": 1.0,
|
423 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
424 |
+
"harness|mmlu|0": null,
|
425 |
+
"harness|mmlu_humanities|0": null,
|
426 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
427 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
428 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
429 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
430 |
+
"harness|mmlu_international_law|0": 0.0,
|
431 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
432 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
433 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
434 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
435 |
+
"harness|mmlu_philosophy|0": 0.0,
|
436 |
+
"harness|mmlu_prehistory|0": 0.0,
|
437 |
+
"harness|mmlu_professional_law|0": 0.0,
|
438 |
+
"harness|mmlu_world_religions|0": 0.0,
|
439 |
+
"harness|mmlu_other|0": null,
|
440 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
441 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
442 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
443 |
+
"harness|mmlu_global_facts|0": 0.0,
|
444 |
+
"harness|mmlu_human_aging|0": 0.0,
|
445 |
+
"harness|mmlu_management|0": 0.0,
|
446 |
+
"harness|mmlu_marketing|0": 0.0,
|
447 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
448 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
449 |
+
"harness|mmlu_nutrition|0": 0.0,
|
450 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
451 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
452 |
+
"harness|mmlu_virology|0": 0.0,
|
453 |
+
"harness|mmlu_social_sciences|0": null,
|
454 |
+
"harness|mmlu_econometrics|0": 0.0,
|
455 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
456 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
457 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
458 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
459 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
460 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
461 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
462 |
+
"harness|mmlu_public_relations|0": 0.0,
|
463 |
+
"harness|mmlu_security_studies|0": 0.0,
|
464 |
+
"harness|mmlu_sociology|0": 0.0,
|
465 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
466 |
+
"harness|mmlu_stem|0": null,
|
467 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
468 |
+
"harness|mmlu_anatomy|0": 0.0,
|
469 |
+
"harness|mmlu_astronomy|0": 0.0,
|
470 |
+
"harness|mmlu_college_biology|0": 0.0,
|
471 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
472 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
473 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
474 |
+
"harness|mmlu_college_physics|0": 0.0,
|
475 |
+
"harness|mmlu_computer_security|0": 0.0,
|
476 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
477 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
478 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
479 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
480 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
481 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
482 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
483 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
484 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
485 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
486 |
+
"harness|lambada:openai|0": 1.0,
|
487 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
488 |
+
"harness|arc:easy|0": 1.0
|
489 |
+
},
|
490 |
+
"n-shot": {
|
491 |
+
"arc_challenge": 0,
|
492 |
+
"arc_easy": 0,
|
493 |
+
"boolq": 0,
|
494 |
+
"hellaswag": 0,
|
495 |
+
"lambada_openai": 0,
|
496 |
+
"mmlu": 0,
|
497 |
+
"mmlu_abstract_algebra": 0,
|
498 |
+
"mmlu_anatomy": 0,
|
499 |
+
"mmlu_astronomy": 0,
|
500 |
+
"mmlu_business_ethics": 0,
|
501 |
+
"mmlu_clinical_knowledge": 0,
|
502 |
+
"mmlu_college_biology": 0,
|
503 |
+
"mmlu_college_chemistry": 0,
|
504 |
+
"mmlu_college_computer_science": 0,
|
505 |
+
"mmlu_college_mathematics": 0,
|
506 |
+
"mmlu_college_medicine": 0,
|
507 |
+
"mmlu_college_physics": 0,
|
508 |
+
"mmlu_computer_security": 0,
|
509 |
+
"mmlu_conceptual_physics": 0,
|
510 |
+
"mmlu_econometrics": 0,
|
511 |
+
"mmlu_electrical_engineering": 0,
|
512 |
+
"mmlu_elementary_mathematics": 0,
|
513 |
+
"mmlu_formal_logic": 0,
|
514 |
+
"mmlu_global_facts": 0,
|
515 |
+
"mmlu_high_school_biology": 0,
|
516 |
+
"mmlu_high_school_chemistry": 0,
|
517 |
+
"mmlu_high_school_computer_science": 0,
|
518 |
+
"mmlu_high_school_european_history": 0,
|
519 |
+
"mmlu_high_school_geography": 0,
|
520 |
+
"mmlu_high_school_government_and_politics": 0,
|
521 |
+
"mmlu_high_school_macroeconomics": 0,
|
522 |
+
"mmlu_high_school_mathematics": 0,
|
523 |
+
"mmlu_high_school_microeconomics": 0,
|
524 |
+
"mmlu_high_school_physics": 0,
|
525 |
+
"mmlu_high_school_psychology": 0,
|
526 |
+
"mmlu_high_school_statistics": 0,
|
527 |
+
"mmlu_high_school_us_history": 0,
|
528 |
+
"mmlu_high_school_world_history": 0,
|
529 |
+
"mmlu_human_aging": 0,
|
530 |
+
"mmlu_human_sexuality": 0,
|
531 |
+
"mmlu_humanities": 0,
|
532 |
+
"mmlu_international_law": 0,
|
533 |
+
"mmlu_jurisprudence": 0,
|
534 |
+
"mmlu_logical_fallacies": 0,
|
535 |
+
"mmlu_machine_learning": 0,
|
536 |
+
"mmlu_management": 0,
|
537 |
+
"mmlu_marketing": 0,
|
538 |
+
"mmlu_medical_genetics": 0,
|
539 |
+
"mmlu_miscellaneous": 0,
|
540 |
+
"mmlu_moral_disputes": 0,
|
541 |
+
"mmlu_moral_scenarios": 0,
|
542 |
+
"mmlu_nutrition": 0,
|
543 |
+
"mmlu_other": 0,
|
544 |
+
"mmlu_philosophy": 0,
|
545 |
+
"mmlu_prehistory": 0,
|
546 |
+
"mmlu_professional_accounting": 0,
|
547 |
+
"mmlu_professional_law": 0,
|
548 |
+
"mmlu_professional_medicine": 0,
|
549 |
+
"mmlu_professional_psychology": 0,
|
550 |
+
"mmlu_public_relations": 0,
|
551 |
+
"mmlu_security_studies": 0,
|
552 |
+
"mmlu_social_sciences": 0,
|
553 |
+
"mmlu_sociology": 0,
|
554 |
+
"mmlu_stem": 0,
|
555 |
+
"mmlu_us_foreign_policy": 0,
|
556 |
+
"mmlu_virology": 0,
|
557 |
+
"mmlu_world_religions": 0,
|
558 |
+
"openbookqa": 0,
|
559 |
+
"piqa": 0,
|
560 |
+
"truthfulqa_mc1": 0,
|
561 |
+
"truthfulqa_mc2": 0,
|
562 |
+
"winogrande": 0
|
563 |
+
},
|
564 |
+
"date": 1716092426.3379767,
|
565 |
+
"config": {
|
566 |
+
"model": "hf",
|
567 |
+
"model_args": "pretrained=facebook/opt-13b,trust_remote_code=True,dtype=float16,_commit_hash=main",
|
568 |
+
"batch_size": 4,
|
569 |
+
"batch_sizes": [],
|
570 |
+
"device": "cuda",
|
571 |
+
"use_cache": null,
|
572 |
+
"limit": null,
|
573 |
+
"bootstrap_iters": 100000,
|
574 |
+
"gen_kwargs": null
|
575 |
+
}
|
576 |
+
}
|
facebook/results_2024-05-19-14-00-10_opt-1.3b.json
ADDED
@@ -0,0 +1,576 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "1.4",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": "-1",
|
9 |
+
"start_time": "",
|
10 |
+
"end_time": "",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "facebook/opt-1.3b",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "16bit",
|
15 |
+
"model_size": 2.6,
|
16 |
+
"model_params": 1.3,
|
17 |
+
"quant_type": null,
|
18 |
+
"precision": "16bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|mmlu|0": {
|
22 |
+
"acc,none": 0.2502492522432702,
|
23 |
+
"acc_stderr,none": 0.0036532855810554217,
|
24 |
+
"alias": "mmlu"
|
25 |
+
},
|
26 |
+
"harness|mmlu_humanities|0": {
|
27 |
+
"alias": " - humanities",
|
28 |
+
"acc,none": 0.2541976620616366,
|
29 |
+
"acc_stderr,none": 0.006342538719017683
|
30 |
+
},
|
31 |
+
"harness|mmlu_formal_logic|0": {
|
32 |
+
"alias": " - formal_logic",
|
33 |
+
"acc,none": 0.2222222222222222,
|
34 |
+
"acc_stderr,none": 0.037184890068181146
|
35 |
+
},
|
36 |
+
"harness|mmlu_high_school_european_history|0": {
|
37 |
+
"alias": " - high_school_european_history",
|
38 |
+
"acc,none": 0.2545454545454545,
|
39 |
+
"acc_stderr,none": 0.0340150671524904
|
40 |
+
},
|
41 |
+
"harness|mmlu_high_school_us_history|0": {
|
42 |
+
"alias": " - high_school_us_history",
|
43 |
+
"acc,none": 0.2549019607843137,
|
44 |
+
"acc_stderr,none": 0.030587591351604236
|
45 |
+
},
|
46 |
+
"harness|mmlu_high_school_world_history|0": {
|
47 |
+
"alias": " - high_school_world_history",
|
48 |
+
"acc,none": 0.22784810126582278,
|
49 |
+
"acc_stderr,none": 0.02730348459906943
|
50 |
+
},
|
51 |
+
"harness|mmlu_international_law|0": {
|
52 |
+
"alias": " - international_law",
|
53 |
+
"acc,none": 0.34710743801652894,
|
54 |
+
"acc_stderr,none": 0.04345724570292534
|
55 |
+
},
|
56 |
+
"harness|mmlu_jurisprudence|0": {
|
57 |
+
"alias": " - jurisprudence",
|
58 |
+
"acc,none": 0.3148148148148148,
|
59 |
+
"acc_stderr,none": 0.04489931073591312
|
60 |
+
},
|
61 |
+
"harness|mmlu_logical_fallacies|0": {
|
62 |
+
"alias": " - logical_fallacies",
|
63 |
+
"acc,none": 0.26993865030674846,
|
64 |
+
"acc_stderr,none": 0.034878251684978906
|
65 |
+
},
|
66 |
+
"harness|mmlu_moral_disputes|0": {
|
67 |
+
"alias": " - moral_disputes",
|
68 |
+
"acc,none": 0.2658959537572254,
|
69 |
+
"acc_stderr,none": 0.023786203255508283
|
70 |
+
},
|
71 |
+
"harness|mmlu_moral_scenarios|0": {
|
72 |
+
"alias": " - moral_scenarios",
|
73 |
+
"acc,none": 0.2424581005586592,
|
74 |
+
"acc_stderr,none": 0.014333522059217887
|
75 |
+
},
|
76 |
+
"harness|mmlu_philosophy|0": {
|
77 |
+
"alias": " - philosophy",
|
78 |
+
"acc,none": 0.24758842443729903,
|
79 |
+
"acc_stderr,none": 0.024513879973621967
|
80 |
+
},
|
81 |
+
"harness|mmlu_prehistory|0": {
|
82 |
+
"alias": " - prehistory",
|
83 |
+
"acc,none": 0.32407407407407407,
|
84 |
+
"acc_stderr,none": 0.026041766202717163
|
85 |
+
},
|
86 |
+
"harness|mmlu_professional_law|0": {
|
87 |
+
"alias": " - professional_law",
|
88 |
+
"acc,none": 0.23859191655801826,
|
89 |
+
"acc_stderr,none": 0.010885929742002221
|
90 |
+
},
|
91 |
+
"harness|mmlu_world_religions|0": {
|
92 |
+
"alias": " - world_religions",
|
93 |
+
"acc,none": 0.25146198830409355,
|
94 |
+
"acc_stderr,none": 0.033275044238468436
|
95 |
+
},
|
96 |
+
"harness|mmlu_other|0": {
|
97 |
+
"alias": " - other",
|
98 |
+
"acc,none": 0.2452526552944963,
|
99 |
+
"acc_stderr,none": 0.007711645493907811
|
100 |
+
},
|
101 |
+
"harness|mmlu_business_ethics|0": {
|
102 |
+
"alias": " - business_ethics",
|
103 |
+
"acc,none": 0.28,
|
104 |
+
"acc_stderr,none": 0.04512608598542129
|
105 |
+
},
|
106 |
+
"harness|mmlu_clinical_knowledge|0": {
|
107 |
+
"alias": " - clinical_knowledge",
|
108 |
+
"acc,none": 0.2188679245283019,
|
109 |
+
"acc_stderr,none": 0.025447863825108614
|
110 |
+
},
|
111 |
+
"harness|mmlu_college_medicine|0": {
|
112 |
+
"alias": " - college_medicine",
|
113 |
+
"acc,none": 0.23699421965317918,
|
114 |
+
"acc_stderr,none": 0.03242414757483098
|
115 |
+
},
|
116 |
+
"harness|mmlu_global_facts|0": {
|
117 |
+
"alias": " - global_facts",
|
118 |
+
"acc,none": 0.29,
|
119 |
+
"acc_stderr,none": 0.04560480215720683
|
120 |
+
},
|
121 |
+
"harness|mmlu_human_aging|0": {
|
122 |
+
"alias": " - human_aging",
|
123 |
+
"acc,none": 0.2914798206278027,
|
124 |
+
"acc_stderr,none": 0.030500283176545913
|
125 |
+
},
|
126 |
+
"harness|mmlu_management|0": {
|
127 |
+
"alias": " - management",
|
128 |
+
"acc,none": 0.1941747572815534,
|
129 |
+
"acc_stderr,none": 0.03916667762822584
|
130 |
+
},
|
131 |
+
"harness|mmlu_marketing|0": {
|
132 |
+
"alias": " - marketing",
|
133 |
+
"acc,none": 0.2863247863247863,
|
134 |
+
"acc_stderr,none": 0.029614323690456655
|
135 |
+
},
|
136 |
+
"harness|mmlu_medical_genetics|0": {
|
137 |
+
"alias": " - medical_genetics",
|
138 |
+
"acc,none": 0.22,
|
139 |
+
"acc_stderr,none": 0.041633319989322695
|
140 |
+
},
|
141 |
+
"harness|mmlu_miscellaneous|0": {
|
142 |
+
"alias": " - miscellaneous",
|
143 |
+
"acc,none": 0.24648786717752236,
|
144 |
+
"acc_stderr,none": 0.015411308769686929
|
145 |
+
},
|
146 |
+
"harness|mmlu_nutrition|0": {
|
147 |
+
"alias": " - nutrition",
|
148 |
+
"acc,none": 0.24183006535947713,
|
149 |
+
"acc_stderr,none": 0.024518195641879334
|
150 |
+
},
|
151 |
+
"harness|mmlu_professional_accounting|0": {
|
152 |
+
"alias": " - professional_accounting",
|
153 |
+
"acc,none": 0.2765957446808511,
|
154 |
+
"acc_stderr,none": 0.026684564340460976
|
155 |
+
},
|
156 |
+
"harness|mmlu_professional_medicine|0": {
|
157 |
+
"alias": " - professional_medicine",
|
158 |
+
"acc,none": 0.17279411764705882,
|
159 |
+
"acc_stderr,none": 0.022966067585581795
|
160 |
+
},
|
161 |
+
"harness|mmlu_virology|0": {
|
162 |
+
"alias": " - virology",
|
163 |
+
"acc,none": 0.24096385542168675,
|
164 |
+
"acc_stderr,none": 0.0332939411907353
|
165 |
+
},
|
166 |
+
"harness|mmlu_social_sciences|0": {
|
167 |
+
"alias": " - social_sciences",
|
168 |
+
"acc,none": 0.2398440038999025,
|
169 |
+
"acc_stderr,none": 0.007695061676795164
|
170 |
+
},
|
171 |
+
"harness|mmlu_econometrics|0": {
|
172 |
+
"alias": " - econometrics",
|
173 |
+
"acc,none": 0.2631578947368421,
|
174 |
+
"acc_stderr,none": 0.041424397194893624
|
175 |
+
},
|
176 |
+
"harness|mmlu_high_school_geography|0": {
|
177 |
+
"alias": " - high_school_geography",
|
178 |
+
"acc,none": 0.23737373737373738,
|
179 |
+
"acc_stderr,none": 0.0303137105381989
|
180 |
+
},
|
181 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
182 |
+
"alias": " - high_school_government_and_politics",
|
183 |
+
"acc,none": 0.24352331606217617,
|
184 |
+
"acc_stderr,none": 0.030975436386845447
|
185 |
+
},
|
186 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
187 |
+
"alias": " - high_school_macroeconomics",
|
188 |
+
"acc,none": 0.2230769230769231,
|
189 |
+
"acc_stderr,none": 0.021107730127244
|
190 |
+
},
|
191 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
192 |
+
"alias": " - high_school_microeconomics",
|
193 |
+
"acc,none": 0.20588235294117646,
|
194 |
+
"acc_stderr,none": 0.02626502460827589
|
195 |
+
},
|
196 |
+
"harness|mmlu_high_school_psychology|0": {
|
197 |
+
"alias": " - high_school_psychology",
|
198 |
+
"acc,none": 0.25137614678899084,
|
199 |
+
"acc_stderr,none": 0.018599206360287415
|
200 |
+
},
|
201 |
+
"harness|mmlu_human_sexuality|0": {
|
202 |
+
"alias": " - human_sexuality",
|
203 |
+
"acc,none": 0.1984732824427481,
|
204 |
+
"acc_stderr,none": 0.034981493854624714
|
205 |
+
},
|
206 |
+
"harness|mmlu_professional_psychology|0": {
|
207 |
+
"alias": " - professional_psychology",
|
208 |
+
"acc,none": 0.28431372549019607,
|
209 |
+
"acc_stderr,none": 0.018249024411207664
|
210 |
+
},
|
211 |
+
"harness|mmlu_public_relations|0": {
|
212 |
+
"alias": " - public_relations",
|
213 |
+
"acc,none": 0.18181818181818182,
|
214 |
+
"acc_stderr,none": 0.036942843353378
|
215 |
+
},
|
216 |
+
"harness|mmlu_security_studies|0": {
|
217 |
+
"alias": " - security_studies",
|
218 |
+
"acc,none": 0.21224489795918366,
|
219 |
+
"acc_stderr,none": 0.026176967197866767
|
220 |
+
},
|
221 |
+
"harness|mmlu_sociology|0": {
|
222 |
+
"alias": " - sociology",
|
223 |
+
"acc,none": 0.22885572139303484,
|
224 |
+
"acc_stderr,none": 0.029705284056772436
|
225 |
+
},
|
226 |
+
"harness|mmlu_us_foreign_policy|0": {
|
227 |
+
"alias": " - us_foreign_policy",
|
228 |
+
"acc,none": 0.23,
|
229 |
+
"acc_stderr,none": 0.042295258468165065
|
230 |
+
},
|
231 |
+
"harness|mmlu_stem|0": {
|
232 |
+
"alias": " - stem",
|
233 |
+
"acc,none": 0.2594354582936885,
|
234 |
+
"acc_stderr,none": 0.007810005624104469
|
235 |
+
},
|
236 |
+
"harness|mmlu_abstract_algebra|0": {
|
237 |
+
"alias": " - abstract_algebra",
|
238 |
+
"acc,none": 0.27,
|
239 |
+
"acc_stderr,none": 0.0446196043338474
|
240 |
+
},
|
241 |
+
"harness|mmlu_anatomy|0": {
|
242 |
+
"alias": " - anatomy",
|
243 |
+
"acc,none": 0.3037037037037037,
|
244 |
+
"acc_stderr,none": 0.03972552884785137
|
245 |
+
},
|
246 |
+
"harness|mmlu_astronomy|0": {
|
247 |
+
"alias": " - astronomy",
|
248 |
+
"acc,none": 0.2631578947368421,
|
249 |
+
"acc_stderr,none": 0.03583496176361062
|
250 |
+
},
|
251 |
+
"harness|mmlu_college_biology|0": {
|
252 |
+
"alias": " - college_biology",
|
253 |
+
"acc,none": 0.2638888888888889,
|
254 |
+
"acc_stderr,none": 0.03685651095897532
|
255 |
+
},
|
256 |
+
"harness|mmlu_college_chemistry|0": {
|
257 |
+
"alias": " - college_chemistry",
|
258 |
+
"acc,none": 0.19,
|
259 |
+
"acc_stderr,none": 0.039427724440366234
|
260 |
+
},
|
261 |
+
"harness|mmlu_college_computer_science|0": {
|
262 |
+
"alias": " - college_computer_science",
|
263 |
+
"acc,none": 0.25,
|
264 |
+
"acc_stderr,none": 0.04351941398892446
|
265 |
+
},
|
266 |
+
"harness|mmlu_college_mathematics|0": {
|
267 |
+
"alias": " - college_mathematics",
|
268 |
+
"acc,none": 0.21,
|
269 |
+
"acc_stderr,none": 0.040936018074033256
|
270 |
+
},
|
271 |
+
"harness|mmlu_college_physics|0": {
|
272 |
+
"alias": " - college_physics",
|
273 |
+
"acc,none": 0.20588235294117646,
|
274 |
+
"acc_stderr,none": 0.04023382273617747
|
275 |
+
},
|
276 |
+
"harness|mmlu_computer_security|0": {
|
277 |
+
"alias": " - computer_security",
|
278 |
+
"acc,none": 0.29,
|
279 |
+
"acc_stderr,none": 0.04560480215720684
|
280 |
+
},
|
281 |
+
"harness|mmlu_conceptual_physics|0": {
|
282 |
+
"alias": " - conceptual_physics",
|
283 |
+
"acc,none": 0.2127659574468085,
|
284 |
+
"acc_stderr,none": 0.026754391348039787
|
285 |
+
},
|
286 |
+
"harness|mmlu_electrical_engineering|0": {
|
287 |
+
"alias": " - electrical_engineering",
|
288 |
+
"acc,none": 0.25517241379310346,
|
289 |
+
"acc_stderr,none": 0.03632984052707842
|
290 |
+
},
|
291 |
+
"harness|mmlu_elementary_mathematics|0": {
|
292 |
+
"alias": " - elementary_mathematics",
|
293 |
+
"acc,none": 0.25396825396825395,
|
294 |
+
"acc_stderr,none": 0.02241804289111394
|
295 |
+
},
|
296 |
+
"harness|mmlu_high_school_biology|0": {
|
297 |
+
"alias": " - high_school_biology",
|
298 |
+
"acc,none": 0.267741935483871,
|
299 |
+
"acc_stderr,none": 0.025189006660212374
|
300 |
+
},
|
301 |
+
"harness|mmlu_high_school_chemistry|0": {
|
302 |
+
"alias": " - high_school_chemistry",
|
303 |
+
"acc,none": 0.3054187192118227,
|
304 |
+
"acc_stderr,none": 0.032406615658684086
|
305 |
+
},
|
306 |
+
"harness|mmlu_high_school_computer_science|0": {
|
307 |
+
"alias": " - high_school_computer_science",
|
308 |
+
"acc,none": 0.33,
|
309 |
+
"acc_stderr,none": 0.04725815626252605
|
310 |
+
},
|
311 |
+
"harness|mmlu_high_school_mathematics|0": {
|
312 |
+
"alias": " - high_school_mathematics",
|
313 |
+
"acc,none": 0.26666666666666666,
|
314 |
+
"acc_stderr,none": 0.026962424325073838
|
315 |
+
},
|
316 |
+
"harness|mmlu_high_school_physics|0": {
|
317 |
+
"alias": " - high_school_physics",
|
318 |
+
"acc,none": 0.26490066225165565,
|
319 |
+
"acc_stderr,none": 0.03603038545360383
|
320 |
+
},
|
321 |
+
"harness|mmlu_high_school_statistics|0": {
|
322 |
+
"alias": " - high_school_statistics",
|
323 |
+
"acc,none": 0.24074074074074073,
|
324 |
+
"acc_stderr,none": 0.0291575221846056
|
325 |
+
},
|
326 |
+
"harness|mmlu_machine_learning|0": {
|
327 |
+
"alias": " - machine_learning",
|
328 |
+
"acc,none": 0.2857142857142857,
|
329 |
+
"acc_stderr,none": 0.042878587513404544
|
330 |
+
},
|
331 |
+
"harness|lambada:openai|0": {
|
332 |
+
"perplexity,none": 6.644858840033134,
|
333 |
+
"perplexity_stderr,none": 0.17177969436256135,
|
334 |
+
"acc,none": 0.5788860857752766,
|
335 |
+
"acc_stderr,none": 0.006878732547908384,
|
336 |
+
"alias": "lambada_openai"
|
337 |
+
},
|
338 |
+
"harness|openbookqa|0": {
|
339 |
+
"acc,none": 0.232,
|
340 |
+
"acc_stderr,none": 0.018896193591952028,
|
341 |
+
"acc_norm,none": 0.33,
|
342 |
+
"acc_norm_stderr,none": 0.021049612166134817,
|
343 |
+
"alias": "openbookqa"
|
344 |
+
},
|
345 |
+
"harness|arc:easy|0": {
|
346 |
+
"acc,none": 0.5723905723905723,
|
347 |
+
"acc_stderr,none": 0.01015168339743068,
|
348 |
+
"acc_norm,none": 0.51010101010101,
|
349 |
+
"acc_norm_stderr,none": 0.01025768968745837,
|
350 |
+
"alias": "arc_easy"
|
351 |
+
},
|
352 |
+
"harness|truthfulqa:mc2|0": {
|
353 |
+
"acc,none": 0.38656764958132345,
|
354 |
+
"acc_stderr,none": 0.014216966165226998,
|
355 |
+
"alias": "truthfulqa_mc2"
|
356 |
+
},
|
357 |
+
"harness|truthfulqa:mc1|0": {
|
358 |
+
"acc,none": 0.23623011015911874,
|
359 |
+
"acc_stderr,none": 0.014869755015871093,
|
360 |
+
"alias": "truthfulqa_mc1"
|
361 |
+
},
|
362 |
+
"harness|hellaswag|0": {
|
363 |
+
"acc,none": 0.41525592511451903,
|
364 |
+
"acc_stderr,none": 0.004917590378138208,
|
365 |
+
"acc_norm,none": 0.5371439952200757,
|
366 |
+
"acc_norm_stderr,none": 0.004975993795562024,
|
367 |
+
"alias": "hellaswag"
|
368 |
+
},
|
369 |
+
"harness|winogrande|0": {
|
370 |
+
"acc,none": 0.5927387529597474,
|
371 |
+
"acc_stderr,none": 0.013808654122417835,
|
372 |
+
"alias": "winogrande"
|
373 |
+
},
|
374 |
+
"harness|arc:challenge|0": {
|
375 |
+
"acc,none": 0.23464163822525597,
|
376 |
+
"acc_stderr,none": 0.012383873560768685,
|
377 |
+
"acc_norm,none": 0.2960750853242321,
|
378 |
+
"acc_norm_stderr,none": 0.013340916085246263,
|
379 |
+
"alias": "arc_challenge"
|
380 |
+
},
|
381 |
+
"harness|boolq|0": {
|
382 |
+
"acc,none": 0.5764525993883792,
|
383 |
+
"acc_stderr,none": 0.008642220663071522,
|
384 |
+
"alias": "boolq"
|
385 |
+
},
|
386 |
+
"harness|piqa|0": {
|
387 |
+
"acc,none": 0.7165397170837867,
|
388 |
+
"acc_stderr,none": 0.010515057791152065,
|
389 |
+
"acc_norm,none": 0.7241566920565833,
|
390 |
+
"acc_norm_stderr,none": 0.010427805502729115,
|
391 |
+
"alias": "piqa"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "facebook/opt-1.3b",
|
396 |
+
"revision": "main",
|
397 |
+
"private": false,
|
398 |
+
"params": 1.3,
|
399 |
+
"architectures": "OPTForCausalLM",
|
400 |
+
"quant_type": null,
|
401 |
+
"precision": "16bit",
|
402 |
+
"model_params": 1.3,
|
403 |
+
"model_size": 2.6,
|
404 |
+
"weight_dtype": "float16",
|
405 |
+
"compute_dtype": "float16",
|
406 |
+
"gguf_ftype": "*Q4_0.gguf",
|
407 |
+
"hardware": "gpu",
|
408 |
+
"status": "Pending",
|
409 |
+
"submitted_time": "2024-04-27T08:04:58Z",
|
410 |
+
"model_type": "original",
|
411 |
+
"job_id": -1,
|
412 |
+
"job_start_time": null,
|
413 |
+
"scripts": "ITREX"
|
414 |
+
},
|
415 |
+
"quantization_config": null,
|
416 |
+
"versions": {
|
417 |
+
"harness|mmlu|0": null,
|
418 |
+
"harness|mmlu_humanities|0": null,
|
419 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
420 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
421 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
422 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
423 |
+
"harness|mmlu_international_law|0": 0.0,
|
424 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
425 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
426 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
427 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
428 |
+
"harness|mmlu_philosophy|0": 0.0,
|
429 |
+
"harness|mmlu_prehistory|0": 0.0,
|
430 |
+
"harness|mmlu_professional_law|0": 0.0,
|
431 |
+
"harness|mmlu_world_religions|0": 0.0,
|
432 |
+
"harness|mmlu_other|0": null,
|
433 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
434 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
435 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
436 |
+
"harness|mmlu_global_facts|0": 0.0,
|
437 |
+
"harness|mmlu_human_aging|0": 0.0,
|
438 |
+
"harness|mmlu_management|0": 0.0,
|
439 |
+
"harness|mmlu_marketing|0": 0.0,
|
440 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
441 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
442 |
+
"harness|mmlu_nutrition|0": 0.0,
|
443 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
444 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
445 |
+
"harness|mmlu_virology|0": 0.0,
|
446 |
+
"harness|mmlu_social_sciences|0": null,
|
447 |
+
"harness|mmlu_econometrics|0": 0.0,
|
448 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
449 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
450 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
451 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
452 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
453 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
454 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
455 |
+
"harness|mmlu_public_relations|0": 0.0,
|
456 |
+
"harness|mmlu_security_studies|0": 0.0,
|
457 |
+
"harness|mmlu_sociology|0": 0.0,
|
458 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
459 |
+
"harness|mmlu_stem|0": null,
|
460 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
461 |
+
"harness|mmlu_anatomy|0": 0.0,
|
462 |
+
"harness|mmlu_astronomy|0": 0.0,
|
463 |
+
"harness|mmlu_college_biology|0": 0.0,
|
464 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
465 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
466 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
467 |
+
"harness|mmlu_college_physics|0": 0.0,
|
468 |
+
"harness|mmlu_computer_security|0": 0.0,
|
469 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
470 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
471 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
472 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
473 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
474 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
475 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
476 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
477 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
478 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
479 |
+
"harness|lambada:openai|0": 1.0,
|
480 |
+
"harness|openbookqa|0": 1.0,
|
481 |
+
"harness|arc:easy|0": 1.0,
|
482 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
483 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
484 |
+
"harness|hellaswag|0": 1.0,
|
485 |
+
"harness|winogrande|0": 1.0,
|
486 |
+
"harness|arc:challenge|0": 1.0,
|
487 |
+
"harness|boolq|0": 2.0,
|
488 |
+
"harness|piqa|0": 1.0
|
489 |
+
},
|
490 |
+
"n-shot": {
|
491 |
+
"arc_challenge": 0,
|
492 |
+
"arc_easy": 0,
|
493 |
+
"boolq": 0,
|
494 |
+
"hellaswag": 0,
|
495 |
+
"lambada_openai": 0,
|
496 |
+
"mmlu": 0,
|
497 |
+
"mmlu_abstract_algebra": 0,
|
498 |
+
"mmlu_anatomy": 0,
|
499 |
+
"mmlu_astronomy": 0,
|
500 |
+
"mmlu_business_ethics": 0,
|
501 |
+
"mmlu_clinical_knowledge": 0,
|
502 |
+
"mmlu_college_biology": 0,
|
503 |
+
"mmlu_college_chemistry": 0,
|
504 |
+
"mmlu_college_computer_science": 0,
|
505 |
+
"mmlu_college_mathematics": 0,
|
506 |
+
"mmlu_college_medicine": 0,
|
507 |
+
"mmlu_college_physics": 0,
|
508 |
+
"mmlu_computer_security": 0,
|
509 |
+
"mmlu_conceptual_physics": 0,
|
510 |
+
"mmlu_econometrics": 0,
|
511 |
+
"mmlu_electrical_engineering": 0,
|
512 |
+
"mmlu_elementary_mathematics": 0,
|
513 |
+
"mmlu_formal_logic": 0,
|
514 |
+
"mmlu_global_facts": 0,
|
515 |
+
"mmlu_high_school_biology": 0,
|
516 |
+
"mmlu_high_school_chemistry": 0,
|
517 |
+
"mmlu_high_school_computer_science": 0,
|
518 |
+
"mmlu_high_school_european_history": 0,
|
519 |
+
"mmlu_high_school_geography": 0,
|
520 |
+
"mmlu_high_school_government_and_politics": 0,
|
521 |
+
"mmlu_high_school_macroeconomics": 0,
|
522 |
+
"mmlu_high_school_mathematics": 0,
|
523 |
+
"mmlu_high_school_microeconomics": 0,
|
524 |
+
"mmlu_high_school_physics": 0,
|
525 |
+
"mmlu_high_school_psychology": 0,
|
526 |
+
"mmlu_high_school_statistics": 0,
|
527 |
+
"mmlu_high_school_us_history": 0,
|
528 |
+
"mmlu_high_school_world_history": 0,
|
529 |
+
"mmlu_human_aging": 0,
|
530 |
+
"mmlu_human_sexuality": 0,
|
531 |
+
"mmlu_humanities": 0,
|
532 |
+
"mmlu_international_law": 0,
|
533 |
+
"mmlu_jurisprudence": 0,
|
534 |
+
"mmlu_logical_fallacies": 0,
|
535 |
+
"mmlu_machine_learning": 0,
|
536 |
+
"mmlu_management": 0,
|
537 |
+
"mmlu_marketing": 0,
|
538 |
+
"mmlu_medical_genetics": 0,
|
539 |
+
"mmlu_miscellaneous": 0,
|
540 |
+
"mmlu_moral_disputes": 0,
|
541 |
+
"mmlu_moral_scenarios": 0,
|
542 |
+
"mmlu_nutrition": 0,
|
543 |
+
"mmlu_other": 0,
|
544 |
+
"mmlu_philosophy": 0,
|
545 |
+
"mmlu_prehistory": 0,
|
546 |
+
"mmlu_professional_accounting": 0,
|
547 |
+
"mmlu_professional_law": 0,
|
548 |
+
"mmlu_professional_medicine": 0,
|
549 |
+
"mmlu_professional_psychology": 0,
|
550 |
+
"mmlu_public_relations": 0,
|
551 |
+
"mmlu_security_studies": 0,
|
552 |
+
"mmlu_social_sciences": 0,
|
553 |
+
"mmlu_sociology": 0,
|
554 |
+
"mmlu_stem": 0,
|
555 |
+
"mmlu_us_foreign_policy": 0,
|
556 |
+
"mmlu_virology": 0,
|
557 |
+
"mmlu_world_religions": 0,
|
558 |
+
"openbookqa": 0,
|
559 |
+
"piqa": 0,
|
560 |
+
"truthfulqa_mc1": 0,
|
561 |
+
"truthfulqa_mc2": 0,
|
562 |
+
"winogrande": 0
|
563 |
+
},
|
564 |
+
"date": 1716096976.8643775,
|
565 |
+
"config": {
|
566 |
+
"model": "hf",
|
567 |
+
"model_args": "pretrained=facebook/opt-1.3b,trust_remote_code=True,dtype=float16,_commit_hash=main",
|
568 |
+
"batch_size": 4,
|
569 |
+
"batch_sizes": [],
|
570 |
+
"device": "cuda",
|
571 |
+
"use_cache": null,
|
572 |
+
"limit": null,
|
573 |
+
"bootstrap_iters": 100000,
|
574 |
+
"gen_kwargs": null
|
575 |
+
}
|
576 |
+
}
|
google/results_2024-05-18-14-39-54_gemma-7b.json
ADDED
@@ -0,0 +1,576 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "1.4",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": "-1",
|
9 |
+
"start_time": "",
|
10 |
+
"end_time": "",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "google/gemma-7b",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "16bit",
|
15 |
+
"model_size": 17.08,
|
16 |
+
"model_params": 8.54,
|
17 |
+
"quant_type": null,
|
18 |
+
"precision": "16bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|mmlu|0": {
|
22 |
+
"acc,none": 0.6199259364762855,
|
23 |
+
"acc_stderr,none": 0.003832381421427788,
|
24 |
+
"alias": "mmlu"
|
25 |
+
},
|
26 |
+
"harness|mmlu_humanities|0": {
|
27 |
+
"alias": " - humanities",
|
28 |
+
"acc,none": 0.5543039319872476,
|
29 |
+
"acc_stderr,none": 0.006643997514441502
|
30 |
+
},
|
31 |
+
"harness|mmlu_formal_logic|0": {
|
32 |
+
"alias": " - formal_logic",
|
33 |
+
"acc,none": 0.46825396825396826,
|
34 |
+
"acc_stderr,none": 0.04463112720677173
|
35 |
+
},
|
36 |
+
"harness|mmlu_high_school_european_history|0": {
|
37 |
+
"alias": " - high_school_european_history",
|
38 |
+
"acc,none": 0.7515151515151515,
|
39 |
+
"acc_stderr,none": 0.033744026441394036
|
40 |
+
},
|
41 |
+
"harness|mmlu_high_school_us_history|0": {
|
42 |
+
"alias": " - high_school_us_history",
|
43 |
+
"acc,none": 0.7647058823529411,
|
44 |
+
"acc_stderr,none": 0.029771775228145635
|
45 |
+
},
|
46 |
+
"harness|mmlu_high_school_world_history|0": {
|
47 |
+
"alias": " - high_school_world_history",
|
48 |
+
"acc,none": 0.810126582278481,
|
49 |
+
"acc_stderr,none": 0.025530100460233504
|
50 |
+
},
|
51 |
+
"harness|mmlu_international_law|0": {
|
52 |
+
"alias": " - international_law",
|
53 |
+
"acc,none": 0.7933884297520661,
|
54 |
+
"acc_stderr,none": 0.03695980128098824
|
55 |
+
},
|
56 |
+
"harness|mmlu_jurisprudence|0": {
|
57 |
+
"alias": " - jurisprudence",
|
58 |
+
"acc,none": 0.7407407407407407,
|
59 |
+
"acc_stderr,none": 0.04236511258094632
|
60 |
+
},
|
61 |
+
"harness|mmlu_logical_fallacies|0": {
|
62 |
+
"alias": " - logical_fallacies",
|
63 |
+
"acc,none": 0.7423312883435583,
|
64 |
+
"acc_stderr,none": 0.03436150827846917
|
65 |
+
},
|
66 |
+
"harness|mmlu_moral_disputes|0": {
|
67 |
+
"alias": " - moral_disputes",
|
68 |
+
"acc,none": 0.6965317919075145,
|
69 |
+
"acc_stderr,none": 0.024752411960917205
|
70 |
+
},
|
71 |
+
"harness|mmlu_moral_scenarios|0": {
|
72 |
+
"alias": " - moral_scenarios",
|
73 |
+
"acc,none": 0.2424581005586592,
|
74 |
+
"acc_stderr,none": 0.014333522059217887
|
75 |
+
},
|
76 |
+
"harness|mmlu_philosophy|0": {
|
77 |
+
"alias": " - philosophy",
|
78 |
+
"acc,none": 0.7234726688102894,
|
79 |
+
"acc_stderr,none": 0.025403832978179615
|
80 |
+
},
|
81 |
+
"harness|mmlu_prehistory|0": {
|
82 |
+
"alias": " - prehistory",
|
83 |
+
"acc,none": 0.7345679012345679,
|
84 |
+
"acc_stderr,none": 0.02456922360046085
|
85 |
+
},
|
86 |
+
"harness|mmlu_professional_law|0": {
|
87 |
+
"alias": " - professional_law",
|
88 |
+
"acc,none": 0.4661016949152542,
|
89 |
+
"acc_stderr,none": 0.012740853872949834
|
90 |
+
},
|
91 |
+
"harness|mmlu_world_religions|0": {
|
92 |
+
"alias": " - world_religions",
|
93 |
+
"acc,none": 0.8421052631578947,
|
94 |
+
"acc_stderr,none": 0.027966785859160872
|
95 |
+
},
|
96 |
+
"harness|mmlu_other|0": {
|
97 |
+
"alias": " - other",
|
98 |
+
"acc,none": 0.6997103315094947,
|
99 |
+
"acc_stderr,none": 0.007925287337497432
|
100 |
+
},
|
101 |
+
"harness|mmlu_business_ethics|0": {
|
102 |
+
"alias": " - business_ethics",
|
103 |
+
"acc,none": 0.62,
|
104 |
+
"acc_stderr,none": 0.048783173121456316
|
105 |
+
},
|
106 |
+
"harness|mmlu_clinical_knowledge|0": {
|
107 |
+
"alias": " - clinical_knowledge",
|
108 |
+
"acc,none": 0.6981132075471698,
|
109 |
+
"acc_stderr,none": 0.028254200344438665
|
110 |
+
},
|
111 |
+
"harness|mmlu_college_medicine|0": {
|
112 |
+
"alias": " - college_medicine",
|
113 |
+
"acc,none": 0.6069364161849711,
|
114 |
+
"acc_stderr,none": 0.0372424959581773
|
115 |
+
},
|
116 |
+
"harness|mmlu_global_facts|0": {
|
117 |
+
"alias": " - global_facts",
|
118 |
+
"acc,none": 0.38,
|
119 |
+
"acc_stderr,none": 0.04878317312145632
|
120 |
+
},
|
121 |
+
"harness|mmlu_human_aging|0": {
|
122 |
+
"alias": " - human_aging",
|
123 |
+
"acc,none": 0.695067264573991,
|
124 |
+
"acc_stderr,none": 0.030898610882477515
|
125 |
+
},
|
126 |
+
"harness|mmlu_management|0": {
|
127 |
+
"alias": " - management",
|
128 |
+
"acc,none": 0.8737864077669902,
|
129 |
+
"acc_stderr,none": 0.03288180278808628
|
130 |
+
},
|
131 |
+
"harness|mmlu_marketing|0": {
|
132 |
+
"alias": " - marketing",
|
133 |
+
"acc,none": 0.8803418803418803,
|
134 |
+
"acc_stderr,none": 0.02126271940040697
|
135 |
+
},
|
136 |
+
"harness|mmlu_medical_genetics|0": {
|
137 |
+
"alias": " - medical_genetics",
|
138 |
+
"acc,none": 0.71,
|
139 |
+
"acc_stderr,none": 0.045604802157206845
|
140 |
+
},
|
141 |
+
"harness|mmlu_miscellaneous|0": {
|
142 |
+
"alias": " - miscellaneous",
|
143 |
+
"acc,none": 0.8148148148148148,
|
144 |
+
"acc_stderr,none": 0.013890862162876166
|
145 |
+
},
|
146 |
+
"harness|mmlu_nutrition|0": {
|
147 |
+
"alias": " - nutrition",
|
148 |
+
"acc,none": 0.6928104575163399,
|
149 |
+
"acc_stderr,none": 0.026415601914389002
|
150 |
+
},
|
151 |
+
"harness|mmlu_professional_accounting|0": {
|
152 |
+
"alias": " - professional_accounting",
|
153 |
+
"acc,none": 0.5212765957446809,
|
154 |
+
"acc_stderr,none": 0.029800481645628693
|
155 |
+
},
|
156 |
+
"harness|mmlu_professional_medicine|0": {
|
157 |
+
"alias": " - professional_medicine",
|
158 |
+
"acc,none": 0.6654411764705882,
|
159 |
+
"acc_stderr,none": 0.02866199620233531
|
160 |
+
},
|
161 |
+
"harness|mmlu_virology|0": {
|
162 |
+
"alias": " - virology",
|
163 |
+
"acc,none": 0.5060240963855421,
|
164 |
+
"acc_stderr,none": 0.03892212195333045
|
165 |
+
},
|
166 |
+
"harness|mmlu_social_sciences|0": {
|
167 |
+
"alias": " - social_sciences",
|
168 |
+
"acc,none": 0.7185570360740982,
|
169 |
+
"acc_stderr,none": 0.007944110221899323
|
170 |
+
},
|
171 |
+
"harness|mmlu_econometrics|0": {
|
172 |
+
"alias": " - econometrics",
|
173 |
+
"acc,none": 0.45614035087719296,
|
174 |
+
"acc_stderr,none": 0.046854730419077895
|
175 |
+
},
|
176 |
+
"harness|mmlu_high_school_geography|0": {
|
177 |
+
"alias": " - high_school_geography",
|
178 |
+
"acc,none": 0.803030303030303,
|
179 |
+
"acc_stderr,none": 0.028335609732463355
|
180 |
+
},
|
181 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
182 |
+
"alias": " - high_school_government_and_politics",
|
183 |
+
"acc,none": 0.8393782383419689,
|
184 |
+
"acc_stderr,none": 0.026499057701397453
|
185 |
+
},
|
186 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
187 |
+
"alias": " - high_school_macroeconomics",
|
188 |
+
"acc,none": 0.6307692307692307,
|
189 |
+
"acc_stderr,none": 0.024468615241478912
|
190 |
+
},
|
191 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
192 |
+
"alias": " - high_school_microeconomics",
|
193 |
+
"acc,none": 0.6596638655462185,
|
194 |
+
"acc_stderr,none": 0.030778057422931673
|
195 |
+
},
|
196 |
+
"harness|mmlu_high_school_psychology|0": {
|
197 |
+
"alias": " - high_school_psychology",
|
198 |
+
"acc,none": 0.8110091743119267,
|
199 |
+
"acc_stderr,none": 0.016785481159203627
|
200 |
+
},
|
201 |
+
"harness|mmlu_human_sexuality|0": {
|
202 |
+
"alias": " - human_sexuality",
|
203 |
+
"acc,none": 0.7557251908396947,
|
204 |
+
"acc_stderr,none": 0.03768335959728745
|
205 |
+
},
|
206 |
+
"harness|mmlu_professional_psychology|0": {
|
207 |
+
"alias": " - professional_psychology",
|
208 |
+
"acc,none": 0.6584967320261438,
|
209 |
+
"acc_stderr,none": 0.019184639328092487
|
210 |
+
},
|
211 |
+
"harness|mmlu_public_relations|0": {
|
212 |
+
"alias": " - public_relations",
|
213 |
+
"acc,none": 0.6181818181818182,
|
214 |
+
"acc_stderr,none": 0.04653429807913508
|
215 |
+
},
|
216 |
+
"harness|mmlu_security_studies|0": {
|
217 |
+
"alias": " - security_studies",
|
218 |
+
"acc,none": 0.7346938775510204,
|
219 |
+
"acc_stderr,none": 0.02826388994378462
|
220 |
+
},
|
221 |
+
"harness|mmlu_sociology|0": {
|
222 |
+
"alias": " - sociology",
|
223 |
+
"acc,none": 0.7761194029850746,
|
224 |
+
"acc_stderr,none": 0.029475250236017193
|
225 |
+
},
|
226 |
+
"harness|mmlu_us_foreign_policy|0": {
|
227 |
+
"alias": " - us_foreign_policy",
|
228 |
+
"acc,none": 0.87,
|
229 |
+
"acc_stderr,none": 0.033799766898963086
|
230 |
+
},
|
231 |
+
"harness|mmlu_stem|0": {
|
232 |
+
"alias": " - stem",
|
233 |
+
"acc,none": 0.5429749444973041,
|
234 |
+
"acc_stderr,none": 0.00848031767805221
|
235 |
+
},
|
236 |
+
"harness|mmlu_abstract_algebra|0": {
|
237 |
+
"alias": " - abstract_algebra",
|
238 |
+
"acc,none": 0.25,
|
239 |
+
"acc_stderr,none": 0.04351941398892446
|
240 |
+
},
|
241 |
+
"harness|mmlu_anatomy|0": {
|
242 |
+
"alias": " - anatomy",
|
243 |
+
"acc,none": 0.5777777777777777,
|
244 |
+
"acc_stderr,none": 0.04266763404099582
|
245 |
+
},
|
246 |
+
"harness|mmlu_astronomy|0": {
|
247 |
+
"alias": " - astronomy",
|
248 |
+
"acc,none": 0.756578947368421,
|
249 |
+
"acc_stderr,none": 0.034923496688842384
|
250 |
+
},
|
251 |
+
"harness|mmlu_college_biology|0": {
|
252 |
+
"alias": " - college_biology",
|
253 |
+
"acc,none": 0.7916666666666666,
|
254 |
+
"acc_stderr,none": 0.033961162058453336
|
255 |
+
},
|
256 |
+
"harness|mmlu_college_chemistry|0": {
|
257 |
+
"alias": " - college_chemistry",
|
258 |
+
"acc,none": 0.49,
|
259 |
+
"acc_stderr,none": 0.05024183937956913
|
260 |
+
},
|
261 |
+
"harness|mmlu_college_computer_science|0": {
|
262 |
+
"alias": " - college_computer_science",
|
263 |
+
"acc,none": 0.55,
|
264 |
+
"acc_stderr,none": 0.04999999999999999
|
265 |
+
},
|
266 |
+
"harness|mmlu_college_mathematics|0": {
|
267 |
+
"alias": " - college_mathematics",
|
268 |
+
"acc,none": 0.34,
|
269 |
+
"acc_stderr,none": 0.04760952285695236
|
270 |
+
},
|
271 |
+
"harness|mmlu_college_physics|0": {
|
272 |
+
"alias": " - college_physics",
|
273 |
+
"acc,none": 0.43137254901960786,
|
274 |
+
"acc_stderr,none": 0.04928099597287534
|
275 |
+
},
|
276 |
+
"harness|mmlu_computer_security|0": {
|
277 |
+
"alias": " - computer_security",
|
278 |
+
"acc,none": 0.69,
|
279 |
+
"acc_stderr,none": 0.04648231987117316
|
280 |
+
},
|
281 |
+
"harness|mmlu_conceptual_physics|0": {
|
282 |
+
"alias": " - conceptual_physics",
|
283 |
+
"acc,none": 0.5872340425531914,
|
284 |
+
"acc_stderr,none": 0.03218471141400351
|
285 |
+
},
|
286 |
+
"harness|mmlu_electrical_engineering|0": {
|
287 |
+
"alias": " - electrical_engineering",
|
288 |
+
"acc,none": 0.6,
|
289 |
+
"acc_stderr,none": 0.040824829046386284
|
290 |
+
},
|
291 |
+
"harness|mmlu_elementary_mathematics|0": {
|
292 |
+
"alias": " - elementary_mathematics",
|
293 |
+
"acc,none": 0.4497354497354497,
|
294 |
+
"acc_stderr,none": 0.025620857042936648
|
295 |
+
},
|
296 |
+
"harness|mmlu_high_school_biology|0": {
|
297 |
+
"alias": " - high_school_biology",
|
298 |
+
"acc,none": 0.7741935483870968,
|
299 |
+
"acc_stderr,none": 0.023785577884181012
|
300 |
+
},
|
301 |
+
"harness|mmlu_high_school_chemistry|0": {
|
302 |
+
"alias": " - high_school_chemistry",
|
303 |
+
"acc,none": 0.5467980295566502,
|
304 |
+
"acc_stderr,none": 0.035025446508458714
|
305 |
+
},
|
306 |
+
"harness|mmlu_high_school_computer_science|0": {
|
307 |
+
"alias": " - high_school_computer_science",
|
308 |
+
"acc,none": 0.63,
|
309 |
+
"acc_stderr,none": 0.04852365870939099
|
310 |
+
},
|
311 |
+
"harness|mmlu_high_school_mathematics|0": {
|
312 |
+
"alias": " - high_school_mathematics",
|
313 |
+
"acc,none": 0.32222222222222224,
|
314 |
+
"acc_stderr,none": 0.028493465091028597
|
315 |
+
},
|
316 |
+
"harness|mmlu_high_school_physics|0": {
|
317 |
+
"alias": " - high_school_physics",
|
318 |
+
"acc,none": 0.3841059602649007,
|
319 |
+
"acc_stderr,none": 0.03971301814719198
|
320 |
+
},
|
321 |
+
"harness|mmlu_high_school_statistics|0": {
|
322 |
+
"alias": " - high_school_statistics",
|
323 |
+
"acc,none": 0.5694444444444444,
|
324 |
+
"acc_stderr,none": 0.03376922151252336
|
325 |
+
},
|
326 |
+
"harness|mmlu_machine_learning|0": {
|
327 |
+
"alias": " - machine_learning",
|
328 |
+
"acc,none": 0.4642857142857143,
|
329 |
+
"acc_stderr,none": 0.04733667890053755
|
330 |
+
},
|
331 |
+
"harness|truthfulqa:mc1|0": {
|
332 |
+
"acc,none": 0.31211750305997554,
|
333 |
+
"acc_stderr,none": 0.016220756769520946,
|
334 |
+
"alias": "truthfulqa_mc1"
|
335 |
+
},
|
336 |
+
"harness|openbookqa|0": {
|
337 |
+
"acc,none": 0.322,
|
338 |
+
"acc_stderr,none": 0.020916668330019882,
|
339 |
+
"acc_norm,none": 0.44,
|
340 |
+
"acc_norm_stderr,none": 0.02222133153414306,
|
341 |
+
"alias": "openbookqa"
|
342 |
+
},
|
343 |
+
"harness|piqa|0": {
|
344 |
+
"acc,none": 0.8014145810663765,
|
345 |
+
"acc_stderr,none": 0.009307814521717864,
|
346 |
+
"acc_norm,none": 0.8215451577801959,
|
347 |
+
"acc_norm_stderr,none": 0.008933575463062074,
|
348 |
+
"alias": "piqa"
|
349 |
+
},
|
350 |
+
"harness|winogrande|0": {
|
351 |
+
"acc,none": 0.7505919494869772,
|
352 |
+
"acc_stderr,none": 0.012160189196930685,
|
353 |
+
"alias": "winogrande"
|
354 |
+
},
|
355 |
+
"harness|arc:easy|0": {
|
356 |
+
"acc,none": 0.8253367003367004,
|
357 |
+
"acc_stderr,none": 0.007790845678413371,
|
358 |
+
"acc_norm,none": 0.8118686868686869,
|
359 |
+
"acc_norm_stderr,none": 0.008019395492398136,
|
360 |
+
"alias": "arc_easy"
|
361 |
+
},
|
362 |
+
"harness|boolq|0": {
|
363 |
+
"acc,none": 0.8339449541284404,
|
364 |
+
"acc_stderr,none": 0.006508595338469726,
|
365 |
+
"alias": "boolq"
|
366 |
+
},
|
367 |
+
"harness|hellaswag|0": {
|
368 |
+
"acc,none": 0.6058554072893846,
|
369 |
+
"acc_stderr,none": 0.004876674814874686,
|
370 |
+
"acc_norm,none": 0.808603863772157,
|
371 |
+
"acc_norm_stderr,none": 0.003925961222839846,
|
372 |
+
"alias": "hellaswag"
|
373 |
+
},
|
374 |
+
"harness|truthfulqa:mc2|0": {
|
375 |
+
"acc,none": 0.449533098008656,
|
376 |
+
"acc_stderr,none": 0.01466914497705992,
|
377 |
+
"alias": "truthfulqa_mc2"
|
378 |
+
},
|
379 |
+
"harness|arc:challenge|0": {
|
380 |
+
"acc,none": 0.4991467576791809,
|
381 |
+
"acc_stderr,none": 0.014611369529813283,
|
382 |
+
"acc_norm,none": 0.537542662116041,
|
383 |
+
"acc_norm_stderr,none": 0.014570144495075581,
|
384 |
+
"alias": "arc_challenge"
|
385 |
+
},
|
386 |
+
"harness|lambada:openai|0": {
|
387 |
+
"perplexity,none": 3.3526447901683145,
|
388 |
+
"perplexity_stderr,none": 0.06471519419298942,
|
389 |
+
"acc,none": 0.7316126528235979,
|
390 |
+
"acc_stderr,none": 0.006173531884910562,
|
391 |
+
"alias": "lambada_openai"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "google/gemma-7b",
|
396 |
+
"revision": "main",
|
397 |
+
"private": false,
|
398 |
+
"params": 8.54,
|
399 |
+
"architectures": "GemmaForCausalLM",
|
400 |
+
"quant_type": null,
|
401 |
+
"precision": "16bit",
|
402 |
+
"model_params": 8.54,
|
403 |
+
"model_size": 17.08,
|
404 |
+
"weight_dtype": "bfloat16",
|
405 |
+
"compute_dtype": "float16",
|
406 |
+
"gguf_ftype": "*Q4_0.gguf",
|
407 |
+
"hardware": "gpu",
|
408 |
+
"status": "Pending",
|
409 |
+
"submitted_time": "2024-04-27T08:04:58Z",
|
410 |
+
"model_type": "original",
|
411 |
+
"job_id": -1,
|
412 |
+
"job_start_time": null,
|
413 |
+
"scripts": "ITREX"
|
414 |
+
},
|
415 |
+
"quantization_config": null,
|
416 |
+
"versions": {
|
417 |
+
"harness|mmlu|0": null,
|
418 |
+
"harness|mmlu_humanities|0": null,
|
419 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
420 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
421 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
422 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
423 |
+
"harness|mmlu_international_law|0": 0.0,
|
424 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
425 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
426 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
427 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
428 |
+
"harness|mmlu_philosophy|0": 0.0,
|
429 |
+
"harness|mmlu_prehistory|0": 0.0,
|
430 |
+
"harness|mmlu_professional_law|0": 0.0,
|
431 |
+
"harness|mmlu_world_religions|0": 0.0,
|
432 |
+
"harness|mmlu_other|0": null,
|
433 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
434 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
435 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
436 |
+
"harness|mmlu_global_facts|0": 0.0,
|
437 |
+
"harness|mmlu_human_aging|0": 0.0,
|
438 |
+
"harness|mmlu_management|0": 0.0,
|
439 |
+
"harness|mmlu_marketing|0": 0.0,
|
440 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
441 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
442 |
+
"harness|mmlu_nutrition|0": 0.0,
|
443 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
444 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
445 |
+
"harness|mmlu_virology|0": 0.0,
|
446 |
+
"harness|mmlu_social_sciences|0": null,
|
447 |
+
"harness|mmlu_econometrics|0": 0.0,
|
448 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
449 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
450 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
451 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
452 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
453 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
454 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
455 |
+
"harness|mmlu_public_relations|0": 0.0,
|
456 |
+
"harness|mmlu_security_studies|0": 0.0,
|
457 |
+
"harness|mmlu_sociology|0": 0.0,
|
458 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
459 |
+
"harness|mmlu_stem|0": null,
|
460 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
461 |
+
"harness|mmlu_anatomy|0": 0.0,
|
462 |
+
"harness|mmlu_astronomy|0": 0.0,
|
463 |
+
"harness|mmlu_college_biology|0": 0.0,
|
464 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
465 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
466 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
467 |
+
"harness|mmlu_college_physics|0": 0.0,
|
468 |
+
"harness|mmlu_computer_security|0": 0.0,
|
469 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
470 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
471 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
472 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
473 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
474 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
475 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
476 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
477 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
478 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
479 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
480 |
+
"harness|openbookqa|0": 1.0,
|
481 |
+
"harness|piqa|0": 1.0,
|
482 |
+
"harness|winogrande|0": 1.0,
|
483 |
+
"harness|arc:easy|0": 1.0,
|
484 |
+
"harness|boolq|0": 2.0,
|
485 |
+
"harness|hellaswag|0": 1.0,
|
486 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
487 |
+
"harness|arc:challenge|0": 1.0,
|
488 |
+
"harness|lambada:openai|0": 1.0
|
489 |
+
},
|
490 |
+
"n-shot": {
|
491 |
+
"arc_challenge": 0,
|
492 |
+
"arc_easy": 0,
|
493 |
+
"boolq": 0,
|
494 |
+
"hellaswag": 0,
|
495 |
+
"lambada_openai": 0,
|
496 |
+
"mmlu": 0,
|
497 |
+
"mmlu_abstract_algebra": 0,
|
498 |
+
"mmlu_anatomy": 0,
|
499 |
+
"mmlu_astronomy": 0,
|
500 |
+
"mmlu_business_ethics": 0,
|
501 |
+
"mmlu_clinical_knowledge": 0,
|
502 |
+
"mmlu_college_biology": 0,
|
503 |
+
"mmlu_college_chemistry": 0,
|
504 |
+
"mmlu_college_computer_science": 0,
|
505 |
+
"mmlu_college_mathematics": 0,
|
506 |
+
"mmlu_college_medicine": 0,
|
507 |
+
"mmlu_college_physics": 0,
|
508 |
+
"mmlu_computer_security": 0,
|
509 |
+
"mmlu_conceptual_physics": 0,
|
510 |
+
"mmlu_econometrics": 0,
|
511 |
+
"mmlu_electrical_engineering": 0,
|
512 |
+
"mmlu_elementary_mathematics": 0,
|
513 |
+
"mmlu_formal_logic": 0,
|
514 |
+
"mmlu_global_facts": 0,
|
515 |
+
"mmlu_high_school_biology": 0,
|
516 |
+
"mmlu_high_school_chemistry": 0,
|
517 |
+
"mmlu_high_school_computer_science": 0,
|
518 |
+
"mmlu_high_school_european_history": 0,
|
519 |
+
"mmlu_high_school_geography": 0,
|
520 |
+
"mmlu_high_school_government_and_politics": 0,
|
521 |
+
"mmlu_high_school_macroeconomics": 0,
|
522 |
+
"mmlu_high_school_mathematics": 0,
|
523 |
+
"mmlu_high_school_microeconomics": 0,
|
524 |
+
"mmlu_high_school_physics": 0,
|
525 |
+
"mmlu_high_school_psychology": 0,
|
526 |
+
"mmlu_high_school_statistics": 0,
|
527 |
+
"mmlu_high_school_us_history": 0,
|
528 |
+
"mmlu_high_school_world_history": 0,
|
529 |
+
"mmlu_human_aging": 0,
|
530 |
+
"mmlu_human_sexuality": 0,
|
531 |
+
"mmlu_humanities": 0,
|
532 |
+
"mmlu_international_law": 0,
|
533 |
+
"mmlu_jurisprudence": 0,
|
534 |
+
"mmlu_logical_fallacies": 0,
|
535 |
+
"mmlu_machine_learning": 0,
|
536 |
+
"mmlu_management": 0,
|
537 |
+
"mmlu_marketing": 0,
|
538 |
+
"mmlu_medical_genetics": 0,
|
539 |
+
"mmlu_miscellaneous": 0,
|
540 |
+
"mmlu_moral_disputes": 0,
|
541 |
+
"mmlu_moral_scenarios": 0,
|
542 |
+
"mmlu_nutrition": 0,
|
543 |
+
"mmlu_other": 0,
|
544 |
+
"mmlu_philosophy": 0,
|
545 |
+
"mmlu_prehistory": 0,
|
546 |
+
"mmlu_professional_accounting": 0,
|
547 |
+
"mmlu_professional_law": 0,
|
548 |
+
"mmlu_professional_medicine": 0,
|
549 |
+
"mmlu_professional_psychology": 0,
|
550 |
+
"mmlu_public_relations": 0,
|
551 |
+
"mmlu_security_studies": 0,
|
552 |
+
"mmlu_social_sciences": 0,
|
553 |
+
"mmlu_sociology": 0,
|
554 |
+
"mmlu_stem": 0,
|
555 |
+
"mmlu_us_foreign_policy": 0,
|
556 |
+
"mmlu_virology": 0,
|
557 |
+
"mmlu_world_religions": 0,
|
558 |
+
"openbookqa": 0,
|
559 |
+
"piqa": 0,
|
560 |
+
"truthfulqa_mc1": 0,
|
561 |
+
"truthfulqa_mc2": 0,
|
562 |
+
"winogrande": 0
|
563 |
+
},
|
564 |
+
"date": 1716012437.9040594,
|
565 |
+
"config": {
|
566 |
+
"model": "hf",
|
567 |
+
"model_args": "pretrained=gemma-7b,trust_remote_code=True,dtype=float16,_commit_hash=main",
|
568 |
+
"batch_size": 4,
|
569 |
+
"batch_sizes": [],
|
570 |
+
"device": "cuda",
|
571 |
+
"use_cache": null,
|
572 |
+
"limit": null,
|
573 |
+
"bootstrap_iters": 100000,
|
574 |
+
"gen_kwargs": null
|
575 |
+
}
|
576 |
+
}
|
google/results_2024-05-18-15-11-33_gemma-7b-it.json
ADDED
@@ -0,0 +1,576 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "1.4",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": "-1",
|
9 |
+
"start_time": "",
|
10 |
+
"end_time": "",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "google/gemma-7b-it",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "16bit",
|
15 |
+
"model_size": 17.08,
|
16 |
+
"model_params": 8.54,
|
17 |
+
"quant_type": null,
|
18 |
+
"precision": "16bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|arc:easy|0": {
|
22 |
+
"acc,none": 0.2537878787878788,
|
23 |
+
"acc_stderr,none": 0.008929657065808297,
|
24 |
+
"acc_norm,none": 0.25673400673400676,
|
25 |
+
"acc_norm_stderr,none": 0.008963590834042407,
|
26 |
+
"alias": "arc_easy"
|
27 |
+
},
|
28 |
+
"harness|piqa|0": {
|
29 |
+
"acc,none": 0.5413492927094669,
|
30 |
+
"acc_stderr,none": 0.011625864113315815,
|
31 |
+
"acc_norm,none": 0.5136017410228509,
|
32 |
+
"acc_norm_stderr,none": 0.011661506839823772,
|
33 |
+
"alias": "piqa"
|
34 |
+
},
|
35 |
+
"harness|lambada:openai|0": {
|
36 |
+
"perplexity,none": 14767821.727399996,
|
37 |
+
"perplexity_stderr,none": 2409090.1741631296,
|
38 |
+
"acc,none": 0.022123035125169804,
|
39 |
+
"acc_stderr,none": 0.0020491631397655933,
|
40 |
+
"alias": "lambada_openai"
|
41 |
+
},
|
42 |
+
"harness|mmlu|0": {
|
43 |
+
"acc,none": 0.2558752314485116,
|
44 |
+
"acc_stderr,none": 0.0036824517724478216,
|
45 |
+
"alias": "mmlu"
|
46 |
+
},
|
47 |
+
"harness|mmlu_humanities|0": {
|
48 |
+
"alias": " - humanities",
|
49 |
+
"acc,none": 0.251009564293305,
|
50 |
+
"acc_stderr,none": 0.006322493233824801
|
51 |
+
},
|
52 |
+
"harness|mmlu_formal_logic|0": {
|
53 |
+
"alias": " - formal_logic",
|
54 |
+
"acc,none": 0.2698412698412698,
|
55 |
+
"acc_stderr,none": 0.039701582732351734
|
56 |
+
},
|
57 |
+
"harness|mmlu_high_school_european_history|0": {
|
58 |
+
"alias": " - high_school_european_history",
|
59 |
+
"acc,none": 0.22424242424242424,
|
60 |
+
"acc_stderr,none": 0.032568666616811015
|
61 |
+
},
|
62 |
+
"harness|mmlu_high_school_us_history|0": {
|
63 |
+
"alias": " - high_school_us_history",
|
64 |
+
"acc,none": 0.2549019607843137,
|
65 |
+
"acc_stderr,none": 0.03058759135160426
|
66 |
+
},
|
67 |
+
"harness|mmlu_high_school_world_history|0": {
|
68 |
+
"alias": " - high_school_world_history",
|
69 |
+
"acc,none": 0.2320675105485232,
|
70 |
+
"acc_stderr,none": 0.027479744550808524
|
71 |
+
},
|
72 |
+
"harness|mmlu_international_law|0": {
|
73 |
+
"alias": " - international_law",
|
74 |
+
"acc,none": 0.21487603305785125,
|
75 |
+
"acc_stderr,none": 0.037494924487096966
|
76 |
+
},
|
77 |
+
"harness|mmlu_jurisprudence|0": {
|
78 |
+
"alias": " - jurisprudence",
|
79 |
+
"acc,none": 0.3333333333333333,
|
80 |
+
"acc_stderr,none": 0.04557239513497752
|
81 |
+
},
|
82 |
+
"harness|mmlu_logical_fallacies|0": {
|
83 |
+
"alias": " - logical_fallacies",
|
84 |
+
"acc,none": 0.2331288343558282,
|
85 |
+
"acc_stderr,none": 0.0332201579577674
|
86 |
+
},
|
87 |
+
"harness|mmlu_moral_disputes|0": {
|
88 |
+
"alias": " - moral_disputes",
|
89 |
+
"acc,none": 0.24566473988439305,
|
90 |
+
"acc_stderr,none": 0.02317629820399201
|
91 |
+
},
|
92 |
+
"harness|mmlu_moral_scenarios|0": {
|
93 |
+
"alias": " - moral_scenarios",
|
94 |
+
"acc,none": 0.25027932960893856,
|
95 |
+
"acc_stderr,none": 0.014487500852850417
|
96 |
+
},
|
97 |
+
"harness|mmlu_philosophy|0": {
|
98 |
+
"alias": " - philosophy",
|
99 |
+
"acc,none": 0.24758842443729903,
|
100 |
+
"acc_stderr,none": 0.024513879973621967
|
101 |
+
},
|
102 |
+
"harness|mmlu_prehistory|0": {
|
103 |
+
"alias": " - prehistory",
|
104 |
+
"acc,none": 0.2654320987654321,
|
105 |
+
"acc_stderr,none": 0.024569223600460845
|
106 |
+
},
|
107 |
+
"harness|mmlu_professional_law|0": {
|
108 |
+
"alias": " - professional_law",
|
109 |
+
"acc,none": 0.26010430247718386,
|
110 |
+
"acc_stderr,none": 0.011204382887823822
|
111 |
+
},
|
112 |
+
"harness|mmlu_world_religions|0": {
|
113 |
+
"alias": " - world_religions",
|
114 |
+
"acc,none": 0.1871345029239766,
|
115 |
+
"acc_stderr,none": 0.02991312723236806
|
116 |
+
},
|
117 |
+
"harness|mmlu_other|0": {
|
118 |
+
"alias": " - other",
|
119 |
+
"acc,none": 0.24750563244287094,
|
120 |
+
"acc_stderr,none": 0.007747039809319903
|
121 |
+
},
|
122 |
+
"harness|mmlu_business_ethics|0": {
|
123 |
+
"alias": " - business_ethics",
|
124 |
+
"acc,none": 0.24,
|
125 |
+
"acc_stderr,none": 0.04292346959909284
|
126 |
+
},
|
127 |
+
"harness|mmlu_clinical_knowledge|0": {
|
128 |
+
"alias": " - clinical_knowledge",
|
129 |
+
"acc,none": 0.24528301886792453,
|
130 |
+
"acc_stderr,none": 0.026480357179895685
|
131 |
+
},
|
132 |
+
"harness|mmlu_college_medicine|0": {
|
133 |
+
"alias": " - college_medicine",
|
134 |
+
"acc,none": 0.2658959537572254,
|
135 |
+
"acc_stderr,none": 0.03368762932259431
|
136 |
+
},
|
137 |
+
"harness|mmlu_global_facts|0": {
|
138 |
+
"alias": " - global_facts",
|
139 |
+
"acc,none": 0.18,
|
140 |
+
"acc_stderr,none": 0.038612291966536955
|
141 |
+
},
|
142 |
+
"harness|mmlu_human_aging|0": {
|
143 |
+
"alias": " - human_aging",
|
144 |
+
"acc,none": 0.21524663677130046,
|
145 |
+
"acc_stderr,none": 0.027584066602208256
|
146 |
+
},
|
147 |
+
"harness|mmlu_management|0": {
|
148 |
+
"alias": " - management",
|
149 |
+
"acc,none": 0.2621359223300971,
|
150 |
+
"acc_stderr,none": 0.043546310772605956
|
151 |
+
},
|
152 |
+
"harness|mmlu_marketing|0": {
|
153 |
+
"alias": " - marketing",
|
154 |
+
"acc,none": 0.23076923076923078,
|
155 |
+
"acc_stderr,none": 0.02760192138141758
|
156 |
+
},
|
157 |
+
"harness|mmlu_medical_genetics|0": {
|
158 |
+
"alias": " - medical_genetics",
|
159 |
+
"acc,none": 0.27,
|
160 |
+
"acc_stderr,none": 0.044619604333847394
|
161 |
+
},
|
162 |
+
"harness|mmlu_miscellaneous|0": {
|
163 |
+
"alias": " - miscellaneous",
|
164 |
+
"acc,none": 0.2503192848020434,
|
165 |
+
"acc_stderr,none": 0.01549108895149459
|
166 |
+
},
|
167 |
+
"harness|mmlu_nutrition|0": {
|
168 |
+
"alias": " - nutrition",
|
169 |
+
"acc,none": 0.23529411764705882,
|
170 |
+
"acc_stderr,none": 0.02428861946604611
|
171 |
+
},
|
172 |
+
"harness|mmlu_professional_accounting|0": {
|
173 |
+
"alias": " - professional_accounting",
|
174 |
+
"acc,none": 0.25886524822695034,
|
175 |
+
"acc_stderr,none": 0.02612957252718085
|
176 |
+
},
|
177 |
+
"harness|mmlu_professional_medicine|0": {
|
178 |
+
"alias": " - professional_medicine",
|
179 |
+
"acc,none": 0.2977941176470588,
|
180 |
+
"acc_stderr,none": 0.027778298701545447
|
181 |
+
},
|
182 |
+
"harness|mmlu_virology|0": {
|
183 |
+
"alias": " - virology",
|
184 |
+
"acc,none": 0.2289156626506024,
|
185 |
+
"acc_stderr,none": 0.03270745277352477
|
186 |
+
},
|
187 |
+
"harness|mmlu_social_sciences|0": {
|
188 |
+
"alias": " - social_sciences",
|
189 |
+
"acc,none": 0.2684432889177771,
|
190 |
+
"acc_stderr,none": 0.007992200599947303
|
191 |
+
},
|
192 |
+
"harness|mmlu_econometrics|0": {
|
193 |
+
"alias": " - econometrics",
|
194 |
+
"acc,none": 0.3157894736842105,
|
195 |
+
"acc_stderr,none": 0.04372748290278007
|
196 |
+
},
|
197 |
+
"harness|mmlu_high_school_geography|0": {
|
198 |
+
"alias": " - high_school_geography",
|
199 |
+
"acc,none": 0.2676767676767677,
|
200 |
+
"acc_stderr,none": 0.031544498882702866
|
201 |
+
},
|
202 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
203 |
+
"alias": " - high_school_government_and_politics",
|
204 |
+
"acc,none": 0.24352331606217617,
|
205 |
+
"acc_stderr,none": 0.030975436386845426
|
206 |
+
},
|
207 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
208 |
+
"alias": " - high_school_macroeconomics",
|
209 |
+
"acc,none": 0.28717948717948716,
|
210 |
+
"acc_stderr,none": 0.022939925418530616
|
211 |
+
},
|
212 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
213 |
+
"alias": " - high_school_microeconomics",
|
214 |
+
"acc,none": 0.2773109243697479,
|
215 |
+
"acc_stderr,none": 0.02907937453948001
|
216 |
+
},
|
217 |
+
"harness|mmlu_high_school_psychology|0": {
|
218 |
+
"alias": " - high_school_psychology",
|
219 |
+
"acc,none": 0.26605504587155965,
|
220 |
+
"acc_stderr,none": 0.018946022322225583
|
221 |
+
},
|
222 |
+
"harness|mmlu_human_sexuality|0": {
|
223 |
+
"alias": " - human_sexuality",
|
224 |
+
"acc,none": 0.21374045801526717,
|
225 |
+
"acc_stderr,none": 0.035954616117746904
|
226 |
+
},
|
227 |
+
"harness|mmlu_professional_psychology|0": {
|
228 |
+
"alias": " - professional_psychology",
|
229 |
+
"acc,none": 0.2565359477124183,
|
230 |
+
"acc_stderr,none": 0.01766784161237899
|
231 |
+
},
|
232 |
+
"harness|mmlu_public_relations|0": {
|
233 |
+
"alias": " - public_relations",
|
234 |
+
"acc,none": 0.3090909090909091,
|
235 |
+
"acc_stderr,none": 0.044262946482000985
|
236 |
+
},
|
237 |
+
"harness|mmlu_security_studies|0": {
|
238 |
+
"alias": " - security_studies",
|
239 |
+
"acc,none": 0.30612244897959184,
|
240 |
+
"acc_stderr,none": 0.02950489645459596
|
241 |
+
},
|
242 |
+
"harness|mmlu_sociology|0": {
|
243 |
+
"alias": " - sociology",
|
244 |
+
"acc,none": 0.22885572139303484,
|
245 |
+
"acc_stderr,none": 0.029705284056772432
|
246 |
+
},
|
247 |
+
"harness|mmlu_us_foreign_policy|0": {
|
248 |
+
"alias": " - us_foreign_policy",
|
249 |
+
"acc,none": 0.27,
|
250 |
+
"acc_stderr,none": 0.044619604333847394
|
251 |
+
},
|
252 |
+
"harness|mmlu_stem|0": {
|
253 |
+
"alias": " - stem",
|
254 |
+
"acc,none": 0.2591183000317158,
|
255 |
+
"acc_stderr,none": 0.007799662714513278
|
256 |
+
},
|
257 |
+
"harness|mmlu_abstract_algebra|0": {
|
258 |
+
"alias": " - abstract_algebra",
|
259 |
+
"acc,none": 0.2,
|
260 |
+
"acc_stderr,none": 0.04020151261036845
|
261 |
+
},
|
262 |
+
"harness|mmlu_anatomy|0": {
|
263 |
+
"alias": " - anatomy",
|
264 |
+
"acc,none": 0.2518518518518518,
|
265 |
+
"acc_stderr,none": 0.03749850709174022
|
266 |
+
},
|
267 |
+
"harness|mmlu_astronomy|0": {
|
268 |
+
"alias": " - astronomy",
|
269 |
+
"acc,none": 0.29605263157894735,
|
270 |
+
"acc_stderr,none": 0.03715062154998905
|
271 |
+
},
|
272 |
+
"harness|mmlu_college_biology|0": {
|
273 |
+
"alias": " - college_biology",
|
274 |
+
"acc,none": 0.2361111111111111,
|
275 |
+
"acc_stderr,none": 0.03551446610810826
|
276 |
+
},
|
277 |
+
"harness|mmlu_college_chemistry|0": {
|
278 |
+
"alias": " - college_chemistry",
|
279 |
+
"acc,none": 0.21,
|
280 |
+
"acc_stderr,none": 0.040936018074033256
|
281 |
+
},
|
282 |
+
"harness|mmlu_college_computer_science|0": {
|
283 |
+
"alias": " - college_computer_science",
|
284 |
+
"acc,none": 0.22,
|
285 |
+
"acc_stderr,none": 0.041633319989322695
|
286 |
+
},
|
287 |
+
"harness|mmlu_college_mathematics|0": {
|
288 |
+
"alias": " - college_mathematics",
|
289 |
+
"acc,none": 0.21,
|
290 |
+
"acc_stderr,none": 0.040936018074033256
|
291 |
+
},
|
292 |
+
"harness|mmlu_college_physics|0": {
|
293 |
+
"alias": " - college_physics",
|
294 |
+
"acc,none": 0.37254901960784315,
|
295 |
+
"acc_stderr,none": 0.04810840148082634
|
296 |
+
},
|
297 |
+
"harness|mmlu_computer_security|0": {
|
298 |
+
"alias": " - computer_security",
|
299 |
+
"acc,none": 0.2,
|
300 |
+
"acc_stderr,none": 0.04020151261036846
|
301 |
+
},
|
302 |
+
"harness|mmlu_conceptual_physics|0": {
|
303 |
+
"alias": " - conceptual_physics",
|
304 |
+
"acc,none": 0.23404255319148937,
|
305 |
+
"acc_stderr,none": 0.027678452578212387
|
306 |
+
},
|
307 |
+
"harness|mmlu_electrical_engineering|0": {
|
308 |
+
"alias": " - electrical_engineering",
|
309 |
+
"acc,none": 0.25517241379310346,
|
310 |
+
"acc_stderr,none": 0.03632984052707842
|
311 |
+
},
|
312 |
+
"harness|mmlu_elementary_mathematics|0": {
|
313 |
+
"alias": " - elementary_mathematics",
|
314 |
+
"acc,none": 0.24338624338624337,
|
315 |
+
"acc_stderr,none": 0.022101128787415426
|
316 |
+
},
|
317 |
+
"harness|mmlu_high_school_biology|0": {
|
318 |
+
"alias": " - high_school_biology",
|
319 |
+
"acc,none": 0.27419354838709675,
|
320 |
+
"acc_stderr,none": 0.025378139970885203
|
321 |
+
},
|
322 |
+
"harness|mmlu_high_school_chemistry|0": {
|
323 |
+
"alias": " - high_school_chemistry",
|
324 |
+
"acc,none": 0.270935960591133,
|
325 |
+
"acc_stderr,none": 0.031270907132976984
|
326 |
+
},
|
327 |
+
"harness|mmlu_high_school_computer_science|0": {
|
328 |
+
"alias": " - high_school_computer_science",
|
329 |
+
"acc,none": 0.22,
|
330 |
+
"acc_stderr,none": 0.041633319989322695
|
331 |
+
},
|
332 |
+
"harness|mmlu_high_school_mathematics|0": {
|
333 |
+
"alias": " - high_school_mathematics",
|
334 |
+
"acc,none": 0.2740740740740741,
|
335 |
+
"acc_stderr,none": 0.027195934804085626
|
336 |
+
},
|
337 |
+
"harness|mmlu_high_school_physics|0": {
|
338 |
+
"alias": " - high_school_physics",
|
339 |
+
"acc,none": 0.2980132450331126,
|
340 |
+
"acc_stderr,none": 0.037345356767871984
|
341 |
+
},
|
342 |
+
"harness|mmlu_high_school_statistics|0": {
|
343 |
+
"alias": " - high_school_statistics",
|
344 |
+
"acc,none": 0.2962962962962963,
|
345 |
+
"acc_stderr,none": 0.031141447823536037
|
346 |
+
},
|
347 |
+
"harness|mmlu_machine_learning|0": {
|
348 |
+
"alias": " - machine_learning",
|
349 |
+
"acc,none": 0.29464285714285715,
|
350 |
+
"acc_stderr,none": 0.0432704093257873
|
351 |
+
},
|
352 |
+
"harness|truthfulqa:mc2|0": {
|
353 |
+
"acc,none": 0.47424027194431734,
|
354 |
+
"acc_stderr,none": 0.017234475672732584,
|
355 |
+
"alias": "truthfulqa_mc2"
|
356 |
+
},
|
357 |
+
"harness|arc:challenge|0": {
|
358 |
+
"acc,none": 0.21160409556313994,
|
359 |
+
"acc_stderr,none": 0.011935916358632859,
|
360 |
+
"acc_norm,none": 0.2525597269624573,
|
361 |
+
"acc_norm_stderr,none": 0.012696728980207706,
|
362 |
+
"alias": "arc_challenge"
|
363 |
+
},
|
364 |
+
"harness|winogrande|0": {
|
365 |
+
"acc,none": 0.47750591949486976,
|
366 |
+
"acc_stderr,none": 0.014038257824059886,
|
367 |
+
"alias": "winogrande"
|
368 |
+
},
|
369 |
+
"harness|openbookqa|0": {
|
370 |
+
"acc,none": 0.178,
|
371 |
+
"acc_stderr,none": 0.017123622189062257,
|
372 |
+
"acc_norm,none": 0.274,
|
373 |
+
"acc_norm_stderr,none": 0.01996610354027947,
|
374 |
+
"alias": "openbookqa"
|
375 |
+
},
|
376 |
+
"harness|boolq|0": {
|
377 |
+
"acc,none": 0.617737003058104,
|
378 |
+
"acc_stderr,none": 0.00849914969044927,
|
379 |
+
"alias": "boolq"
|
380 |
+
},
|
381 |
+
"harness|hellaswag|0": {
|
382 |
+
"acc,none": 0.2586138219478192,
|
383 |
+
"acc_stderr,none": 0.004369780529824002,
|
384 |
+
"acc_norm,none": 0.2726548496315475,
|
385 |
+
"acc_norm_stderr,none": 0.004444146875436291,
|
386 |
+
"alias": "hellaswag"
|
387 |
+
},
|
388 |
+
"harness|truthfulqa:mc1|0": {
|
389 |
+
"acc,none": 0.24969400244798043,
|
390 |
+
"acc_stderr,none": 0.015152286907148125,
|
391 |
+
"alias": "truthfulqa_mc1"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "google/gemma-7b-it",
|
396 |
+
"revision": "main",
|
397 |
+
"private": false,
|
398 |
+
"params": 8.54,
|
399 |
+
"architectures": "GemmaForCausalLM",
|
400 |
+
"quant_type": null,
|
401 |
+
"precision": "16bit",
|
402 |
+
"model_params": 8.54,
|
403 |
+
"model_size": 17.08,
|
404 |
+
"weight_dtype": "bfloat16",
|
405 |
+
"compute_dtype": "float16",
|
406 |
+
"gguf_ftype": "*Q4_0.gguf",
|
407 |
+
"hardware": "gpu",
|
408 |
+
"status": "Pending",
|
409 |
+
"submitted_time": "2024-04-27T08:04:58Z",
|
410 |
+
"model_type": "original",
|
411 |
+
"job_id": -1,
|
412 |
+
"job_start_time": null,
|
413 |
+
"scripts": "ITREX"
|
414 |
+
},
|
415 |
+
"quantization_config": null,
|
416 |
+
"versions": {
|
417 |
+
"harness|arc:easy|0": 1.0,
|
418 |
+
"harness|piqa|0": 1.0,
|
419 |
+
"harness|lambada:openai|0": 1.0,
|
420 |
+
"harness|mmlu|0": null,
|
421 |
+
"harness|mmlu_humanities|0": null,
|
422 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
423 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
424 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
425 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
426 |
+
"harness|mmlu_international_law|0": 0.0,
|
427 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
428 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
429 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
430 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
431 |
+
"harness|mmlu_philosophy|0": 0.0,
|
432 |
+
"harness|mmlu_prehistory|0": 0.0,
|
433 |
+
"harness|mmlu_professional_law|0": 0.0,
|
434 |
+
"harness|mmlu_world_religions|0": 0.0,
|
435 |
+
"harness|mmlu_other|0": null,
|
436 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
437 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
438 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
439 |
+
"harness|mmlu_global_facts|0": 0.0,
|
440 |
+
"harness|mmlu_human_aging|0": 0.0,
|
441 |
+
"harness|mmlu_management|0": 0.0,
|
442 |
+
"harness|mmlu_marketing|0": 0.0,
|
443 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
444 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
445 |
+
"harness|mmlu_nutrition|0": 0.0,
|
446 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
447 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
448 |
+
"harness|mmlu_virology|0": 0.0,
|
449 |
+
"harness|mmlu_social_sciences|0": null,
|
450 |
+
"harness|mmlu_econometrics|0": 0.0,
|
451 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
452 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
453 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
454 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
455 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
456 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
457 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
458 |
+
"harness|mmlu_public_relations|0": 0.0,
|
459 |
+
"harness|mmlu_security_studies|0": 0.0,
|
460 |
+
"harness|mmlu_sociology|0": 0.0,
|
461 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
462 |
+
"harness|mmlu_stem|0": null,
|
463 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
464 |
+
"harness|mmlu_anatomy|0": 0.0,
|
465 |
+
"harness|mmlu_astronomy|0": 0.0,
|
466 |
+
"harness|mmlu_college_biology|0": 0.0,
|
467 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
468 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
469 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
470 |
+
"harness|mmlu_college_physics|0": 0.0,
|
471 |
+
"harness|mmlu_computer_security|0": 0.0,
|
472 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
473 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
474 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
475 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
476 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
477 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
478 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
479 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
480 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
481 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
482 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
483 |
+
"harness|arc:challenge|0": 1.0,
|
484 |
+
"harness|winogrande|0": 1.0,
|
485 |
+
"harness|openbookqa|0": 1.0,
|
486 |
+
"harness|boolq|0": 2.0,
|
487 |
+
"harness|hellaswag|0": 1.0,
|
488 |
+
"harness|truthfulqa:mc1|0": 2.0
|
489 |
+
},
|
490 |
+
"n-shot": {
|
491 |
+
"arc_challenge": 0,
|
492 |
+
"arc_easy": 0,
|
493 |
+
"boolq": 0,
|
494 |
+
"hellaswag": 0,
|
495 |
+
"lambada_openai": 0,
|
496 |
+
"mmlu": 0,
|
497 |
+
"mmlu_abstract_algebra": 0,
|
498 |
+
"mmlu_anatomy": 0,
|
499 |
+
"mmlu_astronomy": 0,
|
500 |
+
"mmlu_business_ethics": 0,
|
501 |
+
"mmlu_clinical_knowledge": 0,
|
502 |
+
"mmlu_college_biology": 0,
|
503 |
+
"mmlu_college_chemistry": 0,
|
504 |
+
"mmlu_college_computer_science": 0,
|
505 |
+
"mmlu_college_mathematics": 0,
|
506 |
+
"mmlu_college_medicine": 0,
|
507 |
+
"mmlu_college_physics": 0,
|
508 |
+
"mmlu_computer_security": 0,
|
509 |
+
"mmlu_conceptual_physics": 0,
|
510 |
+
"mmlu_econometrics": 0,
|
511 |
+
"mmlu_electrical_engineering": 0,
|
512 |
+
"mmlu_elementary_mathematics": 0,
|
513 |
+
"mmlu_formal_logic": 0,
|
514 |
+
"mmlu_global_facts": 0,
|
515 |
+
"mmlu_high_school_biology": 0,
|
516 |
+
"mmlu_high_school_chemistry": 0,
|
517 |
+
"mmlu_high_school_computer_science": 0,
|
518 |
+
"mmlu_high_school_european_history": 0,
|
519 |
+
"mmlu_high_school_geography": 0,
|
520 |
+
"mmlu_high_school_government_and_politics": 0,
|
521 |
+
"mmlu_high_school_macroeconomics": 0,
|
522 |
+
"mmlu_high_school_mathematics": 0,
|
523 |
+
"mmlu_high_school_microeconomics": 0,
|
524 |
+
"mmlu_high_school_physics": 0,
|
525 |
+
"mmlu_high_school_psychology": 0,
|
526 |
+
"mmlu_high_school_statistics": 0,
|
527 |
+
"mmlu_high_school_us_history": 0,
|
528 |
+
"mmlu_high_school_world_history": 0,
|
529 |
+
"mmlu_human_aging": 0,
|
530 |
+
"mmlu_human_sexuality": 0,
|
531 |
+
"mmlu_humanities": 0,
|
532 |
+
"mmlu_international_law": 0,
|
533 |
+
"mmlu_jurisprudence": 0,
|
534 |
+
"mmlu_logical_fallacies": 0,
|
535 |
+
"mmlu_machine_learning": 0,
|
536 |
+
"mmlu_management": 0,
|
537 |
+
"mmlu_marketing": 0,
|
538 |
+
"mmlu_medical_genetics": 0,
|
539 |
+
"mmlu_miscellaneous": 0,
|
540 |
+
"mmlu_moral_disputes": 0,
|
541 |
+
"mmlu_moral_scenarios": 0,
|
542 |
+
"mmlu_nutrition": 0,
|
543 |
+
"mmlu_other": 0,
|
544 |
+
"mmlu_philosophy": 0,
|
545 |
+
"mmlu_prehistory": 0,
|
546 |
+
"mmlu_professional_accounting": 0,
|
547 |
+
"mmlu_professional_law": 0,
|
548 |
+
"mmlu_professional_medicine": 0,
|
549 |
+
"mmlu_professional_psychology": 0,
|
550 |
+
"mmlu_public_relations": 0,
|
551 |
+
"mmlu_security_studies": 0,
|
552 |
+
"mmlu_social_sciences": 0,
|
553 |
+
"mmlu_sociology": 0,
|
554 |
+
"mmlu_stem": 0,
|
555 |
+
"mmlu_us_foreign_policy": 0,
|
556 |
+
"mmlu_virology": 0,
|
557 |
+
"mmlu_world_religions": 0,
|
558 |
+
"openbookqa": 0,
|
559 |
+
"piqa": 0,
|
560 |
+
"truthfulqa_mc1": 0,
|
561 |
+
"truthfulqa_mc2": 0,
|
562 |
+
"winogrande": 0
|
563 |
+
},
|
564 |
+
"date": 1716014497.6343136,
|
565 |
+
"config": {
|
566 |
+
"model": "hf",
|
567 |
+
"model_args": "pretrained=gemma-7b-it,trust_remote_code=True,dtype=float16,_commit_hash=main",
|
568 |
+
"batch_size": 4,
|
569 |
+
"batch_sizes": [],
|
570 |
+
"device": "cuda",
|
571 |
+
"use_cache": null,
|
572 |
+
"limit": null,
|
573 |
+
"bootstrap_iters": 100000,
|
574 |
+
"gen_kwargs": null
|
575 |
+
}
|
576 |
+
}
|
lodrick-the-lafted/results_2024-05-18-13-49-24_Olethros-8B.json
ADDED
@@ -0,0 +1,576 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "1.4",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": "-1",
|
9 |
+
"start_time": "",
|
10 |
+
"end_time": "",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "lodrick-the-lafted/Olethros-8B",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "16bit",
|
15 |
+
"model_size": 16.06,
|
16 |
+
"model_params": 8.03,
|
17 |
+
"quant_type": null,
|
18 |
+
"precision": "16bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|openbookqa|0": {
|
22 |
+
"acc,none": 0.342,
|
23 |
+
"acc_stderr,none": 0.02123614719989926,
|
24 |
+
"acc_norm,none": 0.452,
|
25 |
+
"acc_norm_stderr,none": 0.02227969410784342,
|
26 |
+
"alias": "openbookqa"
|
27 |
+
},
|
28 |
+
"harness|hellaswag|0": {
|
29 |
+
"acc,none": 0.5983867755427206,
|
30 |
+
"acc_stderr,none": 0.004892226011836588,
|
31 |
+
"acc_norm,none": 0.7917745469030074,
|
32 |
+
"acc_norm_stderr,none": 0.004052091024041559,
|
33 |
+
"alias": "hellaswag"
|
34 |
+
},
|
35 |
+
"harness|piqa|0": {
|
36 |
+
"acc,none": 0.7916213275299239,
|
37 |
+
"acc_stderr,none": 0.009476125383049453,
|
38 |
+
"acc_norm,none": 0.8041349292709467,
|
39 |
+
"acc_norm_stderr,none": 0.009259518041395772,
|
40 |
+
"alias": "piqa"
|
41 |
+
},
|
42 |
+
"harness|arc:challenge|0": {
|
43 |
+
"acc,none": 0.5307167235494881,
|
44 |
+
"acc_stderr,none": 0.014583792546304038,
|
45 |
+
"acc_norm,none": 0.5631399317406144,
|
46 |
+
"acc_norm_stderr,none": 0.014494421584256525,
|
47 |
+
"alias": "arc_challenge"
|
48 |
+
},
|
49 |
+
"harness|mmlu|0": {
|
50 |
+
"acc,none": 0.6441390115368181,
|
51 |
+
"acc_stderr,none": 0.003776253514007323,
|
52 |
+
"alias": "mmlu"
|
53 |
+
},
|
54 |
+
"harness|mmlu_humanities|0": {
|
55 |
+
"alias": " - humanities",
|
56 |
+
"acc,none": 0.5723698193411264,
|
57 |
+
"acc_stderr,none": 0.0066380361140532754
|
58 |
+
},
|
59 |
+
"harness|mmlu_formal_logic|0": {
|
60 |
+
"alias": " - formal_logic",
|
61 |
+
"acc,none": 0.5079365079365079,
|
62 |
+
"acc_stderr,none": 0.044715725362943486
|
63 |
+
},
|
64 |
+
"harness|mmlu_high_school_european_history|0": {
|
65 |
+
"alias": " - high_school_european_history",
|
66 |
+
"acc,none": 0.7212121212121212,
|
67 |
+
"acc_stderr,none": 0.03501438706296781
|
68 |
+
},
|
69 |
+
"harness|mmlu_high_school_us_history|0": {
|
70 |
+
"alias": " - high_school_us_history",
|
71 |
+
"acc,none": 0.8578431372549019,
|
72 |
+
"acc_stderr,none": 0.024509803921568624
|
73 |
+
},
|
74 |
+
"harness|mmlu_high_school_world_history|0": {
|
75 |
+
"alias": " - high_school_world_history",
|
76 |
+
"acc,none": 0.8607594936708861,
|
77 |
+
"acc_stderr,none": 0.022535526352692712
|
78 |
+
},
|
79 |
+
"harness|mmlu_international_law|0": {
|
80 |
+
"alias": " - international_law",
|
81 |
+
"acc,none": 0.7768595041322314,
|
82 |
+
"acc_stderr,none": 0.03800754475228733
|
83 |
+
},
|
84 |
+
"harness|mmlu_jurisprudence|0": {
|
85 |
+
"alias": " - jurisprudence",
|
86 |
+
"acc,none": 0.7407407407407407,
|
87 |
+
"acc_stderr,none": 0.042365112580946336
|
88 |
+
},
|
89 |
+
"harness|mmlu_logical_fallacies|0": {
|
90 |
+
"alias": " - logical_fallacies",
|
91 |
+
"acc,none": 0.7791411042944786,
|
92 |
+
"acc_stderr,none": 0.032591773927421776
|
93 |
+
},
|
94 |
+
"harness|mmlu_moral_disputes|0": {
|
95 |
+
"alias": " - moral_disputes",
|
96 |
+
"acc,none": 0.7167630057803468,
|
97 |
+
"acc_stderr,none": 0.024257901705323374
|
98 |
+
},
|
99 |
+
"harness|mmlu_moral_scenarios|0": {
|
100 |
+
"alias": " - moral_scenarios",
|
101 |
+
"acc,none": 0.2759776536312849,
|
102 |
+
"acc_stderr,none": 0.014950103002475349
|
103 |
+
},
|
104 |
+
"harness|mmlu_philosophy|0": {
|
105 |
+
"alias": " - philosophy",
|
106 |
+
"acc,none": 0.7202572347266881,
|
107 |
+
"acc_stderr,none": 0.0254942593506949
|
108 |
+
},
|
109 |
+
"harness|mmlu_prehistory|0": {
|
110 |
+
"alias": " - prehistory",
|
111 |
+
"acc,none": 0.7345679012345679,
|
112 |
+
"acc_stderr,none": 0.02456922360046085
|
113 |
+
},
|
114 |
+
"harness|mmlu_professional_law|0": {
|
115 |
+
"alias": " - professional_law",
|
116 |
+
"acc,none": 0.47979139504563234,
|
117 |
+
"acc_stderr,none": 0.01275980142776756
|
118 |
+
},
|
119 |
+
"harness|mmlu_world_religions|0": {
|
120 |
+
"alias": " - world_religions",
|
121 |
+
"acc,none": 0.8011695906432749,
|
122 |
+
"acc_stderr,none": 0.03061111655743253
|
123 |
+
},
|
124 |
+
"harness|mmlu_other|0": {
|
125 |
+
"alias": " - other",
|
126 |
+
"acc,none": 0.7325394271000966,
|
127 |
+
"acc_stderr,none": 0.007648269910880707
|
128 |
+
},
|
129 |
+
"harness|mmlu_business_ethics|0": {
|
130 |
+
"alias": " - business_ethics",
|
131 |
+
"acc,none": 0.67,
|
132 |
+
"acc_stderr,none": 0.04725815626252607
|
133 |
+
},
|
134 |
+
"harness|mmlu_clinical_knowledge|0": {
|
135 |
+
"alias": " - clinical_knowledge",
|
136 |
+
"acc,none": 0.7320754716981132,
|
137 |
+
"acc_stderr,none": 0.027257260322494845
|
138 |
+
},
|
139 |
+
"harness|mmlu_college_medicine|0": {
|
140 |
+
"alias": " - college_medicine",
|
141 |
+
"acc,none": 0.6242774566473989,
|
142 |
+
"acc_stderr,none": 0.036928207672648664
|
143 |
+
},
|
144 |
+
"harness|mmlu_global_facts|0": {
|
145 |
+
"alias": " - global_facts",
|
146 |
+
"acc,none": 0.42,
|
147 |
+
"acc_stderr,none": 0.049604496374885836
|
148 |
+
},
|
149 |
+
"harness|mmlu_human_aging|0": {
|
150 |
+
"alias": " - human_aging",
|
151 |
+
"acc,none": 0.7219730941704036,
|
152 |
+
"acc_stderr,none": 0.030069584874494033
|
153 |
+
},
|
154 |
+
"harness|mmlu_management|0": {
|
155 |
+
"alias": " - management",
|
156 |
+
"acc,none": 0.8543689320388349,
|
157 |
+
"acc_stderr,none": 0.03492606476623789
|
158 |
+
},
|
159 |
+
"harness|mmlu_marketing|0": {
|
160 |
+
"alias": " - marketing",
|
161 |
+
"acc,none": 0.8888888888888888,
|
162 |
+
"acc_stderr,none": 0.020588491316092375
|
163 |
+
},
|
164 |
+
"harness|mmlu_medical_genetics|0": {
|
165 |
+
"alias": " - medical_genetics",
|
166 |
+
"acc,none": 0.81,
|
167 |
+
"acc_stderr,none": 0.03942772444036623
|
168 |
+
},
|
169 |
+
"harness|mmlu_miscellaneous|0": {
|
170 |
+
"alias": " - miscellaneous",
|
171 |
+
"acc,none": 0.8352490421455939,
|
172 |
+
"acc_stderr,none": 0.013265346261323785
|
173 |
+
},
|
174 |
+
"harness|mmlu_nutrition|0": {
|
175 |
+
"alias": " - nutrition",
|
176 |
+
"acc,none": 0.7581699346405228,
|
177 |
+
"acc_stderr,none": 0.024518195641879334
|
178 |
+
},
|
179 |
+
"harness|mmlu_professional_accounting|0": {
|
180 |
+
"alias": " - professional_accounting",
|
181 |
+
"acc,none": 0.5212765957446809,
|
182 |
+
"acc_stderr,none": 0.029800481645628693
|
183 |
+
},
|
184 |
+
"harness|mmlu_professional_medicine|0": {
|
185 |
+
"alias": " - professional_medicine",
|
186 |
+
"acc,none": 0.7426470588235294,
|
187 |
+
"acc_stderr,none": 0.026556519470041524
|
188 |
+
},
|
189 |
+
"harness|mmlu_virology|0": {
|
190 |
+
"alias": " - virology",
|
191 |
+
"acc,none": 0.5542168674698795,
|
192 |
+
"acc_stderr,none": 0.03869543323472101
|
193 |
+
},
|
194 |
+
"harness|mmlu_social_sciences|0": {
|
195 |
+
"alias": " - social_sciences",
|
196 |
+
"acc,none": 0.7517062073448164,
|
197 |
+
"acc_stderr,none": 0.00761596873245451
|
198 |
+
},
|
199 |
+
"harness|mmlu_econometrics|0": {
|
200 |
+
"alias": " - econometrics",
|
201 |
+
"acc,none": 0.4824561403508772,
|
202 |
+
"acc_stderr,none": 0.04700708033551038
|
203 |
+
},
|
204 |
+
"harness|mmlu_high_school_geography|0": {
|
205 |
+
"alias": " - high_school_geography",
|
206 |
+
"acc,none": 0.8282828282828283,
|
207 |
+
"acc_stderr,none": 0.026869716187429917
|
208 |
+
},
|
209 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
210 |
+
"alias": " - high_school_government_and_politics",
|
211 |
+
"acc,none": 0.8860103626943006,
|
212 |
+
"acc_stderr,none": 0.022935144053919422
|
213 |
+
},
|
214 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
215 |
+
"alias": " - high_school_macroeconomics",
|
216 |
+
"acc,none": 0.6717948717948717,
|
217 |
+
"acc_stderr,none": 0.023807633198657266
|
218 |
+
},
|
219 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
220 |
+
"alias": " - high_school_microeconomics",
|
221 |
+
"acc,none": 0.7689075630252101,
|
222 |
+
"acc_stderr,none": 0.027381406927868897
|
223 |
+
},
|
224 |
+
"harness|mmlu_high_school_psychology|0": {
|
225 |
+
"alias": " - high_school_psychology",
|
226 |
+
"acc,none": 0.8330275229357799,
|
227 |
+
"acc_stderr,none": 0.015990154885073406
|
228 |
+
},
|
229 |
+
"harness|mmlu_human_sexuality|0": {
|
230 |
+
"alias": " - human_sexuality",
|
231 |
+
"acc,none": 0.7633587786259542,
|
232 |
+
"acc_stderr,none": 0.03727673575596915
|
233 |
+
},
|
234 |
+
"harness|mmlu_professional_psychology|0": {
|
235 |
+
"alias": " - professional_psychology",
|
236 |
+
"acc,none": 0.6633986928104575,
|
237 |
+
"acc_stderr,none": 0.01911721391149515
|
238 |
+
},
|
239 |
+
"harness|mmlu_public_relations|0": {
|
240 |
+
"alias": " - public_relations",
|
241 |
+
"acc,none": 0.6909090909090909,
|
242 |
+
"acc_stderr,none": 0.044262946482000985
|
243 |
+
},
|
244 |
+
"harness|mmlu_security_studies|0": {
|
245 |
+
"alias": " - security_studies",
|
246 |
+
"acc,none": 0.7591836734693878,
|
247 |
+
"acc_stderr,none": 0.027372942201788167
|
248 |
+
},
|
249 |
+
"harness|mmlu_sociology|0": {
|
250 |
+
"alias": " - sociology",
|
251 |
+
"acc,none": 0.8407960199004975,
|
252 |
+
"acc_stderr,none": 0.02587064676616914
|
253 |
+
},
|
254 |
+
"harness|mmlu_us_foreign_policy|0": {
|
255 |
+
"alias": " - us_foreign_policy",
|
256 |
+
"acc,none": 0.87,
|
257 |
+
"acc_stderr,none": 0.03379976689896309
|
258 |
+
},
|
259 |
+
"harness|mmlu_stem|0": {
|
260 |
+
"alias": " - stem",
|
261 |
+
"acc,none": 0.5591500158579131,
|
262 |
+
"acc_stderr,none": 0.00852487318530932
|
263 |
+
},
|
264 |
+
"harness|mmlu_abstract_algebra|0": {
|
265 |
+
"alias": " - abstract_algebra",
|
266 |
+
"acc,none": 0.37,
|
267 |
+
"acc_stderr,none": 0.048523658709391
|
268 |
+
},
|
269 |
+
"harness|mmlu_anatomy|0": {
|
270 |
+
"alias": " - anatomy",
|
271 |
+
"acc,none": 0.6518518518518519,
|
272 |
+
"acc_stderr,none": 0.041153246103369526
|
273 |
+
},
|
274 |
+
"harness|mmlu_astronomy|0": {
|
275 |
+
"alias": " - astronomy",
|
276 |
+
"acc,none": 0.7105263157894737,
|
277 |
+
"acc_stderr,none": 0.036906779861372814
|
278 |
+
},
|
279 |
+
"harness|mmlu_college_biology|0": {
|
280 |
+
"alias": " - college_biology",
|
281 |
+
"acc,none": 0.7430555555555556,
|
282 |
+
"acc_stderr,none": 0.03653946969442099
|
283 |
+
},
|
284 |
+
"harness|mmlu_college_chemistry|0": {
|
285 |
+
"alias": " - college_chemistry",
|
286 |
+
"acc,none": 0.46,
|
287 |
+
"acc_stderr,none": 0.05009082659620333
|
288 |
+
},
|
289 |
+
"harness|mmlu_college_computer_science|0": {
|
290 |
+
"alias": " - college_computer_science",
|
291 |
+
"acc,none": 0.47,
|
292 |
+
"acc_stderr,none": 0.05016135580465919
|
293 |
+
},
|
294 |
+
"harness|mmlu_college_mathematics|0": {
|
295 |
+
"alias": " - college_mathematics",
|
296 |
+
"acc,none": 0.35,
|
297 |
+
"acc_stderr,none": 0.047937248544110196
|
298 |
+
},
|
299 |
+
"harness|mmlu_college_physics|0": {
|
300 |
+
"alias": " - college_physics",
|
301 |
+
"acc,none": 0.46078431372549017,
|
302 |
+
"acc_stderr,none": 0.04959859966384181
|
303 |
+
},
|
304 |
+
"harness|mmlu_computer_security|0": {
|
305 |
+
"alias": " - computer_security",
|
306 |
+
"acc,none": 0.8,
|
307 |
+
"acc_stderr,none": 0.04020151261036846
|
308 |
+
},
|
309 |
+
"harness|mmlu_conceptual_physics|0": {
|
310 |
+
"alias": " - conceptual_physics",
|
311 |
+
"acc,none": 0.5574468085106383,
|
312 |
+
"acc_stderr,none": 0.032469569197899575
|
313 |
+
},
|
314 |
+
"harness|mmlu_electrical_engineering|0": {
|
315 |
+
"alias": " - electrical_engineering",
|
316 |
+
"acc,none": 0.6068965517241379,
|
317 |
+
"acc_stderr,none": 0.040703290137070705
|
318 |
+
},
|
319 |
+
"harness|mmlu_elementary_mathematics|0": {
|
320 |
+
"alias": " - elementary_mathematics",
|
321 |
+
"acc,none": 0.48412698412698413,
|
322 |
+
"acc_stderr,none": 0.02573833063941215
|
323 |
+
},
|
324 |
+
"harness|mmlu_high_school_biology|0": {
|
325 |
+
"alias": " - high_school_biology",
|
326 |
+
"acc,none": 0.7903225806451613,
|
327 |
+
"acc_stderr,none": 0.023157879349083515
|
328 |
+
},
|
329 |
+
"harness|mmlu_high_school_chemistry|0": {
|
330 |
+
"alias": " - high_school_chemistry",
|
331 |
+
"acc,none": 0.5172413793103449,
|
332 |
+
"acc_stderr,none": 0.03515895551165698
|
333 |
+
},
|
334 |
+
"harness|mmlu_high_school_computer_science|0": {
|
335 |
+
"alias": " - high_school_computer_science",
|
336 |
+
"acc,none": 0.73,
|
337 |
+
"acc_stderr,none": 0.044619604333847394
|
338 |
+
},
|
339 |
+
"harness|mmlu_high_school_mathematics|0": {
|
340 |
+
"alias": " - high_school_mathematics",
|
341 |
+
"acc,none": 0.3925925925925926,
|
342 |
+
"acc_stderr,none": 0.02977384701253297
|
343 |
+
},
|
344 |
+
"harness|mmlu_high_school_physics|0": {
|
345 |
+
"alias": " - high_school_physics",
|
346 |
+
"acc,none": 0.4304635761589404,
|
347 |
+
"acc_stderr,none": 0.04042809961395634
|
348 |
+
},
|
349 |
+
"harness|mmlu_high_school_statistics|0": {
|
350 |
+
"alias": " - high_school_statistics",
|
351 |
+
"acc,none": 0.5,
|
352 |
+
"acc_stderr,none": 0.034099716973523674
|
353 |
+
},
|
354 |
+
"harness|mmlu_machine_learning|0": {
|
355 |
+
"alias": " - machine_learning",
|
356 |
+
"acc,none": 0.5714285714285714,
|
357 |
+
"acc_stderr,none": 0.04697113923010213
|
358 |
+
},
|
359 |
+
"harness|boolq|0": {
|
360 |
+
"acc,none": 0.8379204892966361,
|
361 |
+
"acc_stderr,none": 0.0064455206371826935,
|
362 |
+
"alias": "boolq"
|
363 |
+
},
|
364 |
+
"harness|winogrande|0": {
|
365 |
+
"acc,none": 0.7332280978689818,
|
366 |
+
"acc_stderr,none": 0.012430046102144335,
|
367 |
+
"alias": "winogrande"
|
368 |
+
},
|
369 |
+
"harness|lambada:openai|0": {
|
370 |
+
"perplexity,none": 3.535143071467598,
|
371 |
+
"perplexity_stderr,none": 0.07814219566623416,
|
372 |
+
"acc,none": 0.7085193091403066,
|
373 |
+
"acc_stderr,none": 0.006331298844115851,
|
374 |
+
"alias": "lambada_openai"
|
375 |
+
},
|
376 |
+
"harness|truthfulqa:mc2|0": {
|
377 |
+
"acc,none": 0.5156218237914109,
|
378 |
+
"acc_stderr,none": 0.014751538668557534,
|
379 |
+
"alias": "truthfulqa_mc2"
|
380 |
+
},
|
381 |
+
"harness|arc:easy|0": {
|
382 |
+
"acc,none": 0.8211279461279462,
|
383 |
+
"acc_stderr,none": 0.007864024474332735,
|
384 |
+
"acc_norm,none": 0.8118686868686869,
|
385 |
+
"acc_norm_stderr,none": 0.008019395492398136,
|
386 |
+
"alias": "arc_easy"
|
387 |
+
},
|
388 |
+
"harness|truthfulqa:mc1|0": {
|
389 |
+
"acc,none": 0.35495716034271724,
|
390 |
+
"acc_stderr,none": 0.016750862381375898,
|
391 |
+
"alias": "truthfulqa_mc1"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "lodrick-the-lafted/Olethros-8B",
|
396 |
+
"revision": "main",
|
397 |
+
"private": false,
|
398 |
+
"params": 8.03,
|
399 |
+
"architectures": "LlamaForCausalLM",
|
400 |
+
"quant_type": null,
|
401 |
+
"precision": "16bit",
|
402 |
+
"model_params": 8.03,
|
403 |
+
"model_size": 16.06,
|
404 |
+
"weight_dtype": "bfloat16",
|
405 |
+
"compute_dtype": "float16",
|
406 |
+
"gguf_ftype": "*Q4_0.gguf",
|
407 |
+
"hardware": "gpu",
|
408 |
+
"status": "Pending",
|
409 |
+
"submitted_time": "2024-04-27T08:04:58Z",
|
410 |
+
"model_type": "original",
|
411 |
+
"job_id": -1,
|
412 |
+
"job_start_time": null,
|
413 |
+
"scripts": "ITREX"
|
414 |
+
},
|
415 |
+
"quantization_config": null,
|
416 |
+
"versions": {
|
417 |
+
"harness|openbookqa|0": 1.0,
|
418 |
+
"harness|hellaswag|0": 1.0,
|
419 |
+
"harness|piqa|0": 1.0,
|
420 |
+
"harness|arc:challenge|0": 1.0,
|
421 |
+
"harness|mmlu|0": null,
|
422 |
+
"harness|mmlu_humanities|0": null,
|
423 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
424 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
425 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
426 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
427 |
+
"harness|mmlu_international_law|0": 0.0,
|
428 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
429 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
430 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
431 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
432 |
+
"harness|mmlu_philosophy|0": 0.0,
|
433 |
+
"harness|mmlu_prehistory|0": 0.0,
|
434 |
+
"harness|mmlu_professional_law|0": 0.0,
|
435 |
+
"harness|mmlu_world_religions|0": 0.0,
|
436 |
+
"harness|mmlu_other|0": null,
|
437 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
438 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
439 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
440 |
+
"harness|mmlu_global_facts|0": 0.0,
|
441 |
+
"harness|mmlu_human_aging|0": 0.0,
|
442 |
+
"harness|mmlu_management|0": 0.0,
|
443 |
+
"harness|mmlu_marketing|0": 0.0,
|
444 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
445 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
446 |
+
"harness|mmlu_nutrition|0": 0.0,
|
447 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
448 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
449 |
+
"harness|mmlu_virology|0": 0.0,
|
450 |
+
"harness|mmlu_social_sciences|0": null,
|
451 |
+
"harness|mmlu_econometrics|0": 0.0,
|
452 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
453 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
454 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
455 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
456 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
457 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
458 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
459 |
+
"harness|mmlu_public_relations|0": 0.0,
|
460 |
+
"harness|mmlu_security_studies|0": 0.0,
|
461 |
+
"harness|mmlu_sociology|0": 0.0,
|
462 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
463 |
+
"harness|mmlu_stem|0": null,
|
464 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
465 |
+
"harness|mmlu_anatomy|0": 0.0,
|
466 |
+
"harness|mmlu_astronomy|0": 0.0,
|
467 |
+
"harness|mmlu_college_biology|0": 0.0,
|
468 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
469 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
470 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
471 |
+
"harness|mmlu_college_physics|0": 0.0,
|
472 |
+
"harness|mmlu_computer_security|0": 0.0,
|
473 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
474 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
475 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
476 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
477 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
478 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
479 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
480 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
481 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
482 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
483 |
+
"harness|boolq|0": 2.0,
|
484 |
+
"harness|winogrande|0": 1.0,
|
485 |
+
"harness|lambada:openai|0": 1.0,
|
486 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
487 |
+
"harness|arc:easy|0": 1.0,
|
488 |
+
"harness|truthfulqa:mc1|0": 2.0
|
489 |
+
},
|
490 |
+
"n-shot": {
|
491 |
+
"arc_challenge": 0,
|
492 |
+
"arc_easy": 0,
|
493 |
+
"boolq": 0,
|
494 |
+
"hellaswag": 0,
|
495 |
+
"lambada_openai": 0,
|
496 |
+
"mmlu": 0,
|
497 |
+
"mmlu_abstract_algebra": 0,
|
498 |
+
"mmlu_anatomy": 0,
|
499 |
+
"mmlu_astronomy": 0,
|
500 |
+
"mmlu_business_ethics": 0,
|
501 |
+
"mmlu_clinical_knowledge": 0,
|
502 |
+
"mmlu_college_biology": 0,
|
503 |
+
"mmlu_college_chemistry": 0,
|
504 |
+
"mmlu_college_computer_science": 0,
|
505 |
+
"mmlu_college_mathematics": 0,
|
506 |
+
"mmlu_college_medicine": 0,
|
507 |
+
"mmlu_college_physics": 0,
|
508 |
+
"mmlu_computer_security": 0,
|
509 |
+
"mmlu_conceptual_physics": 0,
|
510 |
+
"mmlu_econometrics": 0,
|
511 |
+
"mmlu_electrical_engineering": 0,
|
512 |
+
"mmlu_elementary_mathematics": 0,
|
513 |
+
"mmlu_formal_logic": 0,
|
514 |
+
"mmlu_global_facts": 0,
|
515 |
+
"mmlu_high_school_biology": 0,
|
516 |
+
"mmlu_high_school_chemistry": 0,
|
517 |
+
"mmlu_high_school_computer_science": 0,
|
518 |
+
"mmlu_high_school_european_history": 0,
|
519 |
+
"mmlu_high_school_geography": 0,
|
520 |
+
"mmlu_high_school_government_and_politics": 0,
|
521 |
+
"mmlu_high_school_macroeconomics": 0,
|
522 |
+
"mmlu_high_school_mathematics": 0,
|
523 |
+
"mmlu_high_school_microeconomics": 0,
|
524 |
+
"mmlu_high_school_physics": 0,
|
525 |
+
"mmlu_high_school_psychology": 0,
|
526 |
+
"mmlu_high_school_statistics": 0,
|
527 |
+
"mmlu_high_school_us_history": 0,
|
528 |
+
"mmlu_high_school_world_history": 0,
|
529 |
+
"mmlu_human_aging": 0,
|
530 |
+
"mmlu_human_sexuality": 0,
|
531 |
+
"mmlu_humanities": 0,
|
532 |
+
"mmlu_international_law": 0,
|
533 |
+
"mmlu_jurisprudence": 0,
|
534 |
+
"mmlu_logical_fallacies": 0,
|
535 |
+
"mmlu_machine_learning": 0,
|
536 |
+
"mmlu_management": 0,
|
537 |
+
"mmlu_marketing": 0,
|
538 |
+
"mmlu_medical_genetics": 0,
|
539 |
+
"mmlu_miscellaneous": 0,
|
540 |
+
"mmlu_moral_disputes": 0,
|
541 |
+
"mmlu_moral_scenarios": 0,
|
542 |
+
"mmlu_nutrition": 0,
|
543 |
+
"mmlu_other": 0,
|
544 |
+
"mmlu_philosophy": 0,
|
545 |
+
"mmlu_prehistory": 0,
|
546 |
+
"mmlu_professional_accounting": 0,
|
547 |
+
"mmlu_professional_law": 0,
|
548 |
+
"mmlu_professional_medicine": 0,
|
549 |
+
"mmlu_professional_psychology": 0,
|
550 |
+
"mmlu_public_relations": 0,
|
551 |
+
"mmlu_security_studies": 0,
|
552 |
+
"mmlu_social_sciences": 0,
|
553 |
+
"mmlu_sociology": 0,
|
554 |
+
"mmlu_stem": 0,
|
555 |
+
"mmlu_us_foreign_policy": 0,
|
556 |
+
"mmlu_virology": 0,
|
557 |
+
"mmlu_world_religions": 0,
|
558 |
+
"openbookqa": 0,
|
559 |
+
"piqa": 0,
|
560 |
+
"truthfulqa_mc1": 0,
|
561 |
+
"truthfulqa_mc2": 0,
|
562 |
+
"winogrande": 0
|
563 |
+
},
|
564 |
+
"date": 1716007355.3824885,
|
565 |
+
"config": {
|
566 |
+
"model": "hf",
|
567 |
+
"model_args": "pretrained=lodrick-the-lafted/Olethros-8B,trust_remote_code=True,dtype=float16,_commit_hash=main",
|
568 |
+
"batch_size": 4,
|
569 |
+
"batch_sizes": [],
|
570 |
+
"device": "cuda",
|
571 |
+
"use_cache": null,
|
572 |
+
"limit": null,
|
573 |
+
"bootstrap_iters": 100000,
|
574 |
+
"gen_kwargs": null
|
575 |
+
}
|
576 |
+
}
|
meta-llama/results_2024-05-18-11-57-40_llama3_8b_instruct-chat.json
ADDED
@@ -0,0 +1,576 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "1.4",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": "-1",
|
9 |
+
"start_time": "",
|
10 |
+
"end_time": "",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "meta-llama/Meta-Llama-3-8B-Instruct",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "16bit",
|
15 |
+
"model_size": "16.06",
|
16 |
+
"model_params": "8.03",
|
17 |
+
"quant_type": null,
|
18 |
+
"precision": "16bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|truthfulqa:mc2|0": {
|
22 |
+
"acc,none": 0.516537054473661,
|
23 |
+
"acc_stderr,none": 0.015198101439639141,
|
24 |
+
"alias": "truthfulqa_mc2"
|
25 |
+
},
|
26 |
+
"harness|arc:easy|0": {
|
27 |
+
"acc,none": 0.8169191919191919,
|
28 |
+
"acc_stderr,none": 0.007935588746593822,
|
29 |
+
"acc_norm,none": 0.7975589225589226,
|
30 |
+
"acc_norm_stderr,none": 0.008245156475629189,
|
31 |
+
"alias": "arc_easy"
|
32 |
+
},
|
33 |
+
"harness|openbookqa|0": {
|
34 |
+
"acc,none": 0.342,
|
35 |
+
"acc_stderr,none": 0.02123614719989926,
|
36 |
+
"acc_norm,none": 0.432,
|
37 |
+
"acc_norm_stderr,none": 0.02217510926561316,
|
38 |
+
"alias": "openbookqa"
|
39 |
+
},
|
40 |
+
"harness|hellaswag|0": {
|
41 |
+
"acc,none": 0.5763792073292173,
|
42 |
+
"acc_stderr,none": 0.004931219148182254,
|
43 |
+
"acc_norm,none": 0.7583150766779526,
|
44 |
+
"acc_norm_stderr,none": 0.0042722941002326644,
|
45 |
+
"alias": "hellaswag"
|
46 |
+
},
|
47 |
+
"harness|winogrande|0": {
|
48 |
+
"acc,none": 0.7205998421468035,
|
49 |
+
"acc_stderr,none": 0.012610826539404686,
|
50 |
+
"alias": "winogrande"
|
51 |
+
},
|
52 |
+
"harness|mmlu|0": {
|
53 |
+
"acc,none": 0.6389403218914684,
|
54 |
+
"acc_stderr,none": 0.0038309470916819913,
|
55 |
+
"alias": "mmlu"
|
56 |
+
},
|
57 |
+
"harness|mmlu_humanities|0": {
|
58 |
+
"alias": " - humanities",
|
59 |
+
"acc,none": 0.5808714133900106,
|
60 |
+
"acc_stderr,none": 0.006755881132684596
|
61 |
+
},
|
62 |
+
"harness|mmlu_formal_logic|0": {
|
63 |
+
"alias": " - formal_logic",
|
64 |
+
"acc,none": 0.49206349206349204,
|
65 |
+
"acc_stderr,none": 0.044715725362943486
|
66 |
+
},
|
67 |
+
"harness|mmlu_high_school_european_history|0": {
|
68 |
+
"alias": " - high_school_european_history",
|
69 |
+
"acc,none": 0.7393939393939394,
|
70 |
+
"acc_stderr,none": 0.034277431758165236
|
71 |
+
},
|
72 |
+
"harness|mmlu_high_school_us_history|0": {
|
73 |
+
"alias": " - high_school_us_history",
|
74 |
+
"acc,none": 0.8382352941176471,
|
75 |
+
"acc_stderr,none": 0.025845017986926924
|
76 |
+
},
|
77 |
+
"harness|mmlu_high_school_world_history|0": {
|
78 |
+
"alias": " - high_school_world_history",
|
79 |
+
"acc,none": 0.8312236286919831,
|
80 |
+
"acc_stderr,none": 0.02438140683258623
|
81 |
+
},
|
82 |
+
"harness|mmlu_international_law|0": {
|
83 |
+
"alias": " - international_law",
|
84 |
+
"acc,none": 0.768595041322314,
|
85 |
+
"acc_stderr,none": 0.03849856098794088
|
86 |
+
},
|
87 |
+
"harness|mmlu_jurisprudence|0": {
|
88 |
+
"alias": " - jurisprudence",
|
89 |
+
"acc,none": 0.7685185185185185,
|
90 |
+
"acc_stderr,none": 0.04077494709252627
|
91 |
+
},
|
92 |
+
"harness|mmlu_logical_fallacies|0": {
|
93 |
+
"alias": " - logical_fallacies",
|
94 |
+
"acc,none": 0.754601226993865,
|
95 |
+
"acc_stderr,none": 0.03380939813943354
|
96 |
+
},
|
97 |
+
"harness|mmlu_moral_disputes|0": {
|
98 |
+
"alias": " - moral_disputes",
|
99 |
+
"acc,none": 0.684971098265896,
|
100 |
+
"acc_stderr,none": 0.025009313790069723
|
101 |
+
},
|
102 |
+
"harness|mmlu_moral_scenarios|0": {
|
103 |
+
"alias": " - moral_scenarios",
|
104 |
+
"acc,none": 0.3307262569832402,
|
105 |
+
"acc_stderr,none": 0.01573502625896612
|
106 |
+
},
|
107 |
+
"harness|mmlu_philosophy|0": {
|
108 |
+
"alias": " - philosophy",
|
109 |
+
"acc,none": 0.7106109324758842,
|
110 |
+
"acc_stderr,none": 0.025755865922632924
|
111 |
+
},
|
112 |
+
"harness|mmlu_prehistory|0": {
|
113 |
+
"alias": " - prehistory",
|
114 |
+
"acc,none": 0.7407407407407407,
|
115 |
+
"acc_stderr,none": 0.02438366553103545
|
116 |
+
},
|
117 |
+
"harness|mmlu_professional_law|0": {
|
118 |
+
"alias": " - professional_law",
|
119 |
+
"acc,none": 0.49282920469361147,
|
120 |
+
"acc_stderr,none": 0.012768922739553313
|
121 |
+
},
|
122 |
+
"harness|mmlu_world_religions|0": {
|
123 |
+
"alias": " - world_religions",
|
124 |
+
"acc,none": 0.7719298245614035,
|
125 |
+
"acc_stderr,none": 0.03218093795602357
|
126 |
+
},
|
127 |
+
"harness|mmlu_other|0": {
|
128 |
+
"alias": " - other",
|
129 |
+
"acc,none": 0.718056002574831,
|
130 |
+
"acc_stderr,none": 0.007781916153013798
|
131 |
+
},
|
132 |
+
"harness|mmlu_business_ethics|0": {
|
133 |
+
"alias": " - business_ethics",
|
134 |
+
"acc,none": 0.66,
|
135 |
+
"acc_stderr,none": 0.04760952285695237
|
136 |
+
},
|
137 |
+
"harness|mmlu_clinical_knowledge|0": {
|
138 |
+
"alias": " - clinical_knowledge",
|
139 |
+
"acc,none": 0.7094339622641509,
|
140 |
+
"acc_stderr,none": 0.027943219989337128
|
141 |
+
},
|
142 |
+
"harness|mmlu_college_medicine|0": {
|
143 |
+
"alias": " - college_medicine",
|
144 |
+
"acc,none": 0.653179190751445,
|
145 |
+
"acc_stderr,none": 0.036291466701596636
|
146 |
+
},
|
147 |
+
"harness|mmlu_global_facts|0": {
|
148 |
+
"alias": " - global_facts",
|
149 |
+
"acc,none": 0.37,
|
150 |
+
"acc_stderr,none": 0.04852365870939098
|
151 |
+
},
|
152 |
+
"harness|mmlu_human_aging|0": {
|
153 |
+
"alias": " - human_aging",
|
154 |
+
"acc,none": 0.6905829596412556,
|
155 |
+
"acc_stderr,none": 0.03102441174057221
|
156 |
+
},
|
157 |
+
"harness|mmlu_management|0": {
|
158 |
+
"alias": " - management",
|
159 |
+
"acc,none": 0.8155339805825242,
|
160 |
+
"acc_stderr,none": 0.03840423627288276
|
161 |
+
},
|
162 |
+
"harness|mmlu_marketing|0": {
|
163 |
+
"alias": " - marketing",
|
164 |
+
"acc,none": 0.8888888888888888,
|
165 |
+
"acc_stderr,none": 0.020588491316092368
|
166 |
+
},
|
167 |
+
"harness|mmlu_medical_genetics|0": {
|
168 |
+
"alias": " - medical_genetics",
|
169 |
+
"acc,none": 0.82,
|
170 |
+
"acc_stderr,none": 0.03861229196653695
|
171 |
+
},
|
172 |
+
"harness|mmlu_miscellaneous|0": {
|
173 |
+
"alias": " - miscellaneous",
|
174 |
+
"acc,none": 0.80970625798212,
|
175 |
+
"acc_stderr,none": 0.01403694585038139
|
176 |
+
},
|
177 |
+
"harness|mmlu_nutrition|0": {
|
178 |
+
"alias": " - nutrition",
|
179 |
+
"acc,none": 0.7450980392156863,
|
180 |
+
"acc_stderr,none": 0.024954184324879912
|
181 |
+
},
|
182 |
+
"harness|mmlu_professional_accounting|0": {
|
183 |
+
"alias": " - professional_accounting",
|
184 |
+
"acc,none": 0.524822695035461,
|
185 |
+
"acc_stderr,none": 0.02979071924382972
|
186 |
+
},
|
187 |
+
"harness|mmlu_professional_medicine|0": {
|
188 |
+
"alias": " - professional_medicine",
|
189 |
+
"acc,none": 0.75,
|
190 |
+
"acc_stderr,none": 0.026303648393696036
|
191 |
+
},
|
192 |
+
"harness|mmlu_virology|0": {
|
193 |
+
"alias": " - virology",
|
194 |
+
"acc,none": 0.5120481927710844,
|
195 |
+
"acc_stderr,none": 0.03891364495835817
|
196 |
+
},
|
197 |
+
"harness|mmlu_social_sciences|0": {
|
198 |
+
"alias": " - social_sciences",
|
199 |
+
"acc,none": 0.7416314592135197,
|
200 |
+
"acc_stderr,none": 0.007737195831875021
|
201 |
+
},
|
202 |
+
"harness|mmlu_econometrics|0": {
|
203 |
+
"alias": " - econometrics",
|
204 |
+
"acc,none": 0.5,
|
205 |
+
"acc_stderr,none": 0.047036043419179864
|
206 |
+
},
|
207 |
+
"harness|mmlu_high_school_geography|0": {
|
208 |
+
"alias": " - high_school_geography",
|
209 |
+
"acc,none": 0.7777777777777778,
|
210 |
+
"acc_stderr,none": 0.02962022787479047
|
211 |
+
},
|
212 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
213 |
+
"alias": " - high_school_government_and_politics",
|
214 |
+
"acc,none": 0.8704663212435233,
|
215 |
+
"acc_stderr,none": 0.024233532297758712
|
216 |
+
},
|
217 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
218 |
+
"alias": " - high_school_macroeconomics",
|
219 |
+
"acc,none": 0.6461538461538462,
|
220 |
+
"acc_stderr,none": 0.024243783994062164
|
221 |
+
},
|
222 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
223 |
+
"alias": " - high_school_microeconomics",
|
224 |
+
"acc,none": 0.7310924369747899,
|
225 |
+
"acc_stderr,none": 0.028801392193631276
|
226 |
+
},
|
227 |
+
"harness|mmlu_high_school_psychology|0": {
|
228 |
+
"alias": " - high_school_psychology",
|
229 |
+
"acc,none": 0.8238532110091743,
|
230 |
+
"acc_stderr,none": 0.016332882393431385
|
231 |
+
},
|
232 |
+
"harness|mmlu_human_sexuality|0": {
|
233 |
+
"alias": " - human_sexuality",
|
234 |
+
"acc,none": 0.7709923664122137,
|
235 |
+
"acc_stderr,none": 0.036853466317118506
|
236 |
+
},
|
237 |
+
"harness|mmlu_professional_psychology|0": {
|
238 |
+
"alias": " - professional_psychology",
|
239 |
+
"acc,none": 0.6764705882352942,
|
240 |
+
"acc_stderr,none": 0.018926082916083383
|
241 |
+
},
|
242 |
+
"harness|mmlu_public_relations|0": {
|
243 |
+
"alias": " - public_relations",
|
244 |
+
"acc,none": 0.6636363636363637,
|
245 |
+
"acc_stderr,none": 0.04525393596302505
|
246 |
+
},
|
247 |
+
"harness|mmlu_security_studies|0": {
|
248 |
+
"alias": " - security_studies",
|
249 |
+
"acc,none": 0.7387755102040816,
|
250 |
+
"acc_stderr,none": 0.02812342933514279
|
251 |
+
},
|
252 |
+
"harness|mmlu_sociology|0": {
|
253 |
+
"alias": " - sociology",
|
254 |
+
"acc,none": 0.8606965174129353,
|
255 |
+
"acc_stderr,none": 0.024484487162913973
|
256 |
+
},
|
257 |
+
"harness|mmlu_us_foreign_policy|0": {
|
258 |
+
"alias": " - us_foreign_policy",
|
259 |
+
"acc,none": 0.86,
|
260 |
+
"acc_stderr,none": 0.03487350880197769
|
261 |
+
},
|
262 |
+
"harness|mmlu_stem|0": {
|
263 |
+
"alias": " - stem",
|
264 |
+
"acc,none": 0.5474151601649223,
|
265 |
+
"acc_stderr,none": 0.008581134442063206
|
266 |
+
},
|
267 |
+
"harness|mmlu_abstract_algebra|0": {
|
268 |
+
"alias": " - abstract_algebra",
|
269 |
+
"acc,none": 0.35,
|
270 |
+
"acc_stderr,none": 0.0479372485441102
|
271 |
+
},
|
272 |
+
"harness|mmlu_anatomy|0": {
|
273 |
+
"alias": " - anatomy",
|
274 |
+
"acc,none": 0.6222222222222222,
|
275 |
+
"acc_stderr,none": 0.04188307537595853
|
276 |
+
},
|
277 |
+
"harness|mmlu_astronomy|0": {
|
278 |
+
"alias": " - astronomy",
|
279 |
+
"acc,none": 0.7039473684210527,
|
280 |
+
"acc_stderr,none": 0.03715062154998904
|
281 |
+
},
|
282 |
+
"harness|mmlu_college_biology|0": {
|
283 |
+
"alias": " - college_biology",
|
284 |
+
"acc,none": 0.7361111111111112,
|
285 |
+
"acc_stderr,none": 0.03685651095897532
|
286 |
+
},
|
287 |
+
"harness|mmlu_college_chemistry|0": {
|
288 |
+
"alias": " - college_chemistry",
|
289 |
+
"acc,none": 0.44,
|
290 |
+
"acc_stderr,none": 0.04988876515698589
|
291 |
+
},
|
292 |
+
"harness|mmlu_college_computer_science|0": {
|
293 |
+
"alias": " - college_computer_science",
|
294 |
+
"acc,none": 0.53,
|
295 |
+
"acc_stderr,none": 0.050161355804659205
|
296 |
+
},
|
297 |
+
"harness|mmlu_college_mathematics|0": {
|
298 |
+
"alias": " - college_mathematics",
|
299 |
+
"acc,none": 0.35,
|
300 |
+
"acc_stderr,none": 0.04793724854411019
|
301 |
+
},
|
302 |
+
"harness|mmlu_college_physics|0": {
|
303 |
+
"alias": " - college_physics",
|
304 |
+
"acc,none": 0.5,
|
305 |
+
"acc_stderr,none": 0.04975185951049946
|
306 |
+
},
|
307 |
+
"harness|mmlu_computer_security|0": {
|
308 |
+
"alias": " - computer_security",
|
309 |
+
"acc,none": 0.74,
|
310 |
+
"acc_stderr,none": 0.044084400227680794
|
311 |
+
},
|
312 |
+
"harness|mmlu_conceptual_physics|0": {
|
313 |
+
"alias": " - conceptual_physics",
|
314 |
+
"acc,none": 0.548936170212766,
|
315 |
+
"acc_stderr,none": 0.032529096196131965
|
316 |
+
},
|
317 |
+
"harness|mmlu_electrical_engineering|0": {
|
318 |
+
"alias": " - electrical_engineering",
|
319 |
+
"acc,none": 0.6413793103448275,
|
320 |
+
"acc_stderr,none": 0.03996629574876719
|
321 |
+
},
|
322 |
+
"harness|mmlu_elementary_mathematics|0": {
|
323 |
+
"alias": " - elementary_mathematics",
|
324 |
+
"acc,none": 0.47619047619047616,
|
325 |
+
"acc_stderr,none": 0.025722097064388525
|
326 |
+
},
|
327 |
+
"harness|mmlu_high_school_biology|0": {
|
328 |
+
"alias": " - high_school_biology",
|
329 |
+
"acc,none": 0.7645161290322581,
|
330 |
+
"acc_stderr,none": 0.024137632429337703
|
331 |
+
},
|
332 |
+
"harness|mmlu_high_school_chemistry|0": {
|
333 |
+
"alias": " - high_school_chemistry",
|
334 |
+
"acc,none": 0.45320197044334976,
|
335 |
+
"acc_stderr,none": 0.03502544650845872
|
336 |
+
},
|
337 |
+
"harness|mmlu_high_school_computer_science|0": {
|
338 |
+
"alias": " - high_school_computer_science",
|
339 |
+
"acc,none": 0.69,
|
340 |
+
"acc_stderr,none": 0.04648231987117316
|
341 |
+
},
|
342 |
+
"harness|mmlu_high_school_mathematics|0": {
|
343 |
+
"alias": " - high_school_mathematics",
|
344 |
+
"acc,none": 0.37777777777777777,
|
345 |
+
"acc_stderr,none": 0.02956070739246571
|
346 |
+
},
|
347 |
+
"harness|mmlu_high_school_physics|0": {
|
348 |
+
"alias": " - high_school_physics",
|
349 |
+
"acc,none": 0.4966887417218543,
|
350 |
+
"acc_stderr,none": 0.04082393379449654
|
351 |
+
},
|
352 |
+
"harness|mmlu_high_school_statistics|0": {
|
353 |
+
"alias": " - high_school_statistics",
|
354 |
+
"acc,none": 0.49537037037037035,
|
355 |
+
"acc_stderr,none": 0.03409825519163572
|
356 |
+
},
|
357 |
+
"harness|mmlu_machine_learning|0": {
|
358 |
+
"alias": " - machine_learning",
|
359 |
+
"acc,none": 0.4732142857142857,
|
360 |
+
"acc_stderr,none": 0.047389751192741546
|
361 |
+
},
|
362 |
+
"harness|arc:challenge|0": {
|
363 |
+
"acc,none": 0.5298634812286689,
|
364 |
+
"acc_stderr,none": 0.014585305840007102,
|
365 |
+
"acc_norm,none": 0.5674061433447098,
|
366 |
+
"acc_norm_stderr,none": 0.014478005694182533,
|
367 |
+
"alias": "arc_challenge"
|
368 |
+
},
|
369 |
+
"harness|truthfulqa:mc1|0": {
|
370 |
+
"acc,none": 0.3623011015911873,
|
371 |
+
"acc_stderr,none": 0.016826646897262258,
|
372 |
+
"alias": "truthfulqa_mc1"
|
373 |
+
},
|
374 |
+
"harness|lambada:openai|0": {
|
375 |
+
"perplexity,none": 3.1027577586868635,
|
376 |
+
"perplexity_stderr,none": 0.07658028231054229,
|
377 |
+
"acc,none": 0.7224917523772559,
|
378 |
+
"acc_stderr,none": 0.006238303031556425,
|
379 |
+
"alias": "lambada_openai"
|
380 |
+
},
|
381 |
+
"harness|boolq|0": {
|
382 |
+
"acc,none": 0.8308868501529052,
|
383 |
+
"acc_stderr,none": 0.006556199674684511,
|
384 |
+
"alias": "boolq"
|
385 |
+
},
|
386 |
+
"harness|piqa|0": {
|
387 |
+
"acc,none": 0.7867247007616975,
|
388 |
+
"acc_stderr,none": 0.009557121225861331,
|
389 |
+
"acc_norm,none": 0.7850924918389554,
|
390 |
+
"acc_norm_stderr,none": 0.009583665082653313,
|
391 |
+
"alias": "piqa"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "meta-llama/Meta-Llama-3-8B-Instruct",
|
396 |
+
"revision": "main",
|
397 |
+
"private": false,
|
398 |
+
"params": 8.03,
|
399 |
+
"architectures": "LlamaForCausalLM",
|
400 |
+
"quant_type": null,
|
401 |
+
"precision": "16bit",
|
402 |
+
"model_params": 8.03,
|
403 |
+
"model_size": 16.06,
|
404 |
+
"weight_dtype": "bfloat16",
|
405 |
+
"compute_dtype": "float16",
|
406 |
+
"gguf_ftype": "*Q4_0.gguf",
|
407 |
+
"hardware": "gpu",
|
408 |
+
"status": "Pending",
|
409 |
+
"submitted_time": "2024-04-27T08:04:58Z",
|
410 |
+
"model_type": "original",
|
411 |
+
"job_id": -1,
|
412 |
+
"job_start_time": null,
|
413 |
+
"scripts": "ITREX"
|
414 |
+
},
|
415 |
+
"quantization_config": null,
|
416 |
+
"versions": {
|
417 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
418 |
+
"harness|arc:easy|0": 1.0,
|
419 |
+
"harness|openbookqa|0": 1.0,
|
420 |
+
"harness|hellaswag|0": 1.0,
|
421 |
+
"harness|winogrande|0": 1.0,
|
422 |
+
"harness|mmlu|0": null,
|
423 |
+
"harness|mmlu_humanities|0": null,
|
424 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
425 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
426 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
427 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
428 |
+
"harness|mmlu_international_law|0": 0.0,
|
429 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
430 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
431 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
432 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
433 |
+
"harness|mmlu_philosophy|0": 0.0,
|
434 |
+
"harness|mmlu_prehistory|0": 0.0,
|
435 |
+
"harness|mmlu_professional_law|0": 0.0,
|
436 |
+
"harness|mmlu_world_religions|0": 0.0,
|
437 |
+
"harness|mmlu_other|0": null,
|
438 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
439 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
440 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
441 |
+
"harness|mmlu_global_facts|0": 0.0,
|
442 |
+
"harness|mmlu_human_aging|0": 0.0,
|
443 |
+
"harness|mmlu_management|0": 0.0,
|
444 |
+
"harness|mmlu_marketing|0": 0.0,
|
445 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
446 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
447 |
+
"harness|mmlu_nutrition|0": 0.0,
|
448 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
449 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
450 |
+
"harness|mmlu_virology|0": 0.0,
|
451 |
+
"harness|mmlu_social_sciences|0": null,
|
452 |
+
"harness|mmlu_econometrics|0": 0.0,
|
453 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
454 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
455 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
456 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
457 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
458 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
459 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
460 |
+
"harness|mmlu_public_relations|0": 0.0,
|
461 |
+
"harness|mmlu_security_studies|0": 0.0,
|
462 |
+
"harness|mmlu_sociology|0": 0.0,
|
463 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
464 |
+
"harness|mmlu_stem|0": null,
|
465 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
466 |
+
"harness|mmlu_anatomy|0": 0.0,
|
467 |
+
"harness|mmlu_astronomy|0": 0.0,
|
468 |
+
"harness|mmlu_college_biology|0": 0.0,
|
469 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
470 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
471 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
472 |
+
"harness|mmlu_college_physics|0": 0.0,
|
473 |
+
"harness|mmlu_computer_security|0": 0.0,
|
474 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
475 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
476 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
477 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
478 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
479 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
480 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
481 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
482 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
483 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
484 |
+
"harness|arc:challenge|0": 1.0,
|
485 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
486 |
+
"harness|lambada:openai|0": 1.0,
|
487 |
+
"harness|boolq|0": 2.0,
|
488 |
+
"harness|piqa|0": 1.0
|
489 |
+
},
|
490 |
+
"n-shot": {
|
491 |
+
"arc_challenge": 0,
|
492 |
+
"arc_easy": 0,
|
493 |
+
"boolq": 0,
|
494 |
+
"hellaswag": 0,
|
495 |
+
"lambada_openai": 0,
|
496 |
+
"mmlu": 0,
|
497 |
+
"mmlu_abstract_algebra": 0,
|
498 |
+
"mmlu_anatomy": 0,
|
499 |
+
"mmlu_astronomy": 0,
|
500 |
+
"mmlu_business_ethics": 0,
|
501 |
+
"mmlu_clinical_knowledge": 0,
|
502 |
+
"mmlu_college_biology": 0,
|
503 |
+
"mmlu_college_chemistry": 0,
|
504 |
+
"mmlu_college_computer_science": 0,
|
505 |
+
"mmlu_college_mathematics": 0,
|
506 |
+
"mmlu_college_medicine": 0,
|
507 |
+
"mmlu_college_physics": 0,
|
508 |
+
"mmlu_computer_security": 0,
|
509 |
+
"mmlu_conceptual_physics": 0,
|
510 |
+
"mmlu_econometrics": 0,
|
511 |
+
"mmlu_electrical_engineering": 0,
|
512 |
+
"mmlu_elementary_mathematics": 0,
|
513 |
+
"mmlu_formal_logic": 0,
|
514 |
+
"mmlu_global_facts": 0,
|
515 |
+
"mmlu_high_school_biology": 0,
|
516 |
+
"mmlu_high_school_chemistry": 0,
|
517 |
+
"mmlu_high_school_computer_science": 0,
|
518 |
+
"mmlu_high_school_european_history": 0,
|
519 |
+
"mmlu_high_school_geography": 0,
|
520 |
+
"mmlu_high_school_government_and_politics": 0,
|
521 |
+
"mmlu_high_school_macroeconomics": 0,
|
522 |
+
"mmlu_high_school_mathematics": 0,
|
523 |
+
"mmlu_high_school_microeconomics": 0,
|
524 |
+
"mmlu_high_school_physics": 0,
|
525 |
+
"mmlu_high_school_psychology": 0,
|
526 |
+
"mmlu_high_school_statistics": 0,
|
527 |
+
"mmlu_high_school_us_history": 0,
|
528 |
+
"mmlu_high_school_world_history": 0,
|
529 |
+
"mmlu_human_aging": 0,
|
530 |
+
"mmlu_human_sexuality": 0,
|
531 |
+
"mmlu_humanities": 0,
|
532 |
+
"mmlu_international_law": 0,
|
533 |
+
"mmlu_jurisprudence": 0,
|
534 |
+
"mmlu_logical_fallacies": 0,
|
535 |
+
"mmlu_machine_learning": 0,
|
536 |
+
"mmlu_management": 0,
|
537 |
+
"mmlu_marketing": 0,
|
538 |
+
"mmlu_medical_genetics": 0,
|
539 |
+
"mmlu_miscellaneous": 0,
|
540 |
+
"mmlu_moral_disputes": 0,
|
541 |
+
"mmlu_moral_scenarios": 0,
|
542 |
+
"mmlu_nutrition": 0,
|
543 |
+
"mmlu_other": 0,
|
544 |
+
"mmlu_philosophy": 0,
|
545 |
+
"mmlu_prehistory": 0,
|
546 |
+
"mmlu_professional_accounting": 0,
|
547 |
+
"mmlu_professional_law": 0,
|
548 |
+
"mmlu_professional_medicine": 0,
|
549 |
+
"mmlu_professional_psychology": 0,
|
550 |
+
"mmlu_public_relations": 0,
|
551 |
+
"mmlu_security_studies": 0,
|
552 |
+
"mmlu_social_sciences": 0,
|
553 |
+
"mmlu_sociology": 0,
|
554 |
+
"mmlu_stem": 0,
|
555 |
+
"mmlu_us_foreign_policy": 0,
|
556 |
+
"mmlu_virology": 0,
|
557 |
+
"mmlu_world_religions": 0,
|
558 |
+
"openbookqa": 0,
|
559 |
+
"piqa": 0,
|
560 |
+
"truthfulqa_mc1": 0,
|
561 |
+
"truthfulqa_mc2": 0,
|
562 |
+
"winogrande": 0
|
563 |
+
},
|
564 |
+
"date": 1716002903.7340682,
|
565 |
+
"config": {
|
566 |
+
"model": "hf",
|
567 |
+
"model_args": "pretrained=llama3_8b_instruct-chat,trust_remote_code=True,dtype=float16,_commit_hash=main",
|
568 |
+
"batch_size": 4,
|
569 |
+
"batch_sizes": [],
|
570 |
+
"device": "cuda",
|
571 |
+
"use_cache": null,
|
572 |
+
"limit": null,
|
573 |
+
"bootstrap_iters": 100000,
|
574 |
+
"gen_kwargs": null
|
575 |
+
}
|
576 |
+
}
|
meta-llama/results_2024-05-21-09-49-00_Llama-2-7b-chat-hf.json
ADDED
@@ -0,0 +1,576 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "1.4",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": "-1",
|
9 |
+
"start_time": "",
|
10 |
+
"end_time": "",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "meta-llama/Llama-2-7b-chat-hf",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "16bit",
|
15 |
+
"model_size": "13.48",
|
16 |
+
"model_params": "6.74",
|
17 |
+
"quant_type": null,
|
18 |
+
"precision": "16bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|winogrande|0": {
|
22 |
+
"acc,none": 0.6629834254143646,
|
23 |
+
"acc_stderr,none": 0.013284955769395252,
|
24 |
+
"alias": "winogrande"
|
25 |
+
},
|
26 |
+
"harness|hellaswag|0": {
|
27 |
+
"acc,none": 0.5778729336785501,
|
28 |
+
"acc_stderr,none": 0.004928891895874301,
|
29 |
+
"acc_norm,none": 0.7549292969527982,
|
30 |
+
"acc_norm_stderr,none": 0.004292500501716266,
|
31 |
+
"alias": "hellaswag"
|
32 |
+
},
|
33 |
+
"harness|mmlu|0": {
|
34 |
+
"acc,none": 0.4641076769690927,
|
35 |
+
"acc_stderr,none": 0.004034093349564484,
|
36 |
+
"alias": "mmlu"
|
37 |
+
},
|
38 |
+
"harness|mmlu_humanities|0": {
|
39 |
+
"alias": " - humanities",
|
40 |
+
"acc,none": 0.43209351753453773,
|
41 |
+
"acc_stderr,none": 0.006925999428653776
|
42 |
+
},
|
43 |
+
"harness|mmlu_formal_logic|0": {
|
44 |
+
"alias": " - formal_logic",
|
45 |
+
"acc,none": 0.25396825396825395,
|
46 |
+
"acc_stderr,none": 0.03893259610604674
|
47 |
+
},
|
48 |
+
"harness|mmlu_high_school_european_history|0": {
|
49 |
+
"alias": " - high_school_european_history",
|
50 |
+
"acc,none": 0.5757575757575758,
|
51 |
+
"acc_stderr,none": 0.03859268142070262
|
52 |
+
},
|
53 |
+
"harness|mmlu_high_school_us_history|0": {
|
54 |
+
"alias": " - high_school_us_history",
|
55 |
+
"acc,none": 0.6372549019607843,
|
56 |
+
"acc_stderr,none": 0.03374499356319355
|
57 |
+
},
|
58 |
+
"harness|mmlu_high_school_world_history|0": {
|
59 |
+
"alias": " - high_school_world_history",
|
60 |
+
"acc,none": 0.6160337552742616,
|
61 |
+
"acc_stderr,none": 0.031658678064106674
|
62 |
+
},
|
63 |
+
"harness|mmlu_international_law|0": {
|
64 |
+
"alias": " - international_law",
|
65 |
+
"acc,none": 0.5867768595041323,
|
66 |
+
"acc_stderr,none": 0.04495087843548408
|
67 |
+
},
|
68 |
+
"harness|mmlu_jurisprudence|0": {
|
69 |
+
"alias": " - jurisprudence",
|
70 |
+
"acc,none": 0.5648148148148148,
|
71 |
+
"acc_stderr,none": 0.04792898170907061
|
72 |
+
},
|
73 |
+
"harness|mmlu_logical_fallacies|0": {
|
74 |
+
"alias": " - logical_fallacies",
|
75 |
+
"acc,none": 0.5766871165644172,
|
76 |
+
"acc_stderr,none": 0.03881891213334383
|
77 |
+
},
|
78 |
+
"harness|mmlu_moral_disputes|0": {
|
79 |
+
"alias": " - moral_disputes",
|
80 |
+
"acc,none": 0.5,
|
81 |
+
"acc_stderr,none": 0.026919095102908273
|
82 |
+
},
|
83 |
+
"harness|mmlu_moral_scenarios|0": {
|
84 |
+
"alias": " - moral_scenarios",
|
85 |
+
"acc,none": 0.2424581005586592,
|
86 |
+
"acc_stderr,none": 0.014333522059217887
|
87 |
+
},
|
88 |
+
"harness|mmlu_philosophy|0": {
|
89 |
+
"alias": " - philosophy",
|
90 |
+
"acc,none": 0.5434083601286174,
|
91 |
+
"acc_stderr,none": 0.028290869054197598
|
92 |
+
},
|
93 |
+
"harness|mmlu_prehistory|0": {
|
94 |
+
"alias": " - prehistory",
|
95 |
+
"acc,none": 0.5555555555555556,
|
96 |
+
"acc_stderr,none": 0.027648477877413324
|
97 |
+
},
|
98 |
+
"harness|mmlu_professional_law|0": {
|
99 |
+
"alias": " - professional_law",
|
100 |
+
"acc,none": 0.3559322033898305,
|
101 |
+
"acc_stderr,none": 0.012228645537277568
|
102 |
+
},
|
103 |
+
"harness|mmlu_world_religions|0": {
|
104 |
+
"alias": " - world_religions",
|
105 |
+
"acc,none": 0.695906432748538,
|
106 |
+
"acc_stderr,none": 0.03528211258245232
|
107 |
+
},
|
108 |
+
"harness|mmlu_other|0": {
|
109 |
+
"alias": " - other",
|
110 |
+
"acc,none": 0.548760862568394,
|
111 |
+
"acc_stderr,none": 0.008640433171174668
|
112 |
+
},
|
113 |
+
"harness|mmlu_business_ethics|0": {
|
114 |
+
"alias": " - business_ethics",
|
115 |
+
"acc,none": 0.48,
|
116 |
+
"acc_stderr,none": 0.050211673156867795
|
117 |
+
},
|
118 |
+
"harness|mmlu_clinical_knowledge|0": {
|
119 |
+
"alias": " - clinical_knowledge",
|
120 |
+
"acc,none": 0.5471698113207547,
|
121 |
+
"acc_stderr,none": 0.030635627957961813
|
122 |
+
},
|
123 |
+
"harness|mmlu_college_medicine|0": {
|
124 |
+
"alias": " - college_medicine",
|
125 |
+
"acc,none": 0.37572254335260113,
|
126 |
+
"acc_stderr,none": 0.03692820767264867
|
127 |
+
},
|
128 |
+
"harness|mmlu_global_facts|0": {
|
129 |
+
"alias": " - global_facts",
|
130 |
+
"acc,none": 0.37,
|
131 |
+
"acc_stderr,none": 0.048523658709390974
|
132 |
+
},
|
133 |
+
"harness|mmlu_human_aging|0": {
|
134 |
+
"alias": " - human_aging",
|
135 |
+
"acc,none": 0.5739910313901345,
|
136 |
+
"acc_stderr,none": 0.033188332862172806
|
137 |
+
},
|
138 |
+
"harness|mmlu_management|0": {
|
139 |
+
"alias": " - management",
|
140 |
+
"acc,none": 0.6699029126213593,
|
141 |
+
"acc_stderr,none": 0.0465614711001235
|
142 |
+
},
|
143 |
+
"harness|mmlu_marketing|0": {
|
144 |
+
"alias": " - marketing",
|
145 |
+
"acc,none": 0.7435897435897436,
|
146 |
+
"acc_stderr,none": 0.028605953702004253
|
147 |
+
},
|
148 |
+
"harness|mmlu_medical_genetics|0": {
|
149 |
+
"alias": " - medical_genetics",
|
150 |
+
"acc,none": 0.47,
|
151 |
+
"acc_stderr,none": 0.05016135580465919
|
152 |
+
},
|
153 |
+
"harness|mmlu_miscellaneous|0": {
|
154 |
+
"alias": " - miscellaneous",
|
155 |
+
"acc,none": 0.6909323116219668,
|
156 |
+
"acc_stderr,none": 0.016524988919702194
|
157 |
+
},
|
158 |
+
"harness|mmlu_nutrition|0": {
|
159 |
+
"alias": " - nutrition",
|
160 |
+
"acc,none": 0.5065359477124183,
|
161 |
+
"acc_stderr,none": 0.028627470550556047
|
162 |
+
},
|
163 |
+
"harness|mmlu_professional_accounting|0": {
|
164 |
+
"alias": " - professional_accounting",
|
165 |
+
"acc,none": 0.35815602836879434,
|
166 |
+
"acc_stderr,none": 0.02860208586275942
|
167 |
+
},
|
168 |
+
"harness|mmlu_professional_medicine|0": {
|
169 |
+
"alias": " - professional_medicine",
|
170 |
+
"acc,none": 0.4227941176470588,
|
171 |
+
"acc_stderr,none": 0.03000856284500348
|
172 |
+
},
|
173 |
+
"harness|mmlu_virology|0": {
|
174 |
+
"alias": " - virology",
|
175 |
+
"acc,none": 0.4819277108433735,
|
176 |
+
"acc_stderr,none": 0.03889951252827216
|
177 |
+
},
|
178 |
+
"harness|mmlu_social_sciences|0": {
|
179 |
+
"alias": " - social_sciences",
|
180 |
+
"acc,none": 0.5300617484562886,
|
181 |
+
"acc_stderr,none": 0.008753797337142028
|
182 |
+
},
|
183 |
+
"harness|mmlu_econometrics|0": {
|
184 |
+
"alias": " - econometrics",
|
185 |
+
"acc,none": 0.2894736842105263,
|
186 |
+
"acc_stderr,none": 0.04266339443159394
|
187 |
+
},
|
188 |
+
"harness|mmlu_high_school_geography|0": {
|
189 |
+
"alias": " - high_school_geography",
|
190 |
+
"acc,none": 0.5909090909090909,
|
191 |
+
"acc_stderr,none": 0.035029757994130065
|
192 |
+
},
|
193 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
194 |
+
"alias": " - high_school_government_and_politics",
|
195 |
+
"acc,none": 0.6787564766839378,
|
196 |
+
"acc_stderr,none": 0.033699508685490674
|
197 |
+
},
|
198 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
199 |
+
"alias": " - high_school_macroeconomics",
|
200 |
+
"acc,none": 0.41025641025641024,
|
201 |
+
"acc_stderr,none": 0.02493931390694079
|
202 |
+
},
|
203 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
204 |
+
"alias": " - high_school_microeconomics",
|
205 |
+
"acc,none": 0.36134453781512604,
|
206 |
+
"acc_stderr,none": 0.031204691225150023
|
207 |
+
},
|
208 |
+
"harness|mmlu_high_school_psychology|0": {
|
209 |
+
"alias": " - high_school_psychology",
|
210 |
+
"acc,none": 0.6275229357798165,
|
211 |
+
"acc_stderr,none": 0.020728368457638494
|
212 |
+
},
|
213 |
+
"harness|mmlu_human_sexuality|0": {
|
214 |
+
"alias": " - human_sexuality",
|
215 |
+
"acc,none": 0.5801526717557252,
|
216 |
+
"acc_stderr,none": 0.043285772152629735
|
217 |
+
},
|
218 |
+
"harness|mmlu_professional_psychology|0": {
|
219 |
+
"alias": " - professional_psychology",
|
220 |
+
"acc,none": 0.4624183006535948,
|
221 |
+
"acc_stderr,none": 0.02017061497496977
|
222 |
+
},
|
223 |
+
"harness|mmlu_public_relations|0": {
|
224 |
+
"alias": " - public_relations",
|
225 |
+
"acc,none": 0.5454545454545454,
|
226 |
+
"acc_stderr,none": 0.04769300568972745
|
227 |
+
},
|
228 |
+
"harness|mmlu_security_studies|0": {
|
229 |
+
"alias": " - security_studies",
|
230 |
+
"acc,none": 0.5102040816326531,
|
231 |
+
"acc_stderr,none": 0.03200255347893782
|
232 |
+
},
|
233 |
+
"harness|mmlu_sociology|0": {
|
234 |
+
"alias": " - sociology",
|
235 |
+
"acc,none": 0.7313432835820896,
|
236 |
+
"acc_stderr,none": 0.03134328358208954
|
237 |
+
},
|
238 |
+
"harness|mmlu_us_foreign_policy|0": {
|
239 |
+
"alias": " - us_foreign_policy",
|
240 |
+
"acc,none": 0.71,
|
241 |
+
"acc_stderr,none": 0.045604802157206845
|
242 |
+
},
|
243 |
+
"harness|mmlu_stem|0": {
|
244 |
+
"alias": " - stem",
|
245 |
+
"acc,none": 0.3640976847446876,
|
246 |
+
"acc_stderr,none": 0.00839563704312517
|
247 |
+
},
|
248 |
+
"harness|mmlu_abstract_algebra|0": {
|
249 |
+
"alias": " - abstract_algebra",
|
250 |
+
"acc,none": 0.29,
|
251 |
+
"acc_stderr,none": 0.04560480215720683
|
252 |
+
},
|
253 |
+
"harness|mmlu_anatomy|0": {
|
254 |
+
"alias": " - anatomy",
|
255 |
+
"acc,none": 0.4666666666666667,
|
256 |
+
"acc_stderr,none": 0.043097329010363554
|
257 |
+
},
|
258 |
+
"harness|mmlu_astronomy|0": {
|
259 |
+
"alias": " - astronomy",
|
260 |
+
"acc,none": 0.47368421052631576,
|
261 |
+
"acc_stderr,none": 0.04063302731486671
|
262 |
+
},
|
263 |
+
"harness|mmlu_college_biology|0": {
|
264 |
+
"alias": " - college_biology",
|
265 |
+
"acc,none": 0.4583333333333333,
|
266 |
+
"acc_stderr,none": 0.04166666666666665
|
267 |
+
},
|
268 |
+
"harness|mmlu_college_chemistry|0": {
|
269 |
+
"alias": " - college_chemistry",
|
270 |
+
"acc,none": 0.27,
|
271 |
+
"acc_stderr,none": 0.044619604333847415
|
272 |
+
},
|
273 |
+
"harness|mmlu_college_computer_science|0": {
|
274 |
+
"alias": " - college_computer_science",
|
275 |
+
"acc,none": 0.34,
|
276 |
+
"acc_stderr,none": 0.04760952285695236
|
277 |
+
},
|
278 |
+
"harness|mmlu_college_mathematics|0": {
|
279 |
+
"alias": " - college_mathematics",
|
280 |
+
"acc,none": 0.33,
|
281 |
+
"acc_stderr,none": 0.04725815626252605
|
282 |
+
},
|
283 |
+
"harness|mmlu_college_physics|0": {
|
284 |
+
"alias": " - college_physics",
|
285 |
+
"acc,none": 0.18627450980392157,
|
286 |
+
"acc_stderr,none": 0.03873958714149353
|
287 |
+
},
|
288 |
+
"harness|mmlu_computer_security|0": {
|
289 |
+
"alias": " - computer_security",
|
290 |
+
"acc,none": 0.6,
|
291 |
+
"acc_stderr,none": 0.04923659639173309
|
292 |
+
},
|
293 |
+
"harness|mmlu_conceptual_physics|0": {
|
294 |
+
"alias": " - conceptual_physics",
|
295 |
+
"acc,none": 0.39574468085106385,
|
296 |
+
"acc_stderr,none": 0.03196758697835362
|
297 |
+
},
|
298 |
+
"harness|mmlu_electrical_engineering|0": {
|
299 |
+
"alias": " - electrical_engineering",
|
300 |
+
"acc,none": 0.4482758620689655,
|
301 |
+
"acc_stderr,none": 0.04144311810878151
|
302 |
+
},
|
303 |
+
"harness|mmlu_elementary_mathematics|0": {
|
304 |
+
"alias": " - elementary_mathematics",
|
305 |
+
"acc,none": 0.2804232804232804,
|
306 |
+
"acc_stderr,none": 0.02313528797432562
|
307 |
+
},
|
308 |
+
"harness|mmlu_high_school_biology|0": {
|
309 |
+
"alias": " - high_school_biology",
|
310 |
+
"acc,none": 0.5258064516129032,
|
311 |
+
"acc_stderr,none": 0.02840609505765332
|
312 |
+
},
|
313 |
+
"harness|mmlu_high_school_chemistry|0": {
|
314 |
+
"alias": " - high_school_chemistry",
|
315 |
+
"acc,none": 0.3448275862068966,
|
316 |
+
"acc_stderr,none": 0.03344283744280458
|
317 |
+
},
|
318 |
+
"harness|mmlu_high_school_computer_science|0": {
|
319 |
+
"alias": " - high_school_computer_science",
|
320 |
+
"acc,none": 0.41,
|
321 |
+
"acc_stderr,none": 0.049431107042371025
|
322 |
+
},
|
323 |
+
"harness|mmlu_high_school_mathematics|0": {
|
324 |
+
"alias": " - high_school_mathematics",
|
325 |
+
"acc,none": 0.26296296296296295,
|
326 |
+
"acc_stderr,none": 0.02684205787383371
|
327 |
+
},
|
328 |
+
"harness|mmlu_high_school_physics|0": {
|
329 |
+
"alias": " - high_school_physics",
|
330 |
+
"acc,none": 0.2913907284768212,
|
331 |
+
"acc_stderr,none": 0.03710185726119994
|
332 |
+
},
|
333 |
+
"harness|mmlu_high_school_statistics|0": {
|
334 |
+
"alias": " - high_school_statistics",
|
335 |
+
"acc,none": 0.25462962962962965,
|
336 |
+
"acc_stderr,none": 0.02971127586000537
|
337 |
+
},
|
338 |
+
"harness|mmlu_machine_learning|0": {
|
339 |
+
"alias": " - machine_learning",
|
340 |
+
"acc,none": 0.33035714285714285,
|
341 |
+
"acc_stderr,none": 0.04464285714285713
|
342 |
+
},
|
343 |
+
"harness|arc:easy|0": {
|
344 |
+
"acc,none": 0.7390572390572391,
|
345 |
+
"acc_stderr,none": 0.009011142493235974,
|
346 |
+
"acc_norm,none": 0.696969696969697,
|
347 |
+
"acc_norm_stderr,none": 0.009430140669278957,
|
348 |
+
"alias": "arc_easy"
|
349 |
+
},
|
350 |
+
"harness|boolq|0": {
|
351 |
+
"acc,none": 0.7978593272171254,
|
352 |
+
"acc_stderr,none": 0.007023968517730715,
|
353 |
+
"alias": "boolq"
|
354 |
+
},
|
355 |
+
"harness|lambada:openai|0": {
|
356 |
+
"perplexity,none": 3.262453855478333,
|
357 |
+
"perplexity_stderr,none": 0.0865842236886026,
|
358 |
+
"acc,none": 0.710071802833301,
|
359 |
+
"acc_stderr,none": 0.00632132957685722,
|
360 |
+
"alias": "lambada_openai"
|
361 |
+
},
|
362 |
+
"harness|truthfulqa:mc1|0": {
|
363 |
+
"acc,none": 0.30354957160342716,
|
364 |
+
"acc_stderr,none": 0.016095884155386847,
|
365 |
+
"alias": "truthfulqa_mc1"
|
366 |
+
},
|
367 |
+
"harness|openbookqa|0": {
|
368 |
+
"acc,none": 0.336,
|
369 |
+
"acc_stderr,none": 0.021144791425048853,
|
370 |
+
"acc_norm,none": 0.436,
|
371 |
+
"acc_norm_stderr,none": 0.0221989546414768,
|
372 |
+
"alias": "openbookqa"
|
373 |
+
},
|
374 |
+
"harness|piqa|0": {
|
375 |
+
"acc,none": 0.764417845484222,
|
376 |
+
"acc_stderr,none": 0.009901067586473909,
|
377 |
+
"acc_norm,none": 0.7725788900979326,
|
378 |
+
"acc_norm_stderr,none": 0.009779850767847263,
|
379 |
+
"alias": "piqa"
|
380 |
+
},
|
381 |
+
"harness|truthfulqa:mc2|0": {
|
382 |
+
"acc,none": 0.45319051806258964,
|
383 |
+
"acc_stderr,none": 0.015638549870558044,
|
384 |
+
"alias": "truthfulqa_mc2"
|
385 |
+
},
|
386 |
+
"harness|arc:challenge|0": {
|
387 |
+
"acc,none": 0.44112627986348124,
|
388 |
+
"acc_stderr,none": 0.014509747749064663,
|
389 |
+
"acc_norm,none": 0.44197952218430037,
|
390 |
+
"acc_norm_stderr,none": 0.014512682523128343,
|
391 |
+
"alias": "arc_challenge"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "meta-llama/Llama-2-7b-chat-hf",
|
396 |
+
"revision": "main",
|
397 |
+
"private": false,
|
398 |
+
"params": 6.74,
|
399 |
+
"architectures": "LlamaForCausalLM",
|
400 |
+
"quant_type": null,
|
401 |
+
"precision": "16bit",
|
402 |
+
"model_params": 6.74,
|
403 |
+
"model_size": 13.48,
|
404 |
+
"weight_dtype": "float16",
|
405 |
+
"compute_dtype": "float16",
|
406 |
+
"gguf_ftype": "*Q4_0.gguf",
|
407 |
+
"hardware": "gpu",
|
408 |
+
"status": "Pending",
|
409 |
+
"submitted_time": "2024-04-27T08:04:58Z",
|
410 |
+
"model_type": "original",
|
411 |
+
"job_id": -1,
|
412 |
+
"job_start_time": null,
|
413 |
+
"scripts": "ITREX"
|
414 |
+
},
|
415 |
+
"quantization_config": null,
|
416 |
+
"versions": {
|
417 |
+
"harness|winogrande|0": 1.0,
|
418 |
+
"harness|hellaswag|0": 1.0,
|
419 |
+
"harness|mmlu|0": null,
|
420 |
+
"harness|mmlu_humanities|0": null,
|
421 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
422 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
423 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
424 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
425 |
+
"harness|mmlu_international_law|0": 0.0,
|
426 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
427 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
428 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
429 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
430 |
+
"harness|mmlu_philosophy|0": 0.0,
|
431 |
+
"harness|mmlu_prehistory|0": 0.0,
|
432 |
+
"harness|mmlu_professional_law|0": 0.0,
|
433 |
+
"harness|mmlu_world_religions|0": 0.0,
|
434 |
+
"harness|mmlu_other|0": null,
|
435 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
436 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
437 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
438 |
+
"harness|mmlu_global_facts|0": 0.0,
|
439 |
+
"harness|mmlu_human_aging|0": 0.0,
|
440 |
+
"harness|mmlu_management|0": 0.0,
|
441 |
+
"harness|mmlu_marketing|0": 0.0,
|
442 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
443 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
444 |
+
"harness|mmlu_nutrition|0": 0.0,
|
445 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
446 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
447 |
+
"harness|mmlu_virology|0": 0.0,
|
448 |
+
"harness|mmlu_social_sciences|0": null,
|
449 |
+
"harness|mmlu_econometrics|0": 0.0,
|
450 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
451 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
452 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
453 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
454 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
455 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
456 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
457 |
+
"harness|mmlu_public_relations|0": 0.0,
|
458 |
+
"harness|mmlu_security_studies|0": 0.0,
|
459 |
+
"harness|mmlu_sociology|0": 0.0,
|
460 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
461 |
+
"harness|mmlu_stem|0": null,
|
462 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
463 |
+
"harness|mmlu_anatomy|0": 0.0,
|
464 |
+
"harness|mmlu_astronomy|0": 0.0,
|
465 |
+
"harness|mmlu_college_biology|0": 0.0,
|
466 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
467 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
468 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
469 |
+
"harness|mmlu_college_physics|0": 0.0,
|
470 |
+
"harness|mmlu_computer_security|0": 0.0,
|
471 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
472 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
473 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
474 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
475 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
476 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
477 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
478 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
479 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
480 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
481 |
+
"harness|arc:easy|0": 1.0,
|
482 |
+
"harness|boolq|0": 2.0,
|
483 |
+
"harness|lambada:openai|0": 1.0,
|
484 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
485 |
+
"harness|openbookqa|0": 1.0,
|
486 |
+
"harness|piqa|0": 1.0,
|
487 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
488 |
+
"harness|arc:challenge|0": 1.0
|
489 |
+
},
|
490 |
+
"n-shot": {
|
491 |
+
"arc_challenge": 0,
|
492 |
+
"arc_easy": 0,
|
493 |
+
"boolq": 0,
|
494 |
+
"hellaswag": 0,
|
495 |
+
"lambada_openai": 0,
|
496 |
+
"mmlu": 0,
|
497 |
+
"mmlu_abstract_algebra": 0,
|
498 |
+
"mmlu_anatomy": 0,
|
499 |
+
"mmlu_astronomy": 0,
|
500 |
+
"mmlu_business_ethics": 0,
|
501 |
+
"mmlu_clinical_knowledge": 0,
|
502 |
+
"mmlu_college_biology": 0,
|
503 |
+
"mmlu_college_chemistry": 0,
|
504 |
+
"mmlu_college_computer_science": 0,
|
505 |
+
"mmlu_college_mathematics": 0,
|
506 |
+
"mmlu_college_medicine": 0,
|
507 |
+
"mmlu_college_physics": 0,
|
508 |
+
"mmlu_computer_security": 0,
|
509 |
+
"mmlu_conceptual_physics": 0,
|
510 |
+
"mmlu_econometrics": 0,
|
511 |
+
"mmlu_electrical_engineering": 0,
|
512 |
+
"mmlu_elementary_mathematics": 0,
|
513 |
+
"mmlu_formal_logic": 0,
|
514 |
+
"mmlu_global_facts": 0,
|
515 |
+
"mmlu_high_school_biology": 0,
|
516 |
+
"mmlu_high_school_chemistry": 0,
|
517 |
+
"mmlu_high_school_computer_science": 0,
|
518 |
+
"mmlu_high_school_european_history": 0,
|
519 |
+
"mmlu_high_school_geography": 0,
|
520 |
+
"mmlu_high_school_government_and_politics": 0,
|
521 |
+
"mmlu_high_school_macroeconomics": 0,
|
522 |
+
"mmlu_high_school_mathematics": 0,
|
523 |
+
"mmlu_high_school_microeconomics": 0,
|
524 |
+
"mmlu_high_school_physics": 0,
|
525 |
+
"mmlu_high_school_psychology": 0,
|
526 |
+
"mmlu_high_school_statistics": 0,
|
527 |
+
"mmlu_high_school_us_history": 0,
|
528 |
+
"mmlu_high_school_world_history": 0,
|
529 |
+
"mmlu_human_aging": 0,
|
530 |
+
"mmlu_human_sexuality": 0,
|
531 |
+
"mmlu_humanities": 0,
|
532 |
+
"mmlu_international_law": 0,
|
533 |
+
"mmlu_jurisprudence": 0,
|
534 |
+
"mmlu_logical_fallacies": 0,
|
535 |
+
"mmlu_machine_learning": 0,
|
536 |
+
"mmlu_management": 0,
|
537 |
+
"mmlu_marketing": 0,
|
538 |
+
"mmlu_medical_genetics": 0,
|
539 |
+
"mmlu_miscellaneous": 0,
|
540 |
+
"mmlu_moral_disputes": 0,
|
541 |
+
"mmlu_moral_scenarios": 0,
|
542 |
+
"mmlu_nutrition": 0,
|
543 |
+
"mmlu_other": 0,
|
544 |
+
"mmlu_philosophy": 0,
|
545 |
+
"mmlu_prehistory": 0,
|
546 |
+
"mmlu_professional_accounting": 0,
|
547 |
+
"mmlu_professional_law": 0,
|
548 |
+
"mmlu_professional_medicine": 0,
|
549 |
+
"mmlu_professional_psychology": 0,
|
550 |
+
"mmlu_public_relations": 0,
|
551 |
+
"mmlu_security_studies": 0,
|
552 |
+
"mmlu_social_sciences": 0,
|
553 |
+
"mmlu_sociology": 0,
|
554 |
+
"mmlu_stem": 0,
|
555 |
+
"mmlu_us_foreign_policy": 0,
|
556 |
+
"mmlu_virology": 0,
|
557 |
+
"mmlu_world_religions": 0,
|
558 |
+
"openbookqa": 0,
|
559 |
+
"piqa": 0,
|
560 |
+
"truthfulqa_mc1": 0,
|
561 |
+
"truthfulqa_mc2": 0,
|
562 |
+
"winogrande": 0
|
563 |
+
},
|
564 |
+
"date": 1716254430.0829494,
|
565 |
+
"config": {
|
566 |
+
"model": "hf",
|
567 |
+
"model_args": "pretrained=Llama-2-7b-chat-hf,trust_remote_code=True,dtype=float16,_commit_hash=main",
|
568 |
+
"batch_size": 4,
|
569 |
+
"batch_sizes": [],
|
570 |
+
"device": "cuda",
|
571 |
+
"use_cache": null,
|
572 |
+
"limit": null,
|
573 |
+
"bootstrap_iters": 100000,
|
574 |
+
"gen_kwargs": null
|
575 |
+
}
|
576 |
+
}
|
microsoft/results_2024-05-17-23-28-23_Phi-3-mini-4k-instruct.json
ADDED
@@ -0,0 +1,576 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "1.4",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": "-1",
|
9 |
+
"start_time": "",
|
10 |
+
"end_time": "",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "microsoft/Phi-3-mini-4k-instruct",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "16bit",
|
15 |
+
"model_size": "7.64",
|
16 |
+
"model_params": "3.82",
|
17 |
+
"quant_type": null,
|
18 |
+
"precision": "16bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|truthfulqa:mc1|0": {
|
22 |
+
"acc,none": 0.39167686658506734,
|
23 |
+
"acc_stderr,none": 0.01708779588176964,
|
24 |
+
"alias": "truthfulqa_mc1"
|
25 |
+
},
|
26 |
+
"harness|piqa|0": {
|
27 |
+
"acc,none": 0.8025027203482046,
|
28 |
+
"acc_stderr,none": 0.009288578108523264,
|
29 |
+
"acc_norm,none": 0.8035908596300326,
|
30 |
+
"acc_norm_stderr,none": 0.009269232237679946,
|
31 |
+
"alias": "piqa"
|
32 |
+
},
|
33 |
+
"harness|arc:challenge|0": {
|
34 |
+
"acc,none": 0.5546075085324232,
|
35 |
+
"acc_stderr,none": 0.014523987638344081,
|
36 |
+
"acc_norm,none": 0.575938566552901,
|
37 |
+
"acc_norm_stderr,none": 0.0144418896274644,
|
38 |
+
"alias": "arc_challenge"
|
39 |
+
},
|
40 |
+
"harness|lambada:openai|0": {
|
41 |
+
"perplexity,none": 4.167888136759857,
|
42 |
+
"perplexity_stderr,none": 0.09867194198807626,
|
43 |
+
"acc,none": 0.6834853483407723,
|
44 |
+
"acc_stderr,none": 0.006479978824925173,
|
45 |
+
"alias": "lambada_openai"
|
46 |
+
},
|
47 |
+
"harness|hellaswag|0": {
|
48 |
+
"acc,none": 0.604959171479785,
|
49 |
+
"acc_stderr,none": 0.004878603699686044,
|
50 |
+
"acc_norm,none": 0.7844054969129656,
|
51 |
+
"acc_norm_stderr,none": 0.004103936879526393,
|
52 |
+
"alias": "hellaswag"
|
53 |
+
},
|
54 |
+
"harness|arc:easy|0": {
|
55 |
+
"acc,none": 0.8324915824915825,
|
56 |
+
"acc_stderr,none": 0.007662604959624314,
|
57 |
+
"acc_norm,none": 0.8017676767676768,
|
58 |
+
"acc_norm_stderr,none": 0.008180497199102571,
|
59 |
+
"alias": "arc_easy"
|
60 |
+
},
|
61 |
+
"harness|truthfulqa:mc2|0": {
|
62 |
+
"acc,none": 0.5777764270437843,
|
63 |
+
"acc_stderr,none": 0.015365546097507347,
|
64 |
+
"alias": "truthfulqa_mc2"
|
65 |
+
},
|
66 |
+
"harness|winogrande|0": {
|
67 |
+
"acc,none": 0.7363851617995264,
|
68 |
+
"acc_stderr,none": 0.012382849299658464,
|
69 |
+
"alias": "winogrande"
|
70 |
+
},
|
71 |
+
"harness|boolq|0": {
|
72 |
+
"acc,none": 0.8620795107033639,
|
73 |
+
"acc_stderr,none": 0.006030878733298393,
|
74 |
+
"alias": "boolq"
|
75 |
+
},
|
76 |
+
"harness|mmlu|0": {
|
77 |
+
"acc,none": 0.6794616151545364,
|
78 |
+
"acc_stderr,none": 0.003742559627188732,
|
79 |
+
"alias": "mmlu"
|
80 |
+
},
|
81 |
+
"harness|mmlu_humanities|0": {
|
82 |
+
"alias": " - humanities",
|
83 |
+
"acc,none": 0.6410201912858661,
|
84 |
+
"acc_stderr,none": 0.006733882452592044
|
85 |
+
},
|
86 |
+
"harness|mmlu_formal_logic|0": {
|
87 |
+
"alias": " - formal_logic",
|
88 |
+
"acc,none": 0.5396825396825397,
|
89 |
+
"acc_stderr,none": 0.04458029125470973
|
90 |
+
},
|
91 |
+
"harness|mmlu_high_school_european_history|0": {
|
92 |
+
"alias": " - high_school_european_history",
|
93 |
+
"acc,none": 0.8121212121212121,
|
94 |
+
"acc_stderr,none": 0.03050193405942914
|
95 |
+
},
|
96 |
+
"harness|mmlu_high_school_us_history|0": {
|
97 |
+
"alias": " - high_school_us_history",
|
98 |
+
"acc,none": 0.7990196078431373,
|
99 |
+
"acc_stderr,none": 0.028125972265654362
|
100 |
+
},
|
101 |
+
"harness|mmlu_high_school_world_history|0": {
|
102 |
+
"alias": " - high_school_world_history",
|
103 |
+
"acc,none": 0.810126582278481,
|
104 |
+
"acc_stderr,none": 0.025530100460233494
|
105 |
+
},
|
106 |
+
"harness|mmlu_international_law|0": {
|
107 |
+
"alias": " - international_law",
|
108 |
+
"acc,none": 0.8347107438016529,
|
109 |
+
"acc_stderr,none": 0.03390780612972776
|
110 |
+
},
|
111 |
+
"harness|mmlu_jurisprudence|0": {
|
112 |
+
"alias": " - jurisprudence",
|
113 |
+
"acc,none": 0.7685185185185185,
|
114 |
+
"acc_stderr,none": 0.04077494709252626
|
115 |
+
},
|
116 |
+
"harness|mmlu_logical_fallacies|0": {
|
117 |
+
"alias": " - logical_fallacies",
|
118 |
+
"acc,none": 0.7975460122699386,
|
119 |
+
"acc_stderr,none": 0.03157065078911899
|
120 |
+
},
|
121 |
+
"harness|mmlu_moral_disputes|0": {
|
122 |
+
"alias": " - moral_disputes",
|
123 |
+
"acc,none": 0.7398843930635838,
|
124 |
+
"acc_stderr,none": 0.02361867831006937
|
125 |
+
},
|
126 |
+
"harness|mmlu_moral_scenarios|0": {
|
127 |
+
"alias": " - moral_scenarios",
|
128 |
+
"acc,none": 0.5418994413407822,
|
129 |
+
"acc_stderr,none": 0.016663683295020534
|
130 |
+
},
|
131 |
+
"harness|mmlu_philosophy|0": {
|
132 |
+
"alias": " - philosophy",
|
133 |
+
"acc,none": 0.7427652733118971,
|
134 |
+
"acc_stderr,none": 0.024826171289250885
|
135 |
+
},
|
136 |
+
"harness|mmlu_prehistory|0": {
|
137 |
+
"alias": " - prehistory",
|
138 |
+
"acc,none": 0.7530864197530864,
|
139 |
+
"acc_stderr,none": 0.02399350170904211
|
140 |
+
},
|
141 |
+
"harness|mmlu_professional_law|0": {
|
142 |
+
"alias": " - professional_law",
|
143 |
+
"acc,none": 0.5104302477183833,
|
144 |
+
"acc_stderr,none": 0.012767457253930655
|
145 |
+
},
|
146 |
+
"harness|mmlu_world_religions|0": {
|
147 |
+
"alias": " - world_religions",
|
148 |
+
"acc,none": 0.8538011695906432,
|
149 |
+
"acc_stderr,none": 0.027097290118070806
|
150 |
+
},
|
151 |
+
"harness|mmlu_other|0": {
|
152 |
+
"alias": " - other",
|
153 |
+
"acc,none": 0.7183778564531703,
|
154 |
+
"acc_stderr,none": 0.007754694005515526
|
155 |
+
},
|
156 |
+
"harness|mmlu_business_ethics|0": {
|
157 |
+
"alias": " - business_ethics",
|
158 |
+
"acc,none": 0.66,
|
159 |
+
"acc_stderr,none": 0.04760952285695237
|
160 |
+
},
|
161 |
+
"harness|mmlu_clinical_knowledge|0": {
|
162 |
+
"alias": " - clinical_knowledge",
|
163 |
+
"acc,none": 0.7622641509433963,
|
164 |
+
"acc_stderr,none": 0.026199808807561925
|
165 |
+
},
|
166 |
+
"harness|mmlu_college_medicine|0": {
|
167 |
+
"alias": " - college_medicine",
|
168 |
+
"acc,none": 0.6589595375722543,
|
169 |
+
"acc_stderr,none": 0.036146654241808254
|
170 |
+
},
|
171 |
+
"harness|mmlu_global_facts|0": {
|
172 |
+
"alias": " - global_facts",
|
173 |
+
"acc,none": 0.34,
|
174 |
+
"acc_stderr,none": 0.04760952285695236
|
175 |
+
},
|
176 |
+
"harness|mmlu_human_aging|0": {
|
177 |
+
"alias": " - human_aging",
|
178 |
+
"acc,none": 0.6681614349775785,
|
179 |
+
"acc_stderr,none": 0.03160295143776679
|
180 |
+
},
|
181 |
+
"harness|mmlu_management|0": {
|
182 |
+
"alias": " - management",
|
183 |
+
"acc,none": 0.8155339805825242,
|
184 |
+
"acc_stderr,none": 0.03840423627288276
|
185 |
+
},
|
186 |
+
"harness|mmlu_marketing|0": {
|
187 |
+
"alias": " - marketing",
|
188 |
+
"acc,none": 0.905982905982906,
|
189 |
+
"acc_stderr,none": 0.019119892798924964
|
190 |
+
},
|
191 |
+
"harness|mmlu_medical_genetics|0": {
|
192 |
+
"alias": " - medical_genetics",
|
193 |
+
"acc,none": 0.73,
|
194 |
+
"acc_stderr,none": 0.0446196043338474
|
195 |
+
},
|
196 |
+
"harness|mmlu_miscellaneous|0": {
|
197 |
+
"alias": " - miscellaneous",
|
198 |
+
"acc,none": 0.8237547892720306,
|
199 |
+
"acc_stderr,none": 0.013625556907993478
|
200 |
+
},
|
201 |
+
"harness|mmlu_nutrition|0": {
|
202 |
+
"alias": " - nutrition",
|
203 |
+
"acc,none": 0.7124183006535948,
|
204 |
+
"acc_stderr,none": 0.02591780611714716
|
205 |
+
},
|
206 |
+
"harness|mmlu_professional_accounting|0": {
|
207 |
+
"alias": " - professional_accounting",
|
208 |
+
"acc,none": 0.5425531914893617,
|
209 |
+
"acc_stderr,none": 0.029719281272236848
|
210 |
+
},
|
211 |
+
"harness|mmlu_professional_medicine|0": {
|
212 |
+
"alias": " - professional_medicine",
|
213 |
+
"acc,none": 0.7279411764705882,
|
214 |
+
"acc_stderr,none": 0.02703304115168146
|
215 |
+
},
|
216 |
+
"harness|mmlu_virology|0": {
|
217 |
+
"alias": " - virology",
|
218 |
+
"acc,none": 0.5060240963855421,
|
219 |
+
"acc_stderr,none": 0.03892212195333045
|
220 |
+
},
|
221 |
+
"harness|mmlu_social_sciences|0": {
|
222 |
+
"alias": " - social_sciences",
|
223 |
+
"acc,none": 0.7949301267468313,
|
224 |
+
"acc_stderr,none": 0.007135407179740199
|
225 |
+
},
|
226 |
+
"harness|mmlu_econometrics|0": {
|
227 |
+
"alias": " - econometrics",
|
228 |
+
"acc,none": 0.5350877192982456,
|
229 |
+
"acc_stderr,none": 0.046920083813689104
|
230 |
+
},
|
231 |
+
"harness|mmlu_high_school_geography|0": {
|
232 |
+
"alias": " - high_school_geography",
|
233 |
+
"acc,none": 0.8484848484848485,
|
234 |
+
"acc_stderr,none": 0.02554565042660363
|
235 |
+
},
|
236 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
237 |
+
"alias": " - high_school_government_and_politics",
|
238 |
+
"acc,none": 0.8860103626943006,
|
239 |
+
"acc_stderr,none": 0.02293514405391943
|
240 |
+
},
|
241 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
242 |
+
"alias": " - high_school_macroeconomics",
|
243 |
+
"acc,none": 0.7333333333333333,
|
244 |
+
"acc_stderr,none": 0.022421273612923724
|
245 |
+
},
|
246 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
247 |
+
"alias": " - high_school_microeconomics",
|
248 |
+
"acc,none": 0.8235294117647058,
|
249 |
+
"acc_stderr,none": 0.024762902678057936
|
250 |
+
},
|
251 |
+
"harness|mmlu_high_school_psychology|0": {
|
252 |
+
"alias": " - high_school_psychology",
|
253 |
+
"acc,none": 0.8917431192660551,
|
254 |
+
"acc_stderr,none": 0.01332134844761172
|
255 |
+
},
|
256 |
+
"harness|mmlu_human_sexuality|0": {
|
257 |
+
"alias": " - human_sexuality",
|
258 |
+
"acc,none": 0.7557251908396947,
|
259 |
+
"acc_stderr,none": 0.037683359597287455
|
260 |
+
},
|
261 |
+
"harness|mmlu_professional_psychology|0": {
|
262 |
+
"alias": " - professional_psychology",
|
263 |
+
"acc,none": 0.7369281045751634,
|
264 |
+
"acc_stderr,none": 0.017812676542320657
|
265 |
+
},
|
266 |
+
"harness|mmlu_public_relations|0": {
|
267 |
+
"alias": " - public_relations",
|
268 |
+
"acc,none": 0.7090909090909091,
|
269 |
+
"acc_stderr,none": 0.04350271442923243
|
270 |
+
},
|
271 |
+
"harness|mmlu_security_studies|0": {
|
272 |
+
"alias": " - security_studies",
|
273 |
+
"acc,none": 0.7714285714285715,
|
274 |
+
"acc_stderr,none": 0.026882144922307748
|
275 |
+
},
|
276 |
+
"harness|mmlu_sociology|0": {
|
277 |
+
"alias": " - sociology",
|
278 |
+
"acc,none": 0.8706467661691543,
|
279 |
+
"acc_stderr,none": 0.023729830881018522
|
280 |
+
},
|
281 |
+
"harness|mmlu_us_foreign_policy|0": {
|
282 |
+
"alias": " - us_foreign_policy",
|
283 |
+
"acc,none": 0.86,
|
284 |
+
"acc_stderr,none": 0.03487350880197771
|
285 |
+
},
|
286 |
+
"harness|mmlu_stem|0": {
|
287 |
+
"alias": " - stem",
|
288 |
+
"acc,none": 0.5857913098636219,
|
289 |
+
"acc_stderr,none": 0.008363844540281768
|
290 |
+
},
|
291 |
+
"harness|mmlu_abstract_algebra|0": {
|
292 |
+
"alias": " - abstract_algebra",
|
293 |
+
"acc,none": 0.44,
|
294 |
+
"acc_stderr,none": 0.04988876515698589
|
295 |
+
},
|
296 |
+
"harness|mmlu_anatomy|0": {
|
297 |
+
"alias": " - anatomy",
|
298 |
+
"acc,none": 0.6444444444444445,
|
299 |
+
"acc_stderr,none": 0.04135176749720386
|
300 |
+
},
|
301 |
+
"harness|mmlu_astronomy|0": {
|
302 |
+
"alias": " - astronomy",
|
303 |
+
"acc,none": 0.7631578947368421,
|
304 |
+
"acc_stderr,none": 0.03459777606810536
|
305 |
+
},
|
306 |
+
"harness|mmlu_college_biology|0": {
|
307 |
+
"alias": " - college_biology",
|
308 |
+
"acc,none": 0.8333333333333334,
|
309 |
+
"acc_stderr,none": 0.031164899666948614
|
310 |
+
},
|
311 |
+
"harness|mmlu_college_chemistry|0": {
|
312 |
+
"alias": " - college_chemistry",
|
313 |
+
"acc,none": 0.5,
|
314 |
+
"acc_stderr,none": 0.050251890762960605
|
315 |
+
},
|
316 |
+
"harness|mmlu_college_computer_science|0": {
|
317 |
+
"alias": " - college_computer_science",
|
318 |
+
"acc,none": 0.52,
|
319 |
+
"acc_stderr,none": 0.050211673156867795
|
320 |
+
},
|
321 |
+
"harness|mmlu_college_mathematics|0": {
|
322 |
+
"alias": " - college_mathematics",
|
323 |
+
"acc,none": 0.32,
|
324 |
+
"acc_stderr,none": 0.04688261722621505
|
325 |
+
},
|
326 |
+
"harness|mmlu_college_physics|0": {
|
327 |
+
"alias": " - college_physics",
|
328 |
+
"acc,none": 0.38235294117647056,
|
329 |
+
"acc_stderr,none": 0.04835503696107223
|
330 |
+
},
|
331 |
+
"harness|mmlu_computer_security|0": {
|
332 |
+
"alias": " - computer_security",
|
333 |
+
"acc,none": 0.73,
|
334 |
+
"acc_stderr,none": 0.044619604333847394
|
335 |
+
},
|
336 |
+
"harness|mmlu_conceptual_physics|0": {
|
337 |
+
"alias": " - conceptual_physics",
|
338 |
+
"acc,none": 0.6680851063829787,
|
339 |
+
"acc_stderr,none": 0.030783736757745647
|
340 |
+
},
|
341 |
+
"harness|mmlu_electrical_engineering|0": {
|
342 |
+
"alias": " - electrical_engineering",
|
343 |
+
"acc,none": 0.6137931034482759,
|
344 |
+
"acc_stderr,none": 0.04057324734419035
|
345 |
+
},
|
346 |
+
"harness|mmlu_elementary_mathematics|0": {
|
347 |
+
"alias": " - elementary_mathematics",
|
348 |
+
"acc,none": 0.5105820105820106,
|
349 |
+
"acc_stderr,none": 0.025745542276045478
|
350 |
+
},
|
351 |
+
"harness|mmlu_high_school_biology|0": {
|
352 |
+
"alias": " - high_school_biology",
|
353 |
+
"acc,none": 0.8354838709677419,
|
354 |
+
"acc_stderr,none": 0.02109084774593933
|
355 |
+
},
|
356 |
+
"harness|mmlu_high_school_chemistry|0": {
|
357 |
+
"alias": " - high_school_chemistry",
|
358 |
+
"acc,none": 0.5862068965517241,
|
359 |
+
"acc_stderr,none": 0.03465304488406795
|
360 |
+
},
|
361 |
+
"harness|mmlu_high_school_computer_science|0": {
|
362 |
+
"alias": " - high_school_computer_science",
|
363 |
+
"acc,none": 0.69,
|
364 |
+
"acc_stderr,none": 0.04648231987117316
|
365 |
+
},
|
366 |
+
"harness|mmlu_high_school_mathematics|0": {
|
367 |
+
"alias": " - high_school_mathematics",
|
368 |
+
"acc,none": 0.337037037037037,
|
369 |
+
"acc_stderr,none": 0.028820884666253252
|
370 |
+
},
|
371 |
+
"harness|mmlu_high_school_physics|0": {
|
372 |
+
"alias": " - high_school_physics",
|
373 |
+
"acc,none": 0.48344370860927155,
|
374 |
+
"acc_stderr,none": 0.0408024418562897
|
375 |
+
},
|
376 |
+
"harness|mmlu_high_school_statistics|0": {
|
377 |
+
"alias": " - high_school_statistics",
|
378 |
+
"acc,none": 0.5833333333333334,
|
379 |
+
"acc_stderr,none": 0.03362277436608044
|
380 |
+
},
|
381 |
+
"harness|mmlu_machine_learning|0": {
|
382 |
+
"alias": " - machine_learning",
|
383 |
+
"acc,none": 0.5178571428571429,
|
384 |
+
"acc_stderr,none": 0.04742762361243011
|
385 |
+
},
|
386 |
+
"harness|openbookqa|0": {
|
387 |
+
"acc,none": 0.386,
|
388 |
+
"acc_stderr,none": 0.02179352921928116,
|
389 |
+
"acc_norm,none": 0.466,
|
390 |
+
"acc_norm_stderr,none": 0.02233126442325838,
|
391 |
+
"alias": "openbookqa"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "microsoft/Phi-3-mini-4k-instruct",
|
396 |
+
"revision": "main",
|
397 |
+
"private": false,
|
398 |
+
"params": 3.82,
|
399 |
+
"architectures": "Phi3ForCausalLM",
|
400 |
+
"quant_type": null,
|
401 |
+
"precision": "16bit",
|
402 |
+
"model_params": 3.82,
|
403 |
+
"model_size": 7.64,
|
404 |
+
"weight_dtype": "bfloat16",
|
405 |
+
"compute_dtype": "float16",
|
406 |
+
"gguf_ftype": "*Q4_0.gguf",
|
407 |
+
"hardware": "gpu",
|
408 |
+
"status": "Pending",
|
409 |
+
"submitted_time": "2024-04-27T08:04:58Z",
|
410 |
+
"model_type": "original",
|
411 |
+
"job_id": -1,
|
412 |
+
"job_start_time": null,
|
413 |
+
"scripts": "ITREX"
|
414 |
+
},
|
415 |
+
"quantization_config": null,
|
416 |
+
"versions": {
|
417 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
418 |
+
"harness|piqa|0": 1.0,
|
419 |
+
"harness|arc:challenge|0": 1.0,
|
420 |
+
"harness|lambada:openai|0": 1.0,
|
421 |
+
"harness|hellaswag|0": 1.0,
|
422 |
+
"harness|arc:easy|0": 1.0,
|
423 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
424 |
+
"harness|winogrande|0": 1.0,
|
425 |
+
"harness|boolq|0": 2.0,
|
426 |
+
"harness|mmlu|0": null,
|
427 |
+
"harness|mmlu_humanities|0": null,
|
428 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
429 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
430 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
431 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
432 |
+
"harness|mmlu_international_law|0": 0.0,
|
433 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
434 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
435 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
436 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
437 |
+
"harness|mmlu_philosophy|0": 0.0,
|
438 |
+
"harness|mmlu_prehistory|0": 0.0,
|
439 |
+
"harness|mmlu_professional_law|0": 0.0,
|
440 |
+
"harness|mmlu_world_religions|0": 0.0,
|
441 |
+
"harness|mmlu_other|0": null,
|
442 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
443 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
444 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
445 |
+
"harness|mmlu_global_facts|0": 0.0,
|
446 |
+
"harness|mmlu_human_aging|0": 0.0,
|
447 |
+
"harness|mmlu_management|0": 0.0,
|
448 |
+
"harness|mmlu_marketing|0": 0.0,
|
449 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
450 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
451 |
+
"harness|mmlu_nutrition|0": 0.0,
|
452 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
453 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
454 |
+
"harness|mmlu_virology|0": 0.0,
|
455 |
+
"harness|mmlu_social_sciences|0": null,
|
456 |
+
"harness|mmlu_econometrics|0": 0.0,
|
457 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
458 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
459 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
460 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
461 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
462 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
463 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
464 |
+
"harness|mmlu_public_relations|0": 0.0,
|
465 |
+
"harness|mmlu_security_studies|0": 0.0,
|
466 |
+
"harness|mmlu_sociology|0": 0.0,
|
467 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
468 |
+
"harness|mmlu_stem|0": null,
|
469 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
470 |
+
"harness|mmlu_anatomy|0": 0.0,
|
471 |
+
"harness|mmlu_astronomy|0": 0.0,
|
472 |
+
"harness|mmlu_college_biology|0": 0.0,
|
473 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
474 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
475 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
476 |
+
"harness|mmlu_college_physics|0": 0.0,
|
477 |
+
"harness|mmlu_computer_security|0": 0.0,
|
478 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
479 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
480 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
481 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
482 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
483 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
484 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
485 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
486 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
487 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
488 |
+
"harness|openbookqa|0": 1.0
|
489 |
+
},
|
490 |
+
"n-shot": {
|
491 |
+
"arc_challenge": 0,
|
492 |
+
"arc_easy": 0,
|
493 |
+
"boolq": 0,
|
494 |
+
"hellaswag": 0,
|
495 |
+
"lambada_openai": 0,
|
496 |
+
"mmlu": 0,
|
497 |
+
"mmlu_abstract_algebra": 0,
|
498 |
+
"mmlu_anatomy": 0,
|
499 |
+
"mmlu_astronomy": 0,
|
500 |
+
"mmlu_business_ethics": 0,
|
501 |
+
"mmlu_clinical_knowledge": 0,
|
502 |
+
"mmlu_college_biology": 0,
|
503 |
+
"mmlu_college_chemistry": 0,
|
504 |
+
"mmlu_college_computer_science": 0,
|
505 |
+
"mmlu_college_mathematics": 0,
|
506 |
+
"mmlu_college_medicine": 0,
|
507 |
+
"mmlu_college_physics": 0,
|
508 |
+
"mmlu_computer_security": 0,
|
509 |
+
"mmlu_conceptual_physics": 0,
|
510 |
+
"mmlu_econometrics": 0,
|
511 |
+
"mmlu_electrical_engineering": 0,
|
512 |
+
"mmlu_elementary_mathematics": 0,
|
513 |
+
"mmlu_formal_logic": 0,
|
514 |
+
"mmlu_global_facts": 0,
|
515 |
+
"mmlu_high_school_biology": 0,
|
516 |
+
"mmlu_high_school_chemistry": 0,
|
517 |
+
"mmlu_high_school_computer_science": 0,
|
518 |
+
"mmlu_high_school_european_history": 0,
|
519 |
+
"mmlu_high_school_geography": 0,
|
520 |
+
"mmlu_high_school_government_and_politics": 0,
|
521 |
+
"mmlu_high_school_macroeconomics": 0,
|
522 |
+
"mmlu_high_school_mathematics": 0,
|
523 |
+
"mmlu_high_school_microeconomics": 0,
|
524 |
+
"mmlu_high_school_physics": 0,
|
525 |
+
"mmlu_high_school_psychology": 0,
|
526 |
+
"mmlu_high_school_statistics": 0,
|
527 |
+
"mmlu_high_school_us_history": 0,
|
528 |
+
"mmlu_high_school_world_history": 0,
|
529 |
+
"mmlu_human_aging": 0,
|
530 |
+
"mmlu_human_sexuality": 0,
|
531 |
+
"mmlu_humanities": 0,
|
532 |
+
"mmlu_international_law": 0,
|
533 |
+
"mmlu_jurisprudence": 0,
|
534 |
+
"mmlu_logical_fallacies": 0,
|
535 |
+
"mmlu_machine_learning": 0,
|
536 |
+
"mmlu_management": 0,
|
537 |
+
"mmlu_marketing": 0,
|
538 |
+
"mmlu_medical_genetics": 0,
|
539 |
+
"mmlu_miscellaneous": 0,
|
540 |
+
"mmlu_moral_disputes": 0,
|
541 |
+
"mmlu_moral_scenarios": 0,
|
542 |
+
"mmlu_nutrition": 0,
|
543 |
+
"mmlu_other": 0,
|
544 |
+
"mmlu_philosophy": 0,
|
545 |
+
"mmlu_prehistory": 0,
|
546 |
+
"mmlu_professional_accounting": 0,
|
547 |
+
"mmlu_professional_law": 0,
|
548 |
+
"mmlu_professional_medicine": 0,
|
549 |
+
"mmlu_professional_psychology": 0,
|
550 |
+
"mmlu_public_relations": 0,
|
551 |
+
"mmlu_security_studies": 0,
|
552 |
+
"mmlu_social_sciences": 0,
|
553 |
+
"mmlu_sociology": 0,
|
554 |
+
"mmlu_stem": 0,
|
555 |
+
"mmlu_us_foreign_policy": 0,
|
556 |
+
"mmlu_virology": 0,
|
557 |
+
"mmlu_world_religions": 0,
|
558 |
+
"openbookqa": 0,
|
559 |
+
"piqa": 0,
|
560 |
+
"truthfulqa_mc1": 0,
|
561 |
+
"truthfulqa_mc2": 0,
|
562 |
+
"winogrande": 0
|
563 |
+
},
|
564 |
+
"date": 1715956957.150029,
|
565 |
+
"config": {
|
566 |
+
"model": "hf",
|
567 |
+
"model_args": "pretrained=microsoft/Phi-3-mini-4k-instruct,trust_remote_code=True,dtype=float16,_commit_hash=main",
|
568 |
+
"batch_size": 4,
|
569 |
+
"batch_sizes": [],
|
570 |
+
"device": "cuda",
|
571 |
+
"use_cache": null,
|
572 |
+
"limit": null,
|
573 |
+
"bootstrap_iters": 100000,
|
574 |
+
"gen_kwargs": null
|
575 |
+
}
|
576 |
+
}
|
microsoft/results_2024-05-18-20-01-59_phi-2.json
ADDED
@@ -0,0 +1,576 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "1.4",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": "-1",
|
9 |
+
"start_time": "",
|
10 |
+
"end_time": "",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "microsoft/phi-2",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "16bit",
|
15 |
+
"model_size": 5.56,
|
16 |
+
"model_params": 2.78,
|
17 |
+
"quant_type": null,
|
18 |
+
"precision": "16bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|winogrande|0": {
|
22 |
+
"acc,none": 0.7561168113654302,
|
23 |
+
"acc_stderr,none": 0.01206892327890819,
|
24 |
+
"alias": "winogrande"
|
25 |
+
},
|
26 |
+
"harness|lambada:openai|0": {
|
27 |
+
"perplexity,none": 5.557616371889193,
|
28 |
+
"perplexity_stderr,none": 0.1501396554611075,
|
29 |
+
"acc,none": 0.627595575392975,
|
30 |
+
"acc_stderr,none": 0.006735338475740351,
|
31 |
+
"alias": "lambada_openai"
|
32 |
+
},
|
33 |
+
"harness|boolq|0": {
|
34 |
+
"acc,none": 0.8330275229357799,
|
35 |
+
"acc_stderr,none": 0.006522959168899894,
|
36 |
+
"alias": "boolq"
|
37 |
+
},
|
38 |
+
"harness|mmlu|0": {
|
39 |
+
"acc,none": 0.5325452214784219,
|
40 |
+
"acc_stderr,none": 0.004017890530869981,
|
41 |
+
"alias": "mmlu"
|
42 |
+
},
|
43 |
+
"harness|mmlu_humanities|0": {
|
44 |
+
"alias": " - humanities",
|
45 |
+
"acc,none": 0.48459086078639746,
|
46 |
+
"acc_stderr,none": 0.006856193765800774
|
47 |
+
},
|
48 |
+
"harness|mmlu_formal_logic|0": {
|
49 |
+
"alias": " - formal_logic",
|
50 |
+
"acc,none": 0.36507936507936506,
|
51 |
+
"acc_stderr,none": 0.04306241259127153
|
52 |
+
},
|
53 |
+
"harness|mmlu_high_school_european_history|0": {
|
54 |
+
"alias": " - high_school_european_history",
|
55 |
+
"acc,none": 0.6727272727272727,
|
56 |
+
"acc_stderr,none": 0.03663974994391243
|
57 |
+
},
|
58 |
+
"harness|mmlu_high_school_us_history|0": {
|
59 |
+
"alias": " - high_school_us_history",
|
60 |
+
"acc,none": 0.6617647058823529,
|
61 |
+
"acc_stderr,none": 0.03320574612945431
|
62 |
+
},
|
63 |
+
"harness|mmlu_high_school_world_history|0": {
|
64 |
+
"alias": " - high_school_world_history",
|
65 |
+
"acc,none": 0.7426160337552743,
|
66 |
+
"acc_stderr,none": 0.0284588209914603
|
67 |
+
},
|
68 |
+
"harness|mmlu_international_law|0": {
|
69 |
+
"alias": " - international_law",
|
70 |
+
"acc,none": 0.743801652892562,
|
71 |
+
"acc_stderr,none": 0.03984979653302871
|
72 |
+
},
|
73 |
+
"harness|mmlu_jurisprudence|0": {
|
74 |
+
"alias": " - jurisprudence",
|
75 |
+
"acc,none": 0.6574074074074074,
|
76 |
+
"acc_stderr,none": 0.04587904741301811
|
77 |
+
},
|
78 |
+
"harness|mmlu_logical_fallacies|0": {
|
79 |
+
"alias": " - logical_fallacies",
|
80 |
+
"acc,none": 0.7055214723926381,
|
81 |
+
"acc_stderr,none": 0.03581165790474082
|
82 |
+
},
|
83 |
+
"harness|mmlu_moral_disputes|0": {
|
84 |
+
"alias": " - moral_disputes",
|
85 |
+
"acc,none": 0.6445086705202312,
|
86 |
+
"acc_stderr,none": 0.025770292082977247
|
87 |
+
},
|
88 |
+
"harness|mmlu_moral_scenarios|0": {
|
89 |
+
"alias": " - moral_scenarios",
|
90 |
+
"acc,none": 0.2324022346368715,
|
91 |
+
"acc_stderr,none": 0.014125968754673389
|
92 |
+
},
|
93 |
+
"harness|mmlu_philosophy|0": {
|
94 |
+
"alias": " - philosophy",
|
95 |
+
"acc,none": 0.5659163987138264,
|
96 |
+
"acc_stderr,none": 0.0281502322445356
|
97 |
+
},
|
98 |
+
"harness|mmlu_prehistory|0": {
|
99 |
+
"alias": " - prehistory",
|
100 |
+
"acc,none": 0.6049382716049383,
|
101 |
+
"acc_stderr,none": 0.027201117666925657
|
102 |
+
},
|
103 |
+
"harness|mmlu_professional_law|0": {
|
104 |
+
"alias": " - professional_law",
|
105 |
+
"acc,none": 0.40352020860495436,
|
106 |
+
"acc_stderr,none": 0.01253024130119318
|
107 |
+
},
|
108 |
+
"harness|mmlu_world_religions|0": {
|
109 |
+
"alias": " - world_religions",
|
110 |
+
"acc,none": 0.6666666666666666,
|
111 |
+
"acc_stderr,none": 0.036155076303109344
|
112 |
+
},
|
113 |
+
"harness|mmlu_other|0": {
|
114 |
+
"alias": " - other",
|
115 |
+
"acc,none": 0.5828773736723527,
|
116 |
+
"acc_stderr,none": 0.008588881438455619
|
117 |
+
},
|
118 |
+
"harness|mmlu_business_ethics|0": {
|
119 |
+
"alias": " - business_ethics",
|
120 |
+
"acc,none": 0.51,
|
121 |
+
"acc_stderr,none": 0.05024183937956914
|
122 |
+
},
|
123 |
+
"harness|mmlu_clinical_knowledge|0": {
|
124 |
+
"alias": " - clinical_knowledge",
|
125 |
+
"acc,none": 0.6339622641509434,
|
126 |
+
"acc_stderr,none": 0.029647813539365235
|
127 |
+
},
|
128 |
+
"harness|mmlu_college_medicine|0": {
|
129 |
+
"alias": " - college_medicine",
|
130 |
+
"acc,none": 0.5317919075144508,
|
131 |
+
"acc_stderr,none": 0.03804749744364763
|
132 |
+
},
|
133 |
+
"harness|mmlu_global_facts|0": {
|
134 |
+
"alias": " - global_facts",
|
135 |
+
"acc,none": 0.36,
|
136 |
+
"acc_stderr,none": 0.048241815132442176
|
137 |
+
},
|
138 |
+
"harness|mmlu_human_aging|0": {
|
139 |
+
"alias": " - human_aging",
|
140 |
+
"acc,none": 0.6143497757847534,
|
141 |
+
"acc_stderr,none": 0.03266842214289201
|
142 |
+
},
|
143 |
+
"harness|mmlu_management|0": {
|
144 |
+
"alias": " - management",
|
145 |
+
"acc,none": 0.6990291262135923,
|
146 |
+
"acc_stderr,none": 0.04541609446503948
|
147 |
+
},
|
148 |
+
"harness|mmlu_marketing|0": {
|
149 |
+
"alias": " - marketing",
|
150 |
+
"acc,none": 0.7991452991452992,
|
151 |
+
"acc_stderr,none": 0.02624677294689048
|
152 |
+
},
|
153 |
+
"harness|mmlu_medical_genetics|0": {
|
154 |
+
"alias": " - medical_genetics",
|
155 |
+
"acc,none": 0.56,
|
156 |
+
"acc_stderr,none": 0.04988876515698589
|
157 |
+
},
|
158 |
+
"harness|mmlu_miscellaneous|0": {
|
159 |
+
"alias": " - miscellaneous",
|
160 |
+
"acc,none": 0.6768837803320562,
|
161 |
+
"acc_stderr,none": 0.016723726512343048
|
162 |
+
},
|
163 |
+
"harness|mmlu_nutrition|0": {
|
164 |
+
"alias": " - nutrition",
|
165 |
+
"acc,none": 0.5947712418300654,
|
166 |
+
"acc_stderr,none": 0.028110928492809075
|
167 |
+
},
|
168 |
+
"harness|mmlu_professional_accounting|0": {
|
169 |
+
"alias": " - professional_accounting",
|
170 |
+
"acc,none": 0.40070921985815605,
|
171 |
+
"acc_stderr,none": 0.029233465745573093
|
172 |
+
},
|
173 |
+
"harness|mmlu_professional_medicine|0": {
|
174 |
+
"alias": " - professional_medicine",
|
175 |
+
"acc,none": 0.41911764705882354,
|
176 |
+
"acc_stderr,none": 0.029972807170464626
|
177 |
+
},
|
178 |
+
"harness|mmlu_virology|0": {
|
179 |
+
"alias": " - virology",
|
180 |
+
"acc,none": 0.4397590361445783,
|
181 |
+
"acc_stderr,none": 0.03864139923699122
|
182 |
+
},
|
183 |
+
"harness|mmlu_social_sciences|0": {
|
184 |
+
"alias": " - social_sciences",
|
185 |
+
"acc,none": 0.6304842378940526,
|
186 |
+
"acc_stderr,none": 0.008517546545668603
|
187 |
+
},
|
188 |
+
"harness|mmlu_econometrics|0": {
|
189 |
+
"alias": " - econometrics",
|
190 |
+
"acc,none": 0.30701754385964913,
|
191 |
+
"acc_stderr,none": 0.043391383225798615
|
192 |
+
},
|
193 |
+
"harness|mmlu_high_school_geography|0": {
|
194 |
+
"alias": " - high_school_geography",
|
195 |
+
"acc,none": 0.7121212121212122,
|
196 |
+
"acc_stderr,none": 0.03225883512300992
|
197 |
+
},
|
198 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
199 |
+
"alias": " - high_school_government_and_politics",
|
200 |
+
"acc,none": 0.7202072538860104,
|
201 |
+
"acc_stderr,none": 0.03239637046735703
|
202 |
+
},
|
203 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
204 |
+
"alias": " - high_school_macroeconomics",
|
205 |
+
"acc,none": 0.5538461538461539,
|
206 |
+
"acc_stderr,none": 0.02520357177302833
|
207 |
+
},
|
208 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
209 |
+
"alias": " - high_school_microeconomics",
|
210 |
+
"acc,none": 0.5588235294117647,
|
211 |
+
"acc_stderr,none": 0.0322529423239964
|
212 |
+
},
|
213 |
+
"harness|mmlu_high_school_psychology|0": {
|
214 |
+
"alias": " - high_school_psychology",
|
215 |
+
"acc,none": 0.7321100917431193,
|
216 |
+
"acc_stderr,none": 0.018987462257978652
|
217 |
+
},
|
218 |
+
"harness|mmlu_human_sexuality|0": {
|
219 |
+
"alias": " - human_sexuality",
|
220 |
+
"acc,none": 0.6259541984732825,
|
221 |
+
"acc_stderr,none": 0.04243869242230524
|
222 |
+
},
|
223 |
+
"harness|mmlu_professional_psychology|0": {
|
224 |
+
"alias": " - professional_psychology",
|
225 |
+
"acc,none": 0.5473856209150327,
|
226 |
+
"acc_stderr,none": 0.02013679091849253
|
227 |
+
},
|
228 |
+
"harness|mmlu_public_relations|0": {
|
229 |
+
"alias": " - public_relations",
|
230 |
+
"acc,none": 0.6636363636363637,
|
231 |
+
"acc_stderr,none": 0.04525393596302506
|
232 |
+
},
|
233 |
+
"harness|mmlu_security_studies|0": {
|
234 |
+
"alias": " - security_studies",
|
235 |
+
"acc,none": 0.6612244897959184,
|
236 |
+
"acc_stderr,none": 0.030299506562154185
|
237 |
+
},
|
238 |
+
"harness|mmlu_sociology|0": {
|
239 |
+
"alias": " - sociology",
|
240 |
+
"acc,none": 0.7512437810945274,
|
241 |
+
"acc_stderr,none": 0.030567675938916714
|
242 |
+
},
|
243 |
+
"harness|mmlu_us_foreign_policy|0": {
|
244 |
+
"alias": " - us_foreign_policy",
|
245 |
+
"acc,none": 0.74,
|
246 |
+
"acc_stderr,none": 0.0440844002276808
|
247 |
+
},
|
248 |
+
"harness|mmlu_stem|0": {
|
249 |
+
"alias": " - stem",
|
250 |
+
"acc,none": 0.4589280050745322,
|
251 |
+
"acc_stderr,none": 0.008648120965315137
|
252 |
+
},
|
253 |
+
"harness|mmlu_abstract_algebra|0": {
|
254 |
+
"alias": " - abstract_algebra",
|
255 |
+
"acc,none": 0.28,
|
256 |
+
"acc_stderr,none": 0.04512608598542127
|
257 |
+
},
|
258 |
+
"harness|mmlu_anatomy|0": {
|
259 |
+
"alias": " - anatomy",
|
260 |
+
"acc,none": 0.45185185185185184,
|
261 |
+
"acc_stderr,none": 0.04299268905480864
|
262 |
+
},
|
263 |
+
"harness|mmlu_astronomy|0": {
|
264 |
+
"alias": " - astronomy",
|
265 |
+
"acc,none": 0.5723684210526315,
|
266 |
+
"acc_stderr,none": 0.040260970832965634
|
267 |
+
},
|
268 |
+
"harness|mmlu_college_biology|0": {
|
269 |
+
"alias": " - college_biology",
|
270 |
+
"acc,none": 0.5763888888888888,
|
271 |
+
"acc_stderr,none": 0.041321250197233685
|
272 |
+
},
|
273 |
+
"harness|mmlu_college_chemistry|0": {
|
274 |
+
"alias": " - college_chemistry",
|
275 |
+
"acc,none": 0.32,
|
276 |
+
"acc_stderr,none": 0.046882617226215034
|
277 |
+
},
|
278 |
+
"harness|mmlu_college_computer_science|0": {
|
279 |
+
"alias": " - college_computer_science",
|
280 |
+
"acc,none": 0.37,
|
281 |
+
"acc_stderr,none": 0.048523658709391
|
282 |
+
},
|
283 |
+
"harness|mmlu_college_mathematics|0": {
|
284 |
+
"alias": " - college_mathematics",
|
285 |
+
"acc,none": 0.4,
|
286 |
+
"acc_stderr,none": 0.049236596391733084
|
287 |
+
},
|
288 |
+
"harness|mmlu_college_physics|0": {
|
289 |
+
"alias": " - college_physics",
|
290 |
+
"acc,none": 0.29411764705882354,
|
291 |
+
"acc_stderr,none": 0.04533838195929774
|
292 |
+
},
|
293 |
+
"harness|mmlu_computer_security|0": {
|
294 |
+
"alias": " - computer_security",
|
295 |
+
"acc,none": 0.63,
|
296 |
+
"acc_stderr,none": 0.04852365870939099
|
297 |
+
},
|
298 |
+
"harness|mmlu_conceptual_physics|0": {
|
299 |
+
"alias": " - conceptual_physics",
|
300 |
+
"acc,none": 0.502127659574468,
|
301 |
+
"acc_stderr,none": 0.03268572658667492
|
302 |
+
},
|
303 |
+
"harness|mmlu_electrical_engineering|0": {
|
304 |
+
"alias": " - electrical_engineering",
|
305 |
+
"acc,none": 0.503448275862069,
|
306 |
+
"acc_stderr,none": 0.04166567577101579
|
307 |
+
},
|
308 |
+
"harness|mmlu_elementary_mathematics|0": {
|
309 |
+
"alias": " - elementary_mathematics",
|
310 |
+
"acc,none": 0.3888888888888889,
|
311 |
+
"acc_stderr,none": 0.02510742548113728
|
312 |
+
},
|
313 |
+
"harness|mmlu_high_school_biology|0": {
|
314 |
+
"alias": " - high_school_biology",
|
315 |
+
"acc,none": 0.667741935483871,
|
316 |
+
"acc_stderr,none": 0.02679556084812279
|
317 |
+
},
|
318 |
+
"harness|mmlu_high_school_chemistry|0": {
|
319 |
+
"alias": " - high_school_chemistry",
|
320 |
+
"acc,none": 0.4482758620689655,
|
321 |
+
"acc_stderr,none": 0.03499113137676744
|
322 |
+
},
|
323 |
+
"harness|mmlu_high_school_computer_science|0": {
|
324 |
+
"alias": " - high_school_computer_science",
|
325 |
+
"acc,none": 0.67,
|
326 |
+
"acc_stderr,none": 0.04725815626252609
|
327 |
+
},
|
328 |
+
"harness|mmlu_high_school_mathematics|0": {
|
329 |
+
"alias": " - high_school_mathematics",
|
330 |
+
"acc,none": 0.3111111111111111,
|
331 |
+
"acc_stderr,none": 0.028226446749683515
|
332 |
+
},
|
333 |
+
"harness|mmlu_high_school_physics|0": {
|
334 |
+
"alias": " - high_school_physics",
|
335 |
+
"acc,none": 0.36423841059602646,
|
336 |
+
"acc_stderr,none": 0.03929111781242742
|
337 |
+
},
|
338 |
+
"harness|mmlu_high_school_statistics|0": {
|
339 |
+
"alias": " - high_school_statistics",
|
340 |
+
"acc,none": 0.4444444444444444,
|
341 |
+
"acc_stderr,none": 0.03388857118502325
|
342 |
+
},
|
343 |
+
"harness|mmlu_machine_learning|0": {
|
344 |
+
"alias": " - machine_learning",
|
345 |
+
"acc,none": 0.42857142857142855,
|
346 |
+
"acc_stderr,none": 0.04697113923010212
|
347 |
+
},
|
348 |
+
"harness|hellaswag|0": {
|
349 |
+
"acc,none": 0.5583549093806015,
|
350 |
+
"acc_stderr,none": 0.004955681533284329,
|
351 |
+
"acc_norm,none": 0.7378012348137821,
|
352 |
+
"acc_norm_stderr,none": 0.00438931274801212,
|
353 |
+
"alias": "hellaswag"
|
354 |
+
},
|
355 |
+
"harness|arc:easy|0": {
|
356 |
+
"acc,none": 0.7992424242424242,
|
357 |
+
"acc_stderr,none": 0.008219462692991508,
|
358 |
+
"acc_norm,none": 0.781986531986532,
|
359 |
+
"acc_norm_stderr,none": 0.008472459303145413,
|
360 |
+
"alias": "arc_easy"
|
361 |
+
},
|
362 |
+
"harness|truthfulqa:mc1|0": {
|
363 |
+
"acc,none": 0.31456548347613217,
|
364 |
+
"acc_stderr,none": 0.016255241993179164,
|
365 |
+
"alias": "truthfulqa_mc1"
|
366 |
+
},
|
367 |
+
"harness|truthfulqa:mc2|0": {
|
368 |
+
"acc,none": 0.4453719698490273,
|
369 |
+
"acc_stderr,none": 0.015127379337232197,
|
370 |
+
"alias": "truthfulqa_mc2"
|
371 |
+
},
|
372 |
+
"harness|openbookqa|0": {
|
373 |
+
"acc,none": 0.402,
|
374 |
+
"acc_stderr,none": 0.021948929609938612,
|
375 |
+
"acc_norm,none": 0.514,
|
376 |
+
"acc_norm_stderr,none": 0.022374298166353185,
|
377 |
+
"alias": "openbookqa"
|
378 |
+
},
|
379 |
+
"harness|piqa|0": {
|
380 |
+
"acc,none": 0.7867247007616975,
|
381 |
+
"acc_stderr,none": 0.009557121225861328,
|
382 |
+
"acc_norm,none": 0.7921653971708379,
|
383 |
+
"acc_norm_stderr,none": 0.009466997964536391,
|
384 |
+
"alias": "piqa"
|
385 |
+
},
|
386 |
+
"harness|arc:challenge|0": {
|
387 |
+
"acc,none": 0.5281569965870307,
|
388 |
+
"acc_stderr,none": 0.014588204105102203,
|
389 |
+
"acc_norm,none": 0.5409556313993175,
|
390 |
+
"acc_norm_stderr,none": 0.014562291073601229,
|
391 |
+
"alias": "arc_challenge"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "microsoft/phi-2",
|
396 |
+
"revision": "main",
|
397 |
+
"private": false,
|
398 |
+
"params": 2.78,
|
399 |
+
"architectures": "PhiForCausalLM",
|
400 |
+
"quant_type": null,
|
401 |
+
"precision": "16bit",
|
402 |
+
"model_params": 2.78,
|
403 |
+
"model_size": 5.56,
|
404 |
+
"weight_dtype": "float16",
|
405 |
+
"compute_dtype": "float16",
|
406 |
+
"gguf_ftype": "*Q4_0.gguf",
|
407 |
+
"hardware": "gpu",
|
408 |
+
"status": "Pending",
|
409 |
+
"submitted_time": "2024-04-27T08:04:58Z",
|
410 |
+
"model_type": "original",
|
411 |
+
"job_id": -1,
|
412 |
+
"job_start_time": null,
|
413 |
+
"scripts": "ITREX"
|
414 |
+
},
|
415 |
+
"quantization_config": null,
|
416 |
+
"versions": {
|
417 |
+
"harness|winogrande|0": 1.0,
|
418 |
+
"harness|lambada:openai|0": 1.0,
|
419 |
+
"harness|boolq|0": 2.0,
|
420 |
+
"harness|mmlu|0": null,
|
421 |
+
"harness|mmlu_humanities|0": null,
|
422 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
423 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
424 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
425 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
426 |
+
"harness|mmlu_international_law|0": 0.0,
|
427 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
428 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
429 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
430 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
431 |
+
"harness|mmlu_philosophy|0": 0.0,
|
432 |
+
"harness|mmlu_prehistory|0": 0.0,
|
433 |
+
"harness|mmlu_professional_law|0": 0.0,
|
434 |
+
"harness|mmlu_world_religions|0": 0.0,
|
435 |
+
"harness|mmlu_other|0": null,
|
436 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
437 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
438 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
439 |
+
"harness|mmlu_global_facts|0": 0.0,
|
440 |
+
"harness|mmlu_human_aging|0": 0.0,
|
441 |
+
"harness|mmlu_management|0": 0.0,
|
442 |
+
"harness|mmlu_marketing|0": 0.0,
|
443 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
444 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
445 |
+
"harness|mmlu_nutrition|0": 0.0,
|
446 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
447 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
448 |
+
"harness|mmlu_virology|0": 0.0,
|
449 |
+
"harness|mmlu_social_sciences|0": null,
|
450 |
+
"harness|mmlu_econometrics|0": 0.0,
|
451 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
452 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
453 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
454 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
455 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
456 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
457 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
458 |
+
"harness|mmlu_public_relations|0": 0.0,
|
459 |
+
"harness|mmlu_security_studies|0": 0.0,
|
460 |
+
"harness|mmlu_sociology|0": 0.0,
|
461 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
462 |
+
"harness|mmlu_stem|0": null,
|
463 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
464 |
+
"harness|mmlu_anatomy|0": 0.0,
|
465 |
+
"harness|mmlu_astronomy|0": 0.0,
|
466 |
+
"harness|mmlu_college_biology|0": 0.0,
|
467 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
468 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
469 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
470 |
+
"harness|mmlu_college_physics|0": 0.0,
|
471 |
+
"harness|mmlu_computer_security|0": 0.0,
|
472 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
473 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
474 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
475 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
476 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
477 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
478 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
479 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
480 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
481 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
482 |
+
"harness|hellaswag|0": 1.0,
|
483 |
+
"harness|arc:easy|0": 1.0,
|
484 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
485 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
486 |
+
"harness|openbookqa|0": 1.0,
|
487 |
+
"harness|piqa|0": 1.0,
|
488 |
+
"harness|arc:challenge|0": 1.0
|
489 |
+
},
|
490 |
+
"n-shot": {
|
491 |
+
"arc_challenge": 0,
|
492 |
+
"arc_easy": 0,
|
493 |
+
"boolq": 0,
|
494 |
+
"hellaswag": 0,
|
495 |
+
"lambada_openai": 0,
|
496 |
+
"mmlu": 0,
|
497 |
+
"mmlu_abstract_algebra": 0,
|
498 |
+
"mmlu_anatomy": 0,
|
499 |
+
"mmlu_astronomy": 0,
|
500 |
+
"mmlu_business_ethics": 0,
|
501 |
+
"mmlu_clinical_knowledge": 0,
|
502 |
+
"mmlu_college_biology": 0,
|
503 |
+
"mmlu_college_chemistry": 0,
|
504 |
+
"mmlu_college_computer_science": 0,
|
505 |
+
"mmlu_college_mathematics": 0,
|
506 |
+
"mmlu_college_medicine": 0,
|
507 |
+
"mmlu_college_physics": 0,
|
508 |
+
"mmlu_computer_security": 0,
|
509 |
+
"mmlu_conceptual_physics": 0,
|
510 |
+
"mmlu_econometrics": 0,
|
511 |
+
"mmlu_electrical_engineering": 0,
|
512 |
+
"mmlu_elementary_mathematics": 0,
|
513 |
+
"mmlu_formal_logic": 0,
|
514 |
+
"mmlu_global_facts": 0,
|
515 |
+
"mmlu_high_school_biology": 0,
|
516 |
+
"mmlu_high_school_chemistry": 0,
|
517 |
+
"mmlu_high_school_computer_science": 0,
|
518 |
+
"mmlu_high_school_european_history": 0,
|
519 |
+
"mmlu_high_school_geography": 0,
|
520 |
+
"mmlu_high_school_government_and_politics": 0,
|
521 |
+
"mmlu_high_school_macroeconomics": 0,
|
522 |
+
"mmlu_high_school_mathematics": 0,
|
523 |
+
"mmlu_high_school_microeconomics": 0,
|
524 |
+
"mmlu_high_school_physics": 0,
|
525 |
+
"mmlu_high_school_psychology": 0,
|
526 |
+
"mmlu_high_school_statistics": 0,
|
527 |
+
"mmlu_high_school_us_history": 0,
|
528 |
+
"mmlu_high_school_world_history": 0,
|
529 |
+
"mmlu_human_aging": 0,
|
530 |
+
"mmlu_human_sexuality": 0,
|
531 |
+
"mmlu_humanities": 0,
|
532 |
+
"mmlu_international_law": 0,
|
533 |
+
"mmlu_jurisprudence": 0,
|
534 |
+
"mmlu_logical_fallacies": 0,
|
535 |
+
"mmlu_machine_learning": 0,
|
536 |
+
"mmlu_management": 0,
|
537 |
+
"mmlu_marketing": 0,
|
538 |
+
"mmlu_medical_genetics": 0,
|
539 |
+
"mmlu_miscellaneous": 0,
|
540 |
+
"mmlu_moral_disputes": 0,
|
541 |
+
"mmlu_moral_scenarios": 0,
|
542 |
+
"mmlu_nutrition": 0,
|
543 |
+
"mmlu_other": 0,
|
544 |
+
"mmlu_philosophy": 0,
|
545 |
+
"mmlu_prehistory": 0,
|
546 |
+
"mmlu_professional_accounting": 0,
|
547 |
+
"mmlu_professional_law": 0,
|
548 |
+
"mmlu_professional_medicine": 0,
|
549 |
+
"mmlu_professional_psychology": 0,
|
550 |
+
"mmlu_public_relations": 0,
|
551 |
+
"mmlu_security_studies": 0,
|
552 |
+
"mmlu_social_sciences": 0,
|
553 |
+
"mmlu_sociology": 0,
|
554 |
+
"mmlu_stem": 0,
|
555 |
+
"mmlu_us_foreign_policy": 0,
|
556 |
+
"mmlu_virology": 0,
|
557 |
+
"mmlu_world_religions": 0,
|
558 |
+
"openbookqa": 0,
|
559 |
+
"piqa": 0,
|
560 |
+
"truthfulqa_mc1": 0,
|
561 |
+
"truthfulqa_mc2": 0,
|
562 |
+
"winogrande": 0
|
563 |
+
},
|
564 |
+
"date": 1716031670.214239,
|
565 |
+
"config": {
|
566 |
+
"model": "hf",
|
567 |
+
"model_args": "pretrained=microsoft/phi-2,trust_remote_code=True,dtype=float16,_commit_hash=main",
|
568 |
+
"batch_size": 4,
|
569 |
+
"batch_sizes": [],
|
570 |
+
"device": "cuda",
|
571 |
+
"use_cache": null,
|
572 |
+
"limit": null,
|
573 |
+
"bootstrap_iters": 100000,
|
574 |
+
"gen_kwargs": null
|
575 |
+
}
|
576 |
+
}
|
microsoft/results_2024-05-19-05-12-46_Phi-3-mini-128k-instruct.json
ADDED
@@ -0,0 +1,576 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "1.4",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": "-1",
|
9 |
+
"start_time": "",
|
10 |
+
"end_time": "",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "microsoft/Phi-3-mini-128k-instruct",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "16bit",
|
15 |
+
"model_size": 7.64,
|
16 |
+
"model_params": 3.82,
|
17 |
+
"quant_type": null,
|
18 |
+
"precision": "16bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|mmlu|0": {
|
22 |
+
"acc,none": 0.6214926648625552,
|
23 |
+
"acc_stderr,none": 0.0038976987371299155,
|
24 |
+
"alias": "mmlu"
|
25 |
+
},
|
26 |
+
"harness|mmlu_humanities|0": {
|
27 |
+
"alias": " - humanities",
|
28 |
+
"acc,none": 0.5727948990435706,
|
29 |
+
"acc_stderr,none": 0.006888665051090993
|
30 |
+
},
|
31 |
+
"harness|mmlu_formal_logic|0": {
|
32 |
+
"alias": " - formal_logic",
|
33 |
+
"acc,none": 0.46825396825396826,
|
34 |
+
"acc_stderr,none": 0.04463112720677173
|
35 |
+
},
|
36 |
+
"harness|mmlu_high_school_european_history|0": {
|
37 |
+
"alias": " - high_school_european_history",
|
38 |
+
"acc,none": 0.7757575757575758,
|
39 |
+
"acc_stderr,none": 0.03256866661681102
|
40 |
+
},
|
41 |
+
"harness|mmlu_high_school_us_history|0": {
|
42 |
+
"alias": " - high_school_us_history",
|
43 |
+
"acc,none": 0.7450980392156863,
|
44 |
+
"acc_stderr,none": 0.030587591351604246
|
45 |
+
},
|
46 |
+
"harness|mmlu_high_school_world_history|0": {
|
47 |
+
"alias": " - high_school_world_history",
|
48 |
+
"acc,none": 0.7890295358649789,
|
49 |
+
"acc_stderr,none": 0.02655837250266192
|
50 |
+
},
|
51 |
+
"harness|mmlu_international_law|0": {
|
52 |
+
"alias": " - international_law",
|
53 |
+
"acc,none": 0.7933884297520661,
|
54 |
+
"acc_stderr,none": 0.03695980128098824
|
55 |
+
},
|
56 |
+
"harness|mmlu_jurisprudence|0": {
|
57 |
+
"alias": " - jurisprudence",
|
58 |
+
"acc,none": 0.6944444444444444,
|
59 |
+
"acc_stderr,none": 0.04453197507374983
|
60 |
+
},
|
61 |
+
"harness|mmlu_logical_fallacies|0": {
|
62 |
+
"alias": " - logical_fallacies",
|
63 |
+
"acc,none": 0.6993865030674846,
|
64 |
+
"acc_stderr,none": 0.03602511318806771
|
65 |
+
},
|
66 |
+
"harness|mmlu_moral_disputes|0": {
|
67 |
+
"alias": " - moral_disputes",
|
68 |
+
"acc,none": 0.653179190751445,
|
69 |
+
"acc_stderr,none": 0.02562472399403046
|
70 |
+
},
|
71 |
+
"harness|mmlu_moral_scenarios|0": {
|
72 |
+
"alias": " - moral_scenarios",
|
73 |
+
"acc,none": 0.3776536312849162,
|
74 |
+
"acc_stderr,none": 0.01621414875213663
|
75 |
+
},
|
76 |
+
"harness|mmlu_philosophy|0": {
|
77 |
+
"alias": " - philosophy",
|
78 |
+
"acc,none": 0.5627009646302251,
|
79 |
+
"acc_stderr,none": 0.0281739177617629
|
80 |
+
},
|
81 |
+
"harness|mmlu_prehistory|0": {
|
82 |
+
"alias": " - prehistory",
|
83 |
+
"acc,none": 0.7407407407407407,
|
84 |
+
"acc_stderr,none": 0.02438366553103545
|
85 |
+
},
|
86 |
+
"harness|mmlu_professional_law|0": {
|
87 |
+
"alias": " - professional_law",
|
88 |
+
"acc,none": 0.49478487614080835,
|
89 |
+
"acc_stderr,none": 0.012769541449652545
|
90 |
+
},
|
91 |
+
"harness|mmlu_world_religions|0": {
|
92 |
+
"alias": " - world_religions",
|
93 |
+
"acc,none": 0.8538011695906432,
|
94 |
+
"acc_stderr,none": 0.02709729011807077
|
95 |
+
},
|
96 |
+
"harness|mmlu_other|0": {
|
97 |
+
"alias": " - other",
|
98 |
+
"acc,none": 0.6749275828773736,
|
99 |
+
"acc_stderr,none": 0.008191576163209844
|
100 |
+
},
|
101 |
+
"harness|mmlu_business_ethics|0": {
|
102 |
+
"alias": " - business_ethics",
|
103 |
+
"acc,none": 0.65,
|
104 |
+
"acc_stderr,none": 0.047937248544110196
|
105 |
+
},
|
106 |
+
"harness|mmlu_clinical_knowledge|0": {
|
107 |
+
"alias": " - clinical_knowledge",
|
108 |
+
"acc,none": 0.6792452830188679,
|
109 |
+
"acc_stderr,none": 0.028727502957880263
|
110 |
+
},
|
111 |
+
"harness|mmlu_college_medicine|0": {
|
112 |
+
"alias": " - college_medicine",
|
113 |
+
"acc,none": 0.5953757225433526,
|
114 |
+
"acc_stderr,none": 0.03742461193887249
|
115 |
+
},
|
116 |
+
"harness|mmlu_global_facts|0": {
|
117 |
+
"alias": " - global_facts",
|
118 |
+
"acc,none": 0.42,
|
119 |
+
"acc_stderr,none": 0.049604496374885836
|
120 |
+
},
|
121 |
+
"harness|mmlu_human_aging|0": {
|
122 |
+
"alias": " - human_aging",
|
123 |
+
"acc,none": 0.6367713004484304,
|
124 |
+
"acc_stderr,none": 0.032277904428505
|
125 |
+
},
|
126 |
+
"harness|mmlu_management|0": {
|
127 |
+
"alias": " - management",
|
128 |
+
"acc,none": 0.7184466019417476,
|
129 |
+
"acc_stderr,none": 0.04453254836326466
|
130 |
+
},
|
131 |
+
"harness|mmlu_marketing|0": {
|
132 |
+
"alias": " - marketing",
|
133 |
+
"acc,none": 0.8504273504273504,
|
134 |
+
"acc_stderr,none": 0.023365051491753715
|
135 |
+
},
|
136 |
+
"harness|mmlu_medical_genetics|0": {
|
137 |
+
"alias": " - medical_genetics",
|
138 |
+
"acc,none": 0.7,
|
139 |
+
"acc_stderr,none": 0.046056618647183814
|
140 |
+
},
|
141 |
+
"harness|mmlu_miscellaneous|0": {
|
142 |
+
"alias": " - miscellaneous",
|
143 |
+
"acc,none": 0.7790549169859514,
|
144 |
+
"acc_stderr,none": 0.014836205167333572
|
145 |
+
},
|
146 |
+
"harness|mmlu_nutrition|0": {
|
147 |
+
"alias": " - nutrition",
|
148 |
+
"acc,none": 0.6470588235294118,
|
149 |
+
"acc_stderr,none": 0.027363593284684972
|
150 |
+
},
|
151 |
+
"harness|mmlu_professional_accounting|0": {
|
152 |
+
"alias": " - professional_accounting",
|
153 |
+
"acc,none": 0.5531914893617021,
|
154 |
+
"acc_stderr,none": 0.0296582350976669
|
155 |
+
},
|
156 |
+
"harness|mmlu_professional_medicine|0": {
|
157 |
+
"alias": " - professional_medicine",
|
158 |
+
"acc,none": 0.6727941176470589,
|
159 |
+
"acc_stderr,none": 0.028501452860396563
|
160 |
+
},
|
161 |
+
"harness|mmlu_virology|0": {
|
162 |
+
"alias": " - virology",
|
163 |
+
"acc,none": 0.45180722891566266,
|
164 |
+
"acc_stderr,none": 0.03874371556587953
|
165 |
+
},
|
166 |
+
"harness|mmlu_social_sciences|0": {
|
167 |
+
"alias": " - social_sciences",
|
168 |
+
"acc,none": 0.7396815079623009,
|
169 |
+
"acc_stderr,none": 0.0077691244448688515
|
170 |
+
},
|
171 |
+
"harness|mmlu_econometrics|0": {
|
172 |
+
"alias": " - econometrics",
|
173 |
+
"acc,none": 0.5175438596491229,
|
174 |
+
"acc_stderr,none": 0.04700708033551038
|
175 |
+
},
|
176 |
+
"harness|mmlu_high_school_geography|0": {
|
177 |
+
"alias": " - high_school_geography",
|
178 |
+
"acc,none": 0.7777777777777778,
|
179 |
+
"acc_stderr,none": 0.02962022787479048
|
180 |
+
},
|
181 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
182 |
+
"alias": " - high_school_government_and_politics",
|
183 |
+
"acc,none": 0.8290155440414507,
|
184 |
+
"acc_stderr,none": 0.027171213683164535
|
185 |
+
},
|
186 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
187 |
+
"alias": " - high_school_macroeconomics",
|
188 |
+
"acc,none": 0.658974358974359,
|
189 |
+
"acc_stderr,none": 0.02403548967633507
|
190 |
+
},
|
191 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
192 |
+
"alias": " - high_school_microeconomics",
|
193 |
+
"acc,none": 0.7605042016806722,
|
194 |
+
"acc_stderr,none": 0.027722065493361266
|
195 |
+
},
|
196 |
+
"harness|mmlu_high_school_psychology|0": {
|
197 |
+
"alias": " - high_school_psychology",
|
198 |
+
"acc,none": 0.8477064220183487,
|
199 |
+
"acc_stderr,none": 0.015405084393157069
|
200 |
+
},
|
201 |
+
"harness|mmlu_human_sexuality|0": {
|
202 |
+
"alias": " - human_sexuality",
|
203 |
+
"acc,none": 0.7175572519083969,
|
204 |
+
"acc_stderr,none": 0.03948406125768361
|
205 |
+
},
|
206 |
+
"harness|mmlu_professional_psychology|0": {
|
207 |
+
"alias": " - professional_psychology",
|
208 |
+
"acc,none": 0.6748366013071896,
|
209 |
+
"acc_stderr,none": 0.018950886770806304
|
210 |
+
},
|
211 |
+
"harness|mmlu_public_relations|0": {
|
212 |
+
"alias": " - public_relations",
|
213 |
+
"acc,none": 0.5909090909090909,
|
214 |
+
"acc_stderr,none": 0.047093069786618966
|
215 |
+
},
|
216 |
+
"harness|mmlu_security_studies|0": {
|
217 |
+
"alias": " - security_studies",
|
218 |
+
"acc,none": 0.763265306122449,
|
219 |
+
"acc_stderr,none": 0.027212835884073132
|
220 |
+
},
|
221 |
+
"harness|mmlu_sociology|0": {
|
222 |
+
"alias": " - sociology",
|
223 |
+
"acc,none": 0.8009950248756219,
|
224 |
+
"acc_stderr,none": 0.028231365092758406
|
225 |
+
},
|
226 |
+
"harness|mmlu_us_foreign_policy|0": {
|
227 |
+
"alias": " - us_foreign_policy",
|
228 |
+
"acc,none": 0.83,
|
229 |
+
"acc_stderr,none": 0.0377525168068637
|
230 |
+
},
|
231 |
+
"harness|mmlu_stem|0": {
|
232 |
+
"alias": " - stem",
|
233 |
+
"acc,none": 0.5261655566127498,
|
234 |
+
"acc_stderr,none": 0.008544478630502603
|
235 |
+
},
|
236 |
+
"harness|mmlu_abstract_algebra|0": {
|
237 |
+
"alias": " - abstract_algebra",
|
238 |
+
"acc,none": 0.35,
|
239 |
+
"acc_stderr,none": 0.0479372485441102
|
240 |
+
},
|
241 |
+
"harness|mmlu_anatomy|0": {
|
242 |
+
"alias": " - anatomy",
|
243 |
+
"acc,none": 0.6370370370370371,
|
244 |
+
"acc_stderr,none": 0.04153948404742398
|
245 |
+
},
|
246 |
+
"harness|mmlu_astronomy|0": {
|
247 |
+
"alias": " - astronomy",
|
248 |
+
"acc,none": 0.7236842105263158,
|
249 |
+
"acc_stderr,none": 0.03639057569952929
|
250 |
+
},
|
251 |
+
"harness|mmlu_college_biology|0": {
|
252 |
+
"alias": " - college_biology",
|
253 |
+
"acc,none": 0.7638888888888888,
|
254 |
+
"acc_stderr,none": 0.03551446610810826
|
255 |
+
},
|
256 |
+
"harness|mmlu_college_chemistry|0": {
|
257 |
+
"alias": " - college_chemistry",
|
258 |
+
"acc,none": 0.41,
|
259 |
+
"acc_stderr,none": 0.04943110704237102
|
260 |
+
},
|
261 |
+
"harness|mmlu_college_computer_science|0": {
|
262 |
+
"alias": " - college_computer_science",
|
263 |
+
"acc,none": 0.47,
|
264 |
+
"acc_stderr,none": 0.05016135580465919
|
265 |
+
},
|
266 |
+
"harness|mmlu_college_mathematics|0": {
|
267 |
+
"alias": " - college_mathematics",
|
268 |
+
"acc,none": 0.29,
|
269 |
+
"acc_stderr,none": 0.045604802157206845
|
270 |
+
},
|
271 |
+
"harness|mmlu_college_physics|0": {
|
272 |
+
"alias": " - college_physics",
|
273 |
+
"acc,none": 0.37254901960784315,
|
274 |
+
"acc_stderr,none": 0.04810840148082636
|
275 |
+
},
|
276 |
+
"harness|mmlu_computer_security|0": {
|
277 |
+
"alias": " - computer_security",
|
278 |
+
"acc,none": 0.66,
|
279 |
+
"acc_stderr,none": 0.04760952285695237
|
280 |
+
},
|
281 |
+
"harness|mmlu_conceptual_physics|0": {
|
282 |
+
"alias": " - conceptual_physics",
|
283 |
+
"acc,none": 0.5957446808510638,
|
284 |
+
"acc_stderr,none": 0.03208115750788684
|
285 |
+
},
|
286 |
+
"harness|mmlu_electrical_engineering|0": {
|
287 |
+
"alias": " - electrical_engineering",
|
288 |
+
"acc,none": 0.4896551724137931,
|
289 |
+
"acc_stderr,none": 0.041657747757287644
|
290 |
+
},
|
291 |
+
"harness|mmlu_elementary_mathematics|0": {
|
292 |
+
"alias": " - elementary_mathematics",
|
293 |
+
"acc,none": 0.4656084656084656,
|
294 |
+
"acc_stderr,none": 0.025690321762493848
|
295 |
+
},
|
296 |
+
"harness|mmlu_high_school_biology|0": {
|
297 |
+
"alias": " - high_school_biology",
|
298 |
+
"acc,none": 0.7419354838709677,
|
299 |
+
"acc_stderr,none": 0.024892469172462846
|
300 |
+
},
|
301 |
+
"harness|mmlu_high_school_chemistry|0": {
|
302 |
+
"alias": " - high_school_chemistry",
|
303 |
+
"acc,none": 0.5369458128078818,
|
304 |
+
"acc_stderr,none": 0.035083705204426656
|
305 |
+
},
|
306 |
+
"harness|mmlu_high_school_computer_science|0": {
|
307 |
+
"alias": " - high_school_computer_science",
|
308 |
+
"acc,none": 0.65,
|
309 |
+
"acc_stderr,none": 0.0479372485441102
|
310 |
+
},
|
311 |
+
"harness|mmlu_high_school_mathematics|0": {
|
312 |
+
"alias": " - high_school_mathematics",
|
313 |
+
"acc,none": 0.3037037037037037,
|
314 |
+
"acc_stderr,none": 0.02803792996911499
|
315 |
+
},
|
316 |
+
"harness|mmlu_high_school_physics|0": {
|
317 |
+
"alias": " - high_school_physics",
|
318 |
+
"acc,none": 0.41721854304635764,
|
319 |
+
"acc_stderr,none": 0.040261414976346104
|
320 |
+
},
|
321 |
+
"harness|mmlu_high_school_statistics|0": {
|
322 |
+
"alias": " - high_school_statistics",
|
323 |
+
"acc,none": 0.5046296296296297,
|
324 |
+
"acc_stderr,none": 0.03409825519163572
|
325 |
+
},
|
326 |
+
"harness|mmlu_machine_learning|0": {
|
327 |
+
"alias": " - machine_learning",
|
328 |
+
"acc,none": 0.4642857142857143,
|
329 |
+
"acc_stderr,none": 0.04733667890053756
|
330 |
+
},
|
331 |
+
"harness|truthfulqa:mc2|0": {
|
332 |
+
"acc,none": 0.5367793687915322,
|
333 |
+
"acc_stderr,none": 0.015456637030352596,
|
334 |
+
"alias": "truthfulqa_mc2"
|
335 |
+
},
|
336 |
+
"harness|arc:easy|0": {
|
337 |
+
"acc,none": 0.8118686868686869,
|
338 |
+
"acc_stderr,none": 0.008019395492398136,
|
339 |
+
"acc_norm,none": 0.7760942760942761,
|
340 |
+
"acc_norm_stderr,none": 0.008553779114531769,
|
341 |
+
"alias": "arc_easy"
|
342 |
+
},
|
343 |
+
"harness|piqa|0": {
|
344 |
+
"acc,none": 0.7883569096844396,
|
345 |
+
"acc_stderr,none": 0.009530351270479402,
|
346 |
+
"acc_norm,none": 0.7986942328618063,
|
347 |
+
"acc_norm_stderr,none": 0.009355431098990447,
|
348 |
+
"alias": "piqa"
|
349 |
+
},
|
350 |
+
"harness|truthfulqa:mc1|0": {
|
351 |
+
"acc,none": 0.3574051407588739,
|
352 |
+
"acc_stderr,none": 0.016776599676729405,
|
353 |
+
"alias": "truthfulqa_mc1"
|
354 |
+
},
|
355 |
+
"harness|boolq|0": {
|
356 |
+
"acc,none": 0.8571865443425076,
|
357 |
+
"acc_stderr,none": 0.006119483613869241,
|
358 |
+
"alias": "boolq"
|
359 |
+
},
|
360 |
+
"harness|arc:challenge|0": {
|
361 |
+
"acc,none": 0.5418088737201365,
|
362 |
+
"acc_stderr,none": 0.014560220308714698,
|
363 |
+
"acc_norm,none": 0.552901023890785,
|
364 |
+
"acc_norm_stderr,none": 0.014529380160526845,
|
365 |
+
"alias": "arc_challenge"
|
366 |
+
},
|
367 |
+
"harness|hellaswag|0": {
|
368 |
+
"acc,none": 0.5978888667596096,
|
369 |
+
"acc_stderr,none": 0.004893220635011801,
|
370 |
+
"acc_norm,none": 0.7822146982672774,
|
371 |
+
"acc_norm_stderr,none": 0.004118971487050385,
|
372 |
+
"alias": "hellaswag"
|
373 |
+
},
|
374 |
+
"harness|lambada:openai|0": {
|
375 |
+
"perplexity,none": 4.591695940331181,
|
376 |
+
"perplexity_stderr,none": 0.10971999697563199,
|
377 |
+
"acc,none": 0.6656316708713371,
|
378 |
+
"acc_stderr,none": 0.006572666992809166,
|
379 |
+
"alias": "lambada_openai"
|
380 |
+
},
|
381 |
+
"harness|openbookqa|0": {
|
382 |
+
"acc,none": 0.39,
|
383 |
+
"acc_stderr,none": 0.021834685869369215,
|
384 |
+
"acc_norm,none": 0.454,
|
385 |
+
"acc_norm_stderr,none": 0.02228814759117695,
|
386 |
+
"alias": "openbookqa"
|
387 |
+
},
|
388 |
+
"harness|winogrande|0": {
|
389 |
+
"acc,none": 0.7324388318863457,
|
390 |
+
"acc_stderr,none": 0.012441718456893009,
|
391 |
+
"alias": "winogrande"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "microsoft/Phi-3-mini-128k-instruct",
|
396 |
+
"revision": "main",
|
397 |
+
"private": false,
|
398 |
+
"params": 3.82,
|
399 |
+
"architectures": "Phi3ForCausalLM",
|
400 |
+
"quant_type": null,
|
401 |
+
"precision": "16bit",
|
402 |
+
"model_params": 3.82,
|
403 |
+
"model_size": 7.64,
|
404 |
+
"weight_dtype": "bfloat16",
|
405 |
+
"compute_dtype": "float16",
|
406 |
+
"gguf_ftype": "*Q4_0.gguf",
|
407 |
+
"hardware": "gpu",
|
408 |
+
"status": "Pending",
|
409 |
+
"submitted_time": "2024-04-27T08:04:58Z",
|
410 |
+
"model_type": "original",
|
411 |
+
"job_id": -1,
|
412 |
+
"job_start_time": null,
|
413 |
+
"scripts": "ITREX"
|
414 |
+
},
|
415 |
+
"quantization_config": null,
|
416 |
+
"versions": {
|
417 |
+
"harness|mmlu|0": null,
|
418 |
+
"harness|mmlu_humanities|0": null,
|
419 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
420 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
421 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
422 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
423 |
+
"harness|mmlu_international_law|0": 0.0,
|
424 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
425 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
426 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
427 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
428 |
+
"harness|mmlu_philosophy|0": 0.0,
|
429 |
+
"harness|mmlu_prehistory|0": 0.0,
|
430 |
+
"harness|mmlu_professional_law|0": 0.0,
|
431 |
+
"harness|mmlu_world_religions|0": 0.0,
|
432 |
+
"harness|mmlu_other|0": null,
|
433 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
434 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
435 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
436 |
+
"harness|mmlu_global_facts|0": 0.0,
|
437 |
+
"harness|mmlu_human_aging|0": 0.0,
|
438 |
+
"harness|mmlu_management|0": 0.0,
|
439 |
+
"harness|mmlu_marketing|0": 0.0,
|
440 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
441 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
442 |
+
"harness|mmlu_nutrition|0": 0.0,
|
443 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
444 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
445 |
+
"harness|mmlu_virology|0": 0.0,
|
446 |
+
"harness|mmlu_social_sciences|0": null,
|
447 |
+
"harness|mmlu_econometrics|0": 0.0,
|
448 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
449 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
450 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
451 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
452 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
453 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
454 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
455 |
+
"harness|mmlu_public_relations|0": 0.0,
|
456 |
+
"harness|mmlu_security_studies|0": 0.0,
|
457 |
+
"harness|mmlu_sociology|0": 0.0,
|
458 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
459 |
+
"harness|mmlu_stem|0": null,
|
460 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
461 |
+
"harness|mmlu_anatomy|0": 0.0,
|
462 |
+
"harness|mmlu_astronomy|0": 0.0,
|
463 |
+
"harness|mmlu_college_biology|0": 0.0,
|
464 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
465 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
466 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
467 |
+
"harness|mmlu_college_physics|0": 0.0,
|
468 |
+
"harness|mmlu_computer_security|0": 0.0,
|
469 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
470 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
471 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
472 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
473 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
474 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
475 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
476 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
477 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
478 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
479 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
480 |
+
"harness|arc:easy|0": 1.0,
|
481 |
+
"harness|piqa|0": 1.0,
|
482 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
483 |
+
"harness|boolq|0": 2.0,
|
484 |
+
"harness|arc:challenge|0": 1.0,
|
485 |
+
"harness|hellaswag|0": 1.0,
|
486 |
+
"harness|lambada:openai|0": 1.0,
|
487 |
+
"harness|openbookqa|0": 1.0,
|
488 |
+
"harness|winogrande|0": 1.0
|
489 |
+
},
|
490 |
+
"n-shot": {
|
491 |
+
"arc_challenge": 0,
|
492 |
+
"arc_easy": 0,
|
493 |
+
"boolq": 0,
|
494 |
+
"hellaswag": 0,
|
495 |
+
"lambada_openai": 0,
|
496 |
+
"mmlu": 0,
|
497 |
+
"mmlu_abstract_algebra": 0,
|
498 |
+
"mmlu_anatomy": 0,
|
499 |
+
"mmlu_astronomy": 0,
|
500 |
+
"mmlu_business_ethics": 0,
|
501 |
+
"mmlu_clinical_knowledge": 0,
|
502 |
+
"mmlu_college_biology": 0,
|
503 |
+
"mmlu_college_chemistry": 0,
|
504 |
+
"mmlu_college_computer_science": 0,
|
505 |
+
"mmlu_college_mathematics": 0,
|
506 |
+
"mmlu_college_medicine": 0,
|
507 |
+
"mmlu_college_physics": 0,
|
508 |
+
"mmlu_computer_security": 0,
|
509 |
+
"mmlu_conceptual_physics": 0,
|
510 |
+
"mmlu_econometrics": 0,
|
511 |
+
"mmlu_electrical_engineering": 0,
|
512 |
+
"mmlu_elementary_mathematics": 0,
|
513 |
+
"mmlu_formal_logic": 0,
|
514 |
+
"mmlu_global_facts": 0,
|
515 |
+
"mmlu_high_school_biology": 0,
|
516 |
+
"mmlu_high_school_chemistry": 0,
|
517 |
+
"mmlu_high_school_computer_science": 0,
|
518 |
+
"mmlu_high_school_european_history": 0,
|
519 |
+
"mmlu_high_school_geography": 0,
|
520 |
+
"mmlu_high_school_government_and_politics": 0,
|
521 |
+
"mmlu_high_school_macroeconomics": 0,
|
522 |
+
"mmlu_high_school_mathematics": 0,
|
523 |
+
"mmlu_high_school_microeconomics": 0,
|
524 |
+
"mmlu_high_school_physics": 0,
|
525 |
+
"mmlu_high_school_psychology": 0,
|
526 |
+
"mmlu_high_school_statistics": 0,
|
527 |
+
"mmlu_high_school_us_history": 0,
|
528 |
+
"mmlu_high_school_world_history": 0,
|
529 |
+
"mmlu_human_aging": 0,
|
530 |
+
"mmlu_human_sexuality": 0,
|
531 |
+
"mmlu_humanities": 0,
|
532 |
+
"mmlu_international_law": 0,
|
533 |
+
"mmlu_jurisprudence": 0,
|
534 |
+
"mmlu_logical_fallacies": 0,
|
535 |
+
"mmlu_machine_learning": 0,
|
536 |
+
"mmlu_management": 0,
|
537 |
+
"mmlu_marketing": 0,
|
538 |
+
"mmlu_medical_genetics": 0,
|
539 |
+
"mmlu_miscellaneous": 0,
|
540 |
+
"mmlu_moral_disputes": 0,
|
541 |
+
"mmlu_moral_scenarios": 0,
|
542 |
+
"mmlu_nutrition": 0,
|
543 |
+
"mmlu_other": 0,
|
544 |
+
"mmlu_philosophy": 0,
|
545 |
+
"mmlu_prehistory": 0,
|
546 |
+
"mmlu_professional_accounting": 0,
|
547 |
+
"mmlu_professional_law": 0,
|
548 |
+
"mmlu_professional_medicine": 0,
|
549 |
+
"mmlu_professional_psychology": 0,
|
550 |
+
"mmlu_public_relations": 0,
|
551 |
+
"mmlu_security_studies": 0,
|
552 |
+
"mmlu_social_sciences": 0,
|
553 |
+
"mmlu_sociology": 0,
|
554 |
+
"mmlu_stem": 0,
|
555 |
+
"mmlu_us_foreign_policy": 0,
|
556 |
+
"mmlu_virology": 0,
|
557 |
+
"mmlu_world_religions": 0,
|
558 |
+
"openbookqa": 0,
|
559 |
+
"piqa": 0,
|
560 |
+
"truthfulqa_mc1": 0,
|
561 |
+
"truthfulqa_mc2": 0,
|
562 |
+
"winogrande": 0
|
563 |
+
},
|
564 |
+
"date": 1716064243.9578123,
|
565 |
+
"config": {
|
566 |
+
"model": "hf",
|
567 |
+
"model_args": "pretrained=microsoft/Phi-3-mini-128k-instruct,trust_remote_code=True,dtype=float16,_commit_hash=main",
|
568 |
+
"batch_size": 4,
|
569 |
+
"batch_sizes": [],
|
570 |
+
"device": "cuda",
|
571 |
+
"use_cache": null,
|
572 |
+
"limit": null,
|
573 |
+
"bootstrap_iters": 100000,
|
574 |
+
"gen_kwargs": null
|
575 |
+
}
|
576 |
+
}
|
mistralai/results_2024-05-18-11-15-11_Mistral-7B-Instruct-v0.2.json
ADDED
@@ -0,0 +1,576 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "1.4",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": "-1",
|
9 |
+
"start_time": "",
|
10 |
+
"end_time": "",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "mistralai/Mistral-7B-Instruct-v0.2",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "16bit",
|
15 |
+
"model_size": "14.48",
|
16 |
+
"model_params": "7.24",
|
17 |
+
"quant_type": null,
|
18 |
+
"precision": "16bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|openbookqa|0": {
|
22 |
+
"acc,none": 0.352,
|
23 |
+
"acc_stderr,none": 0.02138004238594604,
|
24 |
+
"acc_norm,none": 0.448,
|
25 |
+
"acc_norm_stderr,none": 0.02226169729227014,
|
26 |
+
"alias": "openbookqa"
|
27 |
+
},
|
28 |
+
"harness|lambada:openai|0": {
|
29 |
+
"perplexity,none": 3.4161390176351083,
|
30 |
+
"perplexity_stderr,none": 0.07283023558231332,
|
31 |
+
"acc,none": 0.7155055307587813,
|
32 |
+
"acc_stderr,none": 0.006285726556944496,
|
33 |
+
"alias": "lambada_openai"
|
34 |
+
},
|
35 |
+
"harness|arc:challenge|0": {
|
36 |
+
"acc,none": 0.5418088737201365,
|
37 |
+
"acc_stderr,none": 0.014560220308714693,
|
38 |
+
"acc_norm,none": 0.560580204778157,
|
39 |
+
"acc_norm_stderr,none": 0.014503747823580127,
|
40 |
+
"alias": "arc_challenge"
|
41 |
+
},
|
42 |
+
"harness|truthfulqa:mc1|0": {
|
43 |
+
"acc,none": 0.5250917992656059,
|
44 |
+
"acc_stderr,none": 0.017481446804104003,
|
45 |
+
"alias": "truthfulqa_mc1"
|
46 |
+
},
|
47 |
+
"harness|truthfulqa:mc2|0": {
|
48 |
+
"acc,none": 0.668245295463442,
|
49 |
+
"acc_stderr,none": 0.015246864187140097,
|
50 |
+
"alias": "truthfulqa_mc2"
|
51 |
+
},
|
52 |
+
"harness|arc:easy|0": {
|
53 |
+
"acc,none": 0.8135521885521886,
|
54 |
+
"acc_stderr,none": 0.007991706887152653,
|
55 |
+
"acc_norm,none": 0.7676767676767676,
|
56 |
+
"acc_norm_stderr,none": 0.008665701248293791,
|
57 |
+
"alias": "arc_easy"
|
58 |
+
},
|
59 |
+
"harness|hellaswag|0": {
|
60 |
+
"acc,none": 0.6602270464050985,
|
61 |
+
"acc_stderr,none": 0.00472664053256203,
|
62 |
+
"acc_norm,none": 0.8361880103565027,
|
63 |
+
"acc_norm_stderr,none": 0.0036934848941792995,
|
64 |
+
"alias": "hellaswag"
|
65 |
+
},
|
66 |
+
"harness|mmlu|0": {
|
67 |
+
"acc,none": 0.5877367896311066,
|
68 |
+
"acc_stderr,none": 0.0039444278947174125,
|
69 |
+
"alias": "mmlu"
|
70 |
+
},
|
71 |
+
"harness|mmlu_humanities|0": {
|
72 |
+
"alias": " - humanities",
|
73 |
+
"acc,none": 0.5404888416578109,
|
74 |
+
"acc_stderr,none": 0.006846588098607813
|
75 |
+
},
|
76 |
+
"harness|mmlu_formal_logic|0": {
|
77 |
+
"alias": " - formal_logic",
|
78 |
+
"acc,none": 0.36507936507936506,
|
79 |
+
"acc_stderr,none": 0.043062412591271526
|
80 |
+
},
|
81 |
+
"harness|mmlu_high_school_european_history|0": {
|
82 |
+
"alias": " - high_school_european_history",
|
83 |
+
"acc,none": 0.7393939393939394,
|
84 |
+
"acc_stderr,none": 0.034277431758165236
|
85 |
+
},
|
86 |
+
"harness|mmlu_high_school_us_history|0": {
|
87 |
+
"alias": " - high_school_us_history",
|
88 |
+
"acc,none": 0.7794117647058824,
|
89 |
+
"acc_stderr,none": 0.0291022543896741
|
90 |
+
},
|
91 |
+
"harness|mmlu_high_school_world_history|0": {
|
92 |
+
"alias": " - high_school_world_history",
|
93 |
+
"acc,none": 0.7805907172995781,
|
94 |
+
"acc_stderr,none": 0.026939106581553945
|
95 |
+
},
|
96 |
+
"harness|mmlu_international_law|0": {
|
97 |
+
"alias": " - international_law",
|
98 |
+
"acc,none": 0.743801652892562,
|
99 |
+
"acc_stderr,none": 0.03984979653302872
|
100 |
+
},
|
101 |
+
"harness|mmlu_jurisprudence|0": {
|
102 |
+
"alias": " - jurisprudence",
|
103 |
+
"acc,none": 0.7129629629629629,
|
104 |
+
"acc_stderr,none": 0.043733130409147614
|
105 |
+
},
|
106 |
+
"harness|mmlu_logical_fallacies|0": {
|
107 |
+
"alias": " - logical_fallacies",
|
108 |
+
"acc,none": 0.754601226993865,
|
109 |
+
"acc_stderr,none": 0.03380939813943354
|
110 |
+
},
|
111 |
+
"harness|mmlu_moral_disputes|0": {
|
112 |
+
"alias": " - moral_disputes",
|
113 |
+
"acc,none": 0.6560693641618497,
|
114 |
+
"acc_stderr,none": 0.025574123786546648
|
115 |
+
},
|
116 |
+
"harness|mmlu_moral_scenarios|0": {
|
117 |
+
"alias": " - moral_scenarios",
|
118 |
+
"acc,none": 0.3575418994413408,
|
119 |
+
"acc_stderr,none": 0.01602939447489489
|
120 |
+
},
|
121 |
+
"harness|mmlu_philosophy|0": {
|
122 |
+
"alias": " - philosophy",
|
123 |
+
"acc,none": 0.6463022508038585,
|
124 |
+
"acc_stderr,none": 0.02715520810320088
|
125 |
+
},
|
126 |
+
"harness|mmlu_prehistory|0": {
|
127 |
+
"alias": " - prehistory",
|
128 |
+
"acc,none": 0.6820987654320988,
|
129 |
+
"acc_stderr,none": 0.0259100635282409
|
130 |
+
},
|
131 |
+
"harness|mmlu_professional_law|0": {
|
132 |
+
"alias": " - professional_law",
|
133 |
+
"acc,none": 0.41264667535853977,
|
134 |
+
"acc_stderr,none": 0.012573836633799016
|
135 |
+
},
|
136 |
+
"harness|mmlu_world_religions|0": {
|
137 |
+
"alias": " - world_religions",
|
138 |
+
"acc,none": 0.8128654970760234,
|
139 |
+
"acc_stderr,none": 0.029913127232368032
|
140 |
+
},
|
141 |
+
"harness|mmlu_other|0": {
|
142 |
+
"alias": " - other",
|
143 |
+
"acc,none": 0.6620534277438043,
|
144 |
+
"acc_stderr,none": 0.008157377084913042
|
145 |
+
},
|
146 |
+
"harness|mmlu_business_ethics|0": {
|
147 |
+
"alias": " - business_ethics",
|
148 |
+
"acc,none": 0.59,
|
149 |
+
"acc_stderr,none": 0.049431107042371025
|
150 |
+
},
|
151 |
+
"harness|mmlu_clinical_knowledge|0": {
|
152 |
+
"alias": " - clinical_knowledge",
|
153 |
+
"acc,none": 0.6528301886792452,
|
154 |
+
"acc_stderr,none": 0.029300101705549645
|
155 |
+
},
|
156 |
+
"harness|mmlu_college_medicine|0": {
|
157 |
+
"alias": " - college_medicine",
|
158 |
+
"acc,none": 0.5606936416184971,
|
159 |
+
"acc_stderr,none": 0.03784271932887467
|
160 |
+
},
|
161 |
+
"harness|mmlu_global_facts|0": {
|
162 |
+
"alias": " - global_facts",
|
163 |
+
"acc,none": 0.36,
|
164 |
+
"acc_stderr,none": 0.04824181513244218
|
165 |
+
},
|
166 |
+
"harness|mmlu_human_aging|0": {
|
167 |
+
"alias": " - human_aging",
|
168 |
+
"acc,none": 0.6233183856502242,
|
169 |
+
"acc_stderr,none": 0.032521134899291884
|
170 |
+
},
|
171 |
+
"harness|mmlu_management|0": {
|
172 |
+
"alias": " - management",
|
173 |
+
"acc,none": 0.7281553398058253,
|
174 |
+
"acc_stderr,none": 0.044052680241409216
|
175 |
+
},
|
176 |
+
"harness|mmlu_marketing|0": {
|
177 |
+
"alias": " - marketing",
|
178 |
+
"acc,none": 0.8717948717948718,
|
179 |
+
"acc_stderr,none": 0.021901905115073332
|
180 |
+
},
|
181 |
+
"harness|mmlu_medical_genetics|0": {
|
182 |
+
"alias": " - medical_genetics",
|
183 |
+
"acc,none": 0.64,
|
184 |
+
"acc_stderr,none": 0.04824181513244218
|
185 |
+
},
|
186 |
+
"harness|mmlu_miscellaneous|0": {
|
187 |
+
"alias": " - miscellaneous",
|
188 |
+
"acc,none": 0.7943805874840357,
|
189 |
+
"acc_stderr,none": 0.014452500456785828
|
190 |
+
},
|
191 |
+
"harness|mmlu_nutrition|0": {
|
192 |
+
"alias": " - nutrition",
|
193 |
+
"acc,none": 0.673202614379085,
|
194 |
+
"acc_stderr,none": 0.026857294663281416
|
195 |
+
},
|
196 |
+
"harness|mmlu_professional_accounting|0": {
|
197 |
+
"alias": " - professional_accounting",
|
198 |
+
"acc,none": 0.45390070921985815,
|
199 |
+
"acc_stderr,none": 0.029700453247291467
|
200 |
+
},
|
201 |
+
"harness|mmlu_professional_medicine|0": {
|
202 |
+
"alias": " - professional_medicine",
|
203 |
+
"acc,none": 0.6544117647058824,
|
204 |
+
"acc_stderr,none": 0.028888193103988644
|
205 |
+
},
|
206 |
+
"harness|mmlu_virology|0": {
|
207 |
+
"alias": " - virology",
|
208 |
+
"acc,none": 0.4578313253012048,
|
209 |
+
"acc_stderr,none": 0.0387862677100236
|
210 |
+
},
|
211 |
+
"harness|mmlu_social_sciences|0": {
|
212 |
+
"alias": " - social_sciences",
|
213 |
+
"acc,none": 0.6789080272993175,
|
214 |
+
"acc_stderr,none": 0.008204770587034223
|
215 |
+
},
|
216 |
+
"harness|mmlu_econometrics|0": {
|
217 |
+
"alias": " - econometrics",
|
218 |
+
"acc,none": 0.45614035087719296,
|
219 |
+
"acc_stderr,none": 0.04685473041907789
|
220 |
+
},
|
221 |
+
"harness|mmlu_high_school_geography|0": {
|
222 |
+
"alias": " - high_school_geography",
|
223 |
+
"acc,none": 0.7373737373737373,
|
224 |
+
"acc_stderr,none": 0.03135305009533085
|
225 |
+
},
|
226 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
227 |
+
"alias": " - high_school_government_and_politics",
|
228 |
+
"acc,none": 0.8082901554404145,
|
229 |
+
"acc_stderr,none": 0.02840895362624527
|
230 |
+
},
|
231 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
232 |
+
"alias": " - high_school_macroeconomics",
|
233 |
+
"acc,none": 0.5564102564102564,
|
234 |
+
"acc_stderr,none": 0.0251891498947642
|
235 |
+
},
|
236 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
237 |
+
"alias": " - high_school_microeconomics",
|
238 |
+
"acc,none": 0.6302521008403361,
|
239 |
+
"acc_stderr,none": 0.03135709599613591
|
240 |
+
},
|
241 |
+
"harness|mmlu_high_school_psychology|0": {
|
242 |
+
"alias": " - high_school_psychology",
|
243 |
+
"acc,none": 0.7853211009174312,
|
244 |
+
"acc_stderr,none": 0.01760430414925649
|
245 |
+
},
|
246 |
+
"harness|mmlu_human_sexuality|0": {
|
247 |
+
"alias": " - human_sexuality",
|
248 |
+
"acc,none": 0.6793893129770993,
|
249 |
+
"acc_stderr,none": 0.04093329229834278
|
250 |
+
},
|
251 |
+
"harness|mmlu_professional_psychology|0": {
|
252 |
+
"alias": " - professional_psychology",
|
253 |
+
"acc,none": 0.5849673202614379,
|
254 |
+
"acc_stderr,none": 0.01993362777685742
|
255 |
+
},
|
256 |
+
"harness|mmlu_public_relations|0": {
|
257 |
+
"alias": " - public_relations",
|
258 |
+
"acc,none": 0.6454545454545455,
|
259 |
+
"acc_stderr,none": 0.04582004841505417
|
260 |
+
},
|
261 |
+
"harness|mmlu_security_studies|0": {
|
262 |
+
"alias": " - security_studies",
|
263 |
+
"acc,none": 0.6857142857142857,
|
264 |
+
"acc_stderr,none": 0.02971932942241748
|
265 |
+
},
|
266 |
+
"harness|mmlu_sociology|0": {
|
267 |
+
"alias": " - sociology",
|
268 |
+
"acc,none": 0.8407960199004975,
|
269 |
+
"acc_stderr,none": 0.02587064676616913
|
270 |
+
},
|
271 |
+
"harness|mmlu_us_foreign_policy|0": {
|
272 |
+
"alias": " - us_foreign_policy",
|
273 |
+
"acc,none": 0.85,
|
274 |
+
"acc_stderr,none": 0.03588702812826369
|
275 |
+
},
|
276 |
+
"harness|mmlu_stem|0": {
|
277 |
+
"alias": " - stem",
|
278 |
+
"acc,none": 0.49603552172534093,
|
279 |
+
"acc_stderr,none": 0.008687872678546427
|
280 |
+
},
|
281 |
+
"harness|mmlu_abstract_algebra|0": {
|
282 |
+
"alias": " - abstract_algebra",
|
283 |
+
"acc,none": 0.32,
|
284 |
+
"acc_stderr,none": 0.046882617226215034
|
285 |
+
},
|
286 |
+
"harness|mmlu_anatomy|0": {
|
287 |
+
"alias": " - anatomy",
|
288 |
+
"acc,none": 0.5703703703703704,
|
289 |
+
"acc_stderr,none": 0.04276349494376599
|
290 |
+
},
|
291 |
+
"harness|mmlu_astronomy|0": {
|
292 |
+
"alias": " - astronomy",
|
293 |
+
"acc,none": 0.6381578947368421,
|
294 |
+
"acc_stderr,none": 0.03910525752849724
|
295 |
+
},
|
296 |
+
"harness|mmlu_college_biology|0": {
|
297 |
+
"alias": " - college_biology",
|
298 |
+
"acc,none": 0.6458333333333334,
|
299 |
+
"acc_stderr,none": 0.039994111357535424
|
300 |
+
},
|
301 |
+
"harness|mmlu_college_chemistry|0": {
|
302 |
+
"alias": " - college_chemistry",
|
303 |
+
"acc,none": 0.42,
|
304 |
+
"acc_stderr,none": 0.049604496374885836
|
305 |
+
},
|
306 |
+
"harness|mmlu_college_computer_science|0": {
|
307 |
+
"alias": " - college_computer_science",
|
308 |
+
"acc,none": 0.54,
|
309 |
+
"acc_stderr,none": 0.05009082659620332
|
310 |
+
},
|
311 |
+
"harness|mmlu_college_mathematics|0": {
|
312 |
+
"alias": " - college_mathematics",
|
313 |
+
"acc,none": 0.34,
|
314 |
+
"acc_stderr,none": 0.04760952285695235
|
315 |
+
},
|
316 |
+
"harness|mmlu_college_physics|0": {
|
317 |
+
"alias": " - college_physics",
|
318 |
+
"acc,none": 0.37254901960784315,
|
319 |
+
"acc_stderr,none": 0.04810840148082635
|
320 |
+
},
|
321 |
+
"harness|mmlu_computer_security|0": {
|
322 |
+
"alias": " - computer_security",
|
323 |
+
"acc,none": 0.67,
|
324 |
+
"acc_stderr,none": 0.04725815626252609
|
325 |
+
},
|
326 |
+
"harness|mmlu_conceptual_physics|0": {
|
327 |
+
"alias": " - conceptual_physics",
|
328 |
+
"acc,none": 0.4851063829787234,
|
329 |
+
"acc_stderr,none": 0.032671518489247764
|
330 |
+
},
|
331 |
+
"harness|mmlu_electrical_engineering|0": {
|
332 |
+
"alias": " - electrical_engineering",
|
333 |
+
"acc,none": 0.593103448275862,
|
334 |
+
"acc_stderr,none": 0.04093793981266236
|
335 |
+
},
|
336 |
+
"harness|mmlu_elementary_mathematics|0": {
|
337 |
+
"alias": " - elementary_mathematics",
|
338 |
+
"acc,none": 0.42328042328042326,
|
339 |
+
"acc_stderr,none": 0.025446365634406796
|
340 |
+
},
|
341 |
+
"harness|mmlu_high_school_biology|0": {
|
342 |
+
"alias": " - high_school_biology",
|
343 |
+
"acc,none": 0.6806451612903226,
|
344 |
+
"acc_stderr,none": 0.02652270967466777
|
345 |
+
},
|
346 |
+
"harness|mmlu_high_school_chemistry|0": {
|
347 |
+
"alias": " - high_school_chemistry",
|
348 |
+
"acc,none": 0.47783251231527096,
|
349 |
+
"acc_stderr,none": 0.03514528562175007
|
350 |
+
},
|
351 |
+
"harness|mmlu_high_school_computer_science|0": {
|
352 |
+
"alias": " - high_school_computer_science",
|
353 |
+
"acc,none": 0.61,
|
354 |
+
"acc_stderr,none": 0.04902071300001974
|
355 |
+
},
|
356 |
+
"harness|mmlu_high_school_mathematics|0": {
|
357 |
+
"alias": " - high_school_mathematics",
|
358 |
+
"acc,none": 0.34814814814814815,
|
359 |
+
"acc_stderr,none": 0.029045600290616255
|
360 |
+
},
|
361 |
+
"harness|mmlu_high_school_physics|0": {
|
362 |
+
"alias": " - high_school_physics",
|
363 |
+
"acc,none": 0.3576158940397351,
|
364 |
+
"acc_stderr,none": 0.03913453431177258
|
365 |
+
},
|
366 |
+
"harness|mmlu_high_school_statistics|0": {
|
367 |
+
"alias": " - high_school_statistics",
|
368 |
+
"acc,none": 0.46296296296296297,
|
369 |
+
"acc_stderr,none": 0.03400603625538272
|
370 |
+
},
|
371 |
+
"harness|mmlu_machine_learning|0": {
|
372 |
+
"alias": " - machine_learning",
|
373 |
+
"acc,none": 0.4732142857142857,
|
374 |
+
"acc_stderr,none": 0.047389751192741546
|
375 |
+
},
|
376 |
+
"harness|boolq|0": {
|
377 |
+
"acc,none": 0.8529051987767584,
|
378 |
+
"acc_stderr,none": 0.00619500387506207,
|
379 |
+
"alias": "boolq"
|
380 |
+
},
|
381 |
+
"harness|winogrande|0": {
|
382 |
+
"acc,none": 0.7411207576953434,
|
383 |
+
"acc_stderr,none": 0.01231051581099338,
|
384 |
+
"alias": "winogrande"
|
385 |
+
},
|
386 |
+
"harness|piqa|0": {
|
387 |
+
"acc,none": 0.8014145810663765,
|
388 |
+
"acc_stderr,none": 0.009307814521717883,
|
389 |
+
"acc_norm,none": 0.8068552774755169,
|
390 |
+
"acc_norm_stderr,none": 0.009210530962579804,
|
391 |
+
"alias": "piqa"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "mistralai/Mistral-7B-Instruct-v0.2",
|
396 |
+
"revision": "main",
|
397 |
+
"private": false,
|
398 |
+
"params": 7.24,
|
399 |
+
"architectures": "MistralForCausalLM",
|
400 |
+
"quant_type": null,
|
401 |
+
"precision": "16bit",
|
402 |
+
"model_params": 7.24,
|
403 |
+
"model_size": 14.48,
|
404 |
+
"weight_dtype": "bfloat16",
|
405 |
+
"compute_dtype": "float16",
|
406 |
+
"gguf_ftype": "*Q4_0.gguf",
|
407 |
+
"hardware": "gpu",
|
408 |
+
"status": "Pending",
|
409 |
+
"submitted_time": "2024-04-27T08:04:58Z",
|
410 |
+
"model_type": "original",
|
411 |
+
"job_id": -1,
|
412 |
+
"job_start_time": null,
|
413 |
+
"scripts": "ITREX"
|
414 |
+
},
|
415 |
+
"quantization_config": null,
|
416 |
+
"versions": {
|
417 |
+
"harness|openbookqa|0": 1.0,
|
418 |
+
"harness|lambada:openai|0": 1.0,
|
419 |
+
"harness|arc:challenge|0": 1.0,
|
420 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
421 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
422 |
+
"harness|arc:easy|0": 1.0,
|
423 |
+
"harness|hellaswag|0": 1.0,
|
424 |
+
"harness|mmlu|0": null,
|
425 |
+
"harness|mmlu_humanities|0": null,
|
426 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
427 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
428 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
429 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
430 |
+
"harness|mmlu_international_law|0": 0.0,
|
431 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
432 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
433 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
434 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
435 |
+
"harness|mmlu_philosophy|0": 0.0,
|
436 |
+
"harness|mmlu_prehistory|0": 0.0,
|
437 |
+
"harness|mmlu_professional_law|0": 0.0,
|
438 |
+
"harness|mmlu_world_religions|0": 0.0,
|
439 |
+
"harness|mmlu_other|0": null,
|
440 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
441 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
442 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
443 |
+
"harness|mmlu_global_facts|0": 0.0,
|
444 |
+
"harness|mmlu_human_aging|0": 0.0,
|
445 |
+
"harness|mmlu_management|0": 0.0,
|
446 |
+
"harness|mmlu_marketing|0": 0.0,
|
447 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
448 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
449 |
+
"harness|mmlu_nutrition|0": 0.0,
|
450 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
451 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
452 |
+
"harness|mmlu_virology|0": 0.0,
|
453 |
+
"harness|mmlu_social_sciences|0": null,
|
454 |
+
"harness|mmlu_econometrics|0": 0.0,
|
455 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
456 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
457 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
458 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
459 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
460 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
461 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
462 |
+
"harness|mmlu_public_relations|0": 0.0,
|
463 |
+
"harness|mmlu_security_studies|0": 0.0,
|
464 |
+
"harness|mmlu_sociology|0": 0.0,
|
465 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
466 |
+
"harness|mmlu_stem|0": null,
|
467 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
468 |
+
"harness|mmlu_anatomy|0": 0.0,
|
469 |
+
"harness|mmlu_astronomy|0": 0.0,
|
470 |
+
"harness|mmlu_college_biology|0": 0.0,
|
471 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
472 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
473 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
474 |
+
"harness|mmlu_college_physics|0": 0.0,
|
475 |
+
"harness|mmlu_computer_security|0": 0.0,
|
476 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
477 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
478 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
479 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
480 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
481 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
482 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
483 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
484 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
485 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
486 |
+
"harness|boolq|0": 2.0,
|
487 |
+
"harness|winogrande|0": 1.0,
|
488 |
+
"harness|piqa|0": 1.0
|
489 |
+
},
|
490 |
+
"n-shot": {
|
491 |
+
"arc_challenge": 0,
|
492 |
+
"arc_easy": 0,
|
493 |
+
"boolq": 0,
|
494 |
+
"hellaswag": 0,
|
495 |
+
"lambada_openai": 0,
|
496 |
+
"mmlu": 0,
|
497 |
+
"mmlu_abstract_algebra": 0,
|
498 |
+
"mmlu_anatomy": 0,
|
499 |
+
"mmlu_astronomy": 0,
|
500 |
+
"mmlu_business_ethics": 0,
|
501 |
+
"mmlu_clinical_knowledge": 0,
|
502 |
+
"mmlu_college_biology": 0,
|
503 |
+
"mmlu_college_chemistry": 0,
|
504 |
+
"mmlu_college_computer_science": 0,
|
505 |
+
"mmlu_college_mathematics": 0,
|
506 |
+
"mmlu_college_medicine": 0,
|
507 |
+
"mmlu_college_physics": 0,
|
508 |
+
"mmlu_computer_security": 0,
|
509 |
+
"mmlu_conceptual_physics": 0,
|
510 |
+
"mmlu_econometrics": 0,
|
511 |
+
"mmlu_electrical_engineering": 0,
|
512 |
+
"mmlu_elementary_mathematics": 0,
|
513 |
+
"mmlu_formal_logic": 0,
|
514 |
+
"mmlu_global_facts": 0,
|
515 |
+
"mmlu_high_school_biology": 0,
|
516 |
+
"mmlu_high_school_chemistry": 0,
|
517 |
+
"mmlu_high_school_computer_science": 0,
|
518 |
+
"mmlu_high_school_european_history": 0,
|
519 |
+
"mmlu_high_school_geography": 0,
|
520 |
+
"mmlu_high_school_government_and_politics": 0,
|
521 |
+
"mmlu_high_school_macroeconomics": 0,
|
522 |
+
"mmlu_high_school_mathematics": 0,
|
523 |
+
"mmlu_high_school_microeconomics": 0,
|
524 |
+
"mmlu_high_school_physics": 0,
|
525 |
+
"mmlu_high_school_psychology": 0,
|
526 |
+
"mmlu_high_school_statistics": 0,
|
527 |
+
"mmlu_high_school_us_history": 0,
|
528 |
+
"mmlu_high_school_world_history": 0,
|
529 |
+
"mmlu_human_aging": 0,
|
530 |
+
"mmlu_human_sexuality": 0,
|
531 |
+
"mmlu_humanities": 0,
|
532 |
+
"mmlu_international_law": 0,
|
533 |
+
"mmlu_jurisprudence": 0,
|
534 |
+
"mmlu_logical_fallacies": 0,
|
535 |
+
"mmlu_machine_learning": 0,
|
536 |
+
"mmlu_management": 0,
|
537 |
+
"mmlu_marketing": 0,
|
538 |
+
"mmlu_medical_genetics": 0,
|
539 |
+
"mmlu_miscellaneous": 0,
|
540 |
+
"mmlu_moral_disputes": 0,
|
541 |
+
"mmlu_moral_scenarios": 0,
|
542 |
+
"mmlu_nutrition": 0,
|
543 |
+
"mmlu_other": 0,
|
544 |
+
"mmlu_philosophy": 0,
|
545 |
+
"mmlu_prehistory": 0,
|
546 |
+
"mmlu_professional_accounting": 0,
|
547 |
+
"mmlu_professional_law": 0,
|
548 |
+
"mmlu_professional_medicine": 0,
|
549 |
+
"mmlu_professional_psychology": 0,
|
550 |
+
"mmlu_public_relations": 0,
|
551 |
+
"mmlu_security_studies": 0,
|
552 |
+
"mmlu_social_sciences": 0,
|
553 |
+
"mmlu_sociology": 0,
|
554 |
+
"mmlu_stem": 0,
|
555 |
+
"mmlu_us_foreign_policy": 0,
|
556 |
+
"mmlu_virology": 0,
|
557 |
+
"mmlu_world_religions": 0,
|
558 |
+
"openbookqa": 0,
|
559 |
+
"piqa": 0,
|
560 |
+
"truthfulqa_mc1": 0,
|
561 |
+
"truthfulqa_mc2": 0,
|
562 |
+
"winogrande": 0
|
563 |
+
},
|
564 |
+
"date": 1716000361.507838,
|
565 |
+
"config": {
|
566 |
+
"model": "hf",
|
567 |
+
"model_args": "pretrained=Mistral-7B-Instruct-v0.2,trust_remote_code=True,dtype=float16,_commit_hash=main",
|
568 |
+
"batch_size": 4,
|
569 |
+
"batch_sizes": [],
|
570 |
+
"device": "cuda",
|
571 |
+
"use_cache": null,
|
572 |
+
"limit": null,
|
573 |
+
"bootstrap_iters": 100000,
|
574 |
+
"gen_kwargs": null
|
575 |
+
}
|
576 |
+
}
|
tiiuae/results_2024-05-19-06-56-28_falcon-7b.json
ADDED
@@ -0,0 +1,576 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "1.4",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": "-1",
|
9 |
+
"start_time": "",
|
10 |
+
"end_time": "",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "tiiuae/falcon-7b",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "16bit",
|
15 |
+
"model_size": 14.0,
|
16 |
+
"model_params": 7.0,
|
17 |
+
"quant_type": null,
|
18 |
+
"precision": "16bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|winogrande|0": {
|
22 |
+
"acc,none": 0.6732438831886346,
|
23 |
+
"acc_stderr,none": 0.013181997302131375,
|
24 |
+
"alias": "winogrande"
|
25 |
+
},
|
26 |
+
"harness|lambada:openai|0": {
|
27 |
+
"perplexity,none": 3.3575759014645787,
|
28 |
+
"perplexity_stderr,none": 0.0646188814987996,
|
29 |
+
"acc,none": 0.746749466330293,
|
30 |
+
"acc_stderr,none": 0.006058634002437441,
|
31 |
+
"alias": "lambada_openai"
|
32 |
+
},
|
33 |
+
"harness|truthfulqa:mc2|0": {
|
34 |
+
"acc,none": 0.34262461461800586,
|
35 |
+
"acc_stderr,none": 0.013275260285960447,
|
36 |
+
"alias": "truthfulqa_mc2"
|
37 |
+
},
|
38 |
+
"harness|arc:challenge|0": {
|
39 |
+
"acc,none": 0.40187713310580203,
|
40 |
+
"acc_stderr,none": 0.014327268614578283,
|
41 |
+
"acc_norm,none": 0.4325938566552901,
|
42 |
+
"acc_norm_stderr,none": 0.014478005694182528,
|
43 |
+
"alias": "arc_challenge"
|
44 |
+
},
|
45 |
+
"harness|hellaswag|0": {
|
46 |
+
"acc,none": 0.5774746066520613,
|
47 |
+
"acc_stderr,none": 0.004929517011508242,
|
48 |
+
"acc_norm,none": 0.7633937462656841,
|
49 |
+
"acc_norm_stderr,none": 0.004241299341050755,
|
50 |
+
"alias": "hellaswag"
|
51 |
+
},
|
52 |
+
"harness|boolq|0": {
|
53 |
+
"acc,none": 0.736085626911315,
|
54 |
+
"acc_stderr,none": 0.007708825768430382,
|
55 |
+
"alias": "boolq"
|
56 |
+
},
|
57 |
+
"harness|openbookqa|0": {
|
58 |
+
"acc,none": 0.306,
|
59 |
+
"acc_stderr,none": 0.0206295699983454,
|
60 |
+
"acc_norm,none": 0.444,
|
61 |
+
"acc_norm_stderr,none": 0.02224224437573102,
|
62 |
+
"alias": "openbookqa"
|
63 |
+
},
|
64 |
+
"harness|truthfulqa:mc1|0": {
|
65 |
+
"acc,none": 0.22276621787025705,
|
66 |
+
"acc_stderr,none": 0.014566506961396743,
|
67 |
+
"alias": "truthfulqa_mc1"
|
68 |
+
},
|
69 |
+
"harness|arc:easy|0": {
|
70 |
+
"acc,none": 0.7462121212121212,
|
71 |
+
"acc_stderr,none": 0.008929657065808285,
|
72 |
+
"acc_norm,none": 0.7095959595959596,
|
73 |
+
"acc_norm_stderr,none": 0.009314833302936285,
|
74 |
+
"alias": "arc_easy"
|
75 |
+
},
|
76 |
+
"harness|mmlu|0": {
|
77 |
+
"acc,none": 0.25523429710867396,
|
78 |
+
"acc_stderr,none": 0.0036745618171365275,
|
79 |
+
"alias": "mmlu"
|
80 |
+
},
|
81 |
+
"harness|mmlu_humanities|0": {
|
82 |
+
"alias": " - humanities",
|
83 |
+
"acc,none": 0.26439957492029753,
|
84 |
+
"acc_stderr,none": 0.006427037273685427
|
85 |
+
},
|
86 |
+
"harness|mmlu_formal_logic|0": {
|
87 |
+
"alias": " - formal_logic",
|
88 |
+
"acc,none": 0.2777777777777778,
|
89 |
+
"acc_stderr,none": 0.04006168083848876
|
90 |
+
},
|
91 |
+
"harness|mmlu_high_school_european_history|0": {
|
92 |
+
"alias": " - high_school_european_history",
|
93 |
+
"acc,none": 0.23636363636363636,
|
94 |
+
"acc_stderr,none": 0.033175059300091805
|
95 |
+
},
|
96 |
+
"harness|mmlu_high_school_us_history|0": {
|
97 |
+
"alias": " - high_school_us_history",
|
98 |
+
"acc,none": 0.2647058823529412,
|
99 |
+
"acc_stderr,none": 0.030964517926923382
|
100 |
+
},
|
101 |
+
"harness|mmlu_high_school_world_history|0": {
|
102 |
+
"alias": " - high_school_world_history",
|
103 |
+
"acc,none": 0.270042194092827,
|
104 |
+
"acc_stderr,none": 0.028900721906293426
|
105 |
+
},
|
106 |
+
"harness|mmlu_international_law|0": {
|
107 |
+
"alias": " - international_law",
|
108 |
+
"acc,none": 0.256198347107438,
|
109 |
+
"acc_stderr,none": 0.03984979653302871
|
110 |
+
},
|
111 |
+
"harness|mmlu_jurisprudence|0": {
|
112 |
+
"alias": " - jurisprudence",
|
113 |
+
"acc,none": 0.3055555555555556,
|
114 |
+
"acc_stderr,none": 0.04453197507374983
|
115 |
+
},
|
116 |
+
"harness|mmlu_logical_fallacies|0": {
|
117 |
+
"alias": " - logical_fallacies",
|
118 |
+
"acc,none": 0.2085889570552147,
|
119 |
+
"acc_stderr,none": 0.031921934489347235
|
120 |
+
},
|
121 |
+
"harness|mmlu_moral_disputes|0": {
|
122 |
+
"alias": " - moral_disputes",
|
123 |
+
"acc,none": 0.3236994219653179,
|
124 |
+
"acc_stderr,none": 0.025190181327608405
|
125 |
+
},
|
126 |
+
"harness|mmlu_moral_scenarios|0": {
|
127 |
+
"alias": " - moral_scenarios",
|
128 |
+
"acc,none": 0.23798882681564246,
|
129 |
+
"acc_stderr,none": 0.014242630070574885
|
130 |
+
},
|
131 |
+
"harness|mmlu_philosophy|0": {
|
132 |
+
"alias": " - philosophy",
|
133 |
+
"acc,none": 0.2797427652733119,
|
134 |
+
"acc_stderr,none": 0.025494259350694902
|
135 |
+
},
|
136 |
+
"harness|mmlu_prehistory|0": {
|
137 |
+
"alias": " - prehistory",
|
138 |
+
"acc,none": 0.2839506172839506,
|
139 |
+
"acc_stderr,none": 0.02508947852376513
|
140 |
+
},
|
141 |
+
"harness|mmlu_professional_law|0": {
|
142 |
+
"alias": " - professional_law",
|
143 |
+
"acc,none": 0.25945241199478486,
|
144 |
+
"acc_stderr,none": 0.0111952620763503
|
145 |
+
},
|
146 |
+
"harness|mmlu_world_religions|0": {
|
147 |
+
"alias": " - world_religions",
|
148 |
+
"acc,none": 0.30409356725146197,
|
149 |
+
"acc_stderr,none": 0.03528211258245231
|
150 |
+
},
|
151 |
+
"harness|mmlu_other|0": {
|
152 |
+
"alias": " - other",
|
153 |
+
"acc,none": 0.2603797875764403,
|
154 |
+
"acc_stderr,none": 0.007863195809752751
|
155 |
+
},
|
156 |
+
"harness|mmlu_business_ethics|0": {
|
157 |
+
"alias": " - business_ethics",
|
158 |
+
"acc,none": 0.19,
|
159 |
+
"acc_stderr,none": 0.03942772444036622
|
160 |
+
},
|
161 |
+
"harness|mmlu_clinical_knowledge|0": {
|
162 |
+
"alias": " - clinical_knowledge",
|
163 |
+
"acc,none": 0.24150943396226415,
|
164 |
+
"acc_stderr,none": 0.02634148037111835
|
165 |
+
},
|
166 |
+
"harness|mmlu_college_medicine|0": {
|
167 |
+
"alias": " - college_medicine",
|
168 |
+
"acc,none": 0.2832369942196532,
|
169 |
+
"acc_stderr,none": 0.034355680560478746
|
170 |
+
},
|
171 |
+
"harness|mmlu_global_facts|0": {
|
172 |
+
"alias": " - global_facts",
|
173 |
+
"acc,none": 0.31,
|
174 |
+
"acc_stderr,none": 0.04648231987117316
|
175 |
+
},
|
176 |
+
"harness|mmlu_human_aging|0": {
|
177 |
+
"alias": " - human_aging",
|
178 |
+
"acc,none": 0.3094170403587444,
|
179 |
+
"acc_stderr,none": 0.03102441174057222
|
180 |
+
},
|
181 |
+
"harness|mmlu_management|0": {
|
182 |
+
"alias": " - management",
|
183 |
+
"acc,none": 0.2621359223300971,
|
184 |
+
"acc_stderr,none": 0.04354631077260595
|
185 |
+
},
|
186 |
+
"harness|mmlu_marketing|0": {
|
187 |
+
"alias": " - marketing",
|
188 |
+
"acc,none": 0.26495726495726496,
|
189 |
+
"acc_stderr,none": 0.028911208802749472
|
190 |
+
},
|
191 |
+
"harness|mmlu_medical_genetics|0": {
|
192 |
+
"alias": " - medical_genetics",
|
193 |
+
"acc,none": 0.26,
|
194 |
+
"acc_stderr,none": 0.0440844002276808
|
195 |
+
},
|
196 |
+
"harness|mmlu_miscellaneous|0": {
|
197 |
+
"alias": " - miscellaneous",
|
198 |
+
"acc,none": 0.27330779054916987,
|
199 |
+
"acc_stderr,none": 0.015936681062628553
|
200 |
+
},
|
201 |
+
"harness|mmlu_nutrition|0": {
|
202 |
+
"alias": " - nutrition",
|
203 |
+
"acc,none": 0.27450980392156865,
|
204 |
+
"acc_stderr,none": 0.025553169991826514
|
205 |
+
},
|
206 |
+
"harness|mmlu_professional_accounting|0": {
|
207 |
+
"alias": " - professional_accounting",
|
208 |
+
"acc,none": 0.2553191489361702,
|
209 |
+
"acc_stderr,none": 0.026011992930902013
|
210 |
+
},
|
211 |
+
"harness|mmlu_professional_medicine|0": {
|
212 |
+
"alias": " - professional_medicine",
|
213 |
+
"acc,none": 0.16911764705882354,
|
214 |
+
"acc_stderr,none": 0.02277086801011301
|
215 |
+
},
|
216 |
+
"harness|mmlu_virology|0": {
|
217 |
+
"alias": " - virology",
|
218 |
+
"acc,none": 0.27710843373493976,
|
219 |
+
"acc_stderr,none": 0.03484331592680587
|
220 |
+
},
|
221 |
+
"harness|mmlu_social_sciences|0": {
|
222 |
+
"alias": " - social_sciences",
|
223 |
+
"acc,none": 0.24894377640558987,
|
224 |
+
"acc_stderr,none": 0.007777495579368957
|
225 |
+
},
|
226 |
+
"harness|mmlu_econometrics|0": {
|
227 |
+
"alias": " - econometrics",
|
228 |
+
"acc,none": 0.21929824561403508,
|
229 |
+
"acc_stderr,none": 0.0389243110651875
|
230 |
+
},
|
231 |
+
"harness|mmlu_high_school_geography|0": {
|
232 |
+
"alias": " - high_school_geography",
|
233 |
+
"acc,none": 0.23737373737373738,
|
234 |
+
"acc_stderr,none": 0.030313710538198896
|
235 |
+
},
|
236 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
237 |
+
"alias": " - high_school_government_and_politics",
|
238 |
+
"acc,none": 0.23834196891191708,
|
239 |
+
"acc_stderr,none": 0.03074890536390991
|
240 |
+
},
|
241 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
242 |
+
"alias": " - high_school_macroeconomics",
|
243 |
+
"acc,none": 0.23846153846153847,
|
244 |
+
"acc_stderr,none": 0.02160629449464773
|
245 |
+
},
|
246 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
247 |
+
"alias": " - high_school_microeconomics",
|
248 |
+
"acc,none": 0.2184873949579832,
|
249 |
+
"acc_stderr,none": 0.026841514322958948
|
250 |
+
},
|
251 |
+
"harness|mmlu_high_school_psychology|0": {
|
252 |
+
"alias": " - high_school_psychology",
|
253 |
+
"acc,none": 0.24220183486238533,
|
254 |
+
"acc_stderr,none": 0.01836817630659862
|
255 |
+
},
|
256 |
+
"harness|mmlu_human_sexuality|0": {
|
257 |
+
"alias": " - human_sexuality",
|
258 |
+
"acc,none": 0.2824427480916031,
|
259 |
+
"acc_stderr,none": 0.03948406125768362
|
260 |
+
},
|
261 |
+
"harness|mmlu_professional_psychology|0": {
|
262 |
+
"alias": " - professional_psychology",
|
263 |
+
"acc,none": 0.25163398692810457,
|
264 |
+
"acc_stderr,none": 0.01755581809132227
|
265 |
+
},
|
266 |
+
"harness|mmlu_public_relations|0": {
|
267 |
+
"alias": " - public_relations",
|
268 |
+
"acc,none": 0.32727272727272727,
|
269 |
+
"acc_stderr,none": 0.04494290866252091
|
270 |
+
},
|
271 |
+
"harness|mmlu_security_studies|0": {
|
272 |
+
"alias": " - security_studies",
|
273 |
+
"acc,none": 0.19183673469387755,
|
274 |
+
"acc_stderr,none": 0.025206963154225392
|
275 |
+
},
|
276 |
+
"harness|mmlu_sociology|0": {
|
277 |
+
"alias": " - sociology",
|
278 |
+
"acc,none": 0.27860696517412936,
|
279 |
+
"acc_stderr,none": 0.031700561834973086
|
280 |
+
},
|
281 |
+
"harness|mmlu_us_foreign_policy|0": {
|
282 |
+
"alias": " - us_foreign_policy",
|
283 |
+
"acc,none": 0.41,
|
284 |
+
"acc_stderr,none": 0.049431107042371025
|
285 |
+
},
|
286 |
+
"harness|mmlu_stem|0": {
|
287 |
+
"alias": " - stem",
|
288 |
+
"acc,none": 0.24262607040913417,
|
289 |
+
"acc_stderr,none": 0.007627512597161906
|
290 |
+
},
|
291 |
+
"harness|mmlu_abstract_algebra|0": {
|
292 |
+
"alias": " - abstract_algebra",
|
293 |
+
"acc,none": 0.22,
|
294 |
+
"acc_stderr,none": 0.04163331998932269
|
295 |
+
},
|
296 |
+
"harness|mmlu_anatomy|0": {
|
297 |
+
"alias": " - anatomy",
|
298 |
+
"acc,none": 0.22962962962962963,
|
299 |
+
"acc_stderr,none": 0.036333844140734636
|
300 |
+
},
|
301 |
+
"harness|mmlu_astronomy|0": {
|
302 |
+
"alias": " - astronomy",
|
303 |
+
"acc,none": 0.27631578947368424,
|
304 |
+
"acc_stderr,none": 0.03639057569952925
|
305 |
+
},
|
306 |
+
"harness|mmlu_college_biology|0": {
|
307 |
+
"alias": " - college_biology",
|
308 |
+
"acc,none": 0.2638888888888889,
|
309 |
+
"acc_stderr,none": 0.03685651095897532
|
310 |
+
},
|
311 |
+
"harness|mmlu_college_chemistry|0": {
|
312 |
+
"alias": " - college_chemistry",
|
313 |
+
"acc,none": 0.17,
|
314 |
+
"acc_stderr,none": 0.0377525168068637
|
315 |
+
},
|
316 |
+
"harness|mmlu_college_computer_science|0": {
|
317 |
+
"alias": " - college_computer_science",
|
318 |
+
"acc,none": 0.29,
|
319 |
+
"acc_stderr,none": 0.04560480215720684
|
320 |
+
},
|
321 |
+
"harness|mmlu_college_mathematics|0": {
|
322 |
+
"alias": " - college_mathematics",
|
323 |
+
"acc,none": 0.22,
|
324 |
+
"acc_stderr,none": 0.0416333199893227
|
325 |
+
},
|
326 |
+
"harness|mmlu_college_physics|0": {
|
327 |
+
"alias": " - college_physics",
|
328 |
+
"acc,none": 0.18627450980392157,
|
329 |
+
"acc_stderr,none": 0.0387395871414935
|
330 |
+
},
|
331 |
+
"harness|mmlu_computer_security|0": {
|
332 |
+
"alias": " - computer_security",
|
333 |
+
"acc,none": 0.33,
|
334 |
+
"acc_stderr,none": 0.047258156262526045
|
335 |
+
},
|
336 |
+
"harness|mmlu_conceptual_physics|0": {
|
337 |
+
"alias": " - conceptual_physics",
|
338 |
+
"acc,none": 0.2936170212765957,
|
339 |
+
"acc_stderr,none": 0.029771642712491223
|
340 |
+
},
|
341 |
+
"harness|mmlu_electrical_engineering|0": {
|
342 |
+
"alias": " - electrical_engineering",
|
343 |
+
"acc,none": 0.2620689655172414,
|
344 |
+
"acc_stderr,none": 0.036646663372252565
|
345 |
+
},
|
346 |
+
"harness|mmlu_elementary_mathematics|0": {
|
347 |
+
"alias": " - elementary_mathematics",
|
348 |
+
"acc,none": 0.21428571428571427,
|
349 |
+
"acc_stderr,none": 0.021132859182754427
|
350 |
+
},
|
351 |
+
"harness|mmlu_high_school_biology|0": {
|
352 |
+
"alias": " - high_school_biology",
|
353 |
+
"acc,none": 0.25483870967741934,
|
354 |
+
"acc_stderr,none": 0.024790118459332208
|
355 |
+
},
|
356 |
+
"harness|mmlu_high_school_chemistry|0": {
|
357 |
+
"alias": " - high_school_chemistry",
|
358 |
+
"acc,none": 0.2315270935960591,
|
359 |
+
"acc_stderr,none": 0.029678333141444455
|
360 |
+
},
|
361 |
+
"harness|mmlu_high_school_computer_science|0": {
|
362 |
+
"alias": " - high_school_computer_science",
|
363 |
+
"acc,none": 0.29,
|
364 |
+
"acc_stderr,none": 0.045604802157206845
|
365 |
+
},
|
366 |
+
"harness|mmlu_high_school_mathematics|0": {
|
367 |
+
"alias": " - high_school_mathematics",
|
368 |
+
"acc,none": 0.2074074074074074,
|
369 |
+
"acc_stderr,none": 0.024720713193952165
|
370 |
+
},
|
371 |
+
"harness|mmlu_high_school_physics|0": {
|
372 |
+
"alias": " - high_school_physics",
|
373 |
+
"acc,none": 0.2185430463576159,
|
374 |
+
"acc_stderr,none": 0.03374235550425694
|
375 |
+
},
|
376 |
+
"harness|mmlu_high_school_statistics|0": {
|
377 |
+
"alias": " - high_school_statistics",
|
378 |
+
"acc,none": 0.21296296296296297,
|
379 |
+
"acc_stderr,none": 0.027920963147993666
|
380 |
+
},
|
381 |
+
"harness|mmlu_machine_learning|0": {
|
382 |
+
"alias": " - machine_learning",
|
383 |
+
"acc,none": 0.30357142857142855,
|
384 |
+
"acc_stderr,none": 0.04364226155841044
|
385 |
+
},
|
386 |
+
"harness|piqa|0": {
|
387 |
+
"acc,none": 0.795429815016322,
|
388 |
+
"acc_stderr,none": 0.009411688039193584,
|
389 |
+
"acc_norm,none": 0.8063112078346029,
|
390 |
+
"acc_norm_stderr,none": 0.009220384152336643,
|
391 |
+
"alias": "piqa"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "tiiuae/falcon-7b",
|
396 |
+
"revision": "main",
|
397 |
+
"private": false,
|
398 |
+
"params": 7.0,
|
399 |
+
"architectures": "FalconForCausalLM",
|
400 |
+
"quant_type": null,
|
401 |
+
"precision": "16bit",
|
402 |
+
"model_params": 7.0,
|
403 |
+
"model_size": 14.0,
|
404 |
+
"weight_dtype": "bfloat16",
|
405 |
+
"compute_dtype": "float16",
|
406 |
+
"gguf_ftype": "*Q4_0.gguf",
|
407 |
+
"hardware": "gpu",
|
408 |
+
"status": "Pending",
|
409 |
+
"submitted_time": "2024-04-27T08:04:58Z",
|
410 |
+
"model_type": "original",
|
411 |
+
"job_id": -1,
|
412 |
+
"job_start_time": null,
|
413 |
+
"scripts": "ITREX"
|
414 |
+
},
|
415 |
+
"quantization_config": null,
|
416 |
+
"versions": {
|
417 |
+
"harness|winogrande|0": 1.0,
|
418 |
+
"harness|lambada:openai|0": 1.0,
|
419 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
420 |
+
"harness|arc:challenge|0": 1.0,
|
421 |
+
"harness|hellaswag|0": 1.0,
|
422 |
+
"harness|boolq|0": 2.0,
|
423 |
+
"harness|openbookqa|0": 1.0,
|
424 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
425 |
+
"harness|arc:easy|0": 1.0,
|
426 |
+
"harness|mmlu|0": null,
|
427 |
+
"harness|mmlu_humanities|0": null,
|
428 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
429 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
430 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
431 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
432 |
+
"harness|mmlu_international_law|0": 0.0,
|
433 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
434 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
435 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
436 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
437 |
+
"harness|mmlu_philosophy|0": 0.0,
|
438 |
+
"harness|mmlu_prehistory|0": 0.0,
|
439 |
+
"harness|mmlu_professional_law|0": 0.0,
|
440 |
+
"harness|mmlu_world_religions|0": 0.0,
|
441 |
+
"harness|mmlu_other|0": null,
|
442 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
443 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
444 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
445 |
+
"harness|mmlu_global_facts|0": 0.0,
|
446 |
+
"harness|mmlu_human_aging|0": 0.0,
|
447 |
+
"harness|mmlu_management|0": 0.0,
|
448 |
+
"harness|mmlu_marketing|0": 0.0,
|
449 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
450 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
451 |
+
"harness|mmlu_nutrition|0": 0.0,
|
452 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
453 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
454 |
+
"harness|mmlu_virology|0": 0.0,
|
455 |
+
"harness|mmlu_social_sciences|0": null,
|
456 |
+
"harness|mmlu_econometrics|0": 0.0,
|
457 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
458 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
459 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
460 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
461 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
462 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
463 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
464 |
+
"harness|mmlu_public_relations|0": 0.0,
|
465 |
+
"harness|mmlu_security_studies|0": 0.0,
|
466 |
+
"harness|mmlu_sociology|0": 0.0,
|
467 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
468 |
+
"harness|mmlu_stem|0": null,
|
469 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
470 |
+
"harness|mmlu_anatomy|0": 0.0,
|
471 |
+
"harness|mmlu_astronomy|0": 0.0,
|
472 |
+
"harness|mmlu_college_biology|0": 0.0,
|
473 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
474 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
475 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
476 |
+
"harness|mmlu_college_physics|0": 0.0,
|
477 |
+
"harness|mmlu_computer_security|0": 0.0,
|
478 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
479 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
480 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
481 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
482 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
483 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
484 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
485 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
486 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
487 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
488 |
+
"harness|piqa|0": 1.0
|
489 |
+
},
|
490 |
+
"n-shot": {
|
491 |
+
"arc_challenge": 0,
|
492 |
+
"arc_easy": 0,
|
493 |
+
"boolq": 0,
|
494 |
+
"hellaswag": 0,
|
495 |
+
"lambada_openai": 0,
|
496 |
+
"mmlu": 0,
|
497 |
+
"mmlu_abstract_algebra": 0,
|
498 |
+
"mmlu_anatomy": 0,
|
499 |
+
"mmlu_astronomy": 0,
|
500 |
+
"mmlu_business_ethics": 0,
|
501 |
+
"mmlu_clinical_knowledge": 0,
|
502 |
+
"mmlu_college_biology": 0,
|
503 |
+
"mmlu_college_chemistry": 0,
|
504 |
+
"mmlu_college_computer_science": 0,
|
505 |
+
"mmlu_college_mathematics": 0,
|
506 |
+
"mmlu_college_medicine": 0,
|
507 |
+
"mmlu_college_physics": 0,
|
508 |
+
"mmlu_computer_security": 0,
|
509 |
+
"mmlu_conceptual_physics": 0,
|
510 |
+
"mmlu_econometrics": 0,
|
511 |
+
"mmlu_electrical_engineering": 0,
|
512 |
+
"mmlu_elementary_mathematics": 0,
|
513 |
+
"mmlu_formal_logic": 0,
|
514 |
+
"mmlu_global_facts": 0,
|
515 |
+
"mmlu_high_school_biology": 0,
|
516 |
+
"mmlu_high_school_chemistry": 0,
|
517 |
+
"mmlu_high_school_computer_science": 0,
|
518 |
+
"mmlu_high_school_european_history": 0,
|
519 |
+
"mmlu_high_school_geography": 0,
|
520 |
+
"mmlu_high_school_government_and_politics": 0,
|
521 |
+
"mmlu_high_school_macroeconomics": 0,
|
522 |
+
"mmlu_high_school_mathematics": 0,
|
523 |
+
"mmlu_high_school_microeconomics": 0,
|
524 |
+
"mmlu_high_school_physics": 0,
|
525 |
+
"mmlu_high_school_psychology": 0,
|
526 |
+
"mmlu_high_school_statistics": 0,
|
527 |
+
"mmlu_high_school_us_history": 0,
|
528 |
+
"mmlu_high_school_world_history": 0,
|
529 |
+
"mmlu_human_aging": 0,
|
530 |
+
"mmlu_human_sexuality": 0,
|
531 |
+
"mmlu_humanities": 0,
|
532 |
+
"mmlu_international_law": 0,
|
533 |
+
"mmlu_jurisprudence": 0,
|
534 |
+
"mmlu_logical_fallacies": 0,
|
535 |
+
"mmlu_machine_learning": 0,
|
536 |
+
"mmlu_management": 0,
|
537 |
+
"mmlu_marketing": 0,
|
538 |
+
"mmlu_medical_genetics": 0,
|
539 |
+
"mmlu_miscellaneous": 0,
|
540 |
+
"mmlu_moral_disputes": 0,
|
541 |
+
"mmlu_moral_scenarios": 0,
|
542 |
+
"mmlu_nutrition": 0,
|
543 |
+
"mmlu_other": 0,
|
544 |
+
"mmlu_philosophy": 0,
|
545 |
+
"mmlu_prehistory": 0,
|
546 |
+
"mmlu_professional_accounting": 0,
|
547 |
+
"mmlu_professional_law": 0,
|
548 |
+
"mmlu_professional_medicine": 0,
|
549 |
+
"mmlu_professional_psychology": 0,
|
550 |
+
"mmlu_public_relations": 0,
|
551 |
+
"mmlu_security_studies": 0,
|
552 |
+
"mmlu_social_sciences": 0,
|
553 |
+
"mmlu_sociology": 0,
|
554 |
+
"mmlu_stem": 0,
|
555 |
+
"mmlu_us_foreign_policy": 0,
|
556 |
+
"mmlu_virology": 0,
|
557 |
+
"mmlu_world_religions": 0,
|
558 |
+
"openbookqa": 0,
|
559 |
+
"piqa": 0,
|
560 |
+
"truthfulqa_mc1": 0,
|
561 |
+
"truthfulqa_mc2": 0,
|
562 |
+
"winogrande": 0
|
563 |
+
},
|
564 |
+
"date": 1716069715.9687538,
|
565 |
+
"config": {
|
566 |
+
"model": "hf",
|
567 |
+
"model_args": "pretrained=tiiuae/falcon-7b,trust_remote_code=True,dtype=float16,_commit_hash=main",
|
568 |
+
"batch_size": 4,
|
569 |
+
"batch_sizes": [],
|
570 |
+
"device": "cuda",
|
571 |
+
"use_cache": null,
|
572 |
+
"limit": null,
|
573 |
+
"bootstrap_iters": 100000,
|
574 |
+
"gen_kwargs": null
|
575 |
+
}
|
576 |
+
}
|
upstage/results_2024-05-17-22-35-35_SOLAR-10.7B-Instruct-v1.0.json
ADDED
@@ -0,0 +1,576 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "1.4",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": "-1",
|
9 |
+
"start_time": "",
|
10 |
+
"end_time": "",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "upstage/SOLAR-10.7B-Instruct-v1.0",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "16bit",
|
15 |
+
"model_size": "21.46",
|
16 |
+
"model_params": "10.73",
|
17 |
+
"quant_type": null,
|
18 |
+
"precision": "16bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|arc:challenge|0": {
|
22 |
+
"acc,none": 0.6075085324232082,
|
23 |
+
"acc_stderr,none": 0.014269634635670724,
|
24 |
+
"acc_norm,none": 0.6390784982935154,
|
25 |
+
"acc_norm_stderr,none": 0.014034761386175452,
|
26 |
+
"alias": "arc_challenge"
|
27 |
+
},
|
28 |
+
"harness|openbookqa|0": {
|
29 |
+
"acc,none": 0.364,
|
30 |
+
"acc_stderr,none": 0.0215391706373177,
|
31 |
+
"acc_norm,none": 0.476,
|
32 |
+
"acc_norm_stderr,none": 0.0223572738810164,
|
33 |
+
"alias": "openbookqa"
|
34 |
+
},
|
35 |
+
"harness|piqa|0": {
|
36 |
+
"acc,none": 0.8068552774755169,
|
37 |
+
"acc_stderr,none": 0.009210530962579793,
|
38 |
+
"acc_norm,none": 0.8101196953210011,
|
39 |
+
"acc_norm_stderr,none": 0.009150819250948718,
|
40 |
+
"alias": "piqa"
|
41 |
+
},
|
42 |
+
"harness|lambada:openai|0": {
|
43 |
+
"perplexity,none": 3.170644380815313,
|
44 |
+
"perplexity_stderr,none": 0.07317860298667478,
|
45 |
+
"acc,none": 0.727731418591112,
|
46 |
+
"acc_stderr,none": 0.006201495026535788,
|
47 |
+
"alias": "lambada_openai"
|
48 |
+
},
|
49 |
+
"harness|truthfulqa:mc2|0": {
|
50 |
+
"acc,none": 0.7170850336920905,
|
51 |
+
"acc_stderr,none": 0.014993519779369784,
|
52 |
+
"alias": "truthfulqa_mc2"
|
53 |
+
},
|
54 |
+
"harness|hellaswag|0": {
|
55 |
+
"acc,none": 0.6868153754232225,
|
56 |
+
"acc_stderr,none": 0.00462840908421878,
|
57 |
+
"acc_norm,none": 0.8634734116709819,
|
58 |
+
"acc_norm_stderr,none": 0.0034264517445079645,
|
59 |
+
"alias": "hellaswag"
|
60 |
+
},
|
61 |
+
"harness|boolq|0": {
|
62 |
+
"acc,none": 0.8853211009174312,
|
63 |
+
"acc_stderr,none": 0.0055729443323062075,
|
64 |
+
"alias": "boolq"
|
65 |
+
},
|
66 |
+
"harness|mmlu|0": {
|
67 |
+
"acc,none": 0.6358068651189289,
|
68 |
+
"acc_stderr,none": 0.003803645761874694,
|
69 |
+
"alias": "mmlu"
|
70 |
+
},
|
71 |
+
"harness|mmlu_humanities|0": {
|
72 |
+
"alias": " - humanities",
|
73 |
+
"acc,none": 0.5808714133900106,
|
74 |
+
"acc_stderr,none": 0.006641263715610051
|
75 |
+
},
|
76 |
+
"harness|mmlu_formal_logic|0": {
|
77 |
+
"alias": " - formal_logic",
|
78 |
+
"acc,none": 0.40476190476190477,
|
79 |
+
"acc_stderr,none": 0.043902592653775614
|
80 |
+
},
|
81 |
+
"harness|mmlu_high_school_european_history|0": {
|
82 |
+
"alias": " - high_school_european_history",
|
83 |
+
"acc,none": 0.8242424242424242,
|
84 |
+
"acc_stderr,none": 0.02972094300622445
|
85 |
+
},
|
86 |
+
"harness|mmlu_high_school_us_history|0": {
|
87 |
+
"alias": " - high_school_us_history",
|
88 |
+
"acc,none": 0.8578431372549019,
|
89 |
+
"acc_stderr,none": 0.024509803921568624
|
90 |
+
},
|
91 |
+
"harness|mmlu_high_school_world_history|0": {
|
92 |
+
"alias": " - high_school_world_history",
|
93 |
+
"acc,none": 0.8565400843881856,
|
94 |
+
"acc_stderr,none": 0.022818291821017012
|
95 |
+
},
|
96 |
+
"harness|mmlu_international_law|0": {
|
97 |
+
"alias": " - international_law",
|
98 |
+
"acc,none": 0.7851239669421488,
|
99 |
+
"acc_stderr,none": 0.03749492448709699
|
100 |
+
},
|
101 |
+
"harness|mmlu_jurisprudence|0": {
|
102 |
+
"alias": " - jurisprudence",
|
103 |
+
"acc,none": 0.7962962962962963,
|
104 |
+
"acc_stderr,none": 0.03893542518824847
|
105 |
+
},
|
106 |
+
"harness|mmlu_logical_fallacies|0": {
|
107 |
+
"alias": " - logical_fallacies",
|
108 |
+
"acc,none": 0.7239263803680982,
|
109 |
+
"acc_stderr,none": 0.03512385283705049
|
110 |
+
},
|
111 |
+
"harness|mmlu_moral_disputes|0": {
|
112 |
+
"alias": " - moral_disputes",
|
113 |
+
"acc,none": 0.7254335260115607,
|
114 |
+
"acc_stderr,none": 0.024027745155265
|
115 |
+
},
|
116 |
+
"harness|mmlu_moral_scenarios|0": {
|
117 |
+
"alias": " - moral_scenarios",
|
118 |
+
"acc,none": 0.3027932960893855,
|
119 |
+
"acc_stderr,none": 0.015366860386397107
|
120 |
+
},
|
121 |
+
"harness|mmlu_philosophy|0": {
|
122 |
+
"alias": " - philosophy",
|
123 |
+
"acc,none": 0.6977491961414791,
|
124 |
+
"acc_stderr,none": 0.026082700695399662
|
125 |
+
},
|
126 |
+
"harness|mmlu_prehistory|0": {
|
127 |
+
"alias": " - prehistory",
|
128 |
+
"acc,none": 0.7561728395061729,
|
129 |
+
"acc_stderr,none": 0.023891879541959614
|
130 |
+
},
|
131 |
+
"harness|mmlu_professional_law|0": {
|
132 |
+
"alias": " - professional_law",
|
133 |
+
"acc,none": 0.48891786179921776,
|
134 |
+
"acc_stderr,none": 0.012767098998525846
|
135 |
+
},
|
136 |
+
"harness|mmlu_world_religions|0": {
|
137 |
+
"alias": " - world_religions",
|
138 |
+
"acc,none": 0.7894736842105263,
|
139 |
+
"acc_stderr,none": 0.031267817146631786
|
140 |
+
},
|
141 |
+
"harness|mmlu_other|0": {
|
142 |
+
"alias": " - other",
|
143 |
+
"acc,none": 0.7186997103315095,
|
144 |
+
"acc_stderr,none": 0.007774584585330551
|
145 |
+
},
|
146 |
+
"harness|mmlu_business_ethics|0": {
|
147 |
+
"alias": " - business_ethics",
|
148 |
+
"acc,none": 0.66,
|
149 |
+
"acc_stderr,none": 0.04760952285695237
|
150 |
+
},
|
151 |
+
"harness|mmlu_clinical_knowledge|0": {
|
152 |
+
"alias": " - clinical_knowledge",
|
153 |
+
"acc,none": 0.720754716981132,
|
154 |
+
"acc_stderr,none": 0.027611163402399715
|
155 |
+
},
|
156 |
+
"harness|mmlu_college_medicine|0": {
|
157 |
+
"alias": " - college_medicine",
|
158 |
+
"acc,none": 0.653179190751445,
|
159 |
+
"acc_stderr,none": 0.03629146670159663
|
160 |
+
},
|
161 |
+
"harness|mmlu_global_facts|0": {
|
162 |
+
"alias": " - global_facts",
|
163 |
+
"acc,none": 0.35,
|
164 |
+
"acc_stderr,none": 0.047937248544110196
|
165 |
+
},
|
166 |
+
"harness|mmlu_human_aging|0": {
|
167 |
+
"alias": " - human_aging",
|
168 |
+
"acc,none": 0.7040358744394619,
|
169 |
+
"acc_stderr,none": 0.03063659134869981
|
170 |
+
},
|
171 |
+
"harness|mmlu_management|0": {
|
172 |
+
"alias": " - management",
|
173 |
+
"acc,none": 0.8058252427184466,
|
174 |
+
"acc_stderr,none": 0.03916667762822584
|
175 |
+
},
|
176 |
+
"harness|mmlu_marketing|0": {
|
177 |
+
"alias": " - marketing",
|
178 |
+
"acc,none": 0.8803418803418803,
|
179 |
+
"acc_stderr,none": 0.021262719400406964
|
180 |
+
},
|
181 |
+
"harness|mmlu_medical_genetics|0": {
|
182 |
+
"alias": " - medical_genetics",
|
183 |
+
"acc,none": 0.76,
|
184 |
+
"acc_stderr,none": 0.042923469599092816
|
185 |
+
},
|
186 |
+
"harness|mmlu_miscellaneous|0": {
|
187 |
+
"alias": " - miscellaneous",
|
188 |
+
"acc,none": 0.8263090676883781,
|
189 |
+
"acc_stderr,none": 0.01354741565866226
|
190 |
+
},
|
191 |
+
"harness|mmlu_nutrition|0": {
|
192 |
+
"alias": " - nutrition",
|
193 |
+
"acc,none": 0.7287581699346405,
|
194 |
+
"acc_stderr,none": 0.025457756696667867
|
195 |
+
},
|
196 |
+
"harness|mmlu_professional_accounting|0": {
|
197 |
+
"alias": " - professional_accounting",
|
198 |
+
"acc,none": 0.5354609929078015,
|
199 |
+
"acc_stderr,none": 0.02975238965742705
|
200 |
+
},
|
201 |
+
"harness|mmlu_professional_medicine|0": {
|
202 |
+
"alias": " - professional_medicine",
|
203 |
+
"acc,none": 0.7316176470588235,
|
204 |
+
"acc_stderr,none": 0.02691748122437722
|
205 |
+
},
|
206 |
+
"harness|mmlu_virology|0": {
|
207 |
+
"alias": " - virology",
|
208 |
+
"acc,none": 0.5180722891566265,
|
209 |
+
"acc_stderr,none": 0.038899512528272166
|
210 |
+
},
|
211 |
+
"harness|mmlu_social_sciences|0": {
|
212 |
+
"alias": " - social_sciences",
|
213 |
+
"acc,none": 0.7409814754631134,
|
214 |
+
"acc_stderr,none": 0.007748186487492694
|
215 |
+
},
|
216 |
+
"harness|mmlu_econometrics|0": {
|
217 |
+
"alias": " - econometrics",
|
218 |
+
"acc,none": 0.5,
|
219 |
+
"acc_stderr,none": 0.047036043419179864
|
220 |
+
},
|
221 |
+
"harness|mmlu_high_school_geography|0": {
|
222 |
+
"alias": " - high_school_geography",
|
223 |
+
"acc,none": 0.8181818181818182,
|
224 |
+
"acc_stderr,none": 0.027479603010538797
|
225 |
+
},
|
226 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
227 |
+
"alias": " - high_school_government_and_politics",
|
228 |
+
"acc,none": 0.8860103626943006,
|
229 |
+
"acc_stderr,none": 0.02293514405391943
|
230 |
+
},
|
231 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
232 |
+
"alias": " - high_school_macroeconomics",
|
233 |
+
"acc,none": 0.6820512820512821,
|
234 |
+
"acc_stderr,none": 0.023610884308927865
|
235 |
+
},
|
236 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
237 |
+
"alias": " - high_school_microeconomics",
|
238 |
+
"acc,none": 0.6848739495798319,
|
239 |
+
"acc_stderr,none": 0.030176808288974337
|
240 |
+
},
|
241 |
+
"harness|mmlu_high_school_psychology|0": {
|
242 |
+
"alias": " - high_school_psychology",
|
243 |
+
"acc,none": 0.8275229357798165,
|
244 |
+
"acc_stderr,none": 0.016197807956848057
|
245 |
+
},
|
246 |
+
"harness|mmlu_human_sexuality|0": {
|
247 |
+
"alias": " - human_sexuality",
|
248 |
+
"acc,none": 0.7786259541984732,
|
249 |
+
"acc_stderr,none": 0.036412970813137296
|
250 |
+
},
|
251 |
+
"harness|mmlu_professional_psychology|0": {
|
252 |
+
"alias": " - professional_psychology",
|
253 |
+
"acc,none": 0.6748366013071896,
|
254 |
+
"acc_stderr,none": 0.01895088677080631
|
255 |
+
},
|
256 |
+
"harness|mmlu_public_relations|0": {
|
257 |
+
"alias": " - public_relations",
|
258 |
+
"acc,none": 0.6636363636363637,
|
259 |
+
"acc_stderr,none": 0.04525393596302505
|
260 |
+
},
|
261 |
+
"harness|mmlu_security_studies|0": {
|
262 |
+
"alias": " - security_studies",
|
263 |
+
"acc,none": 0.7061224489795919,
|
264 |
+
"acc_stderr,none": 0.029162738410249765
|
265 |
+
},
|
266 |
+
"harness|mmlu_sociology|0": {
|
267 |
+
"alias": " - sociology",
|
268 |
+
"acc,none": 0.8059701492537313,
|
269 |
+
"acc_stderr,none": 0.027962677604768893
|
270 |
+
},
|
271 |
+
"harness|mmlu_us_foreign_policy|0": {
|
272 |
+
"alias": " - us_foreign_policy",
|
273 |
+
"acc,none": 0.87,
|
274 |
+
"acc_stderr,none": 0.03379976689896309
|
275 |
+
},
|
276 |
+
"harness|mmlu_stem|0": {
|
277 |
+
"alias": " - stem",
|
278 |
+
"acc,none": 0.5334601966381224,
|
279 |
+
"acc_stderr,none": 0.008536389351093637
|
280 |
+
},
|
281 |
+
"harness|mmlu_abstract_algebra|0": {
|
282 |
+
"alias": " - abstract_algebra",
|
283 |
+
"acc,none": 0.37,
|
284 |
+
"acc_stderr,none": 0.04852365870939098
|
285 |
+
},
|
286 |
+
"harness|mmlu_anatomy|0": {
|
287 |
+
"alias": " - anatomy",
|
288 |
+
"acc,none": 0.5777777777777777,
|
289 |
+
"acc_stderr,none": 0.04266763404099582
|
290 |
+
},
|
291 |
+
"harness|mmlu_astronomy|0": {
|
292 |
+
"alias": " - astronomy",
|
293 |
+
"acc,none": 0.7236842105263158,
|
294 |
+
"acc_stderr,none": 0.03639057569952929
|
295 |
+
},
|
296 |
+
"harness|mmlu_college_biology|0": {
|
297 |
+
"alias": " - college_biology",
|
298 |
+
"acc,none": 0.7638888888888888,
|
299 |
+
"acc_stderr,none": 0.03551446610810826
|
300 |
+
},
|
301 |
+
"harness|mmlu_college_chemistry|0": {
|
302 |
+
"alias": " - college_chemistry",
|
303 |
+
"acc,none": 0.4,
|
304 |
+
"acc_stderr,none": 0.04923659639173309
|
305 |
+
},
|
306 |
+
"harness|mmlu_college_computer_science|0": {
|
307 |
+
"alias": " - college_computer_science",
|
308 |
+
"acc,none": 0.5,
|
309 |
+
"acc_stderr,none": 0.050251890762960605
|
310 |
+
},
|
311 |
+
"harness|mmlu_college_mathematics|0": {
|
312 |
+
"alias": " - college_mathematics",
|
313 |
+
"acc,none": 0.36,
|
314 |
+
"acc_stderr,none": 0.04824181513244218
|
315 |
+
},
|
316 |
+
"harness|mmlu_college_physics|0": {
|
317 |
+
"alias": " - college_physics",
|
318 |
+
"acc,none": 0.43137254901960786,
|
319 |
+
"acc_stderr,none": 0.04928099597287534
|
320 |
+
},
|
321 |
+
"harness|mmlu_computer_security|0": {
|
322 |
+
"alias": " - computer_security",
|
323 |
+
"acc,none": 0.73,
|
324 |
+
"acc_stderr,none": 0.0446196043338474
|
325 |
+
},
|
326 |
+
"harness|mmlu_conceptual_physics|0": {
|
327 |
+
"alias": " - conceptual_physics",
|
328 |
+
"acc,none": 0.5914893617021276,
|
329 |
+
"acc_stderr,none": 0.032134180267015755
|
330 |
+
},
|
331 |
+
"harness|mmlu_electrical_engineering|0": {
|
332 |
+
"alias": " - electrical_engineering",
|
333 |
+
"acc,none": 0.5241379310344828,
|
334 |
+
"acc_stderr,none": 0.0416180850350153
|
335 |
+
},
|
336 |
+
"harness|mmlu_elementary_mathematics|0": {
|
337 |
+
"alias": " - elementary_mathematics",
|
338 |
+
"acc,none": 0.46825396825396826,
|
339 |
+
"acc_stderr,none": 0.025699352832131792
|
340 |
+
},
|
341 |
+
"harness|mmlu_high_school_biology|0": {
|
342 |
+
"alias": " - high_school_biology",
|
343 |
+
"acc,none": 0.7838709677419354,
|
344 |
+
"acc_stderr,none": 0.023415293433568515
|
345 |
+
},
|
346 |
+
"harness|mmlu_high_school_chemistry|0": {
|
347 |
+
"alias": " - high_school_chemistry",
|
348 |
+
"acc,none": 0.45320197044334976,
|
349 |
+
"acc_stderr,none": 0.03502544650845872
|
350 |
+
},
|
351 |
+
"harness|mmlu_high_school_computer_science|0": {
|
352 |
+
"alias": " - high_school_computer_science",
|
353 |
+
"acc,none": 0.62,
|
354 |
+
"acc_stderr,none": 0.04878317312145632
|
355 |
+
},
|
356 |
+
"harness|mmlu_high_school_mathematics|0": {
|
357 |
+
"alias": " - high_school_mathematics",
|
358 |
+
"acc,none": 0.34814814814814815,
|
359 |
+
"acc_stderr,none": 0.029045600290616255
|
360 |
+
},
|
361 |
+
"harness|mmlu_high_school_physics|0": {
|
362 |
+
"alias": " - high_school_physics",
|
363 |
+
"acc,none": 0.3509933774834437,
|
364 |
+
"acc_stderr,none": 0.03896981964257374
|
365 |
+
},
|
366 |
+
"harness|mmlu_high_school_statistics|0": {
|
367 |
+
"alias": " - high_school_statistics",
|
368 |
+
"acc,none": 0.5370370370370371,
|
369 |
+
"acc_stderr,none": 0.03400603625538271
|
370 |
+
},
|
371 |
+
"harness|mmlu_machine_learning|0": {
|
372 |
+
"alias": " - machine_learning",
|
373 |
+
"acc,none": 0.4642857142857143,
|
374 |
+
"acc_stderr,none": 0.04733667890053756
|
375 |
+
},
|
376 |
+
"harness|winogrande|0": {
|
377 |
+
"acc,none": 0.7671665351223362,
|
378 |
+
"acc_stderr,none": 0.011878201073856553,
|
379 |
+
"alias": "winogrande"
|
380 |
+
},
|
381 |
+
"harness|truthfulqa:mc1|0": {
|
382 |
+
"acc,none": 0.5777233782129743,
|
383 |
+
"acc_stderr,none": 0.017290733254248174,
|
384 |
+
"alias": "truthfulqa_mc1"
|
385 |
+
},
|
386 |
+
"harness|arc:easy|0": {
|
387 |
+
"acc,none": 0.8320707070707071,
|
388 |
+
"acc_stderr,none": 0.0076702856738280565,
|
389 |
+
"acc_norm,none": 0.8148148148148148,
|
390 |
+
"acc_norm_stderr,none": 0.007970779064429208,
|
391 |
+
"alias": "arc_easy"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "upstage/SOLAR-10.7B-Instruct-v1.0",
|
396 |
+
"revision": "main",
|
397 |
+
"private": false,
|
398 |
+
"params": 10.73,
|
399 |
+
"architectures": "LlamaForCausalLM",
|
400 |
+
"quant_type": null,
|
401 |
+
"precision": "16bit",
|
402 |
+
"model_params": 10.73,
|
403 |
+
"model_size": 21.46,
|
404 |
+
"weight_dtype": "float16",
|
405 |
+
"compute_dtype": "float16",
|
406 |
+
"gguf_ftype": "*Q4_0.gguf",
|
407 |
+
"hardware": "gpu",
|
408 |
+
"status": "Pending",
|
409 |
+
"submitted_time": "2024-04-27T08:04:58Z",
|
410 |
+
"model_type": "original",
|
411 |
+
"job_id": -1,
|
412 |
+
"job_start_time": null,
|
413 |
+
"scripts": "ITREX"
|
414 |
+
},
|
415 |
+
"quantization_config": null,
|
416 |
+
"versions": {
|
417 |
+
"harness|arc:challenge|0": 1.0,
|
418 |
+
"harness|openbookqa|0": 1.0,
|
419 |
+
"harness|piqa|0": 1.0,
|
420 |
+
"harness|lambada:openai|0": 1.0,
|
421 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
422 |
+
"harness|hellaswag|0": 1.0,
|
423 |
+
"harness|boolq|0": 2.0,
|
424 |
+
"harness|mmlu|0": null,
|
425 |
+
"harness|mmlu_humanities|0": null,
|
426 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
427 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
428 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
429 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
430 |
+
"harness|mmlu_international_law|0": 0.0,
|
431 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
432 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
433 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
434 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
435 |
+
"harness|mmlu_philosophy|0": 0.0,
|
436 |
+
"harness|mmlu_prehistory|0": 0.0,
|
437 |
+
"harness|mmlu_professional_law|0": 0.0,
|
438 |
+
"harness|mmlu_world_religions|0": 0.0,
|
439 |
+
"harness|mmlu_other|0": null,
|
440 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
441 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
442 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
443 |
+
"harness|mmlu_global_facts|0": 0.0,
|
444 |
+
"harness|mmlu_human_aging|0": 0.0,
|
445 |
+
"harness|mmlu_management|0": 0.0,
|
446 |
+
"harness|mmlu_marketing|0": 0.0,
|
447 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
448 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
449 |
+
"harness|mmlu_nutrition|0": 0.0,
|
450 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
451 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
452 |
+
"harness|mmlu_virology|0": 0.0,
|
453 |
+
"harness|mmlu_social_sciences|0": null,
|
454 |
+
"harness|mmlu_econometrics|0": 0.0,
|
455 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
456 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
457 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
458 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
459 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
460 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
461 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
462 |
+
"harness|mmlu_public_relations|0": 0.0,
|
463 |
+
"harness|mmlu_security_studies|0": 0.0,
|
464 |
+
"harness|mmlu_sociology|0": 0.0,
|
465 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
466 |
+
"harness|mmlu_stem|0": null,
|
467 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
468 |
+
"harness|mmlu_anatomy|0": 0.0,
|
469 |
+
"harness|mmlu_astronomy|0": 0.0,
|
470 |
+
"harness|mmlu_college_biology|0": 0.0,
|
471 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
472 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
473 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
474 |
+
"harness|mmlu_college_physics|0": 0.0,
|
475 |
+
"harness|mmlu_computer_security|0": 0.0,
|
476 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
477 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
478 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
479 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
480 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
481 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
482 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
483 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
484 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
485 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
486 |
+
"harness|winogrande|0": 1.0,
|
487 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
488 |
+
"harness|arc:easy|0": 1.0
|
489 |
+
},
|
490 |
+
"n-shot": {
|
491 |
+
"arc_challenge": 0,
|
492 |
+
"arc_easy": 0,
|
493 |
+
"boolq": 0,
|
494 |
+
"hellaswag": 0,
|
495 |
+
"lambada_openai": 0,
|
496 |
+
"mmlu": 0,
|
497 |
+
"mmlu_abstract_algebra": 0,
|
498 |
+
"mmlu_anatomy": 0,
|
499 |
+
"mmlu_astronomy": 0,
|
500 |
+
"mmlu_business_ethics": 0,
|
501 |
+
"mmlu_clinical_knowledge": 0,
|
502 |
+
"mmlu_college_biology": 0,
|
503 |
+
"mmlu_college_chemistry": 0,
|
504 |
+
"mmlu_college_computer_science": 0,
|
505 |
+
"mmlu_college_mathematics": 0,
|
506 |
+
"mmlu_college_medicine": 0,
|
507 |
+
"mmlu_college_physics": 0,
|
508 |
+
"mmlu_computer_security": 0,
|
509 |
+
"mmlu_conceptual_physics": 0,
|
510 |
+
"mmlu_econometrics": 0,
|
511 |
+
"mmlu_electrical_engineering": 0,
|
512 |
+
"mmlu_elementary_mathematics": 0,
|
513 |
+
"mmlu_formal_logic": 0,
|
514 |
+
"mmlu_global_facts": 0,
|
515 |
+
"mmlu_high_school_biology": 0,
|
516 |
+
"mmlu_high_school_chemistry": 0,
|
517 |
+
"mmlu_high_school_computer_science": 0,
|
518 |
+
"mmlu_high_school_european_history": 0,
|
519 |
+
"mmlu_high_school_geography": 0,
|
520 |
+
"mmlu_high_school_government_and_politics": 0,
|
521 |
+
"mmlu_high_school_macroeconomics": 0,
|
522 |
+
"mmlu_high_school_mathematics": 0,
|
523 |
+
"mmlu_high_school_microeconomics": 0,
|
524 |
+
"mmlu_high_school_physics": 0,
|
525 |
+
"mmlu_high_school_psychology": 0,
|
526 |
+
"mmlu_high_school_statistics": 0,
|
527 |
+
"mmlu_high_school_us_history": 0,
|
528 |
+
"mmlu_high_school_world_history": 0,
|
529 |
+
"mmlu_human_aging": 0,
|
530 |
+
"mmlu_human_sexuality": 0,
|
531 |
+
"mmlu_humanities": 0,
|
532 |
+
"mmlu_international_law": 0,
|
533 |
+
"mmlu_jurisprudence": 0,
|
534 |
+
"mmlu_logical_fallacies": 0,
|
535 |
+
"mmlu_machine_learning": 0,
|
536 |
+
"mmlu_management": 0,
|
537 |
+
"mmlu_marketing": 0,
|
538 |
+
"mmlu_medical_genetics": 0,
|
539 |
+
"mmlu_miscellaneous": 0,
|
540 |
+
"mmlu_moral_disputes": 0,
|
541 |
+
"mmlu_moral_scenarios": 0,
|
542 |
+
"mmlu_nutrition": 0,
|
543 |
+
"mmlu_other": 0,
|
544 |
+
"mmlu_philosophy": 0,
|
545 |
+
"mmlu_prehistory": 0,
|
546 |
+
"mmlu_professional_accounting": 0,
|
547 |
+
"mmlu_professional_law": 0,
|
548 |
+
"mmlu_professional_medicine": 0,
|
549 |
+
"mmlu_professional_psychology": 0,
|
550 |
+
"mmlu_public_relations": 0,
|
551 |
+
"mmlu_security_studies": 0,
|
552 |
+
"mmlu_social_sciences": 0,
|
553 |
+
"mmlu_sociology": 0,
|
554 |
+
"mmlu_stem": 0,
|
555 |
+
"mmlu_us_foreign_policy": 0,
|
556 |
+
"mmlu_virology": 0,
|
557 |
+
"mmlu_world_religions": 0,
|
558 |
+
"openbookqa": 0,
|
559 |
+
"piqa": 0,
|
560 |
+
"truthfulqa_mc1": 0,
|
561 |
+
"truthfulqa_mc2": 0,
|
562 |
+
"winogrande": 0
|
563 |
+
},
|
564 |
+
"date": 1715954031.0171022,
|
565 |
+
"config": {
|
566 |
+
"model": "hf",
|
567 |
+
"model_args": "pretrained=upstage/SOLAR-10.7B-Instruct-v1.0,trust_remote_code=True,dtype=float16,_commit_hash=main",
|
568 |
+
"batch_size": 4,
|
569 |
+
"batch_sizes": [],
|
570 |
+
"device": "cuda",
|
571 |
+
"use_cache": null,
|
572 |
+
"limit": null,
|
573 |
+
"bootstrap_iters": 100000,
|
574 |
+
"gen_kwargs": null
|
575 |
+
}
|
576 |
+
}
|