|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.3148464163822526, |
|
"acc_stderr": 0.01357265770308495, |
|
"acc_norm": 0.37542662116040953, |
|
"acc_norm_stderr": 0.014150631435111726 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.3571997610037841, |
|
"acc_stderr": 0.004781950883460504, |
|
"acc_norm": 0.4569806811392153, |
|
"acc_norm_stderr": 0.004971278309204196 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.45614035087719296, |
|
"acc_stderr": 0.03820042586602967, |
|
"acc_norm": 0.45614035087719296, |
|
"acc_norm_stderr": 0.03820042586602967 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.6310679611650486, |
|
"acc_stderr": 0.0477761518115674, |
|
"acc_norm": 0.6310679611650486, |
|
"acc_norm_stderr": 0.0477761518115674 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.4342273307790549, |
|
"acc_stderr": 0.017724589389677785, |
|
"acc_norm": 0.4342273307790549, |
|
"acc_norm_stderr": 0.017724589389677785 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.34814814814814815, |
|
"acc_stderr": 0.041153246103369526, |
|
"acc_norm": 0.34814814814814815, |
|
"acc_norm_stderr": 0.041153246103369526 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.21, |
|
"acc_stderr": 0.040936018074033256, |
|
"acc_norm": 0.21, |
|
"acc_norm_stderr": 0.040936018074033256 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.32340425531914896, |
|
"acc_stderr": 0.030579442773610334, |
|
"acc_norm": 0.32340425531914896, |
|
"acc_norm_stderr": 0.030579442773610334 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.43373493975903615, |
|
"acc_stderr": 0.03858158940685515, |
|
"acc_norm": 0.43373493975903615, |
|
"acc_norm_stderr": 0.03858158940685515 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.4694533762057878, |
|
"acc_stderr": 0.02834504586484068, |
|
"acc_norm": 0.4694533762057878, |
|
"acc_norm_stderr": 0.02834504586484068 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.39461883408071746, |
|
"acc_stderr": 0.03280400504755292, |
|
"acc_norm": 0.39461883408071746, |
|
"acc_norm_stderr": 0.03280400504755292 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.4122137404580153, |
|
"acc_stderr": 0.04317171194870254, |
|
"acc_norm": 0.4122137404580153, |
|
"acc_norm_stderr": 0.04317171194870254 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.33, |
|
"acc_stderr": 0.04725815626252606, |
|
"acc_norm": 0.33, |
|
"acc_norm_stderr": 0.04725815626252606 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.47474747474747475, |
|
"acc_stderr": 0.035578062450873145, |
|
"acc_norm": 0.47474747474747475, |
|
"acc_norm_stderr": 0.035578062450873145 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.47586206896551725, |
|
"acc_stderr": 0.041618085035015295, |
|
"acc_norm": 0.47586206896551725, |
|
"acc_norm_stderr": 0.041618085035015295 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.3235294117647059, |
|
"acc_stderr": 0.046550104113196177, |
|
"acc_norm": 0.3235294117647059, |
|
"acc_norm_stderr": 0.046550104113196177 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.4831932773109244, |
|
"acc_stderr": 0.03246013680375308, |
|
"acc_norm": 0.4831932773109244, |
|
"acc_norm_stderr": 0.03246013680375308 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.4307692307692308, |
|
"acc_stderr": 0.025106820660539746, |
|
"acc_norm": 0.4307692307692308, |
|
"acc_norm_stderr": 0.025106820660539746 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.58, |
|
"acc_stderr": 0.049604496374885836, |
|
"acc_norm": 0.58, |
|
"acc_norm_stderr": 0.049604496374885836 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.5462962962962963, |
|
"acc_stderr": 0.04812917324536823, |
|
"acc_norm": 0.5462962962962963, |
|
"acc_norm_stderr": 0.04812917324536823 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.4187192118226601, |
|
"acc_stderr": 0.03471192860518468, |
|
"acc_norm": 0.4187192118226601, |
|
"acc_norm_stderr": 0.03471192860518468 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.45161290322580644, |
|
"acc_stderr": 0.02831050034856839, |
|
"acc_norm": 0.45161290322580644, |
|
"acc_norm_stderr": 0.02831050034856839 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.6752136752136753, |
|
"acc_stderr": 0.03067902276549883, |
|
"acc_norm": 0.6752136752136753, |
|
"acc_norm_stderr": 0.03067902276549883 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.41132075471698115, |
|
"acc_stderr": 0.030285009259009805, |
|
"acc_norm": 0.41132075471698115, |
|
"acc_norm_stderr": 0.030285009259009805 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.4727272727272727, |
|
"acc_stderr": 0.04782001791380063, |
|
"acc_norm": 0.4727272727272727, |
|
"acc_norm_stderr": 0.04782001791380063 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.31851851851851853, |
|
"acc_stderr": 0.028406533090608463, |
|
"acc_norm": 0.31851851851851853, |
|
"acc_norm_stderr": 0.028406533090608463 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.2980132450331126, |
|
"acc_stderr": 0.037345356767871984, |
|
"acc_norm": 0.2980132450331126, |
|
"acc_norm_stderr": 0.037345356767871984 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.5771144278606966, |
|
"acc_stderr": 0.034932317774212816, |
|
"acc_norm": 0.5771144278606966, |
|
"acc_norm_stderr": 0.034932317774212816 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.3815028901734104, |
|
"acc_stderr": 0.037038511930995215, |
|
"acc_norm": 0.3815028901734104, |
|
"acc_norm_stderr": 0.037038511930995215 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.36243386243386244, |
|
"acc_stderr": 0.02475747390275205, |
|
"acc_norm": 0.36243386243386244, |
|
"acc_norm_stderr": 0.02475747390275205 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.2986111111111111, |
|
"acc_stderr": 0.03827052357950756, |
|
"acc_norm": 0.2986111111111111, |
|
"acc_norm_stderr": 0.03827052357950756 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.36, |
|
"acc_stderr": 0.048241815132442176, |
|
"acc_norm": 0.36, |
|
"acc_norm_stderr": 0.048241815132442176 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.56, |
|
"acc_stderr": 0.049888765156985884, |
|
"acc_norm": 0.56, |
|
"acc_norm_stderr": 0.049888765156985884 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.4797687861271676, |
|
"acc_stderr": 0.026897049996382875, |
|
"acc_norm": 0.4797687861271676, |
|
"acc_norm_stderr": 0.026897049996382875 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.4785276073619632, |
|
"acc_stderr": 0.03924746876751129, |
|
"acc_norm": 0.4785276073619632, |
|
"acc_norm_stderr": 0.03924746876751129 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.4382716049382716, |
|
"acc_stderr": 0.027607914087400473, |
|
"acc_norm": 0.4382716049382716, |
|
"acc_norm_stderr": 0.027607914087400473 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.29, |
|
"acc_stderr": 0.04560480215720685, |
|
"acc_norm": 0.29, |
|
"acc_norm_stderr": 0.04560480215720685 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.49740932642487046, |
|
"acc_stderr": 0.03608390745384488, |
|
"acc_norm": 0.49740932642487046, |
|
"acc_norm_stderr": 0.03608390745384488 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.2894736842105263, |
|
"acc_stderr": 0.04266339443159394, |
|
"acc_norm": 0.2894736842105263, |
|
"acc_norm_stderr": 0.04266339443159394 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.47889908256880737, |
|
"acc_stderr": 0.021418224754264643, |
|
"acc_norm": 0.47889908256880737, |
|
"acc_norm_stderr": 0.021418224754264643 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.3492063492063492, |
|
"acc_stderr": 0.04263906892795132, |
|
"acc_norm": 0.3492063492063492, |
|
"acc_norm_stderr": 0.04263906892795132 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.45751633986928103, |
|
"acc_stderr": 0.028526383452142624, |
|
"acc_norm": 0.45751633986928103, |
|
"acc_norm_stderr": 0.028526383452142624 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.44, |
|
"acc_stderr": 0.04988876515698589, |
|
"acc_norm": 0.44, |
|
"acc_norm_stderr": 0.04988876515698589 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.6198347107438017, |
|
"acc_stderr": 0.04431324501968431, |
|
"acc_norm": 0.6198347107438017, |
|
"acc_norm_stderr": 0.04431324501968431 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.4473684210526316, |
|
"acc_stderr": 0.040463368839782514, |
|
"acc_norm": 0.4473684210526316, |
|
"acc_norm_stderr": 0.040463368839782514 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.3611111111111111, |
|
"acc_stderr": 0.01943177567703731, |
|
"acc_norm": 0.3611111111111111, |
|
"acc_norm_stderr": 0.01943177567703731 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.32978723404255317, |
|
"acc_stderr": 0.028045946942042405, |
|
"acc_norm": 0.32978723404255317, |
|
"acc_norm_stderr": 0.028045946942042405 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.30357142857142855, |
|
"acc_stderr": 0.04364226155841044, |
|
"acc_norm": 0.30357142857142855, |
|
"acc_norm_stderr": 0.04364226155841044 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.4212962962962963, |
|
"acc_stderr": 0.03367462138896078, |
|
"acc_norm": 0.4212962962962963, |
|
"acc_norm_stderr": 0.03367462138896078 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.33631284916201115, |
|
"acc_stderr": 0.015801003729145908, |
|
"acc_norm": 0.33631284916201115, |
|
"acc_norm_stderr": 0.015801003729145908 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.44, |
|
"acc_stderr": 0.04988876515698589, |
|
"acc_norm": 0.44, |
|
"acc_norm_stderr": 0.04988876515698589 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.53, |
|
"acc_stderr": 0.050161355804659205, |
|
"acc_norm": 0.53, |
|
"acc_norm_stderr": 0.050161355804659205 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.39338235294117646, |
|
"acc_stderr": 0.02967428828131118, |
|
"acc_norm": 0.39338235294117646, |
|
"acc_norm_stderr": 0.02967428828131118 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.5061224489795918, |
|
"acc_stderr": 0.03200682020163906, |
|
"acc_norm": 0.5061224489795918, |
|
"acc_norm_stderr": 0.03200682020163906 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.5358649789029536, |
|
"acc_stderr": 0.03246338898055659, |
|
"acc_norm": 0.5358649789029536, |
|
"acc_norm_stderr": 0.03246338898055659 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.31877444589308995, |
|
"acc_stderr": 0.011901895635786084, |
|
"acc_norm": 0.31877444589308995, |
|
"acc_norm_stderr": 0.011901895635786084 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.45588235294117646, |
|
"acc_stderr": 0.03495624522015474, |
|
"acc_norm": 0.45588235294117646, |
|
"acc_norm_stderr": 0.03495624522015474 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.45454545454545453, |
|
"acc_stderr": 0.03888176921674099, |
|
"acc_norm": 0.45454545454545453, |
|
"acc_norm_stderr": 0.03888176921674099 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.30354957160342716, |
|
"mc1_stderr": 0.016095884155386854, |
|
"mc2": 0.4745826617149022, |
|
"mc2_stderr": 0.015464604846827046 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.38016528925619836, |
|
"acc_stderr": 0.0166893335969801, |
|
"acc_norm": 0.4155844155844156, |
|
"acc_norm_stderr": 0.01694358631307657 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "jin05102518/Astral-7B-1.0Epoch-Instruct-v0.05", |
|
"model_sha": "fb04a8d5574256eefe4faa1783874384c88eea9b", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |