|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.39419795221843, |
|
"acc_stderr": 0.01428052266746733, |
|
"acc_norm": 0.454778156996587, |
|
"acc_norm_stderr": 0.014551507060836355 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.4217287392949612, |
|
"acc_stderr": 0.004928263494616727, |
|
"acc_norm": 0.5544712208723361, |
|
"acc_norm_stderr": 0.004960082528852438 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.543859649122807, |
|
"acc_stderr": 0.03820042586602967, |
|
"acc_norm": 0.543859649122807, |
|
"acc_norm_stderr": 0.03820042586602967 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.5436893203883495, |
|
"acc_stderr": 0.049318019942204146, |
|
"acc_norm": 0.5436893203883495, |
|
"acc_norm_stderr": 0.049318019942204146 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.49936143039591313, |
|
"acc_stderr": 0.017879948914431662, |
|
"acc_norm": 0.49936143039591313, |
|
"acc_norm_stderr": 0.017879948914431662 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.4074074074074074, |
|
"acc_stderr": 0.042446332383532286, |
|
"acc_norm": 0.4074074074074074, |
|
"acc_norm_stderr": 0.042446332383532286 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.31, |
|
"acc_stderr": 0.04648231987117316, |
|
"acc_norm": 0.31, |
|
"acc_norm_stderr": 0.04648231987117316 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.3659574468085106, |
|
"acc_stderr": 0.0314895582974553, |
|
"acc_norm": 0.3659574468085106, |
|
"acc_norm_stderr": 0.0314895582974553 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.42771084337349397, |
|
"acc_stderr": 0.03851597683718533, |
|
"acc_norm": 0.42771084337349397, |
|
"acc_norm_stderr": 0.03851597683718533 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.4630225080385852, |
|
"acc_stderr": 0.028320325830105915, |
|
"acc_norm": 0.4630225080385852, |
|
"acc_norm_stderr": 0.028320325830105915 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.35874439461883406, |
|
"acc_stderr": 0.03219079200419995, |
|
"acc_norm": 0.35874439461883406, |
|
"acc_norm_stderr": 0.03219079200419995 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.5038167938931297, |
|
"acc_stderr": 0.043851623256015534, |
|
"acc_norm": 0.5038167938931297, |
|
"acc_norm_stderr": 0.043851623256015534 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.31, |
|
"acc_stderr": 0.04648231987117316, |
|
"acc_norm": 0.31, |
|
"acc_norm_stderr": 0.04648231987117316 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.5909090909090909, |
|
"acc_stderr": 0.03502975799413007, |
|
"acc_norm": 0.5909090909090909, |
|
"acc_norm_stderr": 0.03502975799413007 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.4068965517241379, |
|
"acc_stderr": 0.040937939812662374, |
|
"acc_norm": 0.4068965517241379, |
|
"acc_norm_stderr": 0.040937939812662374 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.24509803921568626, |
|
"acc_stderr": 0.042801058373643966, |
|
"acc_norm": 0.24509803921568626, |
|
"acc_norm_stderr": 0.042801058373643966 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.42436974789915966, |
|
"acc_stderr": 0.03210479051015776, |
|
"acc_norm": 0.42436974789915966, |
|
"acc_norm_stderr": 0.03210479051015776 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.39487179487179486, |
|
"acc_stderr": 0.02478431694215636, |
|
"acc_norm": 0.39487179487179486, |
|
"acc_norm_stderr": 0.02478431694215636 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.48, |
|
"acc_stderr": 0.050211673156867795, |
|
"acc_norm": 0.48, |
|
"acc_norm_stderr": 0.050211673156867795 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.35, |
|
"acc_stderr": 0.047937248544110196, |
|
"acc_norm": 0.35, |
|
"acc_norm_stderr": 0.047937248544110196 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.4444444444444444, |
|
"acc_stderr": 0.04803752235190193, |
|
"acc_norm": 0.4444444444444444, |
|
"acc_norm_stderr": 0.04803752235190193 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.3399014778325123, |
|
"acc_stderr": 0.033327690684107895, |
|
"acc_norm": 0.3399014778325123, |
|
"acc_norm_stderr": 0.033327690684107895 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.432258064516129, |
|
"acc_stderr": 0.028181739720019416, |
|
"acc_norm": 0.432258064516129, |
|
"acc_norm_stderr": 0.028181739720019416 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.6581196581196581, |
|
"acc_stderr": 0.031075028526507748, |
|
"acc_norm": 0.6581196581196581, |
|
"acc_norm_stderr": 0.031075028526507748 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.4716981132075472, |
|
"acc_stderr": 0.030723535249006107, |
|
"acc_norm": 0.4716981132075472, |
|
"acc_norm_stderr": 0.030723535249006107 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.4818181818181818, |
|
"acc_stderr": 0.04785964010794916, |
|
"acc_norm": 0.4818181818181818, |
|
"acc_norm_stderr": 0.04785964010794916 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.2518518518518518, |
|
"acc_stderr": 0.02646611753895992, |
|
"acc_norm": 0.2518518518518518, |
|
"acc_norm_stderr": 0.02646611753895992 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.2913907284768212, |
|
"acc_stderr": 0.03710185726119995, |
|
"acc_norm": 0.2913907284768212, |
|
"acc_norm_stderr": 0.03710185726119995 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.5970149253731343, |
|
"acc_stderr": 0.034683432951111266, |
|
"acc_norm": 0.5970149253731343, |
|
"acc_norm_stderr": 0.034683432951111266 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.3583815028901734, |
|
"acc_stderr": 0.0365634365335316, |
|
"acc_norm": 0.3583815028901734, |
|
"acc_norm_stderr": 0.0365634365335316 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.30952380952380953, |
|
"acc_stderr": 0.023809523809523864, |
|
"acc_norm": 0.30952380952380953, |
|
"acc_norm_stderr": 0.023809523809523864 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.3402777777777778, |
|
"acc_stderr": 0.03962135573486219, |
|
"acc_norm": 0.3402777777777778, |
|
"acc_norm_stderr": 0.03962135573486219 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.31, |
|
"acc_stderr": 0.04648231987117316, |
|
"acc_norm": 0.31, |
|
"acc_norm_stderr": 0.04648231987117316 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.64, |
|
"acc_stderr": 0.048241815132442176, |
|
"acc_norm": 0.64, |
|
"acc_norm_stderr": 0.048241815132442176 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.4479768786127168, |
|
"acc_stderr": 0.026772990653361826, |
|
"acc_norm": 0.4479768786127168, |
|
"acc_norm_stderr": 0.026772990653361826 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.4171779141104294, |
|
"acc_stderr": 0.038741028598180814, |
|
"acc_norm": 0.4171779141104294, |
|
"acc_norm_stderr": 0.038741028598180814 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.4537037037037037, |
|
"acc_stderr": 0.0277012284685426, |
|
"acc_norm": 0.4537037037037037, |
|
"acc_norm_stderr": 0.0277012284685426 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.33, |
|
"acc_stderr": 0.04725815626252606, |
|
"acc_norm": 0.33, |
|
"acc_norm_stderr": 0.04725815626252606 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.46632124352331605, |
|
"acc_stderr": 0.03600244069867178, |
|
"acc_norm": 0.46632124352331605, |
|
"acc_norm_stderr": 0.03600244069867178 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.2631578947368421, |
|
"acc_stderr": 0.04142439719489361, |
|
"acc_norm": 0.2631578947368421, |
|
"acc_norm_stderr": 0.04142439719489361 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.4990825688073395, |
|
"acc_stderr": 0.021437287056051215, |
|
"acc_norm": 0.4990825688073395, |
|
"acc_norm_stderr": 0.021437287056051215 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.30952380952380953, |
|
"acc_stderr": 0.04134913018303316, |
|
"acc_norm": 0.30952380952380953, |
|
"acc_norm_stderr": 0.04134913018303316 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.4477124183006536, |
|
"acc_stderr": 0.028472938478033522, |
|
"acc_norm": 0.4477124183006536, |
|
"acc_norm_stderr": 0.028472938478033522 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.38, |
|
"acc_stderr": 0.04878317312145634, |
|
"acc_norm": 0.38, |
|
"acc_norm_stderr": 0.04878317312145634 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.5867768595041323, |
|
"acc_stderr": 0.04495087843548408, |
|
"acc_norm": 0.5867768595041323, |
|
"acc_norm_stderr": 0.04495087843548408 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.4407894736842105, |
|
"acc_stderr": 0.04040311062490436, |
|
"acc_norm": 0.4407894736842105, |
|
"acc_norm_stderr": 0.04040311062490436 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.33169934640522875, |
|
"acc_stderr": 0.01904748523936038, |
|
"acc_norm": 0.33169934640522875, |
|
"acc_norm_stderr": 0.01904748523936038 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.3333333333333333, |
|
"acc_stderr": 0.02812163604063988, |
|
"acc_norm": 0.3333333333333333, |
|
"acc_norm_stderr": 0.02812163604063988 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.20535714285714285, |
|
"acc_stderr": 0.03834241021419073, |
|
"acc_norm": 0.20535714285714285, |
|
"acc_norm_stderr": 0.03834241021419073 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.2824074074074074, |
|
"acc_stderr": 0.03070137211151092, |
|
"acc_norm": 0.2824074074074074, |
|
"acc_norm_stderr": 0.03070137211151092 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.2424581005586592, |
|
"acc_stderr": 0.01433352205921789, |
|
"acc_norm": 0.2424581005586592, |
|
"acc_norm_stderr": 0.01433352205921789 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.32, |
|
"acc_stderr": 0.046882617226215034, |
|
"acc_norm": 0.32, |
|
"acc_norm_stderr": 0.046882617226215034 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.46, |
|
"acc_stderr": 0.05009082659620333, |
|
"acc_norm": 0.46, |
|
"acc_norm_stderr": 0.05009082659620333 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.27205882352941174, |
|
"acc_stderr": 0.027033041151681456, |
|
"acc_norm": 0.27205882352941174, |
|
"acc_norm_stderr": 0.027033041151681456 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.4775510204081633, |
|
"acc_stderr": 0.031976941187136725, |
|
"acc_norm": 0.4775510204081633, |
|
"acc_norm_stderr": 0.031976941187136725 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.5189873417721519, |
|
"acc_stderr": 0.03252375148090447, |
|
"acc_norm": 0.5189873417721519, |
|
"acc_norm_stderr": 0.03252375148090447 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.2966101694915254, |
|
"acc_stderr": 0.011665946586082854, |
|
"acc_norm": 0.2966101694915254, |
|
"acc_norm_stderr": 0.011665946586082854 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.4215686274509804, |
|
"acc_stderr": 0.03465868196380757, |
|
"acc_norm": 0.4215686274509804, |
|
"acc_norm_stderr": 0.03465868196380757 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.509090909090909, |
|
"acc_stderr": 0.0390369864774844, |
|
"acc_norm": 0.509090909090909, |
|
"acc_norm_stderr": 0.0390369864774844 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.30599755201958384, |
|
"mc1_stderr": 0.016132229728155062, |
|
"mc2": 0.4746429594651757, |
|
"mc2_stderr": 0.01531218992321956 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.5206611570247934, |
|
"acc_stderr": 0.017175671279836446, |
|
"acc_norm": 0.5678866587957497, |
|
"acc_norm_stderr": 0.017031170198851742 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "jiwoochris/ko-llama2-v1", |
|
"model_sha": "4253098940413125f8f0847038c076d42e5b2c59", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |