|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.41638225255972694, |
|
"acc_stderr": 0.01440561827943618, |
|
"acc_norm": 0.4803754266211604, |
|
"acc_norm_stderr": 0.014600132075947096 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.43158733320055764, |
|
"acc_stderr": 0.004942853459371549, |
|
"acc_norm": 0.5758812985461064, |
|
"acc_norm_stderr": 0.004931984642695335 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.5029239766081871, |
|
"acc_stderr": 0.03834759370936839, |
|
"acc_norm": 0.5029239766081871, |
|
"acc_norm_stderr": 0.03834759370936839 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.5728155339805825, |
|
"acc_stderr": 0.04897957737781168, |
|
"acc_norm": 0.5728155339805825, |
|
"acc_norm_stderr": 0.04897957737781168 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.565772669220945, |
|
"acc_stderr": 0.01772458938967779, |
|
"acc_norm": 0.565772669220945, |
|
"acc_norm_stderr": 0.01772458938967779 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.4666666666666667, |
|
"acc_stderr": 0.04309732901036354, |
|
"acc_norm": 0.4666666666666667, |
|
"acc_norm_stderr": 0.04309732901036354 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.28, |
|
"acc_stderr": 0.04512608598542124, |
|
"acc_norm": 0.28, |
|
"acc_norm_stderr": 0.04512608598542124 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.4340425531914894, |
|
"acc_stderr": 0.03240038086792747, |
|
"acc_norm": 0.4340425531914894, |
|
"acc_norm_stderr": 0.03240038086792747 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.40963855421686746, |
|
"acc_stderr": 0.038284011150790206, |
|
"acc_norm": 0.40963855421686746, |
|
"acc_norm_stderr": 0.038284011150790206 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.5048231511254019, |
|
"acc_stderr": 0.028396770444111298, |
|
"acc_norm": 0.5048231511254019, |
|
"acc_norm_stderr": 0.028396770444111298 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.5291479820627802, |
|
"acc_stderr": 0.03350073248773404, |
|
"acc_norm": 0.5291479820627802, |
|
"acc_norm_stderr": 0.03350073248773404 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.46564885496183206, |
|
"acc_stderr": 0.043749285605997376, |
|
"acc_norm": 0.46564885496183206, |
|
"acc_norm_stderr": 0.043749285605997376 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.37, |
|
"acc_stderr": 0.04852365870939099, |
|
"acc_norm": 0.37, |
|
"acc_norm_stderr": 0.04852365870939099 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.601010101010101, |
|
"acc_stderr": 0.0348890161685273, |
|
"acc_norm": 0.601010101010101, |
|
"acc_norm_stderr": 0.0348890161685273 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.4206896551724138, |
|
"acc_stderr": 0.0411391498118926, |
|
"acc_norm": 0.4206896551724138, |
|
"acc_norm_stderr": 0.0411391498118926 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.24509803921568626, |
|
"acc_stderr": 0.042801058373643966, |
|
"acc_norm": 0.24509803921568626, |
|
"acc_norm_stderr": 0.042801058373643966 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.4789915966386555, |
|
"acc_stderr": 0.03244980849990029, |
|
"acc_norm": 0.4789915966386555, |
|
"acc_norm_stderr": 0.03244980849990029 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.4666666666666667, |
|
"acc_stderr": 0.025294608023986483, |
|
"acc_norm": 0.4666666666666667, |
|
"acc_norm_stderr": 0.025294608023986483 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.54, |
|
"acc_stderr": 0.05009082659620332, |
|
"acc_norm": 0.54, |
|
"acc_norm_stderr": 0.05009082659620332 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.36, |
|
"acc_stderr": 0.04824181513244218, |
|
"acc_norm": 0.36, |
|
"acc_norm_stderr": 0.04824181513244218 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.49074074074074076, |
|
"acc_stderr": 0.04832853553437055, |
|
"acc_norm": 0.49074074074074076, |
|
"acc_norm_stderr": 0.04832853553437055 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.4088669950738916, |
|
"acc_stderr": 0.03459058815883231, |
|
"acc_norm": 0.4088669950738916, |
|
"acc_norm_stderr": 0.03459058815883231 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.4806451612903226, |
|
"acc_stderr": 0.0284226874043121, |
|
"acc_norm": 0.4806451612903226, |
|
"acc_norm_stderr": 0.0284226874043121 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.6538461538461539, |
|
"acc_stderr": 0.031166957367235903, |
|
"acc_norm": 0.6538461538461539, |
|
"acc_norm_stderr": 0.031166957367235903 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.4490566037735849, |
|
"acc_stderr": 0.030612730713641092, |
|
"acc_norm": 0.4490566037735849, |
|
"acc_norm_stderr": 0.030612730713641092 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.5363636363636364, |
|
"acc_stderr": 0.04776449162396197, |
|
"acc_norm": 0.5363636363636364, |
|
"acc_norm_stderr": 0.04776449162396197 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.25555555555555554, |
|
"acc_stderr": 0.026593939101844058, |
|
"acc_norm": 0.25555555555555554, |
|
"acc_norm_stderr": 0.026593939101844058 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.2847682119205298, |
|
"acc_stderr": 0.03684881521389024, |
|
"acc_norm": 0.2847682119205298, |
|
"acc_norm_stderr": 0.03684881521389024 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.6218905472636815, |
|
"acc_stderr": 0.03428867848778658, |
|
"acc_norm": 0.6218905472636815, |
|
"acc_norm_stderr": 0.03428867848778658 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.41040462427745666, |
|
"acc_stderr": 0.03750757044895538, |
|
"acc_norm": 0.41040462427745666, |
|
"acc_norm_stderr": 0.03750757044895538 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.29365079365079366, |
|
"acc_stderr": 0.023456037383982026, |
|
"acc_norm": 0.29365079365079366, |
|
"acc_norm_stderr": 0.023456037383982026 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.4097222222222222, |
|
"acc_stderr": 0.04112490974670787, |
|
"acc_norm": 0.4097222222222222, |
|
"acc_norm_stderr": 0.04112490974670787 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.35, |
|
"acc_stderr": 0.0479372485441102, |
|
"acc_norm": 0.35, |
|
"acc_norm_stderr": 0.0479372485441102 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.64, |
|
"acc_stderr": 0.048241815132442176, |
|
"acc_norm": 0.64, |
|
"acc_norm_stderr": 0.048241815132442176 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.5115606936416185, |
|
"acc_stderr": 0.026911898686377913, |
|
"acc_norm": 0.5115606936416185, |
|
"acc_norm_stderr": 0.026911898686377913 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.5030674846625767, |
|
"acc_stderr": 0.03928297078179663, |
|
"acc_norm": 0.5030674846625767, |
|
"acc_norm_stderr": 0.03928297078179663 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.4845679012345679, |
|
"acc_stderr": 0.027807490044276198, |
|
"acc_norm": 0.4845679012345679, |
|
"acc_norm_stderr": 0.027807490044276198 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.34, |
|
"acc_stderr": 0.04760952285695235, |
|
"acc_norm": 0.34, |
|
"acc_norm_stderr": 0.04760952285695235 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.6062176165803109, |
|
"acc_stderr": 0.035260770955482405, |
|
"acc_norm": 0.6062176165803109, |
|
"acc_norm_stderr": 0.035260770955482405 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.2982456140350877, |
|
"acc_stderr": 0.04303684033537317, |
|
"acc_norm": 0.2982456140350877, |
|
"acc_norm_stderr": 0.04303684033537317 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.6165137614678899, |
|
"acc_stderr": 0.020847156641915984, |
|
"acc_norm": 0.6165137614678899, |
|
"acc_norm_stderr": 0.020847156641915984 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.3412698412698413, |
|
"acc_stderr": 0.04240799327574924, |
|
"acc_norm": 0.3412698412698413, |
|
"acc_norm_stderr": 0.04240799327574924 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.434640522875817, |
|
"acc_stderr": 0.02838425670488304, |
|
"acc_norm": 0.434640522875817, |
|
"acc_norm_stderr": 0.02838425670488304 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.43, |
|
"acc_stderr": 0.04975698519562428, |
|
"acc_norm": 0.43, |
|
"acc_norm_stderr": 0.04975698519562428 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.628099173553719, |
|
"acc_stderr": 0.04412015806624504, |
|
"acc_norm": 0.628099173553719, |
|
"acc_norm_stderr": 0.04412015806624504 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.3815789473684211, |
|
"acc_stderr": 0.03953173377749194, |
|
"acc_norm": 0.3815789473684211, |
|
"acc_norm_stderr": 0.03953173377749194 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.37745098039215685, |
|
"acc_stderr": 0.019610851474880286, |
|
"acc_norm": 0.37745098039215685, |
|
"acc_norm_stderr": 0.019610851474880286 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.35815602836879434, |
|
"acc_stderr": 0.02860208586275942, |
|
"acc_norm": 0.35815602836879434, |
|
"acc_norm_stderr": 0.02860208586275942 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.23214285714285715, |
|
"acc_stderr": 0.04007341809755805, |
|
"acc_norm": 0.23214285714285715, |
|
"acc_norm_stderr": 0.04007341809755805 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.36574074074074076, |
|
"acc_stderr": 0.03284738857647206, |
|
"acc_norm": 0.36574074074074076, |
|
"acc_norm_stderr": 0.03284738857647206 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.2424581005586592, |
|
"acc_stderr": 0.01433352205921789, |
|
"acc_norm": 0.2424581005586592, |
|
"acc_norm_stderr": 0.01433352205921789 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.36, |
|
"acc_stderr": 0.04824181513244218, |
|
"acc_norm": 0.36, |
|
"acc_norm_stderr": 0.04824181513244218 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.41, |
|
"acc_stderr": 0.049431107042371025, |
|
"acc_norm": 0.41, |
|
"acc_norm_stderr": 0.049431107042371025 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.5036764705882353, |
|
"acc_stderr": 0.0303720158854282, |
|
"acc_norm": 0.5036764705882353, |
|
"acc_norm_stderr": 0.0303720158854282 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.47346938775510206, |
|
"acc_stderr": 0.03196412734523272, |
|
"acc_norm": 0.47346938775510206, |
|
"acc_norm_stderr": 0.03196412734523272 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.6582278481012658, |
|
"acc_stderr": 0.030874537537553617, |
|
"acc_norm": 0.6582278481012658, |
|
"acc_norm_stderr": 0.030874537537553617 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.35658409387222945, |
|
"acc_stderr": 0.012233642989273888, |
|
"acc_norm": 0.35658409387222945, |
|
"acc_norm_stderr": 0.012233642989273888 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.5147058823529411, |
|
"acc_stderr": 0.03507793834791324, |
|
"acc_norm": 0.5147058823529411, |
|
"acc_norm_stderr": 0.03507793834791324 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.5393939393939394, |
|
"acc_stderr": 0.03892207016552012, |
|
"acc_norm": 0.5393939393939394, |
|
"acc_norm_stderr": 0.03892207016552012 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.3011015911872705, |
|
"mc1_stderr": 0.01605899902610062, |
|
"mc2": 0.46120733649464474, |
|
"mc2_stderr": 0.015282924396450131 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.4510035419126328, |
|
"acc_stderr": 0.01710761885954935, |
|
"acc_norm": 0.5242030696576151, |
|
"acc_norm_stderr": 0.017170202466520748 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "DopeorNope/COKAL_pre_DPO_Test_v2-13b", |
|
"model_sha": "e2fb97d3ef746540a5900ad1e19250dd74dc429f", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |