|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.3412969283276451, |
|
"acc_stderr": 0.01385583128749772, |
|
"acc_norm": 0.40017064846416384, |
|
"acc_norm_stderr": 0.014317197787809186 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.36606253734315874, |
|
"acc_stderr": 0.00480742334322458, |
|
"acc_norm": 0.47719577773351923, |
|
"acc_norm_stderr": 0.004984589012289372 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.4853801169590643, |
|
"acc_stderr": 0.038331852752130205, |
|
"acc_norm": 0.4853801169590643, |
|
"acc_norm_stderr": 0.038331852752130205 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.5436893203883495, |
|
"acc_stderr": 0.04931801994220416, |
|
"acc_norm": 0.5436893203883495, |
|
"acc_norm_stderr": 0.04931801994220416 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.49680715197956576, |
|
"acc_stderr": 0.01787959894593307, |
|
"acc_norm": 0.49680715197956576, |
|
"acc_norm_stderr": 0.01787959894593307 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.37777777777777777, |
|
"acc_stderr": 0.04188307537595853, |
|
"acc_norm": 0.37777777777777777, |
|
"acc_norm_stderr": 0.04188307537595853 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.38, |
|
"acc_stderr": 0.04878317312145633, |
|
"acc_norm": 0.38, |
|
"acc_norm_stderr": 0.04878317312145633 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.3617021276595745, |
|
"acc_stderr": 0.031410821975962386, |
|
"acc_norm": 0.3617021276595745, |
|
"acc_norm_stderr": 0.031410821975962386 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.35542168674698793, |
|
"acc_stderr": 0.03726214354322415, |
|
"acc_norm": 0.35542168674698793, |
|
"acc_norm_stderr": 0.03726214354322415 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.5080385852090032, |
|
"acc_stderr": 0.028394421370984538, |
|
"acc_norm": 0.5080385852090032, |
|
"acc_norm_stderr": 0.028394421370984538 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.3991031390134529, |
|
"acc_stderr": 0.03286745312567961, |
|
"acc_norm": 0.3991031390134529, |
|
"acc_norm_stderr": 0.03286745312567961 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.4732824427480916, |
|
"acc_stderr": 0.04379024936553893, |
|
"acc_norm": 0.4732824427480916, |
|
"acc_norm_stderr": 0.04379024936553893 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.45, |
|
"acc_stderr": 0.04999999999999999, |
|
"acc_norm": 0.45, |
|
"acc_norm_stderr": 0.04999999999999999 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.5606060606060606, |
|
"acc_stderr": 0.035360859475294805, |
|
"acc_norm": 0.5606060606060606, |
|
"acc_norm_stderr": 0.035360859475294805 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.4482758620689655, |
|
"acc_stderr": 0.04144311810878151, |
|
"acc_norm": 0.4482758620689655, |
|
"acc_norm_stderr": 0.04144311810878151 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.29411764705882354, |
|
"acc_stderr": 0.04533838195929774, |
|
"acc_norm": 0.29411764705882354, |
|
"acc_norm_stderr": 0.04533838195929774 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.4495798319327731, |
|
"acc_stderr": 0.03231293497137707, |
|
"acc_norm": 0.4495798319327731, |
|
"acc_norm_stderr": 0.03231293497137707 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.45897435897435895, |
|
"acc_stderr": 0.025265525491284295, |
|
"acc_norm": 0.45897435897435895, |
|
"acc_norm_stderr": 0.025265525491284295 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.48, |
|
"acc_stderr": 0.05021167315686781, |
|
"acc_norm": 0.48, |
|
"acc_norm_stderr": 0.05021167315686781 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.36, |
|
"acc_stderr": 0.04824181513244218, |
|
"acc_norm": 0.36, |
|
"acc_norm_stderr": 0.04824181513244218 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.4722222222222222, |
|
"acc_stderr": 0.04826217294139894, |
|
"acc_norm": 0.4722222222222222, |
|
"acc_norm_stderr": 0.04826217294139894 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.4039408866995074, |
|
"acc_stderr": 0.0345245390382204, |
|
"acc_norm": 0.4039408866995074, |
|
"acc_norm_stderr": 0.0345245390382204 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.45161290322580644, |
|
"acc_stderr": 0.028310500348568392, |
|
"acc_norm": 0.45161290322580644, |
|
"acc_norm_stderr": 0.028310500348568392 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.6324786324786325, |
|
"acc_stderr": 0.03158539157745636, |
|
"acc_norm": 0.6324786324786325, |
|
"acc_norm_stderr": 0.03158539157745636 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.4716981132075472, |
|
"acc_stderr": 0.030723535249006107, |
|
"acc_norm": 0.4716981132075472, |
|
"acc_norm_stderr": 0.030723535249006107 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.37272727272727274, |
|
"acc_stderr": 0.04631381319425464, |
|
"acc_norm": 0.37272727272727274, |
|
"acc_norm_stderr": 0.04631381319425464 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.3037037037037037, |
|
"acc_stderr": 0.02803792996911499, |
|
"acc_norm": 0.3037037037037037, |
|
"acc_norm_stderr": 0.02803792996911499 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.31788079470198677, |
|
"acc_stderr": 0.038020397601079024, |
|
"acc_norm": 0.31788079470198677, |
|
"acc_norm_stderr": 0.038020397601079024 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.582089552238806, |
|
"acc_stderr": 0.034875586404620636, |
|
"acc_norm": 0.582089552238806, |
|
"acc_norm_stderr": 0.034875586404620636 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.4508670520231214, |
|
"acc_stderr": 0.03794012674697031, |
|
"acc_norm": 0.4508670520231214, |
|
"acc_norm_stderr": 0.03794012674697031 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.3439153439153439, |
|
"acc_stderr": 0.02446442662559643, |
|
"acc_norm": 0.3439153439153439, |
|
"acc_norm_stderr": 0.02446442662559643 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.3472222222222222, |
|
"acc_stderr": 0.039812405437178615, |
|
"acc_norm": 0.3472222222222222, |
|
"acc_norm_stderr": 0.039812405437178615 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.29, |
|
"acc_stderr": 0.045604802157206845, |
|
"acc_norm": 0.29, |
|
"acc_norm_stderr": 0.045604802157206845 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.52, |
|
"acc_stderr": 0.050211673156867795, |
|
"acc_norm": 0.52, |
|
"acc_norm_stderr": 0.050211673156867795 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.49710982658959535, |
|
"acc_stderr": 0.02691864538323901, |
|
"acc_norm": 0.49710982658959535, |
|
"acc_norm_stderr": 0.02691864538323901 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.4049079754601227, |
|
"acc_stderr": 0.038566721635489125, |
|
"acc_norm": 0.4049079754601227, |
|
"acc_norm_stderr": 0.038566721635489125 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.4537037037037037, |
|
"acc_stderr": 0.027701228468542602, |
|
"acc_norm": 0.4537037037037037, |
|
"acc_norm_stderr": 0.027701228468542602 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.35, |
|
"acc_stderr": 0.047937248544110196, |
|
"acc_norm": 0.35, |
|
"acc_norm_stderr": 0.047937248544110196 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.47150259067357514, |
|
"acc_stderr": 0.036025735712884414, |
|
"acc_norm": 0.47150259067357514, |
|
"acc_norm_stderr": 0.036025735712884414 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.2894736842105263, |
|
"acc_stderr": 0.04266339443159394, |
|
"acc_norm": 0.2894736842105263, |
|
"acc_norm_stderr": 0.04266339443159394 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.4990825688073395, |
|
"acc_stderr": 0.021437287056051215, |
|
"acc_norm": 0.4990825688073395, |
|
"acc_norm_stderr": 0.021437287056051215 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.30158730158730157, |
|
"acc_stderr": 0.04104947269903394, |
|
"acc_norm": 0.30158730158730157, |
|
"acc_norm_stderr": 0.04104947269903394 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.4673202614379085, |
|
"acc_stderr": 0.02856869975222588, |
|
"acc_norm": 0.4673202614379085, |
|
"acc_norm_stderr": 0.02856869975222588 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.41, |
|
"acc_stderr": 0.04943110704237103, |
|
"acc_norm": 0.41, |
|
"acc_norm_stderr": 0.04943110704237103 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.6033057851239669, |
|
"acc_stderr": 0.044658697805310094, |
|
"acc_norm": 0.6033057851239669, |
|
"acc_norm_stderr": 0.044658697805310094 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.40789473684210525, |
|
"acc_stderr": 0.03999309712777472, |
|
"acc_norm": 0.40789473684210525, |
|
"acc_norm_stderr": 0.03999309712777472 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.3839869281045752, |
|
"acc_stderr": 0.019675808135281525, |
|
"acc_norm": 0.3839869281045752, |
|
"acc_norm_stderr": 0.019675808135281525 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.2978723404255319, |
|
"acc_stderr": 0.027281608344469414, |
|
"acc_norm": 0.2978723404255319, |
|
"acc_norm_stderr": 0.027281608344469414 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.35714285714285715, |
|
"acc_stderr": 0.04547960999764376, |
|
"acc_norm": 0.35714285714285715, |
|
"acc_norm_stderr": 0.04547960999764376 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.375, |
|
"acc_stderr": 0.033016908987210894, |
|
"acc_norm": 0.375, |
|
"acc_norm_stderr": 0.033016908987210894 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.23016759776536314, |
|
"acc_stderr": 0.014078339253425807, |
|
"acc_norm": 0.23016759776536314, |
|
"acc_norm_stderr": 0.014078339253425807 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.42, |
|
"acc_stderr": 0.049604496374885836, |
|
"acc_norm": 0.42, |
|
"acc_norm_stderr": 0.049604496374885836 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.47, |
|
"acc_stderr": 0.05016135580465919, |
|
"acc_norm": 0.47, |
|
"acc_norm_stderr": 0.05016135580465919 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.38235294117647056, |
|
"acc_stderr": 0.029520095697687765, |
|
"acc_norm": 0.38235294117647056, |
|
"acc_norm_stderr": 0.029520095697687765 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.4775510204081633, |
|
"acc_stderr": 0.03197694118713673, |
|
"acc_norm": 0.4775510204081633, |
|
"acc_norm_stderr": 0.03197694118713673 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.5949367088607594, |
|
"acc_stderr": 0.031955147413706725, |
|
"acc_norm": 0.5949367088607594, |
|
"acc_norm_stderr": 0.031955147413706725 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.31486310299869624, |
|
"acc_stderr": 0.011862561755715937, |
|
"acc_norm": 0.31486310299869624, |
|
"acc_norm_stderr": 0.011862561755715937 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.49019607843137253, |
|
"acc_stderr": 0.03508637358630572, |
|
"acc_norm": 0.49019607843137253, |
|
"acc_norm_stderr": 0.03508637358630572 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.49696969696969695, |
|
"acc_stderr": 0.03904272341431856, |
|
"acc_norm": 0.49696969696969695, |
|
"acc_norm_stderr": 0.03904272341431856 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.2778457772337821, |
|
"mc1_stderr": 0.015680929364024633, |
|
"mc2": 0.45111653933523216, |
|
"mc2_stderr": 0.015355758550705367 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.31103286384976525, |
|
"acc_stderr": 0.015868563452870778, |
|
"acc_norm": 0.42488262910798125, |
|
"acc_norm_stderr": 0.016945248826821704 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "maywell/Synatra-11B-Instruct-v0.3-pre", |
|
"model_sha": "a40fe5c95687a32967ea4573e958356a214ae652", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |