|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.3174061433447099, |
|
"acc_stderr": 0.01360223908803817, |
|
"acc_norm": 0.35238907849829354, |
|
"acc_norm_stderr": 0.01396014260059868 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.36536546504680345, |
|
"acc_stderr": 0.004805483767055344, |
|
"acc_norm": 0.45648277235610435, |
|
"acc_norm_stderr": 0.004970846697552307 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.4444444444444444, |
|
"acc_stderr": 0.03811079669833531, |
|
"acc_norm": 0.4444444444444444, |
|
"acc_norm_stderr": 0.03811079669833531 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.4854368932038835, |
|
"acc_stderr": 0.04948637324026637, |
|
"acc_norm": 0.4854368932038835, |
|
"acc_norm_stderr": 0.04948637324026637 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.4355044699872286, |
|
"acc_stderr": 0.01773058992792662, |
|
"acc_norm": 0.4355044699872286, |
|
"acc_norm_stderr": 0.01773058992792662 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.31851851851851853, |
|
"acc_stderr": 0.0402477840197711, |
|
"acc_norm": 0.31851851851851853, |
|
"acc_norm_stderr": 0.0402477840197711 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.28, |
|
"acc_stderr": 0.045126085985421255, |
|
"acc_norm": 0.28, |
|
"acc_norm_stderr": 0.045126085985421255 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.3829787234042553, |
|
"acc_stderr": 0.03177821250236922, |
|
"acc_norm": 0.3829787234042553, |
|
"acc_norm_stderr": 0.03177821250236922 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.39759036144578314, |
|
"acc_stderr": 0.038099730845402184, |
|
"acc_norm": 0.39759036144578314, |
|
"acc_norm_stderr": 0.038099730845402184 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.47266881028938906, |
|
"acc_stderr": 0.02835563356832818, |
|
"acc_norm": 0.47266881028938906, |
|
"acc_norm_stderr": 0.02835563356832818 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.36771300448430494, |
|
"acc_stderr": 0.03236198350928276, |
|
"acc_norm": 0.36771300448430494, |
|
"acc_norm_stderr": 0.03236198350928276 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.3969465648854962, |
|
"acc_stderr": 0.04291135671009224, |
|
"acc_norm": 0.3969465648854962, |
|
"acc_norm_stderr": 0.04291135671009224 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.32, |
|
"acc_stderr": 0.04688261722621505, |
|
"acc_norm": 0.32, |
|
"acc_norm_stderr": 0.04688261722621505 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.48484848484848486, |
|
"acc_stderr": 0.03560716516531061, |
|
"acc_norm": 0.48484848484848486, |
|
"acc_norm_stderr": 0.03560716516531061 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.4413793103448276, |
|
"acc_stderr": 0.04137931034482758, |
|
"acc_norm": 0.4413793103448276, |
|
"acc_norm_stderr": 0.04137931034482758 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.22549019607843138, |
|
"acc_stderr": 0.041583075330832865, |
|
"acc_norm": 0.22549019607843138, |
|
"acc_norm_stderr": 0.041583075330832865 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.4831932773109244, |
|
"acc_stderr": 0.03246013680375308, |
|
"acc_norm": 0.4831932773109244, |
|
"acc_norm_stderr": 0.03246013680375308 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.4128205128205128, |
|
"acc_stderr": 0.024962683564331817, |
|
"acc_norm": 0.4128205128205128, |
|
"acc_norm_stderr": 0.024962683564331817 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.5, |
|
"acc_stderr": 0.050251890762960605, |
|
"acc_norm": 0.5, |
|
"acc_norm_stderr": 0.050251890762960605 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.32, |
|
"acc_stderr": 0.04688261722621505, |
|
"acc_norm": 0.32, |
|
"acc_norm_stderr": 0.04688261722621505 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.5370370370370371, |
|
"acc_stderr": 0.04820403072760628, |
|
"acc_norm": 0.5370370370370371, |
|
"acc_norm_stderr": 0.04820403072760628 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.4088669950738916, |
|
"acc_stderr": 0.03459058815883231, |
|
"acc_norm": 0.4088669950738916, |
|
"acc_norm_stderr": 0.03459058815883231 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.38387096774193546, |
|
"acc_stderr": 0.02766618207553963, |
|
"acc_norm": 0.38387096774193546, |
|
"acc_norm_stderr": 0.02766618207553963 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.6324786324786325, |
|
"acc_stderr": 0.031585391577456365, |
|
"acc_norm": 0.6324786324786325, |
|
"acc_norm_stderr": 0.031585391577456365 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.41509433962264153, |
|
"acc_stderr": 0.030325945789286102, |
|
"acc_norm": 0.41509433962264153, |
|
"acc_norm_stderr": 0.030325945789286102 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.44545454545454544, |
|
"acc_stderr": 0.047605488214603246, |
|
"acc_norm": 0.44545454545454544, |
|
"acc_norm_stderr": 0.047605488214603246 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.3074074074074074, |
|
"acc_stderr": 0.028133252578815646, |
|
"acc_norm": 0.3074074074074074, |
|
"acc_norm_stderr": 0.028133252578815646 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.271523178807947, |
|
"acc_stderr": 0.036313298039696525, |
|
"acc_norm": 0.271523178807947, |
|
"acc_norm_stderr": 0.036313298039696525 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.5920398009950248, |
|
"acc_stderr": 0.03475116365194092, |
|
"acc_norm": 0.5920398009950248, |
|
"acc_norm_stderr": 0.03475116365194092 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.3815028901734104, |
|
"acc_stderr": 0.03703851193099521, |
|
"acc_norm": 0.3815028901734104, |
|
"acc_norm_stderr": 0.03703851193099521 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.3835978835978836, |
|
"acc_stderr": 0.025043757318520196, |
|
"acc_norm": 0.3835978835978836, |
|
"acc_norm_stderr": 0.025043757318520196 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.3611111111111111, |
|
"acc_stderr": 0.04016660030451233, |
|
"acc_norm": 0.3611111111111111, |
|
"acc_norm_stderr": 0.04016660030451233 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.36, |
|
"acc_stderr": 0.04824181513244218, |
|
"acc_norm": 0.36, |
|
"acc_norm_stderr": 0.04824181513244218 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.53, |
|
"acc_stderr": 0.050161355804659205, |
|
"acc_norm": 0.53, |
|
"acc_norm_stderr": 0.050161355804659205 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.47109826589595377, |
|
"acc_stderr": 0.026874085883518348, |
|
"acc_norm": 0.47109826589595377, |
|
"acc_norm_stderr": 0.026874085883518348 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.4233128834355828, |
|
"acc_stderr": 0.038818912133343826, |
|
"acc_norm": 0.4233128834355828, |
|
"acc_norm_stderr": 0.038818912133343826 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.4228395061728395, |
|
"acc_stderr": 0.027487472980871595, |
|
"acc_norm": 0.4228395061728395, |
|
"acc_norm_stderr": 0.027487472980871595 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.34, |
|
"acc_stderr": 0.04760952285695235, |
|
"acc_norm": 0.34, |
|
"acc_norm_stderr": 0.04760952285695235 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.42487046632124353, |
|
"acc_stderr": 0.035674713352125395, |
|
"acc_norm": 0.42487046632124353, |
|
"acc_norm_stderr": 0.035674713352125395 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.2807017543859649, |
|
"acc_stderr": 0.04227054451232199, |
|
"acc_norm": 0.2807017543859649, |
|
"acc_norm_stderr": 0.04227054451232199 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.44036697247706424, |
|
"acc_stderr": 0.021284310623761547, |
|
"acc_norm": 0.44036697247706424, |
|
"acc_norm_stderr": 0.021284310623761547 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.36507936507936506, |
|
"acc_stderr": 0.043062412591271526, |
|
"acc_norm": 0.36507936507936506, |
|
"acc_norm_stderr": 0.043062412591271526 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.4934640522875817, |
|
"acc_stderr": 0.028627470550556054, |
|
"acc_norm": 0.4934640522875817, |
|
"acc_norm_stderr": 0.028627470550556054 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.4, |
|
"acc_stderr": 0.049236596391733084, |
|
"acc_norm": 0.4, |
|
"acc_norm_stderr": 0.049236596391733084 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.5867768595041323, |
|
"acc_stderr": 0.04495087843548408, |
|
"acc_norm": 0.5867768595041323, |
|
"acc_norm_stderr": 0.04495087843548408 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.39473684210526316, |
|
"acc_stderr": 0.03977749934622074, |
|
"acc_norm": 0.39473684210526316, |
|
"acc_norm_stderr": 0.03977749934622074 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.3415032679738562, |
|
"acc_stderr": 0.01918463932809249, |
|
"acc_norm": 0.3415032679738562, |
|
"acc_norm_stderr": 0.01918463932809249 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.3120567375886525, |
|
"acc_stderr": 0.02764012054516993, |
|
"acc_norm": 0.3120567375886525, |
|
"acc_norm_stderr": 0.02764012054516993 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.33035714285714285, |
|
"acc_stderr": 0.04464285714285713, |
|
"acc_norm": 0.33035714285714285, |
|
"acc_norm_stderr": 0.04464285714285713 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.36574074074074076, |
|
"acc_stderr": 0.03284738857647207, |
|
"acc_norm": 0.36574074074074076, |
|
"acc_norm_stderr": 0.03284738857647207 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.24581005586592178, |
|
"acc_stderr": 0.014400296429225608, |
|
"acc_norm": 0.24581005586592178, |
|
"acc_norm_stderr": 0.014400296429225608 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.32, |
|
"acc_stderr": 0.046882617226215034, |
|
"acc_norm": 0.32, |
|
"acc_norm_stderr": 0.046882617226215034 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.53, |
|
"acc_stderr": 0.05016135580465919, |
|
"acc_norm": 0.53, |
|
"acc_norm_stderr": 0.05016135580465919 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.36764705882352944, |
|
"acc_stderr": 0.029289413409403196, |
|
"acc_norm": 0.36764705882352944, |
|
"acc_norm_stderr": 0.029289413409403196 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.4857142857142857, |
|
"acc_stderr": 0.03199615232806287, |
|
"acc_norm": 0.4857142857142857, |
|
"acc_norm_stderr": 0.03199615232806287 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.5358649789029536, |
|
"acc_stderr": 0.03246338898055659, |
|
"acc_norm": 0.5358649789029536, |
|
"acc_norm_stderr": 0.03246338898055659 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.318122555410691, |
|
"acc_stderr": 0.011895407281104097, |
|
"acc_norm": 0.318122555410691, |
|
"acc_norm_stderr": 0.011895407281104097 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.3872549019607843, |
|
"acc_stderr": 0.03418931233833344, |
|
"acc_norm": 0.3872549019607843, |
|
"acc_norm_stderr": 0.03418931233833344 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.47878787878787876, |
|
"acc_stderr": 0.03900828913737302, |
|
"acc_norm": 0.47878787878787876, |
|
"acc_norm_stderr": 0.03900828913737302 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.29865361077111385, |
|
"mc1_stderr": 0.016021570613768542, |
|
"mc2": 0.4631702412075074, |
|
"mc2_stderr": 0.01580874554216882 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.3789846517119244, |
|
"acc_stderr": 0.01667926068422929, |
|
"acc_norm": 0.43683589138134593, |
|
"acc_norm_stderr": 0.017052633559856076 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "MNCJihunKim/MIstral-7B-SlimOrca-OP-2k", |
|
"model_sha": "339ce8fcda3879a2a6e0dbe0ffb06d1f0be9fd15", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |