|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.3046075085324232, |
|
"acc_stderr": 0.01344952210993249, |
|
"acc_norm": 0.34726962457337884, |
|
"acc_norm_stderr": 0.013913034529620442 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.3494323839872535, |
|
"acc_stderr": 0.004758162967997396, |
|
"acc_norm": 0.4313881696873133, |
|
"acc_norm_stderr": 0.004942578520987348 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.38011695906432746, |
|
"acc_stderr": 0.03722965741385539, |
|
"acc_norm": 0.38011695906432746, |
|
"acc_norm_stderr": 0.03722965741385539 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.39805825242718446, |
|
"acc_stderr": 0.04846748253977239, |
|
"acc_norm": 0.39805825242718446, |
|
"acc_norm_stderr": 0.04846748253977239 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.3665389527458493, |
|
"acc_stderr": 0.01723124462679705, |
|
"acc_norm": 0.3665389527458493, |
|
"acc_norm_stderr": 0.01723124462679705 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.2962962962962963, |
|
"acc_stderr": 0.03944624162501117, |
|
"acc_norm": 0.2962962962962963, |
|
"acc_norm_stderr": 0.03944624162501117 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.27, |
|
"acc_stderr": 0.0446196043338474, |
|
"acc_norm": 0.27, |
|
"acc_norm_stderr": 0.0446196043338474 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.32340425531914896, |
|
"acc_stderr": 0.030579442773610334, |
|
"acc_norm": 0.32340425531914896, |
|
"acc_norm_stderr": 0.030579442773610334 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.3253012048192771, |
|
"acc_stderr": 0.03647168523683227, |
|
"acc_norm": 0.3253012048192771, |
|
"acc_norm_stderr": 0.03647168523683227 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.37942122186495175, |
|
"acc_stderr": 0.027559949802347817, |
|
"acc_norm": 0.37942122186495175, |
|
"acc_norm_stderr": 0.027559949802347817 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.3811659192825112, |
|
"acc_stderr": 0.03259625118416827, |
|
"acc_norm": 0.3811659192825112, |
|
"acc_norm_stderr": 0.03259625118416827 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.3816793893129771, |
|
"acc_stderr": 0.0426073515764456, |
|
"acc_norm": 0.3816793893129771, |
|
"acc_norm_stderr": 0.0426073515764456 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.34, |
|
"acc_stderr": 0.04760952285695236, |
|
"acc_norm": 0.34, |
|
"acc_norm_stderr": 0.04760952285695236 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.3282828282828283, |
|
"acc_stderr": 0.03345678422756777, |
|
"acc_norm": 0.3282828282828283, |
|
"acc_norm_stderr": 0.03345678422756777 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.32413793103448274, |
|
"acc_stderr": 0.03900432069185553, |
|
"acc_norm": 0.32413793103448274, |
|
"acc_norm_stderr": 0.03900432069185553 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.22549019607843138, |
|
"acc_stderr": 0.041583075330832865, |
|
"acc_norm": 0.22549019607843138, |
|
"acc_norm_stderr": 0.041583075330832865 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.3865546218487395, |
|
"acc_stderr": 0.03163145807552378, |
|
"acc_norm": 0.3865546218487395, |
|
"acc_norm_stderr": 0.03163145807552378 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.34102564102564104, |
|
"acc_stderr": 0.024035489676335044, |
|
"acc_norm": 0.34102564102564104, |
|
"acc_norm_stderr": 0.024035489676335044 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.52, |
|
"acc_stderr": 0.050211673156867795, |
|
"acc_norm": 0.52, |
|
"acc_norm_stderr": 0.050211673156867795 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.27, |
|
"acc_stderr": 0.04461960433384739, |
|
"acc_norm": 0.27, |
|
"acc_norm_stderr": 0.04461960433384739 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.4444444444444444, |
|
"acc_stderr": 0.04803752235190192, |
|
"acc_norm": 0.4444444444444444, |
|
"acc_norm_stderr": 0.04803752235190192 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.2955665024630542, |
|
"acc_stderr": 0.032104944337514575, |
|
"acc_norm": 0.2955665024630542, |
|
"acc_norm_stderr": 0.032104944337514575 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.3870967741935484, |
|
"acc_stderr": 0.02770935967503249, |
|
"acc_norm": 0.3870967741935484, |
|
"acc_norm_stderr": 0.02770935967503249 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.5811965811965812, |
|
"acc_stderr": 0.03232128912157792, |
|
"acc_norm": 0.5811965811965812, |
|
"acc_norm_stderr": 0.03232128912157792 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.33584905660377357, |
|
"acc_stderr": 0.029067220146644826, |
|
"acc_norm": 0.33584905660377357, |
|
"acc_norm_stderr": 0.029067220146644826 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.42727272727272725, |
|
"acc_stderr": 0.04738198703545483, |
|
"acc_norm": 0.42727272727272725, |
|
"acc_norm_stderr": 0.04738198703545483 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.23703703703703705, |
|
"acc_stderr": 0.02592887613276611, |
|
"acc_norm": 0.23703703703703705, |
|
"acc_norm_stderr": 0.02592887613276611 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.31125827814569534, |
|
"acc_stderr": 0.03780445850526733, |
|
"acc_norm": 0.31125827814569534, |
|
"acc_norm_stderr": 0.03780445850526733 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.43781094527363185, |
|
"acc_stderr": 0.0350808011219984, |
|
"acc_norm": 0.43781094527363185, |
|
"acc_norm_stderr": 0.0350808011219984 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.27167630057803466, |
|
"acc_stderr": 0.0339175032232166, |
|
"acc_norm": 0.27167630057803466, |
|
"acc_norm_stderr": 0.0339175032232166 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.25396825396825395, |
|
"acc_stderr": 0.022418042891113935, |
|
"acc_norm": 0.25396825396825395, |
|
"acc_norm_stderr": 0.022418042891113935 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.2708333333333333, |
|
"acc_stderr": 0.03716177437566017, |
|
"acc_norm": 0.2708333333333333, |
|
"acc_norm_stderr": 0.03716177437566017 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.22, |
|
"acc_stderr": 0.04163331998932269, |
|
"acc_norm": 0.22, |
|
"acc_norm_stderr": 0.04163331998932269 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.55, |
|
"acc_stderr": 0.05, |
|
"acc_norm": 0.55, |
|
"acc_norm_stderr": 0.05 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.3872832369942196, |
|
"acc_stderr": 0.026226158605124655, |
|
"acc_norm": 0.3872832369942196, |
|
"acc_norm_stderr": 0.026226158605124655 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.3987730061349693, |
|
"acc_stderr": 0.03847021420456023, |
|
"acc_norm": 0.3987730061349693, |
|
"acc_norm_stderr": 0.03847021420456023 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.38271604938271603, |
|
"acc_stderr": 0.027044538138402616, |
|
"acc_norm": 0.38271604938271603, |
|
"acc_norm_stderr": 0.027044538138402616 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.27, |
|
"acc_stderr": 0.044619604333847394, |
|
"acc_norm": 0.27, |
|
"acc_norm_stderr": 0.044619604333847394 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.41968911917098445, |
|
"acc_stderr": 0.035615873276858834, |
|
"acc_norm": 0.41968911917098445, |
|
"acc_norm_stderr": 0.035615873276858834 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.22807017543859648, |
|
"acc_stderr": 0.03947152782669415, |
|
"acc_norm": 0.22807017543859648, |
|
"acc_norm_stderr": 0.03947152782669415 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.3174311926605505, |
|
"acc_stderr": 0.0199571521984605, |
|
"acc_norm": 0.3174311926605505, |
|
"acc_norm_stderr": 0.0199571521984605 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.31746031746031744, |
|
"acc_stderr": 0.04163453031302859, |
|
"acc_norm": 0.31746031746031744, |
|
"acc_norm_stderr": 0.04163453031302859 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.4084967320261438, |
|
"acc_stderr": 0.028146405993096358, |
|
"acc_norm": 0.4084967320261438, |
|
"acc_norm_stderr": 0.028146405993096358 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.43, |
|
"acc_stderr": 0.04975698519562428, |
|
"acc_norm": 0.43, |
|
"acc_norm_stderr": 0.04975698519562428 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.5702479338842975, |
|
"acc_stderr": 0.04519082021319773, |
|
"acc_norm": 0.5702479338842975, |
|
"acc_norm_stderr": 0.04519082021319773 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.23026315789473684, |
|
"acc_stderr": 0.03426059424403165, |
|
"acc_norm": 0.23026315789473684, |
|
"acc_norm_stderr": 0.03426059424403165 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.3137254901960784, |
|
"acc_stderr": 0.01877168389352817, |
|
"acc_norm": 0.3137254901960784, |
|
"acc_norm_stderr": 0.01877168389352817 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.3049645390070922, |
|
"acc_stderr": 0.02746470844202213, |
|
"acc_norm": 0.3049645390070922, |
|
"acc_norm_stderr": 0.02746470844202213 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.36607142857142855, |
|
"acc_stderr": 0.04572372358737431, |
|
"acc_norm": 0.36607142857142855, |
|
"acc_norm_stderr": 0.04572372358737431 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.24074074074074073, |
|
"acc_stderr": 0.029157522184605607, |
|
"acc_norm": 0.24074074074074073, |
|
"acc_norm_stderr": 0.029157522184605607 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.24916201117318434, |
|
"acc_stderr": 0.014465893829859933, |
|
"acc_norm": 0.24916201117318434, |
|
"acc_norm_stderr": 0.014465893829859933 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.36, |
|
"acc_stderr": 0.04824181513244218, |
|
"acc_norm": 0.36, |
|
"acc_norm_stderr": 0.04824181513244218 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.21691176470588236, |
|
"acc_stderr": 0.025035845227711254, |
|
"acc_norm": 0.21691176470588236, |
|
"acc_norm_stderr": 0.025035845227711254 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.4163265306122449, |
|
"acc_stderr": 0.03155782816556164, |
|
"acc_norm": 0.4163265306122449, |
|
"acc_norm_stderr": 0.03155782816556164 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.3924050632911392, |
|
"acc_stderr": 0.03178471874564729, |
|
"acc_norm": 0.3924050632911392, |
|
"acc_norm_stderr": 0.03178471874564729 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.3050847457627119, |
|
"acc_stderr": 0.011759939618085451, |
|
"acc_norm": 0.3050847457627119, |
|
"acc_norm_stderr": 0.011759939618085451 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.3627450980392157, |
|
"acc_stderr": 0.03374499356319355, |
|
"acc_norm": 0.3627450980392157, |
|
"acc_norm_stderr": 0.03374499356319355 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.4, |
|
"acc_stderr": 0.03825460278380026, |
|
"acc_norm": 0.4, |
|
"acc_norm_stderr": 0.03825460278380026 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.26805385556915545, |
|
"mc1_stderr": 0.015506204722834562, |
|
"mc2": 0.44032476462099357, |
|
"mc2_stderr": 0.015871156864559203 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.29515938606847697, |
|
"acc_stderr": 0.015681535229192186, |
|
"acc_norm": 0.371900826446281, |
|
"acc_norm_stderr": 0.01661661284322494 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "KRAFTON/KORani-v3-13B", |
|
"model_sha": "d6479f9de126caf02a770e5e8db4524a0ccb4db7", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |