|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.2551194539249147, |
|
"acc_stderr": 0.012739038695202105, |
|
"acc_norm": 0.3191126279863481, |
|
"acc_norm_stderr": 0.013621696119173306 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.32812188807010556, |
|
"acc_stderr": 0.0046856987521048075, |
|
"acc_norm": 0.39225253933479387, |
|
"acc_norm_stderr": 0.004872546302641858 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.40350877192982454, |
|
"acc_stderr": 0.03762738699917055, |
|
"acc_norm": 0.40350877192982454, |
|
"acc_norm_stderr": 0.03762738699917055 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.3300970873786408, |
|
"acc_stderr": 0.0465614711001235, |
|
"acc_norm": 0.3300970873786408, |
|
"acc_norm_stderr": 0.0465614711001235 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.37037037037037035, |
|
"acc_stderr": 0.017268607560005773, |
|
"acc_norm": 0.37037037037037035, |
|
"acc_norm_stderr": 0.017268607560005773 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.2518518518518518, |
|
"acc_stderr": 0.03749850709174021, |
|
"acc_norm": 0.2518518518518518, |
|
"acc_norm_stderr": 0.03749850709174021 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.31, |
|
"acc_stderr": 0.04648231987117316, |
|
"acc_norm": 0.31, |
|
"acc_norm_stderr": 0.04648231987117316 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.32340425531914896, |
|
"acc_stderr": 0.030579442773610334, |
|
"acc_norm": 0.32340425531914896, |
|
"acc_norm_stderr": 0.030579442773610334 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.3493975903614458, |
|
"acc_stderr": 0.03711725190740749, |
|
"acc_norm": 0.3493975903614458, |
|
"acc_norm_stderr": 0.03711725190740749 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.3408360128617363, |
|
"acc_stderr": 0.026920841260776165, |
|
"acc_norm": 0.3408360128617363, |
|
"acc_norm_stderr": 0.026920841260776165 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.38565022421524664, |
|
"acc_stderr": 0.03266842214289201, |
|
"acc_norm": 0.38565022421524664, |
|
"acc_norm_stderr": 0.03266842214289201 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.3816793893129771, |
|
"acc_stderr": 0.0426073515764456, |
|
"acc_norm": 0.3816793893129771, |
|
"acc_norm_stderr": 0.0426073515764456 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.34, |
|
"acc_stderr": 0.04760952285695236, |
|
"acc_norm": 0.34, |
|
"acc_norm_stderr": 0.04760952285695236 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.35858585858585856, |
|
"acc_stderr": 0.03416903640391521, |
|
"acc_norm": 0.35858585858585856, |
|
"acc_norm_stderr": 0.03416903640391521 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.3793103448275862, |
|
"acc_stderr": 0.04043461861916747, |
|
"acc_norm": 0.3793103448275862, |
|
"acc_norm_stderr": 0.04043461861916747 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.17647058823529413, |
|
"acc_stderr": 0.03793281185307809, |
|
"acc_norm": 0.17647058823529413, |
|
"acc_norm_stderr": 0.03793281185307809 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.3067226890756303, |
|
"acc_stderr": 0.029953823891887048, |
|
"acc_norm": 0.3067226890756303, |
|
"acc_norm_stderr": 0.029953823891887048 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.2948717948717949, |
|
"acc_stderr": 0.023119362758232294, |
|
"acc_norm": 0.2948717948717949, |
|
"acc_norm_stderr": 0.023119362758232294 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.39, |
|
"acc_stderr": 0.04902071300001974, |
|
"acc_norm": 0.39, |
|
"acc_norm_stderr": 0.04902071300001974 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.31, |
|
"acc_stderr": 0.04648231987117316, |
|
"acc_norm": 0.31, |
|
"acc_norm_stderr": 0.04648231987117316 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.35185185185185186, |
|
"acc_stderr": 0.04616631111801714, |
|
"acc_norm": 0.35185185185185186, |
|
"acc_norm_stderr": 0.04616631111801714 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.2512315270935961, |
|
"acc_stderr": 0.030516530732694436, |
|
"acc_norm": 0.2512315270935961, |
|
"acc_norm_stderr": 0.030516530732694436 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.3580645161290323, |
|
"acc_stderr": 0.027273890594300642, |
|
"acc_norm": 0.3580645161290323, |
|
"acc_norm_stderr": 0.027273890594300642 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.49145299145299143, |
|
"acc_stderr": 0.032751303000970296, |
|
"acc_norm": 0.49145299145299143, |
|
"acc_norm_stderr": 0.032751303000970296 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.3471698113207547, |
|
"acc_stderr": 0.029300101705549655, |
|
"acc_norm": 0.3471698113207547, |
|
"acc_norm_stderr": 0.029300101705549655 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.4090909090909091, |
|
"acc_stderr": 0.047093069786618966, |
|
"acc_norm": 0.4090909090909091, |
|
"acc_norm_stderr": 0.047093069786618966 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.25925925925925924, |
|
"acc_stderr": 0.02671924078371216, |
|
"acc_norm": 0.25925925925925924, |
|
"acc_norm_stderr": 0.02671924078371216 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.2185430463576159, |
|
"acc_stderr": 0.03374235550425694, |
|
"acc_norm": 0.2185430463576159, |
|
"acc_norm_stderr": 0.03374235550425694 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.40298507462686567, |
|
"acc_stderr": 0.034683432951111266, |
|
"acc_norm": 0.40298507462686567, |
|
"acc_norm_stderr": 0.034683432951111266 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.2832369942196532, |
|
"acc_stderr": 0.03435568056047873, |
|
"acc_norm": 0.2832369942196532, |
|
"acc_norm_stderr": 0.03435568056047873 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.24867724867724866, |
|
"acc_stderr": 0.02226181769240016, |
|
"acc_norm": 0.24867724867724866, |
|
"acc_norm_stderr": 0.02226181769240016 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.2638888888888889, |
|
"acc_stderr": 0.03685651095897532, |
|
"acc_norm": 0.2638888888888889, |
|
"acc_norm_stderr": 0.03685651095897532 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.25, |
|
"acc_stderr": 0.04351941398892446, |
|
"acc_norm": 0.25, |
|
"acc_norm_stderr": 0.04351941398892446 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.49, |
|
"acc_stderr": 0.05024183937956913, |
|
"acc_norm": 0.49, |
|
"acc_norm_stderr": 0.05024183937956913 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.3468208092485549, |
|
"acc_stderr": 0.025624723994030457, |
|
"acc_norm": 0.3468208092485549, |
|
"acc_norm_stderr": 0.025624723994030457 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.3619631901840491, |
|
"acc_stderr": 0.037757007291414416, |
|
"acc_norm": 0.3619631901840491, |
|
"acc_norm_stderr": 0.037757007291414416 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.37962962962962965, |
|
"acc_stderr": 0.02700252103451647, |
|
"acc_norm": 0.37962962962962965, |
|
"acc_norm_stderr": 0.02700252103451647 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.36, |
|
"acc_stderr": 0.04824181513244218, |
|
"acc_norm": 0.36, |
|
"acc_norm_stderr": 0.04824181513244218 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.33678756476683935, |
|
"acc_stderr": 0.034107802518361846, |
|
"acc_norm": 0.33678756476683935, |
|
"acc_norm_stderr": 0.034107802518361846 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.22807017543859648, |
|
"acc_stderr": 0.03947152782669415, |
|
"acc_norm": 0.22807017543859648, |
|
"acc_norm_stderr": 0.03947152782669415 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.3155963302752294, |
|
"acc_stderr": 0.019926117513869662, |
|
"acc_norm": 0.3155963302752294, |
|
"acc_norm_stderr": 0.019926117513869662 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.2222222222222222, |
|
"acc_stderr": 0.037184890068181146, |
|
"acc_norm": 0.2222222222222222, |
|
"acc_norm_stderr": 0.037184890068181146 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.30392156862745096, |
|
"acc_stderr": 0.026336613469046637, |
|
"acc_norm": 0.30392156862745096, |
|
"acc_norm_stderr": 0.026336613469046637 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.42, |
|
"acc_stderr": 0.049604496374885836, |
|
"acc_norm": 0.42, |
|
"acc_norm_stderr": 0.049604496374885836 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.5537190082644629, |
|
"acc_stderr": 0.0453793517794788, |
|
"acc_norm": 0.5537190082644629, |
|
"acc_norm_stderr": 0.0453793517794788 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.2894736842105263, |
|
"acc_stderr": 0.036906779861372814, |
|
"acc_norm": 0.2894736842105263, |
|
"acc_norm_stderr": 0.036906779861372814 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.3006535947712418, |
|
"acc_stderr": 0.018550634502952964, |
|
"acc_norm": 0.3006535947712418, |
|
"acc_norm_stderr": 0.018550634502952964 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.29432624113475175, |
|
"acc_stderr": 0.02718712701150379, |
|
"acc_norm": 0.29432624113475175, |
|
"acc_norm_stderr": 0.02718712701150379 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.2767857142857143, |
|
"acc_stderr": 0.04246624336697624, |
|
"acc_norm": 0.2767857142857143, |
|
"acc_norm_stderr": 0.04246624336697624 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.2175925925925926, |
|
"acc_stderr": 0.02813968944485967, |
|
"acc_norm": 0.2175925925925926, |
|
"acc_norm_stderr": 0.02813968944485967 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.2424581005586592, |
|
"acc_stderr": 0.01433352205921789, |
|
"acc_norm": 0.2424581005586592, |
|
"acc_norm_stderr": 0.01433352205921789 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.29, |
|
"acc_stderr": 0.045604802157206845, |
|
"acc_norm": 0.29, |
|
"acc_norm_stderr": 0.045604802157206845 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.35, |
|
"acc_stderr": 0.047937248544110196, |
|
"acc_norm": 0.35, |
|
"acc_norm_stderr": 0.047937248544110196 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.24632352941176472, |
|
"acc_stderr": 0.02617343857052, |
|
"acc_norm": 0.24632352941176472, |
|
"acc_norm_stderr": 0.02617343857052 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.3020408163265306, |
|
"acc_stderr": 0.02939360931987982, |
|
"acc_norm": 0.3020408163265306, |
|
"acc_norm_stderr": 0.02939360931987982 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.3628691983122363, |
|
"acc_stderr": 0.03129920825530213, |
|
"acc_norm": 0.3628691983122363, |
|
"acc_norm_stderr": 0.03129920825530213 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.2646675358539765, |
|
"acc_stderr": 0.011267332992845535, |
|
"acc_norm": 0.2646675358539765, |
|
"acc_norm_stderr": 0.011267332992845535 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.27941176470588236, |
|
"acc_stderr": 0.03149328104507956, |
|
"acc_norm": 0.27941176470588236, |
|
"acc_norm_stderr": 0.03149328104507956 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.23636363636363636, |
|
"acc_stderr": 0.033175059300091805, |
|
"acc_norm": 0.23636363636363636, |
|
"acc_norm_stderr": 0.033175059300091805 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.2729498164014688, |
|
"mc1_stderr": 0.015594753632006509, |
|
"mc2": 0.44330415731488865, |
|
"mc2_stderr": 0.015557823529945149 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.2585596221959858, |
|
"acc_stderr": 0.015053354438963988, |
|
"acc_norm": 0.3482880755608028, |
|
"acc_norm_stderr": 0.01637992673914804 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "heegyu/WizardVicuna2-13b-hf", |
|
"model_sha": "6cfd95e2dcdb6996afa9eb5c63273a1a3524c6c6", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |