|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.40784982935153585, |
|
"acc_stderr": 0.014361097288449708, |
|
"acc_norm": 0.4778156996587031, |
|
"acc_norm_stderr": 0.014597001927076133 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.4287990440151364, |
|
"acc_stderr": 0.004938930143234453, |
|
"acc_norm": 0.574088826926907, |
|
"acc_norm_stderr": 0.004934698012050241 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.5146198830409356, |
|
"acc_stderr": 0.038331852752130254, |
|
"acc_norm": 0.5146198830409356, |
|
"acc_norm_stderr": 0.038331852752130254 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.5533980582524272, |
|
"acc_stderr": 0.04922424153458933, |
|
"acc_norm": 0.5533980582524272, |
|
"acc_norm_stderr": 0.04922424153458933 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.5632183908045977, |
|
"acc_stderr": 0.017736470837800694, |
|
"acc_norm": 0.5632183908045977, |
|
"acc_norm_stderr": 0.017736470837800694 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.4148148148148148, |
|
"acc_stderr": 0.04256193767901407, |
|
"acc_norm": 0.4148148148148148, |
|
"acc_norm_stderr": 0.04256193767901407 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.29, |
|
"acc_stderr": 0.045604802157206824, |
|
"acc_norm": 0.29, |
|
"acc_norm_stderr": 0.045604802157206824 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.425531914893617, |
|
"acc_stderr": 0.03232146916224469, |
|
"acc_norm": 0.425531914893617, |
|
"acc_norm_stderr": 0.03232146916224469 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.41566265060240964, |
|
"acc_stderr": 0.03836722176598053, |
|
"acc_norm": 0.41566265060240964, |
|
"acc_norm_stderr": 0.03836722176598053 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.5016077170418006, |
|
"acc_stderr": 0.02839794490780661, |
|
"acc_norm": 0.5016077170418006, |
|
"acc_norm_stderr": 0.02839794490780661 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.5291479820627802, |
|
"acc_stderr": 0.03350073248773404, |
|
"acc_norm": 0.5291479820627802, |
|
"acc_norm_stderr": 0.03350073248773404 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.48091603053435117, |
|
"acc_stderr": 0.04382094705550988, |
|
"acc_norm": 0.48091603053435117, |
|
"acc_norm_stderr": 0.04382094705550988 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.35, |
|
"acc_stderr": 0.04793724854411021, |
|
"acc_norm": 0.35, |
|
"acc_norm_stderr": 0.04793724854411021 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.6060606060606061, |
|
"acc_stderr": 0.034812853382329645, |
|
"acc_norm": 0.6060606060606061, |
|
"acc_norm_stderr": 0.034812853382329645 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.3931034482758621, |
|
"acc_stderr": 0.040703290137070705, |
|
"acc_norm": 0.3931034482758621, |
|
"acc_norm_stderr": 0.040703290137070705 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.23529411764705882, |
|
"acc_stderr": 0.04220773659171452, |
|
"acc_norm": 0.23529411764705882, |
|
"acc_norm_stderr": 0.04220773659171452 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.4831932773109244, |
|
"acc_stderr": 0.03246013680375308, |
|
"acc_norm": 0.4831932773109244, |
|
"acc_norm_stderr": 0.03246013680375308 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.4641025641025641, |
|
"acc_stderr": 0.025285585990017834, |
|
"acc_norm": 0.4641025641025641, |
|
"acc_norm_stderr": 0.025285585990017834 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.51, |
|
"acc_stderr": 0.05024183937956912, |
|
"acc_norm": 0.51, |
|
"acc_norm_stderr": 0.05024183937956912 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.38, |
|
"acc_stderr": 0.048783173121456316, |
|
"acc_norm": 0.38, |
|
"acc_norm_stderr": 0.048783173121456316 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.49074074074074076, |
|
"acc_stderr": 0.04832853553437055, |
|
"acc_norm": 0.49074074074074076, |
|
"acc_norm_stderr": 0.04832853553437055 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.39408866995073893, |
|
"acc_stderr": 0.03438157967036545, |
|
"acc_norm": 0.39408866995073893, |
|
"acc_norm_stderr": 0.03438157967036545 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.46774193548387094, |
|
"acc_stderr": 0.02838474778881333, |
|
"acc_norm": 0.46774193548387094, |
|
"acc_norm_stderr": 0.02838474778881333 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.6495726495726496, |
|
"acc_stderr": 0.0312561082442188, |
|
"acc_norm": 0.6495726495726496, |
|
"acc_norm_stderr": 0.0312561082442188 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.4377358490566038, |
|
"acc_stderr": 0.030533338430467516, |
|
"acc_norm": 0.4377358490566038, |
|
"acc_norm_stderr": 0.030533338430467516 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.5909090909090909, |
|
"acc_stderr": 0.04709306978661895, |
|
"acc_norm": 0.5909090909090909, |
|
"acc_norm_stderr": 0.04709306978661895 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.26296296296296295, |
|
"acc_stderr": 0.02684205787383371, |
|
"acc_norm": 0.26296296296296295, |
|
"acc_norm_stderr": 0.02684205787383371 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.304635761589404, |
|
"acc_stderr": 0.03757949922943343, |
|
"acc_norm": 0.304635761589404, |
|
"acc_norm_stderr": 0.03757949922943343 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.6069651741293532, |
|
"acc_stderr": 0.0345368246603156, |
|
"acc_norm": 0.6069651741293532, |
|
"acc_norm_stderr": 0.0345368246603156 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.4161849710982659, |
|
"acc_stderr": 0.037585177754049466, |
|
"acc_norm": 0.4161849710982659, |
|
"acc_norm_stderr": 0.037585177754049466 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.29365079365079366, |
|
"acc_stderr": 0.023456037383982026, |
|
"acc_norm": 0.29365079365079366, |
|
"acc_norm_stderr": 0.023456037383982026 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.4027777777777778, |
|
"acc_stderr": 0.04101405519842425, |
|
"acc_norm": 0.4027777777777778, |
|
"acc_norm_stderr": 0.04101405519842425 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.31, |
|
"acc_stderr": 0.04648231987117316, |
|
"acc_norm": 0.31, |
|
"acc_norm_stderr": 0.04648231987117316 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.63, |
|
"acc_stderr": 0.048523658709391, |
|
"acc_norm": 0.63, |
|
"acc_norm_stderr": 0.048523658709391 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.5, |
|
"acc_stderr": 0.026919095102908273, |
|
"acc_norm": 0.5, |
|
"acc_norm_stderr": 0.026919095102908273 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.5030674846625767, |
|
"acc_stderr": 0.03928297078179663, |
|
"acc_norm": 0.5030674846625767, |
|
"acc_norm_stderr": 0.03928297078179663 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.48148148148148145, |
|
"acc_stderr": 0.027801656212323667, |
|
"acc_norm": 0.48148148148148145, |
|
"acc_norm_stderr": 0.027801656212323667 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.32, |
|
"acc_stderr": 0.04688261722621504, |
|
"acc_norm": 0.32, |
|
"acc_norm_stderr": 0.04688261722621504 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.5854922279792746, |
|
"acc_stderr": 0.035553003195576686, |
|
"acc_norm": 0.5854922279792746, |
|
"acc_norm_stderr": 0.035553003195576686 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.2543859649122807, |
|
"acc_stderr": 0.0409698513984367, |
|
"acc_norm": 0.2543859649122807, |
|
"acc_norm_stderr": 0.0409698513984367 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.618348623853211, |
|
"acc_stderr": 0.02082814851702261, |
|
"acc_norm": 0.618348623853211, |
|
"acc_norm_stderr": 0.02082814851702261 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.3333333333333333, |
|
"acc_stderr": 0.042163702135578345, |
|
"acc_norm": 0.3333333333333333, |
|
"acc_norm_stderr": 0.042163702135578345 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.42483660130718953, |
|
"acc_stderr": 0.02830457667314111, |
|
"acc_norm": 0.42483660130718953, |
|
"acc_norm_stderr": 0.02830457667314111 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.42, |
|
"acc_stderr": 0.04960449637488584, |
|
"acc_norm": 0.42, |
|
"acc_norm_stderr": 0.04960449637488584 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.628099173553719, |
|
"acc_stderr": 0.044120158066245044, |
|
"acc_norm": 0.628099173553719, |
|
"acc_norm_stderr": 0.044120158066245044 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.3684210526315789, |
|
"acc_stderr": 0.03925523381052932, |
|
"acc_norm": 0.3684210526315789, |
|
"acc_norm_stderr": 0.03925523381052932 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.39705882352941174, |
|
"acc_stderr": 0.019794488900024106, |
|
"acc_norm": 0.39705882352941174, |
|
"acc_norm_stderr": 0.019794488900024106 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.3475177304964539, |
|
"acc_stderr": 0.028406627809590954, |
|
"acc_norm": 0.3475177304964539, |
|
"acc_norm_stderr": 0.028406627809590954 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.22321428571428573, |
|
"acc_stderr": 0.039523019677025116, |
|
"acc_norm": 0.22321428571428573, |
|
"acc_norm_stderr": 0.039523019677025116 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.35648148148148145, |
|
"acc_stderr": 0.032664783315272714, |
|
"acc_norm": 0.35648148148148145, |
|
"acc_norm_stderr": 0.032664783315272714 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.2424581005586592, |
|
"acc_stderr": 0.01433352205921789, |
|
"acc_norm": 0.2424581005586592, |
|
"acc_norm_stderr": 0.01433352205921789 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.36, |
|
"acc_stderr": 0.04824181513244218, |
|
"acc_norm": 0.36, |
|
"acc_norm_stderr": 0.04824181513244218 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.41, |
|
"acc_stderr": 0.049431107042371025, |
|
"acc_norm": 0.41, |
|
"acc_norm_stderr": 0.049431107042371025 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.4264705882352941, |
|
"acc_stderr": 0.030042615832714854, |
|
"acc_norm": 0.4264705882352941, |
|
"acc_norm_stderr": 0.030042615832714854 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.49795918367346936, |
|
"acc_stderr": 0.0320089533497105, |
|
"acc_norm": 0.49795918367346936, |
|
"acc_norm_stderr": 0.0320089533497105 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.6413502109704642, |
|
"acc_stderr": 0.031219569445301854, |
|
"acc_norm": 0.6413502109704642, |
|
"acc_norm_stderr": 0.031219569445301854 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.35853976531942633, |
|
"acc_stderr": 0.012248487319682751, |
|
"acc_norm": 0.35853976531942633, |
|
"acc_norm_stderr": 0.012248487319682751 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.4950980392156863, |
|
"acc_stderr": 0.03509143375606786, |
|
"acc_norm": 0.4950980392156863, |
|
"acc_norm_stderr": 0.03509143375606786 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.5151515151515151, |
|
"acc_stderr": 0.03902551007374448, |
|
"acc_norm": 0.5151515151515151, |
|
"acc_norm_stderr": 0.03902551007374448 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.3108935128518972, |
|
"mc1_stderr": 0.016203316673559696, |
|
"mc2": 0.4712352722064192, |
|
"mc2_stderr": 0.015376328355595536 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.47461629279811096, |
|
"acc_stderr": 0.017168187201429253, |
|
"acc_norm": 0.5466351829988194, |
|
"acc_norm_stderr": 0.01711541822522687 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "DopeorNope/COKALD-13B-v2", |
|
"model_sha": "3e6e8e2882890e69078d236891f9212a5b9d7a50", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |