|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.371160409556314, |
|
"acc_stderr": 0.014117971901142818, |
|
"acc_norm": 0.42662116040955633, |
|
"acc_norm_stderr": 0.014453185592920293 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.4075881298546106, |
|
"acc_stderr": 0.004903815885983279, |
|
"acc_norm": 0.5435172276438957, |
|
"acc_norm_stderr": 0.004970846697552308 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.5146198830409356, |
|
"acc_stderr": 0.03833185275213026, |
|
"acc_norm": 0.5146198830409356, |
|
"acc_norm_stderr": 0.03833185275213026 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.44660194174757284, |
|
"acc_stderr": 0.04922424153458933, |
|
"acc_norm": 0.44660194174757284, |
|
"acc_norm_stderr": 0.04922424153458933 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.49936143039591313, |
|
"acc_stderr": 0.017879948914431662, |
|
"acc_norm": 0.49936143039591313, |
|
"acc_norm_stderr": 0.017879948914431662 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.3333333333333333, |
|
"acc_stderr": 0.04072314811876837, |
|
"acc_norm": 0.3333333333333333, |
|
"acc_norm_stderr": 0.04072314811876837 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.29, |
|
"acc_stderr": 0.04560480215720683, |
|
"acc_norm": 0.29, |
|
"acc_norm_stderr": 0.04560480215720683 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.3829787234042553, |
|
"acc_stderr": 0.03177821250236922, |
|
"acc_norm": 0.3829787234042553, |
|
"acc_norm_stderr": 0.03177821250236922 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.40963855421686746, |
|
"acc_stderr": 0.0382840111507902, |
|
"acc_norm": 0.40963855421686746, |
|
"acc_norm_stderr": 0.0382840111507902 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.4887459807073955, |
|
"acc_stderr": 0.028390897396863533, |
|
"acc_norm": 0.4887459807073955, |
|
"acc_norm_stderr": 0.028390897396863533 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.4977578475336323, |
|
"acc_stderr": 0.033557465352232634, |
|
"acc_norm": 0.4977578475336323, |
|
"acc_norm_stderr": 0.033557465352232634 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.4732824427480916, |
|
"acc_stderr": 0.04379024936553894, |
|
"acc_norm": 0.4732824427480916, |
|
"acc_norm_stderr": 0.04379024936553894 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.39, |
|
"acc_stderr": 0.04902071300001975, |
|
"acc_norm": 0.39, |
|
"acc_norm_stderr": 0.04902071300001975 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.4797979797979798, |
|
"acc_stderr": 0.03559443565563919, |
|
"acc_norm": 0.4797979797979798, |
|
"acc_norm_stderr": 0.03559443565563919 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.33793103448275863, |
|
"acc_stderr": 0.03941707632064889, |
|
"acc_norm": 0.33793103448275863, |
|
"acc_norm_stderr": 0.03941707632064889 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.18627450980392157, |
|
"acc_stderr": 0.038739587141493524, |
|
"acc_norm": 0.18627450980392157, |
|
"acc_norm_stderr": 0.038739587141493524 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.40336134453781514, |
|
"acc_stderr": 0.031866081214088314, |
|
"acc_norm": 0.40336134453781514, |
|
"acc_norm_stderr": 0.031866081214088314 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.4076923076923077, |
|
"acc_stderr": 0.02491524398598784, |
|
"acc_norm": 0.4076923076923077, |
|
"acc_norm_stderr": 0.02491524398598784 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.49, |
|
"acc_stderr": 0.05024183937956912, |
|
"acc_norm": 0.49, |
|
"acc_norm_stderr": 0.05024183937956912 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.32, |
|
"acc_stderr": 0.04688261722621505, |
|
"acc_norm": 0.32, |
|
"acc_norm_stderr": 0.04688261722621505 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.4351851851851852, |
|
"acc_stderr": 0.04792898170907061, |
|
"acc_norm": 0.4351851851851852, |
|
"acc_norm_stderr": 0.04792898170907061 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.2955665024630542, |
|
"acc_stderr": 0.032104944337514575, |
|
"acc_norm": 0.2955665024630542, |
|
"acc_norm_stderr": 0.032104944337514575 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.4161290322580645, |
|
"acc_stderr": 0.02804098138076155, |
|
"acc_norm": 0.4161290322580645, |
|
"acc_norm_stderr": 0.02804098138076155 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.6837606837606838, |
|
"acc_stderr": 0.030463656747340254, |
|
"acc_norm": 0.6837606837606838, |
|
"acc_norm_stderr": 0.030463656747340254 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.3886792452830189, |
|
"acc_stderr": 0.030000485448675986, |
|
"acc_norm": 0.3886792452830189, |
|
"acc_norm_stderr": 0.030000485448675986 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.4909090909090909, |
|
"acc_stderr": 0.04788339768702861, |
|
"acc_norm": 0.4909090909090909, |
|
"acc_norm_stderr": 0.04788339768702861 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.3037037037037037, |
|
"acc_stderr": 0.028037929969114986, |
|
"acc_norm": 0.3037037037037037, |
|
"acc_norm_stderr": 0.028037929969114986 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.2582781456953642, |
|
"acc_stderr": 0.035737053147634576, |
|
"acc_norm": 0.2582781456953642, |
|
"acc_norm_stderr": 0.035737053147634576 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.5422885572139303, |
|
"acc_stderr": 0.03522865864099598, |
|
"acc_norm": 0.5422885572139303, |
|
"acc_norm_stderr": 0.03522865864099598 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.3583815028901734, |
|
"acc_stderr": 0.03656343653353159, |
|
"acc_norm": 0.3583815028901734, |
|
"acc_norm_stderr": 0.03656343653353159 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.29365079365079366, |
|
"acc_stderr": 0.02345603738398202, |
|
"acc_norm": 0.29365079365079366, |
|
"acc_norm_stderr": 0.02345603738398202 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.3611111111111111, |
|
"acc_stderr": 0.040166600304512336, |
|
"acc_norm": 0.3611111111111111, |
|
"acc_norm_stderr": 0.040166600304512336 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.62, |
|
"acc_stderr": 0.04878317312145633, |
|
"acc_norm": 0.62, |
|
"acc_norm_stderr": 0.04878317312145633 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.44508670520231214, |
|
"acc_stderr": 0.026756255129663762, |
|
"acc_norm": 0.44508670520231214, |
|
"acc_norm_stderr": 0.026756255129663762 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.4110429447852761, |
|
"acc_stderr": 0.038656978537853624, |
|
"acc_norm": 0.4110429447852761, |
|
"acc_norm_stderr": 0.038656978537853624 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.43209876543209874, |
|
"acc_stderr": 0.02756301097160667, |
|
"acc_norm": 0.43209876543209874, |
|
"acc_norm_stderr": 0.02756301097160667 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.31, |
|
"acc_stderr": 0.04648231987117316, |
|
"acc_norm": 0.31, |
|
"acc_norm_stderr": 0.04648231987117316 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.5077720207253886, |
|
"acc_stderr": 0.03608003225569653, |
|
"acc_norm": 0.5077720207253886, |
|
"acc_norm_stderr": 0.03608003225569653 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.2807017543859649, |
|
"acc_stderr": 0.04227054451232199, |
|
"acc_norm": 0.2807017543859649, |
|
"acc_norm_stderr": 0.04227054451232199 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.5082568807339449, |
|
"acc_stderr": 0.021434399918214338, |
|
"acc_norm": 0.5082568807339449, |
|
"acc_norm_stderr": 0.021434399918214338 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.3253968253968254, |
|
"acc_stderr": 0.04190596438871137, |
|
"acc_norm": 0.3253968253968254, |
|
"acc_norm_stderr": 0.04190596438871137 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.434640522875817, |
|
"acc_stderr": 0.02838425670488304, |
|
"acc_norm": 0.434640522875817, |
|
"acc_norm_stderr": 0.02838425670488304 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.5, |
|
"acc_stderr": 0.050251890762960605, |
|
"acc_norm": 0.5, |
|
"acc_norm_stderr": 0.050251890762960605 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.5950413223140496, |
|
"acc_stderr": 0.04481137755942469, |
|
"acc_norm": 0.5950413223140496, |
|
"acc_norm_stderr": 0.04481137755942469 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.3881578947368421, |
|
"acc_stderr": 0.03965842097512744, |
|
"acc_norm": 0.3881578947368421, |
|
"acc_norm_stderr": 0.03965842097512744 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.3562091503267974, |
|
"acc_stderr": 0.0193733324207245, |
|
"acc_norm": 0.3562091503267974, |
|
"acc_norm_stderr": 0.0193733324207245 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.3191489361702128, |
|
"acc_stderr": 0.027807990141320186, |
|
"acc_norm": 0.3191489361702128, |
|
"acc_norm_stderr": 0.027807990141320186 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.3125, |
|
"acc_stderr": 0.043994650575715215, |
|
"acc_norm": 0.3125, |
|
"acc_norm_stderr": 0.043994650575715215 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.3287037037037037, |
|
"acc_stderr": 0.03203614084670058, |
|
"acc_norm": 0.3287037037037037, |
|
"acc_norm_stderr": 0.03203614084670058 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.2424581005586592, |
|
"acc_stderr": 0.01433352205921789, |
|
"acc_norm": 0.2424581005586592, |
|
"acc_norm_stderr": 0.01433352205921789 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.35, |
|
"acc_stderr": 0.0479372485441102, |
|
"acc_norm": 0.35, |
|
"acc_norm_stderr": 0.0479372485441102 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.4, |
|
"acc_stderr": 0.04923659639173309, |
|
"acc_norm": 0.4, |
|
"acc_norm_stderr": 0.04923659639173309 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.3014705882352941, |
|
"acc_stderr": 0.027875982114273168, |
|
"acc_norm": 0.3014705882352941, |
|
"acc_norm_stderr": 0.027875982114273168 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.37551020408163266, |
|
"acc_stderr": 0.031001209039894843, |
|
"acc_norm": 0.37551020408163266, |
|
"acc_norm_stderr": 0.031001209039894843 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.5527426160337553, |
|
"acc_stderr": 0.03236564251614192, |
|
"acc_norm": 0.5527426160337553, |
|
"acc_norm_stderr": 0.03236564251614192 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.31421121251629724, |
|
"acc_stderr": 0.011855911587048231, |
|
"acc_norm": 0.31421121251629724, |
|
"acc_norm_stderr": 0.011855911587048231 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.4852941176470588, |
|
"acc_stderr": 0.03507793834791324, |
|
"acc_norm": 0.4852941176470588, |
|
"acc_norm_stderr": 0.03507793834791324 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.509090909090909, |
|
"acc_stderr": 0.0390369864774844, |
|
"acc_norm": 0.509090909090909, |
|
"acc_norm_stderr": 0.0390369864774844 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.2484700122399021, |
|
"mc1_stderr": 0.0151274270965207, |
|
"mc2": 0.40213800667232835, |
|
"mc2_stderr": 0.014730084928202228 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.3192488262910798, |
|
"acc_stderr": 0.015980636535168225, |
|
"acc_norm": 0.44366197183098594, |
|
"acc_norm_stderr": 0.017030629301613084 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "krevas/LDCC-Instruct-Llama-2-ko-13B", |
|
"model_sha": "fb333611b94ee15e4bb43e2535da14f147f760dc", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |