|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.39505119453924914, |
|
"acc_stderr": 0.014285898292938167, |
|
"acc_norm": 0.45307167235494883, |
|
"acc_norm_stderr": 0.014546892052005628 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.40450109539932283, |
|
"acc_stderr": 0.004897921845492103, |
|
"acc_norm": 0.5380402310296754, |
|
"acc_norm_stderr": 0.004975319435777095 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.40350877192982454, |
|
"acc_stderr": 0.03762738699917056, |
|
"acc_norm": 0.40350877192982454, |
|
"acc_norm_stderr": 0.03762738699917056 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.2912621359223301, |
|
"acc_stderr": 0.044986763205729245, |
|
"acc_norm": 0.2912621359223301, |
|
"acc_norm_stderr": 0.044986763205729245 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.3333333333333333, |
|
"acc_stderr": 0.016857391247472552, |
|
"acc_norm": 0.3333333333333333, |
|
"acc_norm_stderr": 0.016857391247472552 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.35555555555555557, |
|
"acc_stderr": 0.04135176749720386, |
|
"acc_norm": 0.35555555555555557, |
|
"acc_norm_stderr": 0.04135176749720386 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.31, |
|
"acc_stderr": 0.04648231987117316, |
|
"acc_norm": 0.31, |
|
"acc_norm_stderr": 0.04648231987117316 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.2425531914893617, |
|
"acc_stderr": 0.028020226271200217, |
|
"acc_norm": 0.2425531914893617, |
|
"acc_norm_stderr": 0.028020226271200217 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.28313253012048195, |
|
"acc_stderr": 0.03507295431370518, |
|
"acc_norm": 0.28313253012048195, |
|
"acc_norm_stderr": 0.03507295431370518 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.3762057877813505, |
|
"acc_stderr": 0.027513925683549427, |
|
"acc_norm": 0.3762057877813505, |
|
"acc_norm_stderr": 0.027513925683549427 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.336322869955157, |
|
"acc_stderr": 0.031708824268455, |
|
"acc_norm": 0.336322869955157, |
|
"acc_norm_stderr": 0.031708824268455 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.3282442748091603, |
|
"acc_stderr": 0.041184385658062976, |
|
"acc_norm": 0.3282442748091603, |
|
"acc_norm_stderr": 0.041184385658062976 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.04605661864718381, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.04605661864718381 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.2828282828282828, |
|
"acc_stderr": 0.03208779558786751, |
|
"acc_norm": 0.2828282828282828, |
|
"acc_norm_stderr": 0.03208779558786751 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.2620689655172414, |
|
"acc_stderr": 0.036646663372252565, |
|
"acc_norm": 0.2620689655172414, |
|
"acc_norm_stderr": 0.036646663372252565 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.18627450980392157, |
|
"acc_stderr": 0.03873958714149352, |
|
"acc_norm": 0.18627450980392157, |
|
"acc_norm_stderr": 0.03873958714149352 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.25630252100840334, |
|
"acc_stderr": 0.028359620870533953, |
|
"acc_norm": 0.25630252100840334, |
|
"acc_norm_stderr": 0.028359620870533953 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.24615384615384617, |
|
"acc_stderr": 0.021840866990423088, |
|
"acc_norm": 0.24615384615384617, |
|
"acc_norm_stderr": 0.021840866990423088 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.41, |
|
"acc_stderr": 0.049431107042371025, |
|
"acc_norm": 0.41, |
|
"acc_norm_stderr": 0.049431107042371025 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.29, |
|
"acc_stderr": 0.045604802157206845, |
|
"acc_norm": 0.29, |
|
"acc_norm_stderr": 0.045604802157206845 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.37037037037037035, |
|
"acc_stderr": 0.04668408033024931, |
|
"acc_norm": 0.37037037037037035, |
|
"acc_norm_stderr": 0.04668408033024931 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.3103448275862069, |
|
"acc_stderr": 0.03255086769970103, |
|
"acc_norm": 0.3103448275862069, |
|
"acc_norm_stderr": 0.03255086769970103 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.3387096774193548, |
|
"acc_stderr": 0.02692344605930286, |
|
"acc_norm": 0.3387096774193548, |
|
"acc_norm_stderr": 0.02692344605930286 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.4444444444444444, |
|
"acc_stderr": 0.03255326307272486, |
|
"acc_norm": 0.4444444444444444, |
|
"acc_norm_stderr": 0.03255326307272486 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.2490566037735849, |
|
"acc_stderr": 0.02661648298050172, |
|
"acc_norm": 0.2490566037735849, |
|
"acc_norm_stderr": 0.02661648298050172 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.2818181818181818, |
|
"acc_stderr": 0.04309118709946458, |
|
"acc_norm": 0.2818181818181818, |
|
"acc_norm_stderr": 0.04309118709946458 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.26666666666666666, |
|
"acc_stderr": 0.026962424325073838, |
|
"acc_norm": 0.26666666666666666, |
|
"acc_norm_stderr": 0.026962424325073838 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.2847682119205298, |
|
"acc_stderr": 0.03684881521389023, |
|
"acc_norm": 0.2847682119205298, |
|
"acc_norm_stderr": 0.03684881521389023 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.46766169154228854, |
|
"acc_stderr": 0.035281314729336065, |
|
"acc_norm": 0.46766169154228854, |
|
"acc_norm_stderr": 0.035281314729336065 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.2832369942196532, |
|
"acc_stderr": 0.034355680560478746, |
|
"acc_norm": 0.2832369942196532, |
|
"acc_norm_stderr": 0.034355680560478746 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.2857142857142857, |
|
"acc_stderr": 0.02326651221373057, |
|
"acc_norm": 0.2857142857142857, |
|
"acc_norm_stderr": 0.02326651221373057 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.3263888888888889, |
|
"acc_stderr": 0.03921067198982266, |
|
"acc_norm": 0.3263888888888889, |
|
"acc_norm_stderr": 0.03921067198982266 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.19, |
|
"acc_stderr": 0.03942772444036624, |
|
"acc_norm": 0.19, |
|
"acc_norm_stderr": 0.03942772444036624 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.36, |
|
"acc_stderr": 0.04824181513244218, |
|
"acc_norm": 0.36, |
|
"acc_norm_stderr": 0.04824181513244218 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.3901734104046243, |
|
"acc_stderr": 0.026261677607806636, |
|
"acc_norm": 0.3901734104046243, |
|
"acc_norm_stderr": 0.026261677607806636 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.3496932515337423, |
|
"acc_stderr": 0.037466683254700206, |
|
"acc_norm": 0.3496932515337423, |
|
"acc_norm_stderr": 0.037466683254700206 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.3425925925925926, |
|
"acc_stderr": 0.02640614597362566, |
|
"acc_norm": 0.3425925925925926, |
|
"acc_norm_stderr": 0.02640614597362566 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.26, |
|
"acc_stderr": 0.04408440022768079, |
|
"acc_norm": 0.26, |
|
"acc_norm_stderr": 0.04408440022768079 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.32124352331606215, |
|
"acc_stderr": 0.033699508685490674, |
|
"acc_norm": 0.32124352331606215, |
|
"acc_norm_stderr": 0.033699508685490674 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.24561403508771928, |
|
"acc_stderr": 0.04049339297748141, |
|
"acc_norm": 0.24561403508771928, |
|
"acc_norm_stderr": 0.04049339297748141 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.30825688073394497, |
|
"acc_stderr": 0.019798366698367268, |
|
"acc_norm": 0.30825688073394497, |
|
"acc_norm_stderr": 0.019798366698367268 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.21428571428571427, |
|
"acc_stderr": 0.03670066451047181, |
|
"acc_norm": 0.21428571428571427, |
|
"acc_norm_stderr": 0.03670066451047181 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.3202614379084967, |
|
"acc_stderr": 0.026716118380156827, |
|
"acc_norm": 0.3202614379084967, |
|
"acc_norm_stderr": 0.026716118380156827 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.5702479338842975, |
|
"acc_stderr": 0.04519082021319771, |
|
"acc_norm": 0.5702479338842975, |
|
"acc_norm_stderr": 0.04519082021319771 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.3092105263157895, |
|
"acc_stderr": 0.03761070869867479, |
|
"acc_norm": 0.3092105263157895, |
|
"acc_norm_stderr": 0.03761070869867479 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.31862745098039214, |
|
"acc_stderr": 0.01885008469646872, |
|
"acc_norm": 0.31862745098039214, |
|
"acc_norm_stderr": 0.01885008469646872 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.2765957446808511, |
|
"acc_stderr": 0.026684564340461, |
|
"acc_norm": 0.2765957446808511, |
|
"acc_norm_stderr": 0.026684564340461 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.19642857142857142, |
|
"acc_stderr": 0.037709700493470194, |
|
"acc_norm": 0.19642857142857142, |
|
"acc_norm_stderr": 0.037709700493470194 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.24537037037037038, |
|
"acc_stderr": 0.029346665094372948, |
|
"acc_norm": 0.24537037037037038, |
|
"acc_norm_stderr": 0.029346665094372948 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.2324022346368715, |
|
"acc_stderr": 0.014125968754673385, |
|
"acc_norm": 0.2324022346368715, |
|
"acc_norm_stderr": 0.014125968754673385 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.27, |
|
"acc_stderr": 0.0446196043338474, |
|
"acc_norm": 0.27, |
|
"acc_norm_stderr": 0.0446196043338474 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.4, |
|
"acc_stderr": 0.049236596391733084, |
|
"acc_norm": 0.4, |
|
"acc_norm_stderr": 0.049236596391733084 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.20955882352941177, |
|
"acc_stderr": 0.02472311040767705, |
|
"acc_norm": 0.20955882352941177, |
|
"acc_norm_stderr": 0.02472311040767705 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.2653061224489796, |
|
"acc_stderr": 0.028263889943784617, |
|
"acc_norm": 0.2653061224489796, |
|
"acc_norm_stderr": 0.028263889943784617 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.4472573839662447, |
|
"acc_stderr": 0.03236564251614192, |
|
"acc_norm": 0.4472573839662447, |
|
"acc_norm_stderr": 0.03236564251614192 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.30182529335071706, |
|
"acc_stderr": 0.01172435051810589, |
|
"acc_norm": 0.30182529335071706, |
|
"acc_norm_stderr": 0.01172435051810589 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.3627450980392157, |
|
"acc_stderr": 0.03374499356319354, |
|
"acc_norm": 0.3627450980392157, |
|
"acc_norm_stderr": 0.03374499356319354 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.4, |
|
"acc_stderr": 0.03825460278380026, |
|
"acc_norm": 0.4, |
|
"acc_norm_stderr": 0.03825460278380026 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.3072215422276622, |
|
"mc1_stderr": 0.016150201321323002, |
|
"mc2": 0.47008540499028884, |
|
"mc2_stderr": 0.015171096468571796 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.5903755868544601, |
|
"acc_stderr": 0.016857467505356098, |
|
"acc_norm": 0.6842723004694836, |
|
"acc_norm_stderr": 0.01593331134555564 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "krevas/LDCC-Instruct-Llama-2-ko-13B-v4.1.12", |
|
"model_sha": "b70b4cab0f97d64c9540240ae4cdbec4afbf7206", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |