|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.3720136518771331, |
|
"acc_stderr": 0.014124597881844466, |
|
"acc_norm": 0.4402730375426621, |
|
"acc_norm_stderr": 0.014506769524804243 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.40938060147381, |
|
"acc_stderr": 0.004907146229347555, |
|
"acc_norm": 0.5426209918342959, |
|
"acc_norm_stderr": 0.004971619995879755 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.49707602339181284, |
|
"acc_stderr": 0.03834759370936839, |
|
"acc_norm": 0.49707602339181284, |
|
"acc_norm_stderr": 0.03834759370936839 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.3592233009708738, |
|
"acc_stderr": 0.047504583990416925, |
|
"acc_norm": 0.3592233009708738, |
|
"acc_norm_stderr": 0.047504583990416925 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.4891443167305236, |
|
"acc_stderr": 0.017875748840242418, |
|
"acc_norm": 0.4891443167305236, |
|
"acc_norm_stderr": 0.017875748840242418 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.362962962962963, |
|
"acc_stderr": 0.041539484047424, |
|
"acc_norm": 0.362962962962963, |
|
"acc_norm_stderr": 0.041539484047424 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.26, |
|
"acc_stderr": 0.0440844002276808, |
|
"acc_norm": 0.26, |
|
"acc_norm_stderr": 0.0440844002276808 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.34893617021276596, |
|
"acc_stderr": 0.031158522131357783, |
|
"acc_norm": 0.34893617021276596, |
|
"acc_norm_stderr": 0.031158522131357783 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.35542168674698793, |
|
"acc_stderr": 0.03726214354322415, |
|
"acc_norm": 0.35542168674698793, |
|
"acc_norm_stderr": 0.03726214354322415 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.40836012861736337, |
|
"acc_stderr": 0.027917050748484634, |
|
"acc_norm": 0.40836012861736337, |
|
"acc_norm_stderr": 0.027917050748484634 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.4170403587443946, |
|
"acc_stderr": 0.03309266936071721, |
|
"acc_norm": 0.4170403587443946, |
|
"acc_norm_stderr": 0.03309266936071721 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.44274809160305345, |
|
"acc_stderr": 0.0435644720266507, |
|
"acc_norm": 0.44274809160305345, |
|
"acc_norm_stderr": 0.0435644720266507 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.43, |
|
"acc_stderr": 0.0497569851956243, |
|
"acc_norm": 0.43, |
|
"acc_norm_stderr": 0.0497569851956243 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.4292929292929293, |
|
"acc_stderr": 0.035265527246011986, |
|
"acc_norm": 0.4292929292929293, |
|
"acc_norm_stderr": 0.035265527246011986 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.22758620689655173, |
|
"acc_stderr": 0.03493950380131184, |
|
"acc_norm": 0.22758620689655173, |
|
"acc_norm_stderr": 0.03493950380131184 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.23529411764705882, |
|
"acc_stderr": 0.04220773659171453, |
|
"acc_norm": 0.23529411764705882, |
|
"acc_norm_stderr": 0.04220773659171453 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.31092436974789917, |
|
"acc_stderr": 0.030066761582977924, |
|
"acc_norm": 0.31092436974789917, |
|
"acc_norm_stderr": 0.030066761582977924 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.3564102564102564, |
|
"acc_stderr": 0.024283140529467284, |
|
"acc_norm": 0.3564102564102564, |
|
"acc_norm_stderr": 0.024283140529467284 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.44, |
|
"acc_stderr": 0.049888765156985884, |
|
"acc_norm": 0.44, |
|
"acc_norm_stderr": 0.049888765156985884 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.37, |
|
"acc_stderr": 0.04852365870939099, |
|
"acc_norm": 0.37, |
|
"acc_norm_stderr": 0.04852365870939099 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.4444444444444444, |
|
"acc_stderr": 0.04803752235190192, |
|
"acc_norm": 0.4444444444444444, |
|
"acc_norm_stderr": 0.04803752235190192 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.31527093596059114, |
|
"acc_stderr": 0.03269080871970186, |
|
"acc_norm": 0.31527093596059114, |
|
"acc_norm_stderr": 0.03269080871970186 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.3774193548387097, |
|
"acc_stderr": 0.027575960723278243, |
|
"acc_norm": 0.3774193548387097, |
|
"acc_norm_stderr": 0.027575960723278243 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.5982905982905983, |
|
"acc_stderr": 0.03211693751051622, |
|
"acc_norm": 0.5982905982905983, |
|
"acc_norm_stderr": 0.03211693751051622 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.3433962264150943, |
|
"acc_stderr": 0.029224526469124792, |
|
"acc_norm": 0.3433962264150943, |
|
"acc_norm_stderr": 0.029224526469124792 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.4, |
|
"acc_stderr": 0.0469237132203465, |
|
"acc_norm": 0.4, |
|
"acc_norm_stderr": 0.0469237132203465 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.2740740740740741, |
|
"acc_stderr": 0.027195934804085626, |
|
"acc_norm": 0.2740740740740741, |
|
"acc_norm_stderr": 0.027195934804085626 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.2052980132450331, |
|
"acc_stderr": 0.03297986648473836, |
|
"acc_norm": 0.2052980132450331, |
|
"acc_norm_stderr": 0.03297986648473836 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.48258706467661694, |
|
"acc_stderr": 0.03533389234739244, |
|
"acc_norm": 0.48258706467661694, |
|
"acc_norm_stderr": 0.03533389234739244 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.32947976878612717, |
|
"acc_stderr": 0.03583901754736411, |
|
"acc_norm": 0.32947976878612717, |
|
"acc_norm_stderr": 0.03583901754736411 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.2222222222222222, |
|
"acc_stderr": 0.02141168439369419, |
|
"acc_norm": 0.2222222222222222, |
|
"acc_norm_stderr": 0.02141168439369419 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.375, |
|
"acc_stderr": 0.04048439222695598, |
|
"acc_norm": 0.375, |
|
"acc_norm_stderr": 0.04048439222695598 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.25, |
|
"acc_stderr": 0.04351941398892446, |
|
"acc_norm": 0.25, |
|
"acc_norm_stderr": 0.04351941398892446 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.53, |
|
"acc_stderr": 0.050161355804659205, |
|
"acc_norm": 0.53, |
|
"acc_norm_stderr": 0.050161355804659205 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.4046242774566474, |
|
"acc_stderr": 0.026424816594009845, |
|
"acc_norm": 0.4046242774566474, |
|
"acc_norm_stderr": 0.026424816594009845 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.44785276073619634, |
|
"acc_stderr": 0.03906947479456601, |
|
"acc_norm": 0.44785276073619634, |
|
"acc_norm_stderr": 0.03906947479456601 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.4104938271604938, |
|
"acc_stderr": 0.027371350925124764, |
|
"acc_norm": 0.4104938271604938, |
|
"acc_norm_stderr": 0.027371350925124764 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.16, |
|
"acc_stderr": 0.0368452949177471, |
|
"acc_norm": 0.16, |
|
"acc_norm_stderr": 0.0368452949177471 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.41968911917098445, |
|
"acc_stderr": 0.03561587327685883, |
|
"acc_norm": 0.41968911917098445, |
|
"acc_norm_stderr": 0.03561587327685883 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.23684210526315788, |
|
"acc_stderr": 0.03999423879281336, |
|
"acc_norm": 0.23684210526315788, |
|
"acc_norm_stderr": 0.03999423879281336 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.3669724770642202, |
|
"acc_stderr": 0.020664675659520532, |
|
"acc_norm": 0.3669724770642202, |
|
"acc_norm_stderr": 0.020664675659520532 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.23015873015873015, |
|
"acc_stderr": 0.037649508797906066, |
|
"acc_norm": 0.23015873015873015, |
|
"acc_norm_stderr": 0.037649508797906066 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.3562091503267974, |
|
"acc_stderr": 0.02742047766262923, |
|
"acc_norm": 0.3562091503267974, |
|
"acc_norm_stderr": 0.02742047766262923 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.34, |
|
"acc_stderr": 0.04760952285695236, |
|
"acc_norm": 0.34, |
|
"acc_norm_stderr": 0.04760952285695236 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.6115702479338843, |
|
"acc_stderr": 0.04449270350068383, |
|
"acc_norm": 0.6115702479338843, |
|
"acc_norm_stderr": 0.04449270350068383 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.40131578947368424, |
|
"acc_stderr": 0.03988903703336285, |
|
"acc_norm": 0.40131578947368424, |
|
"acc_norm_stderr": 0.03988903703336285 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.3480392156862745, |
|
"acc_stderr": 0.019270998708223977, |
|
"acc_norm": 0.3480392156862745, |
|
"acc_norm_stderr": 0.019270998708223977 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.2730496453900709, |
|
"acc_stderr": 0.026577860943307854, |
|
"acc_norm": 0.2730496453900709, |
|
"acc_norm_stderr": 0.026577860943307854 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.30357142857142855, |
|
"acc_stderr": 0.04364226155841044, |
|
"acc_norm": 0.30357142857142855, |
|
"acc_norm_stderr": 0.04364226155841044 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.20833333333333334, |
|
"acc_stderr": 0.027696910713093936, |
|
"acc_norm": 0.20833333333333334, |
|
"acc_norm_stderr": 0.027696910713093936 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.2435754189944134, |
|
"acc_stderr": 0.01435591196476786, |
|
"acc_norm": 0.2435754189944134, |
|
"acc_norm_stderr": 0.01435591196476786 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.27, |
|
"acc_stderr": 0.044619604333847394, |
|
"acc_norm": 0.27, |
|
"acc_norm_stderr": 0.044619604333847394 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.4, |
|
"acc_stderr": 0.049236596391733084, |
|
"acc_norm": 0.4, |
|
"acc_norm_stderr": 0.049236596391733084 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.23161764705882354, |
|
"acc_stderr": 0.025626533803777565, |
|
"acc_norm": 0.23161764705882354, |
|
"acc_norm_stderr": 0.025626533803777565 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.31020408163265306, |
|
"acc_stderr": 0.029613459872484385, |
|
"acc_norm": 0.31020408163265306, |
|
"acc_norm_stderr": 0.029613459872484385 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.46835443037974683, |
|
"acc_stderr": 0.03248197400511075, |
|
"acc_norm": 0.46835443037974683, |
|
"acc_norm_stderr": 0.03248197400511075 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.28226857887874834, |
|
"acc_stderr": 0.011495852176241954, |
|
"acc_norm": 0.28226857887874834, |
|
"acc_norm_stderr": 0.011495852176241954 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.44607843137254904, |
|
"acc_stderr": 0.03488845451304974, |
|
"acc_norm": 0.44607843137254904, |
|
"acc_norm_stderr": 0.03488845451304974 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.4727272727272727, |
|
"acc_stderr": 0.03898531605579419, |
|
"acc_norm": 0.4727272727272727, |
|
"acc_norm_stderr": 0.03898531605579419 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.29498164014687883, |
|
"mc1_stderr": 0.015964400965589678, |
|
"mc2": 0.4528465622549083, |
|
"mc2_stderr": 0.015125783674090152 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.3707201889020071, |
|
"acc_stderr": 0.01660580128921261, |
|
"acc_norm": 0.512396694214876, |
|
"acc_norm_stderr": 0.017185069732676538 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "krevas/LDCC-Instruct-Llama-2-ko-13B-v4", |
|
"model_sha": "26ef51b65661f5762efa36aadf56a7c3820e6762", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |