|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.2858361774744027, |
|
"acc_stderr": 0.013203196088537364, |
|
"acc_norm": 0.33532423208191126, |
|
"acc_norm_stderr": 0.013796182947785564 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.3910575582553276, |
|
"acc_stderr": 0.00486989929773455, |
|
"acc_norm": 0.5030870344552878, |
|
"acc_norm_stderr": 0.004989686307484551 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.26900584795321636, |
|
"acc_stderr": 0.03401052620104088, |
|
"acc_norm": 0.26900584795321636, |
|
"acc_norm_stderr": 0.03401052620104088 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.1941747572815534, |
|
"acc_stderr": 0.03916667762822584, |
|
"acc_norm": 0.1941747572815534, |
|
"acc_norm_stderr": 0.03916667762822584 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.23371647509578544, |
|
"acc_stderr": 0.015133383278988832, |
|
"acc_norm": 0.23371647509578544, |
|
"acc_norm_stderr": 0.015133383278988832 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.3037037037037037, |
|
"acc_stderr": 0.039725528847851375, |
|
"acc_norm": 0.3037037037037037, |
|
"acc_norm_stderr": 0.039725528847851375 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.27, |
|
"acc_stderr": 0.04461960433384739, |
|
"acc_norm": 0.27, |
|
"acc_norm_stderr": 0.04461960433384739 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.24680851063829787, |
|
"acc_stderr": 0.02818544130123409, |
|
"acc_norm": 0.24680851063829787, |
|
"acc_norm_stderr": 0.02818544130123409 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.21686746987951808, |
|
"acc_stderr": 0.03208284450356365, |
|
"acc_norm": 0.21686746987951808, |
|
"acc_norm_stderr": 0.03208284450356365 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.3215434083601286, |
|
"acc_stderr": 0.026527724079528872, |
|
"acc_norm": 0.3215434083601286, |
|
"acc_norm_stderr": 0.026527724079528872 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.15695067264573992, |
|
"acc_stderr": 0.024413587174907405, |
|
"acc_norm": 0.15695067264573992, |
|
"acc_norm_stderr": 0.024413587174907405 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.24427480916030533, |
|
"acc_stderr": 0.037683359597287434, |
|
"acc_norm": 0.24427480916030533, |
|
"acc_norm_stderr": 0.037683359597287434 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.27, |
|
"acc_stderr": 0.0446196043338474, |
|
"acc_norm": 0.27, |
|
"acc_norm_stderr": 0.0446196043338474 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.29797979797979796, |
|
"acc_stderr": 0.03258630383836555, |
|
"acc_norm": 0.29797979797979796, |
|
"acc_norm_stderr": 0.03258630383836555 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.2896551724137931, |
|
"acc_stderr": 0.037800192304380156, |
|
"acc_norm": 0.2896551724137931, |
|
"acc_norm_stderr": 0.037800192304380156 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.23529411764705882, |
|
"acc_stderr": 0.04220773659171452, |
|
"acc_norm": 0.23529411764705882, |
|
"acc_norm_stderr": 0.04220773659171452 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.21008403361344538, |
|
"acc_stderr": 0.026461398717471874, |
|
"acc_norm": 0.21008403361344538, |
|
"acc_norm_stderr": 0.026461398717471874 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.24102564102564103, |
|
"acc_stderr": 0.021685546665333188, |
|
"acc_norm": 0.24102564102564103, |
|
"acc_norm_stderr": 0.021685546665333188 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.28, |
|
"acc_stderr": 0.04512608598542129, |
|
"acc_norm": 0.28, |
|
"acc_norm_stderr": 0.04512608598542129 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.18, |
|
"acc_stderr": 0.03861229196653695, |
|
"acc_norm": 0.18, |
|
"acc_norm_stderr": 0.03861229196653695 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.24074074074074073, |
|
"acc_stderr": 0.04133119440243839, |
|
"acc_norm": 0.24074074074074073, |
|
"acc_norm_stderr": 0.04133119440243839 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.270935960591133, |
|
"acc_stderr": 0.03127090713297698, |
|
"acc_norm": 0.270935960591133, |
|
"acc_norm_stderr": 0.03127090713297698 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.25161290322580643, |
|
"acc_stderr": 0.024685979286239973, |
|
"acc_norm": 0.25161290322580643, |
|
"acc_norm_stderr": 0.024685979286239973 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.24786324786324787, |
|
"acc_stderr": 0.028286324075564393, |
|
"acc_norm": 0.24786324786324787, |
|
"acc_norm_stderr": 0.028286324075564393 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.2528301886792453, |
|
"acc_stderr": 0.02674989977124124, |
|
"acc_norm": 0.2528301886792453, |
|
"acc_norm_stderr": 0.02674989977124124 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.20909090909090908, |
|
"acc_stderr": 0.038950910157241364, |
|
"acc_norm": 0.20909090909090908, |
|
"acc_norm_stderr": 0.038950910157241364 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.25555555555555554, |
|
"acc_stderr": 0.02659393910184408, |
|
"acc_norm": 0.25555555555555554, |
|
"acc_norm_stderr": 0.02659393910184408 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.271523178807947, |
|
"acc_stderr": 0.03631329803969653, |
|
"acc_norm": 0.271523178807947, |
|
"acc_norm_stderr": 0.03631329803969653 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.23383084577114427, |
|
"acc_stderr": 0.029929415408348384, |
|
"acc_norm": 0.23383084577114427, |
|
"acc_norm_stderr": 0.029929415408348384 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.2832369942196532, |
|
"acc_stderr": 0.03435568056047873, |
|
"acc_norm": 0.2832369942196532, |
|
"acc_norm_stderr": 0.03435568056047873 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.2275132275132275, |
|
"acc_stderr": 0.021591269407823778, |
|
"acc_norm": 0.2275132275132275, |
|
"acc_norm_stderr": 0.021591269407823778 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.3125, |
|
"acc_stderr": 0.038760854559127644, |
|
"acc_norm": 0.3125, |
|
"acc_norm_stderr": 0.038760854559127644 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.23, |
|
"acc_stderr": 0.04229525846816508, |
|
"acc_norm": 0.23, |
|
"acc_norm_stderr": 0.04229525846816508 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.32, |
|
"acc_stderr": 0.046882617226215034, |
|
"acc_norm": 0.32, |
|
"acc_norm_stderr": 0.046882617226215034 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.23410404624277456, |
|
"acc_stderr": 0.022797110278071145, |
|
"acc_norm": 0.23410404624277456, |
|
"acc_norm_stderr": 0.022797110278071145 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.3006134969325153, |
|
"acc_stderr": 0.03602511318806771, |
|
"acc_norm": 0.3006134969325153, |
|
"acc_norm_stderr": 0.03602511318806771 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.3148148148148148, |
|
"acc_stderr": 0.02584224870090218, |
|
"acc_norm": 0.3148148148148148, |
|
"acc_norm_stderr": 0.02584224870090218 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.23316062176165803, |
|
"acc_stderr": 0.03051611137147601, |
|
"acc_norm": 0.23316062176165803, |
|
"acc_norm_stderr": 0.03051611137147601 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.22807017543859648, |
|
"acc_stderr": 0.03947152782669415, |
|
"acc_norm": 0.22807017543859648, |
|
"acc_norm_stderr": 0.03947152782669415 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.26788990825688075, |
|
"acc_stderr": 0.018987462257978652, |
|
"acc_norm": 0.26788990825688075, |
|
"acc_norm_stderr": 0.018987462257978652 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.2222222222222222, |
|
"acc_stderr": 0.03718489006818115, |
|
"acc_norm": 0.2222222222222222, |
|
"acc_norm_stderr": 0.03718489006818115 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.2222222222222222, |
|
"acc_stderr": 0.023805186524888142, |
|
"acc_norm": 0.2222222222222222, |
|
"acc_norm_stderr": 0.023805186524888142 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.24, |
|
"acc_stderr": 0.042923469599092816, |
|
"acc_norm": 0.24, |
|
"acc_norm_stderr": 0.042923469599092816 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.2975206611570248, |
|
"acc_stderr": 0.04173349148083499, |
|
"acc_norm": 0.2975206611570248, |
|
"acc_norm_stderr": 0.04173349148083499 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.19736842105263158, |
|
"acc_stderr": 0.03238981601699397, |
|
"acc_norm": 0.19736842105263158, |
|
"acc_norm_stderr": 0.03238981601699397 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.2549019607843137, |
|
"acc_stderr": 0.017630827375148383, |
|
"acc_norm": 0.2549019607843137, |
|
"acc_norm_stderr": 0.017630827375148383 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.24113475177304963, |
|
"acc_stderr": 0.025518731049537766, |
|
"acc_norm": 0.24113475177304963, |
|
"acc_norm_stderr": 0.025518731049537766 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.22321428571428573, |
|
"acc_stderr": 0.03952301967702511, |
|
"acc_norm": 0.22321428571428573, |
|
"acc_norm_stderr": 0.03952301967702511 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.38425925925925924, |
|
"acc_stderr": 0.03317354514310742, |
|
"acc_norm": 0.38425925925925924, |
|
"acc_norm_stderr": 0.03317354514310742 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.27150837988826815, |
|
"acc_stderr": 0.014874252168095278, |
|
"acc_norm": 0.27150837988826815, |
|
"acc_norm_stderr": 0.014874252168095278 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.28, |
|
"acc_stderr": 0.04512608598542127, |
|
"acc_norm": 0.28, |
|
"acc_norm_stderr": 0.04512608598542127 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.28, |
|
"acc_stderr": 0.04512608598542128, |
|
"acc_norm": 0.28, |
|
"acc_norm_stderr": 0.04512608598542128 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.40441176470588236, |
|
"acc_stderr": 0.02981263070156974, |
|
"acc_norm": 0.40441176470588236, |
|
"acc_norm_stderr": 0.02981263070156974 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.2612244897959184, |
|
"acc_stderr": 0.02812342933514278, |
|
"acc_norm": 0.2612244897959184, |
|
"acc_norm_stderr": 0.02812342933514278 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.2616033755274262, |
|
"acc_stderr": 0.028609516716994934, |
|
"acc_norm": 0.2616033755274262, |
|
"acc_norm_stderr": 0.028609516716994934 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.2692307692307692, |
|
"acc_stderr": 0.011328734403140332, |
|
"acc_norm": 0.2692307692307692, |
|
"acc_norm_stderr": 0.011328734403140332 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.27941176470588236, |
|
"acc_stderr": 0.031493281045079556, |
|
"acc_norm": 0.27941176470588236, |
|
"acc_norm_stderr": 0.031493281045079556 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.23636363636363636, |
|
"acc_stderr": 0.03317505930009179, |
|
"acc_norm": 0.23636363636363636, |
|
"acc_norm_stderr": 0.03317505930009179 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.2802937576499388, |
|
"mc1_stderr": 0.015723139524608753, |
|
"mc2": 0.42256277632208605, |
|
"mc2_stderr": 0.014988663316140667 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.30342384887839435, |
|
"acc_stderr": 0.015806072717909573, |
|
"acc_norm": 0.3884297520661157, |
|
"acc_norm_stderr": 0.016756921571069415 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "ingeol/sft_merged_660", |
|
"model_sha": "2426d1b6f2940a808b68c578e0fafdab1a515707", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |