|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.20563139931740615, |
|
"acc_stderr": 0.011810745260742581, |
|
"acc_norm": 0.257679180887372, |
|
"acc_norm_stderr": 0.012780770562768414 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.27693686516630156, |
|
"acc_stderr": 0.004465704810893538, |
|
"acc_norm": 0.30611431985660226, |
|
"acc_norm_stderr": 0.004599358920909526 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.34502923976608185, |
|
"acc_stderr": 0.03645981377388807, |
|
"acc_norm": 0.34502923976608185, |
|
"acc_norm_stderr": 0.03645981377388807 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.23300970873786409, |
|
"acc_stderr": 0.04185832598928315, |
|
"acc_norm": 0.23300970873786409, |
|
"acc_norm_stderr": 0.04185832598928315 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.24904214559386972, |
|
"acc_stderr": 0.015464676163395983, |
|
"acc_norm": 0.24904214559386972, |
|
"acc_norm_stderr": 0.015464676163395983 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.3037037037037037, |
|
"acc_stderr": 0.039725528847851375, |
|
"acc_norm": 0.3037037037037037, |
|
"acc_norm_stderr": 0.039725528847851375 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.2, |
|
"acc_stderr": 0.04020151261036846, |
|
"acc_norm": 0.2, |
|
"acc_norm_stderr": 0.04020151261036846 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.2680851063829787, |
|
"acc_stderr": 0.02895734278834235, |
|
"acc_norm": 0.2680851063829787, |
|
"acc_norm_stderr": 0.02895734278834235 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.2289156626506024, |
|
"acc_stderr": 0.03270745277352477, |
|
"acc_norm": 0.2289156626506024, |
|
"acc_norm_stderr": 0.03270745277352477 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.20257234726688103, |
|
"acc_stderr": 0.022827317491059682, |
|
"acc_norm": 0.20257234726688103, |
|
"acc_norm_stderr": 0.022827317491059682 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.2825112107623318, |
|
"acc_stderr": 0.030216831011508762, |
|
"acc_norm": 0.2825112107623318, |
|
"acc_norm_stderr": 0.030216831011508762 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.2366412213740458, |
|
"acc_stderr": 0.03727673575596918, |
|
"acc_norm": 0.2366412213740458, |
|
"acc_norm_stderr": 0.03727673575596918 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.23, |
|
"acc_stderr": 0.042295258468165044, |
|
"acc_norm": 0.23, |
|
"acc_norm_stderr": 0.042295258468165044 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.21212121212121213, |
|
"acc_stderr": 0.029126522834586825, |
|
"acc_norm": 0.21212121212121213, |
|
"acc_norm_stderr": 0.029126522834586825 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.2689655172413793, |
|
"acc_stderr": 0.03695183311650232, |
|
"acc_norm": 0.2689655172413793, |
|
"acc_norm_stderr": 0.03695183311650232 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.23529411764705882, |
|
"acc_stderr": 0.04220773659171452, |
|
"acc_norm": 0.23529411764705882, |
|
"acc_norm_stderr": 0.04220773659171452 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.23529411764705882, |
|
"acc_stderr": 0.027553614467863773, |
|
"acc_norm": 0.23529411764705882, |
|
"acc_norm_stderr": 0.027553614467863773 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.2230769230769231, |
|
"acc_stderr": 0.021107730127243998, |
|
"acc_norm": 0.2230769230769231, |
|
"acc_norm_stderr": 0.021107730127243998 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.28, |
|
"acc_stderr": 0.045126085985421296, |
|
"acc_norm": 0.28, |
|
"acc_norm_stderr": 0.045126085985421296 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.2, |
|
"acc_stderr": 0.040201512610368445, |
|
"acc_norm": 0.2, |
|
"acc_norm_stderr": 0.040201512610368445 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.3055555555555556, |
|
"acc_stderr": 0.044531975073749834, |
|
"acc_norm": 0.3055555555555556, |
|
"acc_norm_stderr": 0.044531975073749834 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.28078817733990147, |
|
"acc_stderr": 0.0316185633535861, |
|
"acc_norm": 0.28078817733990147, |
|
"acc_norm_stderr": 0.0316185633535861 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.22258064516129034, |
|
"acc_stderr": 0.02366421667164251, |
|
"acc_norm": 0.22258064516129034, |
|
"acc_norm_stderr": 0.02366421667164251 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.2905982905982906, |
|
"acc_stderr": 0.029745048572674054, |
|
"acc_norm": 0.2905982905982906, |
|
"acc_norm_stderr": 0.029745048572674054 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.23018867924528302, |
|
"acc_stderr": 0.025907897122408173, |
|
"acc_norm": 0.23018867924528302, |
|
"acc_norm_stderr": 0.025907897122408173 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.22727272727272727, |
|
"acc_stderr": 0.040139645540727735, |
|
"acc_norm": 0.22727272727272727, |
|
"acc_norm_stderr": 0.040139645540727735 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.24814814814814815, |
|
"acc_stderr": 0.0263357394040558, |
|
"acc_norm": 0.24814814814814815, |
|
"acc_norm_stderr": 0.0263357394040558 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.2781456953642384, |
|
"acc_stderr": 0.03658603262763744, |
|
"acc_norm": 0.2781456953642384, |
|
"acc_norm_stderr": 0.03658603262763744 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.25870646766169153, |
|
"acc_stderr": 0.030965903123573037, |
|
"acc_norm": 0.25870646766169153, |
|
"acc_norm_stderr": 0.030965903123573037 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.21965317919075145, |
|
"acc_stderr": 0.031568093627031744, |
|
"acc_norm": 0.21965317919075145, |
|
"acc_norm_stderr": 0.031568093627031744 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.24867724867724866, |
|
"acc_stderr": 0.022261817692400175, |
|
"acc_norm": 0.24867724867724866, |
|
"acc_norm_stderr": 0.022261817692400175 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.2569444444444444, |
|
"acc_stderr": 0.03653946969442099, |
|
"acc_norm": 0.2569444444444444, |
|
"acc_norm_stderr": 0.03653946969442099 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.23, |
|
"acc_stderr": 0.04229525846816507, |
|
"acc_norm": 0.23, |
|
"acc_norm_stderr": 0.04229525846816507 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.27, |
|
"acc_stderr": 0.044619604333847394, |
|
"acc_norm": 0.27, |
|
"acc_norm_stderr": 0.044619604333847394 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.28034682080924855, |
|
"acc_stderr": 0.024182427496577622, |
|
"acc_norm": 0.28034682080924855, |
|
"acc_norm_stderr": 0.024182427496577622 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.25153374233128833, |
|
"acc_stderr": 0.034089978868575295, |
|
"acc_norm": 0.25153374233128833, |
|
"acc_norm_stderr": 0.034089978868575295 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.2808641975308642, |
|
"acc_stderr": 0.025006469755799215, |
|
"acc_norm": 0.2808641975308642, |
|
"acc_norm_stderr": 0.025006469755799215 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.24, |
|
"acc_stderr": 0.04292346959909283, |
|
"acc_norm": 0.24, |
|
"acc_norm_stderr": 0.04292346959909283 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.21243523316062177, |
|
"acc_stderr": 0.02951928261681725, |
|
"acc_norm": 0.21243523316062177, |
|
"acc_norm_stderr": 0.02951928261681725 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.23684210526315788, |
|
"acc_stderr": 0.03999423879281336, |
|
"acc_norm": 0.23684210526315788, |
|
"acc_norm_stderr": 0.03999423879281336 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.2036697247706422, |
|
"acc_stderr": 0.01726674208763079, |
|
"acc_norm": 0.2036697247706422, |
|
"acc_norm_stderr": 0.01726674208763079 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.18253968253968253, |
|
"acc_stderr": 0.03455071019102148, |
|
"acc_norm": 0.18253968253968253, |
|
"acc_norm_stderr": 0.03455071019102148 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.25163398692810457, |
|
"acc_stderr": 0.0248480182638752, |
|
"acc_norm": 0.25163398692810457, |
|
"acc_norm_stderr": 0.0248480182638752 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.28, |
|
"acc_stderr": 0.04512608598542129, |
|
"acc_norm": 0.28, |
|
"acc_norm_stderr": 0.04512608598542129 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.371900826446281, |
|
"acc_stderr": 0.04412015806624504, |
|
"acc_norm": 0.371900826446281, |
|
"acc_norm_stderr": 0.04412015806624504 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.19736842105263158, |
|
"acc_stderr": 0.03238981601699397, |
|
"acc_norm": 0.19736842105263158, |
|
"acc_norm_stderr": 0.03238981601699397 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.2826797385620915, |
|
"acc_stderr": 0.018217269552053435, |
|
"acc_norm": 0.2826797385620915, |
|
"acc_norm_stderr": 0.018217269552053435 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.26595744680851063, |
|
"acc_stderr": 0.026358065698880592, |
|
"acc_norm": 0.26595744680851063, |
|
"acc_norm_stderr": 0.026358065698880592 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.3125, |
|
"acc_stderr": 0.043994650575715215, |
|
"acc_norm": 0.3125, |
|
"acc_norm_stderr": 0.043994650575715215 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.2824074074074074, |
|
"acc_stderr": 0.030701372111510934, |
|
"acc_norm": 0.2824074074074074, |
|
"acc_norm_stderr": 0.030701372111510934 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.2335195530726257, |
|
"acc_stderr": 0.014149575348976264, |
|
"acc_norm": 0.2335195530726257, |
|
"acc_norm_stderr": 0.014149575348976264 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.25, |
|
"acc_stderr": 0.04351941398892446, |
|
"acc_norm": 0.25, |
|
"acc_norm_stderr": 0.04351941398892446 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.33, |
|
"acc_stderr": 0.04725815626252605, |
|
"acc_norm": 0.33, |
|
"acc_norm_stderr": 0.04725815626252605 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.22426470588235295, |
|
"acc_stderr": 0.025336848563332372, |
|
"acc_norm": 0.22426470588235295, |
|
"acc_norm_stderr": 0.025336848563332372 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.23265306122448978, |
|
"acc_stderr": 0.02704925791589618, |
|
"acc_norm": 0.23265306122448978, |
|
"acc_norm_stderr": 0.02704925791589618 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.2489451476793249, |
|
"acc_stderr": 0.028146970599422644, |
|
"acc_norm": 0.2489451476793249, |
|
"acc_norm_stderr": 0.028146970599422644 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.2301173402868318, |
|
"acc_stderr": 0.01075018317737556, |
|
"acc_norm": 0.2301173402868318, |
|
"acc_norm_stderr": 0.01075018317737556 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.23039215686274508, |
|
"acc_stderr": 0.029554292605695063, |
|
"acc_norm": 0.23039215686274508, |
|
"acc_norm_stderr": 0.029554292605695063 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.21818181818181817, |
|
"acc_stderr": 0.03225078108306289, |
|
"acc_norm": 0.21818181818181817, |
|
"acc_norm_stderr": 0.03225078108306289 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.2839657282741738, |
|
"mc1_stderr": 0.015785370858396718, |
|
"mc2": 0.46188658792557263, |
|
"mc2_stderr": 0.016386200757722597 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.21487603305785125, |
|
"acc_stderr": 0.01412140552290331, |
|
"acc_norm": 0.28807556080283353, |
|
"acc_norm_stderr": 0.015569869674838374 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "heegyu/WizardVicuna-open-llama-3b-v2", |
|
"model_sha": "0946550dfbf40d926d6ba816d0ca13e9c810fa72", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |