|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.27559726962457337, |
|
"acc_stderr": 0.01305716965576184, |
|
"acc_norm": 0.33532423208191126, |
|
"acc_norm_stderr": 0.013796182947785564 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.3536148177653854, |
|
"acc_stderr": 0.004771143074426132, |
|
"acc_norm": 0.4457279426409082, |
|
"acc_norm_stderr": 0.0049602999525194 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.29239766081871343, |
|
"acc_stderr": 0.034886477134579236, |
|
"acc_norm": 0.29239766081871343, |
|
"acc_norm_stderr": 0.034886477134579236 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.2621359223300971, |
|
"acc_stderr": 0.04354631077260597, |
|
"acc_norm": 0.2621359223300971, |
|
"acc_norm_stderr": 0.04354631077260597 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.21711366538952745, |
|
"acc_stderr": 0.014743125394823291, |
|
"acc_norm": 0.21711366538952745, |
|
"acc_norm_stderr": 0.014743125394823291 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.22962962962962963, |
|
"acc_stderr": 0.03633384414073465, |
|
"acc_norm": 0.22962962962962963, |
|
"acc_norm_stderr": 0.03633384414073465 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.28, |
|
"acc_stderr": 0.045126085985421276, |
|
"acc_norm": 0.28, |
|
"acc_norm_stderr": 0.045126085985421276 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.2765957446808511, |
|
"acc_stderr": 0.02924188386962882, |
|
"acc_norm": 0.2765957446808511, |
|
"acc_norm_stderr": 0.02924188386962882 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.3253012048192771, |
|
"acc_stderr": 0.03647168523683227, |
|
"acc_norm": 0.3253012048192771, |
|
"acc_norm_stderr": 0.03647168523683227 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.2540192926045016, |
|
"acc_stderr": 0.024723861504771686, |
|
"acc_norm": 0.2540192926045016, |
|
"acc_norm_stderr": 0.024723861504771686 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.21973094170403587, |
|
"acc_stderr": 0.0277901770643836, |
|
"acc_norm": 0.21973094170403587, |
|
"acc_norm_stderr": 0.0277901770643836 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.2366412213740458, |
|
"acc_stderr": 0.03727673575596917, |
|
"acc_norm": 0.2366412213740458, |
|
"acc_norm_stderr": 0.03727673575596917 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.24, |
|
"acc_stderr": 0.042923469599092816, |
|
"acc_norm": 0.24, |
|
"acc_norm_stderr": 0.042923469599092816 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.1919191919191919, |
|
"acc_stderr": 0.028057791672989017, |
|
"acc_norm": 0.1919191919191919, |
|
"acc_norm_stderr": 0.028057791672989017 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.25517241379310346, |
|
"acc_stderr": 0.03632984052707842, |
|
"acc_norm": 0.25517241379310346, |
|
"acc_norm_stderr": 0.03632984052707842 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.21568627450980393, |
|
"acc_stderr": 0.04092563958237657, |
|
"acc_norm": 0.21568627450980393, |
|
"acc_norm_stderr": 0.04092563958237657 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.25630252100840334, |
|
"acc_stderr": 0.028359620870533946, |
|
"acc_norm": 0.25630252100840334, |
|
"acc_norm_stderr": 0.028359620870533946 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.20512820512820512, |
|
"acc_stderr": 0.020473233173551975, |
|
"acc_norm": 0.20512820512820512, |
|
"acc_norm_stderr": 0.020473233173551975 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.36, |
|
"acc_stderr": 0.04824181513244218, |
|
"acc_norm": 0.36, |
|
"acc_norm_stderr": 0.04824181513244218 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.28, |
|
"acc_stderr": 0.04512608598542128, |
|
"acc_norm": 0.28, |
|
"acc_norm_stderr": 0.04512608598542128 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.26851851851851855, |
|
"acc_stderr": 0.04284467968052192, |
|
"acc_norm": 0.26851851851851855, |
|
"acc_norm_stderr": 0.04284467968052192 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.2660098522167488, |
|
"acc_stderr": 0.03108982600293752, |
|
"acc_norm": 0.2660098522167488, |
|
"acc_norm_stderr": 0.03108982600293752 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.2870967741935484, |
|
"acc_stderr": 0.025736542745594528, |
|
"acc_norm": 0.2870967741935484, |
|
"acc_norm_stderr": 0.025736542745594528 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.27350427350427353, |
|
"acc_stderr": 0.029202540153431166, |
|
"acc_norm": 0.27350427350427353, |
|
"acc_norm_stderr": 0.029202540153431166 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.24150943396226415, |
|
"acc_stderr": 0.026341480371118355, |
|
"acc_norm": 0.24150943396226415, |
|
"acc_norm_stderr": 0.026341480371118355 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.04389311454644286, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.04389311454644286 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.2777777777777778, |
|
"acc_stderr": 0.02730914058823019, |
|
"acc_norm": 0.2777777777777778, |
|
"acc_norm_stderr": 0.02730914058823019 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.2251655629139073, |
|
"acc_stderr": 0.03410435282008936, |
|
"acc_norm": 0.2251655629139073, |
|
"acc_norm_stderr": 0.03410435282008936 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.2736318407960199, |
|
"acc_stderr": 0.03152439186555402, |
|
"acc_norm": 0.2736318407960199, |
|
"acc_norm_stderr": 0.03152439186555402 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.19653179190751446, |
|
"acc_stderr": 0.03029957466478815, |
|
"acc_norm": 0.19653179190751446, |
|
"acc_norm_stderr": 0.03029957466478815 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.23809523809523808, |
|
"acc_stderr": 0.02193587808118476, |
|
"acc_norm": 0.23809523809523808, |
|
"acc_norm_stderr": 0.02193587808118476 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.2569444444444444, |
|
"acc_stderr": 0.03653946969442099, |
|
"acc_norm": 0.2569444444444444, |
|
"acc_norm_stderr": 0.03653946969442099 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.19, |
|
"acc_stderr": 0.03942772444036623, |
|
"acc_norm": 0.19, |
|
"acc_norm_stderr": 0.03942772444036623 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.23, |
|
"acc_stderr": 0.04229525846816506, |
|
"acc_norm": 0.23, |
|
"acc_norm_stderr": 0.04229525846816506 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.2658959537572254, |
|
"acc_stderr": 0.023786203255508287, |
|
"acc_norm": 0.2658959537572254, |
|
"acc_norm_stderr": 0.023786203255508287 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.25766871165644173, |
|
"acc_stderr": 0.03436150827846917, |
|
"acc_norm": 0.25766871165644173, |
|
"acc_norm_stderr": 0.03436150827846917 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.27469135802469136, |
|
"acc_stderr": 0.024836057868294677, |
|
"acc_norm": 0.27469135802469136, |
|
"acc_norm_stderr": 0.024836057868294677 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.26, |
|
"acc_stderr": 0.04408440022768079, |
|
"acc_norm": 0.26, |
|
"acc_norm_stderr": 0.04408440022768079 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.18652849740932642, |
|
"acc_stderr": 0.02811209121011747, |
|
"acc_norm": 0.18652849740932642, |
|
"acc_norm_stderr": 0.02811209121011747 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.22807017543859648, |
|
"acc_stderr": 0.03947152782669415, |
|
"acc_norm": 0.22807017543859648, |
|
"acc_norm_stderr": 0.03947152782669415 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.20550458715596331, |
|
"acc_stderr": 0.01732435232501601, |
|
"acc_norm": 0.20550458715596331, |
|
"acc_norm_stderr": 0.01732435232501601 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.1984126984126984, |
|
"acc_stderr": 0.03567016675276863, |
|
"acc_norm": 0.1984126984126984, |
|
"acc_norm_stderr": 0.03567016675276863 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.22549019607843138, |
|
"acc_stderr": 0.02392915551735129, |
|
"acc_norm": 0.22549019607843138, |
|
"acc_norm_stderr": 0.02392915551735129 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.2, |
|
"acc_stderr": 0.040201512610368445, |
|
"acc_norm": 0.2, |
|
"acc_norm_stderr": 0.040201512610368445 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.34710743801652894, |
|
"acc_stderr": 0.04345724570292534, |
|
"acc_norm": 0.34710743801652894, |
|
"acc_norm_stderr": 0.04345724570292534 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.17763157894736842, |
|
"acc_stderr": 0.031103182383123377, |
|
"acc_norm": 0.17763157894736842, |
|
"acc_norm_stderr": 0.031103182383123377 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.27124183006535946, |
|
"acc_stderr": 0.017986615304030305, |
|
"acc_norm": 0.27124183006535946, |
|
"acc_norm_stderr": 0.017986615304030305 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.24822695035460993, |
|
"acc_stderr": 0.0257700156442904, |
|
"acc_norm": 0.24822695035460993, |
|
"acc_norm_stderr": 0.0257700156442904 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.25, |
|
"acc_stderr": 0.04109974682633932, |
|
"acc_norm": 0.25, |
|
"acc_norm_stderr": 0.04109974682633932 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.2962962962962963, |
|
"acc_stderr": 0.03114144782353603, |
|
"acc_norm": 0.2962962962962963, |
|
"acc_norm_stderr": 0.03114144782353603 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.25027932960893856, |
|
"acc_stderr": 0.01448750085285042, |
|
"acc_norm": 0.25027932960893856, |
|
"acc_norm_stderr": 0.01448750085285042 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.36, |
|
"acc_stderr": 0.04824181513244218, |
|
"acc_norm": 0.36, |
|
"acc_norm_stderr": 0.04824181513244218 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.25, |
|
"acc_stderr": 0.04351941398892446, |
|
"acc_norm": 0.25, |
|
"acc_norm_stderr": 0.04351941398892446 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.16911764705882354, |
|
"acc_stderr": 0.022770868010113007, |
|
"acc_norm": 0.16911764705882354, |
|
"acc_norm_stderr": 0.022770868010113007 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.22448979591836735, |
|
"acc_stderr": 0.026711430555538408, |
|
"acc_norm": 0.22448979591836735, |
|
"acc_norm_stderr": 0.026711430555538408 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.26582278481012656, |
|
"acc_stderr": 0.028756799629658332, |
|
"acc_norm": 0.26582278481012656, |
|
"acc_norm_stderr": 0.028756799629658332 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.2633637548891786, |
|
"acc_stderr": 0.011249506403605296, |
|
"acc_norm": 0.2633637548891786, |
|
"acc_norm_stderr": 0.011249506403605296 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.2549019607843137, |
|
"acc_stderr": 0.030587591351604243, |
|
"acc_norm": 0.2549019607843137, |
|
"acc_norm_stderr": 0.030587591351604243 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.296969696969697, |
|
"acc_stderr": 0.035679697722680474, |
|
"acc_norm": 0.296969696969697, |
|
"acc_norm_stderr": 0.035679697722680474 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.2533659730722154, |
|
"mc1_stderr": 0.015225899340826824, |
|
"mc2": 0.4107878952898989, |
|
"mc2_stderr": 0.01500499376546119 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.3270365997638725, |
|
"acc_stderr": 0.016129047485457022, |
|
"acc_norm": 0.4332939787485242, |
|
"acc_norm_stderr": 0.017036683641893105 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "AIFT/AIFT-instruct-SFT-1.3B-refine-v3", |
|
"model_sha": "51280ba05cc276e596478e551c75fd4c61b07fe3", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |