|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.35921501706484643, |
|
"acc_stderr": 0.014020224155839162, |
|
"acc_norm": 0.4206484641638225, |
|
"acc_norm_stderr": 0.0144262112525084 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.4087831109340769, |
|
"acc_stderr": 0.004906043613013394, |
|
"acc_norm": 0.5447122087233619, |
|
"acc_norm_stderr": 0.004969790407117533 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.4853801169590643, |
|
"acc_stderr": 0.038331852752130205, |
|
"acc_norm": 0.4853801169590643, |
|
"acc_norm_stderr": 0.038331852752130205 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.4368932038834951, |
|
"acc_stderr": 0.04911147107365777, |
|
"acc_norm": 0.4368932038834951, |
|
"acc_norm_stderr": 0.04911147107365777 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.4367816091954023, |
|
"acc_stderr": 0.017736470837800684, |
|
"acc_norm": 0.4367816091954023, |
|
"acc_norm_stderr": 0.017736470837800684 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.26666666666666666, |
|
"acc_stderr": 0.03820169914517905, |
|
"acc_norm": 0.26666666666666666, |
|
"acc_norm_stderr": 0.03820169914517905 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.28, |
|
"acc_stderr": 0.04512608598542128, |
|
"acc_norm": 0.28, |
|
"acc_norm_stderr": 0.04512608598542128 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.3276595744680851, |
|
"acc_stderr": 0.030683020843231008, |
|
"acc_norm": 0.3276595744680851, |
|
"acc_norm_stderr": 0.030683020843231008 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.40963855421686746, |
|
"acc_stderr": 0.0382840111507902, |
|
"acc_norm": 0.40963855421686746, |
|
"acc_norm_stderr": 0.0382840111507902 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.3633440514469453, |
|
"acc_stderr": 0.02731684767419271, |
|
"acc_norm": 0.3633440514469453, |
|
"acc_norm_stderr": 0.02731684767419271 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.49327354260089684, |
|
"acc_stderr": 0.033554765962343545, |
|
"acc_norm": 0.49327354260089684, |
|
"acc_norm_stderr": 0.033554765962343545 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.3893129770992366, |
|
"acc_stderr": 0.04276486542814591, |
|
"acc_norm": 0.3893129770992366, |
|
"acc_norm_stderr": 0.04276486542814591 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.36, |
|
"acc_stderr": 0.04824181513244218, |
|
"acc_norm": 0.36, |
|
"acc_norm_stderr": 0.04824181513244218 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.3787878787878788, |
|
"acc_stderr": 0.03456088731993747, |
|
"acc_norm": 0.3787878787878788, |
|
"acc_norm_stderr": 0.03456088731993747 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.27586206896551724, |
|
"acc_stderr": 0.03724563619774633, |
|
"acc_norm": 0.27586206896551724, |
|
"acc_norm_stderr": 0.03724563619774633 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.20588235294117646, |
|
"acc_stderr": 0.04023382273617747, |
|
"acc_norm": 0.20588235294117646, |
|
"acc_norm_stderr": 0.04023382273617747 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.3235294117647059, |
|
"acc_stderr": 0.030388353551886845, |
|
"acc_norm": 0.3235294117647059, |
|
"acc_norm_stderr": 0.030388353551886845 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.32564102564102565, |
|
"acc_stderr": 0.02375966576741229, |
|
"acc_norm": 0.32564102564102565, |
|
"acc_norm_stderr": 0.02375966576741229 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.36, |
|
"acc_stderr": 0.048241815132442176, |
|
"acc_norm": 0.36, |
|
"acc_norm_stderr": 0.048241815132442176 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.26, |
|
"acc_stderr": 0.044084400227680794, |
|
"acc_norm": 0.26, |
|
"acc_norm_stderr": 0.044084400227680794 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.4166666666666667, |
|
"acc_stderr": 0.04766075165356461, |
|
"acc_norm": 0.4166666666666667, |
|
"acc_norm_stderr": 0.04766075165356461 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.21674876847290642, |
|
"acc_stderr": 0.02899033125251624, |
|
"acc_norm": 0.21674876847290642, |
|
"acc_norm_stderr": 0.02899033125251624 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.2967741935483871, |
|
"acc_stderr": 0.02598850079241187, |
|
"acc_norm": 0.2967741935483871, |
|
"acc_norm_stderr": 0.02598850079241187 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.5982905982905983, |
|
"acc_stderr": 0.03211693751051621, |
|
"acc_norm": 0.5982905982905983, |
|
"acc_norm_stderr": 0.03211693751051621 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.30566037735849055, |
|
"acc_stderr": 0.028353298073322666, |
|
"acc_norm": 0.30566037735849055, |
|
"acc_norm_stderr": 0.028353298073322666 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.4, |
|
"acc_stderr": 0.0469237132203465, |
|
"acc_norm": 0.4, |
|
"acc_norm_stderr": 0.0469237132203465 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.24444444444444444, |
|
"acc_stderr": 0.026202766534652148, |
|
"acc_norm": 0.24444444444444444, |
|
"acc_norm_stderr": 0.026202766534652148 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.19205298013245034, |
|
"acc_stderr": 0.032162984205936135, |
|
"acc_norm": 0.19205298013245034, |
|
"acc_norm_stderr": 0.032162984205936135 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.46766169154228854, |
|
"acc_stderr": 0.035281314729336065, |
|
"acc_norm": 0.46766169154228854, |
|
"acc_norm_stderr": 0.035281314729336065 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.24855491329479767, |
|
"acc_stderr": 0.03295304696818318, |
|
"acc_norm": 0.24855491329479767, |
|
"acc_norm_stderr": 0.03295304696818318 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.21693121693121692, |
|
"acc_stderr": 0.021227082449445045, |
|
"acc_norm": 0.21693121693121692, |
|
"acc_norm_stderr": 0.021227082449445045 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.3472222222222222, |
|
"acc_stderr": 0.03981240543717862, |
|
"acc_norm": 0.3472222222222222, |
|
"acc_norm_stderr": 0.03981240543717862 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.19, |
|
"acc_stderr": 0.03942772444036623, |
|
"acc_norm": 0.19, |
|
"acc_norm_stderr": 0.03942772444036623 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.5, |
|
"acc_stderr": 0.050251890762960605, |
|
"acc_norm": 0.5, |
|
"acc_norm_stderr": 0.050251890762960605 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.36416184971098264, |
|
"acc_stderr": 0.025906632631016124, |
|
"acc_norm": 0.36416184971098264, |
|
"acc_norm_stderr": 0.025906632631016124 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.26993865030674846, |
|
"acc_stderr": 0.034878251684978906, |
|
"acc_norm": 0.26993865030674846, |
|
"acc_norm_stderr": 0.034878251684978906 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.37037037037037035, |
|
"acc_stderr": 0.026869490744815254, |
|
"acc_norm": 0.37037037037037035, |
|
"acc_norm_stderr": 0.026869490744815254 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.22, |
|
"acc_stderr": 0.0416333199893227, |
|
"acc_norm": 0.22, |
|
"acc_norm_stderr": 0.0416333199893227 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.41450777202072536, |
|
"acc_stderr": 0.03555300319557672, |
|
"acc_norm": 0.41450777202072536, |
|
"acc_norm_stderr": 0.03555300319557672 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.21052631578947367, |
|
"acc_stderr": 0.0383515395439942, |
|
"acc_norm": 0.21052631578947367, |
|
"acc_norm_stderr": 0.0383515395439942 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.3394495412844037, |
|
"acc_stderr": 0.02030210934266235, |
|
"acc_norm": 0.3394495412844037, |
|
"acc_norm_stderr": 0.02030210934266235 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.2619047619047619, |
|
"acc_stderr": 0.03932537680392871, |
|
"acc_norm": 0.2619047619047619, |
|
"acc_norm_stderr": 0.03932537680392871 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.3300653594771242, |
|
"acc_stderr": 0.026925654653615686, |
|
"acc_norm": 0.3300653594771242, |
|
"acc_norm_stderr": 0.026925654653615686 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.44, |
|
"acc_stderr": 0.04988876515698589, |
|
"acc_norm": 0.44, |
|
"acc_norm_stderr": 0.04988876515698589 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.6033057851239669, |
|
"acc_stderr": 0.044658697805310094, |
|
"acc_norm": 0.6033057851239669, |
|
"acc_norm_stderr": 0.044658697805310094 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.32894736842105265, |
|
"acc_stderr": 0.03823428969926604, |
|
"acc_norm": 0.32894736842105265, |
|
"acc_norm_stderr": 0.03823428969926604 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.3366013071895425, |
|
"acc_stderr": 0.01911721391149517, |
|
"acc_norm": 0.3366013071895425, |
|
"acc_norm_stderr": 0.01911721391149517 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.2695035460992908, |
|
"acc_stderr": 0.026469036818590627, |
|
"acc_norm": 0.2695035460992908, |
|
"acc_norm_stderr": 0.026469036818590627 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.3392857142857143, |
|
"acc_stderr": 0.04493949068613539, |
|
"acc_norm": 0.3392857142857143, |
|
"acc_norm_stderr": 0.04493949068613539 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.13425925925925927, |
|
"acc_stderr": 0.02325127759054592, |
|
"acc_norm": 0.13425925925925927, |
|
"acc_norm_stderr": 0.02325127759054592 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.24692737430167597, |
|
"acc_stderr": 0.014422292204808852, |
|
"acc_norm": 0.24692737430167597, |
|
"acc_norm_stderr": 0.014422292204808852 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.22, |
|
"acc_stderr": 0.041633319989322695, |
|
"acc_norm": 0.22, |
|
"acc_norm_stderr": 0.041633319989322695 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.39, |
|
"acc_stderr": 0.04902071300001975, |
|
"acc_norm": 0.39, |
|
"acc_norm_stderr": 0.04902071300001975 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.22058823529411764, |
|
"acc_stderr": 0.02518778666022726, |
|
"acc_norm": 0.22058823529411764, |
|
"acc_norm_stderr": 0.02518778666022726 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.2857142857142857, |
|
"acc_stderr": 0.028920583220675578, |
|
"acc_norm": 0.2857142857142857, |
|
"acc_norm_stderr": 0.028920583220675578 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.3670886075949367, |
|
"acc_stderr": 0.031376240725616185, |
|
"acc_norm": 0.3670886075949367, |
|
"acc_norm_stderr": 0.031376240725616185 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.27183833116036504, |
|
"acc_stderr": 0.011363135278651414, |
|
"acc_norm": 0.27183833116036504, |
|
"acc_norm_stderr": 0.011363135278651414 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.3431372549019608, |
|
"acc_stderr": 0.033321399446680854, |
|
"acc_norm": 0.3431372549019608, |
|
"acc_norm_stderr": 0.033321399446680854 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.3575757575757576, |
|
"acc_stderr": 0.037425970438065836, |
|
"acc_norm": 0.3575757575757576, |
|
"acc_norm_stderr": 0.037425970438065836 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.2582619339045288, |
|
"mc1_stderr": 0.015321821688476197, |
|
"mc2": 0.42735209878041286, |
|
"mc2_stderr": 0.014892368565155705 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.43779342723004694, |
|
"acc_stderr": 0.017006611775152725, |
|
"acc_norm": 0.5316901408450704, |
|
"acc_norm_stderr": 0.01710531885082843 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "krevas/LDCC-Instruct-Llama-2-ko-13B-v2", |
|
"model_sha": "0779b43890c83a02fe7696321c95966717945f58", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |