|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.2568259385665529, |
|
"acc_stderr": 0.012766923794116801, |
|
"acc_norm": 0.30887372013651876, |
|
"acc_norm_stderr": 0.013501770929344003 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.35172276438956385, |
|
"acc_stderr": 0.004765320784902128, |
|
"acc_norm": 0.4396534554869548, |
|
"acc_norm_stderr": 0.004953305461311753 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.30994152046783624, |
|
"acc_stderr": 0.03546976959393161, |
|
"acc_norm": 0.30994152046783624, |
|
"acc_norm_stderr": 0.03546976959393161 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.23300970873786409, |
|
"acc_stderr": 0.04185832598928315, |
|
"acc_norm": 0.23300970873786409, |
|
"acc_norm_stderr": 0.04185832598928315 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.2656449553001277, |
|
"acc_stderr": 0.01579430248788873, |
|
"acc_norm": 0.2656449553001277, |
|
"acc_norm_stderr": 0.01579430248788873 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.2518518518518518, |
|
"acc_stderr": 0.03749850709174022, |
|
"acc_norm": 0.2518518518518518, |
|
"acc_norm_stderr": 0.03749850709174022 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.32, |
|
"acc_stderr": 0.046882617226215034, |
|
"acc_norm": 0.32, |
|
"acc_norm_stderr": 0.046882617226215034 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.20851063829787234, |
|
"acc_stderr": 0.02655698211783875, |
|
"acc_norm": 0.20851063829787234, |
|
"acc_norm_stderr": 0.02655698211783875 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.2289156626506024, |
|
"acc_stderr": 0.03270745277352477, |
|
"acc_norm": 0.2289156626506024, |
|
"acc_norm_stderr": 0.03270745277352477 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.27009646302250806, |
|
"acc_stderr": 0.025218040373410622, |
|
"acc_norm": 0.27009646302250806, |
|
"acc_norm_stderr": 0.025218040373410622 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.2062780269058296, |
|
"acc_stderr": 0.027157150479563824, |
|
"acc_norm": 0.2062780269058296, |
|
"acc_norm_stderr": 0.027157150479563824 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.22900763358778625, |
|
"acc_stderr": 0.036853466317118506, |
|
"acc_norm": 0.22900763358778625, |
|
"acc_norm_stderr": 0.036853466317118506 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.24, |
|
"acc_stderr": 0.04292346959909282, |
|
"acc_norm": 0.24, |
|
"acc_norm_stderr": 0.04292346959909282 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.23232323232323232, |
|
"acc_stderr": 0.030088629490217487, |
|
"acc_norm": 0.23232323232323232, |
|
"acc_norm_stderr": 0.030088629490217487 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.30344827586206896, |
|
"acc_stderr": 0.03831226048850333, |
|
"acc_norm": 0.30344827586206896, |
|
"acc_norm_stderr": 0.03831226048850333 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.22549019607843138, |
|
"acc_stderr": 0.041583075330832865, |
|
"acc_norm": 0.22549019607843138, |
|
"acc_norm_stderr": 0.041583075330832865 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.24369747899159663, |
|
"acc_stderr": 0.027886828078380558, |
|
"acc_norm": 0.24369747899159663, |
|
"acc_norm_stderr": 0.027886828078380558 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.2717948717948718, |
|
"acc_stderr": 0.022556551010132354, |
|
"acc_norm": 0.2717948717948718, |
|
"acc_norm_stderr": 0.022556551010132354 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.28, |
|
"acc_stderr": 0.04512608598542129, |
|
"acc_norm": 0.28, |
|
"acc_norm_stderr": 0.04512608598542129 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.32, |
|
"acc_stderr": 0.04688261722621504, |
|
"acc_norm": 0.32, |
|
"acc_norm_stderr": 0.04688261722621504 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.2222222222222222, |
|
"acc_stderr": 0.040191074725573483, |
|
"acc_norm": 0.2222222222222222, |
|
"acc_norm_stderr": 0.040191074725573483 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.31527093596059114, |
|
"acc_stderr": 0.03269080871970186, |
|
"acc_norm": 0.31527093596059114, |
|
"acc_norm_stderr": 0.03269080871970186 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.26129032258064516, |
|
"acc_stderr": 0.024993053397764833, |
|
"acc_norm": 0.26129032258064516, |
|
"acc_norm_stderr": 0.024993053397764833 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.23931623931623933, |
|
"acc_stderr": 0.027951826808924333, |
|
"acc_norm": 0.23931623931623933, |
|
"acc_norm_stderr": 0.027951826808924333 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.22641509433962265, |
|
"acc_stderr": 0.025757559893106734, |
|
"acc_norm": 0.22641509433962265, |
|
"acc_norm_stderr": 0.025757559893106734 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.17272727272727273, |
|
"acc_stderr": 0.03620691833929217, |
|
"acc_norm": 0.17272727272727273, |
|
"acc_norm_stderr": 0.03620691833929217 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.27037037037037037, |
|
"acc_stderr": 0.027080372815145668, |
|
"acc_norm": 0.27037037037037037, |
|
"acc_norm_stderr": 0.027080372815145668 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.25165562913907286, |
|
"acc_stderr": 0.03543304234389985, |
|
"acc_norm": 0.25165562913907286, |
|
"acc_norm_stderr": 0.03543304234389985 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.2537313432835821, |
|
"acc_stderr": 0.03076944496729602, |
|
"acc_norm": 0.2537313432835821, |
|
"acc_norm_stderr": 0.03076944496729602 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.23121387283236994, |
|
"acc_stderr": 0.03214737302029468, |
|
"acc_norm": 0.23121387283236994, |
|
"acc_norm_stderr": 0.03214737302029468 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.26455026455026454, |
|
"acc_stderr": 0.022717467897708607, |
|
"acc_norm": 0.26455026455026454, |
|
"acc_norm_stderr": 0.022717467897708607 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.2708333333333333, |
|
"acc_stderr": 0.03716177437566016, |
|
"acc_norm": 0.2708333333333333, |
|
"acc_norm_stderr": 0.03716177437566016 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.2, |
|
"acc_stderr": 0.04020151261036845, |
|
"acc_norm": 0.2, |
|
"acc_norm_stderr": 0.04020151261036845 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.25, |
|
"acc_stderr": 0.04351941398892446, |
|
"acc_norm": 0.25, |
|
"acc_norm_stderr": 0.04351941398892446 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.2630057803468208, |
|
"acc_stderr": 0.023703099525258158, |
|
"acc_norm": 0.2630057803468208, |
|
"acc_norm_stderr": 0.023703099525258158 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.27607361963190186, |
|
"acc_stderr": 0.0351238528370505, |
|
"acc_norm": 0.27607361963190186, |
|
"acc_norm_stderr": 0.0351238528370505 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.2777777777777778, |
|
"acc_stderr": 0.024922001168886338, |
|
"acc_norm": 0.2777777777777778, |
|
"acc_norm_stderr": 0.024922001168886338 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.25, |
|
"acc_stderr": 0.04351941398892446, |
|
"acc_norm": 0.25, |
|
"acc_norm_stderr": 0.04351941398892446 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.34196891191709844, |
|
"acc_stderr": 0.03423465100104283, |
|
"acc_norm": 0.34196891191709844, |
|
"acc_norm_stderr": 0.03423465100104283 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.23684210526315788, |
|
"acc_stderr": 0.03999423879281336, |
|
"acc_norm": 0.23684210526315788, |
|
"acc_norm_stderr": 0.03999423879281336 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.24220183486238533, |
|
"acc_stderr": 0.01836817630659862, |
|
"acc_norm": 0.24220183486238533, |
|
"acc_norm_stderr": 0.01836817630659862 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.2698412698412698, |
|
"acc_stderr": 0.03970158273235173, |
|
"acc_norm": 0.2698412698412698, |
|
"acc_norm_stderr": 0.03970158273235173 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.24836601307189543, |
|
"acc_stderr": 0.02473998135511359, |
|
"acc_norm": 0.24836601307189543, |
|
"acc_norm_stderr": 0.02473998135511359 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.23, |
|
"acc_stderr": 0.04229525846816506, |
|
"acc_norm": 0.23, |
|
"acc_norm_stderr": 0.04229525846816506 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.371900826446281, |
|
"acc_stderr": 0.04412015806624504, |
|
"acc_norm": 0.371900826446281, |
|
"acc_norm_stderr": 0.04412015806624504 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.29605263157894735, |
|
"acc_stderr": 0.03715062154998904, |
|
"acc_norm": 0.29605263157894735, |
|
"acc_norm_stderr": 0.03715062154998904 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.26633986928104575, |
|
"acc_stderr": 0.017883188134667192, |
|
"acc_norm": 0.26633986928104575, |
|
"acc_norm_stderr": 0.017883188134667192 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.2730496453900709, |
|
"acc_stderr": 0.026577860943307854, |
|
"acc_norm": 0.2730496453900709, |
|
"acc_norm_stderr": 0.026577860943307854 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.23214285714285715, |
|
"acc_stderr": 0.04007341809755808, |
|
"acc_norm": 0.23214285714285715, |
|
"acc_norm_stderr": 0.04007341809755808 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.3888888888888889, |
|
"acc_stderr": 0.03324708911809117, |
|
"acc_norm": 0.3888888888888889, |
|
"acc_norm_stderr": 0.03324708911809117 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.24804469273743016, |
|
"acc_stderr": 0.014444157808261453, |
|
"acc_norm": 0.24804469273743016, |
|
"acc_norm_stderr": 0.014444157808261453 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.24, |
|
"acc_stderr": 0.04292346959909283, |
|
"acc_norm": 0.24, |
|
"acc_norm_stderr": 0.04292346959909283 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.26838235294117646, |
|
"acc_stderr": 0.02691748122437723, |
|
"acc_norm": 0.26838235294117646, |
|
"acc_norm_stderr": 0.02691748122437723 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.2979591836734694, |
|
"acc_stderr": 0.029279567411065684, |
|
"acc_norm": 0.2979591836734694, |
|
"acc_norm_stderr": 0.029279567411065684 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.26582278481012656, |
|
"acc_stderr": 0.028756799629658332, |
|
"acc_norm": 0.26582278481012656, |
|
"acc_norm_stderr": 0.028756799629658332 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.25945241199478486, |
|
"acc_stderr": 0.011195262076350309, |
|
"acc_norm": 0.25945241199478486, |
|
"acc_norm_stderr": 0.011195262076350309 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.24019607843137256, |
|
"acc_stderr": 0.02998373305591361, |
|
"acc_norm": 0.24019607843137256, |
|
"acc_norm_stderr": 0.02998373305591361 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.26666666666666666, |
|
"acc_stderr": 0.03453131801885416, |
|
"acc_norm": 0.26666666666666666, |
|
"acc_norm_stderr": 0.03453131801885416 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.26438188494492043, |
|
"mc1_stderr": 0.015438211119522517, |
|
"mc2": 0.42818983286182555, |
|
"mc2_stderr": 0.015309048799107149 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.282172373081464, |
|
"acc_stderr": 0.01547327158398843, |
|
"acc_norm": 0.3412042502951594, |
|
"acc_norm_stderr": 0.016300368742137306 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "heegyu/polyglot-ko-3.8b-chat", |
|
"model_sha": "0e8739e22d15d44f6196fb281895856a0372564a", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |