|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.26791808873720135, |
|
"acc_stderr": 0.012942030195136428, |
|
"acc_norm": 0.31313993174061433, |
|
"acc_norm_stderr": 0.013552671543623496 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.32732523401712804, |
|
"acc_stderr": 0.00468278079050834, |
|
"acc_norm": 0.40380402310296754, |
|
"acc_norm_stderr": 0.004896563126116813 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.34502923976608185, |
|
"acc_stderr": 0.036459813773888065, |
|
"acc_norm": 0.34502923976608185, |
|
"acc_norm_stderr": 0.036459813773888065 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.3786407766990291, |
|
"acc_stderr": 0.048026946982589726, |
|
"acc_norm": 0.3786407766990291, |
|
"acc_norm_stderr": 0.048026946982589726 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.3116219667943806, |
|
"acc_stderr": 0.016562433867284176, |
|
"acc_norm": 0.3116219667943806, |
|
"acc_norm_stderr": 0.016562433867284176 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.2740740740740741, |
|
"acc_stderr": 0.03853254836552003, |
|
"acc_norm": 0.2740740740740741, |
|
"acc_norm_stderr": 0.03853254836552003 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.32, |
|
"acc_stderr": 0.04688261722621503, |
|
"acc_norm": 0.32, |
|
"acc_norm_stderr": 0.04688261722621503 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.2978723404255319, |
|
"acc_stderr": 0.02989614568209546, |
|
"acc_norm": 0.2978723404255319, |
|
"acc_norm_stderr": 0.02989614568209546 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.3072289156626506, |
|
"acc_stderr": 0.035915667978246635, |
|
"acc_norm": 0.3072289156626506, |
|
"acc_norm_stderr": 0.035915667978246635 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.3504823151125402, |
|
"acc_stderr": 0.02709865262130175, |
|
"acc_norm": 0.3504823151125402, |
|
"acc_norm_stderr": 0.02709865262130175 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.37668161434977576, |
|
"acc_stderr": 0.032521134899291884, |
|
"acc_norm": 0.37668161434977576, |
|
"acc_norm_stderr": 0.032521134899291884 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.3282442748091603, |
|
"acc_stderr": 0.041184385658062976, |
|
"acc_norm": 0.3282442748091603, |
|
"acc_norm_stderr": 0.041184385658062976 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.31, |
|
"acc_stderr": 0.04648231987117316, |
|
"acc_norm": 0.31, |
|
"acc_norm_stderr": 0.04648231987117316 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.3333333333333333, |
|
"acc_stderr": 0.03358618145732524, |
|
"acc_norm": 0.3333333333333333, |
|
"acc_norm_stderr": 0.03358618145732524 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.3586206896551724, |
|
"acc_stderr": 0.03996629574876718, |
|
"acc_norm": 0.3586206896551724, |
|
"acc_norm_stderr": 0.03996629574876718 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.19607843137254902, |
|
"acc_stderr": 0.03950581861179964, |
|
"acc_norm": 0.19607843137254902, |
|
"acc_norm_stderr": 0.03950581861179964 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.3319327731092437, |
|
"acc_stderr": 0.030588697013783663, |
|
"acc_norm": 0.3319327731092437, |
|
"acc_norm_stderr": 0.030588697013783663 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.30256410256410254, |
|
"acc_stderr": 0.02329088805377272, |
|
"acc_norm": 0.30256410256410254, |
|
"acc_norm_stderr": 0.02329088805377272 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.4, |
|
"acc_stderr": 0.049236596391733084, |
|
"acc_norm": 0.4, |
|
"acc_norm_stderr": 0.049236596391733084 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.24, |
|
"acc_stderr": 0.042923469599092816, |
|
"acc_norm": 0.24, |
|
"acc_norm_stderr": 0.042923469599092816 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.3333333333333333, |
|
"acc_stderr": 0.04557239513497752, |
|
"acc_norm": 0.3333333333333333, |
|
"acc_norm_stderr": 0.04557239513497752 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.26108374384236455, |
|
"acc_stderr": 0.030903796952114475, |
|
"acc_norm": 0.26108374384236455, |
|
"acc_norm_stderr": 0.030903796952114475 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.3580645161290323, |
|
"acc_stderr": 0.027273890594300642, |
|
"acc_norm": 0.3580645161290323, |
|
"acc_norm_stderr": 0.027273890594300642 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.5341880341880342, |
|
"acc_stderr": 0.03267942734081227, |
|
"acc_norm": 0.5341880341880342, |
|
"acc_norm_stderr": 0.03267942734081227 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.3132075471698113, |
|
"acc_stderr": 0.028544793319055333, |
|
"acc_norm": 0.3132075471698113, |
|
"acc_norm_stderr": 0.028544793319055333 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.2727272727272727, |
|
"acc_stderr": 0.04265792110940588, |
|
"acc_norm": 0.2727272727272727, |
|
"acc_norm_stderr": 0.04265792110940588 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.3037037037037037, |
|
"acc_stderr": 0.028037929969114993, |
|
"acc_norm": 0.3037037037037037, |
|
"acc_norm_stderr": 0.028037929969114993 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.2913907284768212, |
|
"acc_stderr": 0.037101857261199946, |
|
"acc_norm": 0.2913907284768212, |
|
"acc_norm_stderr": 0.037101857261199946 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.39800995024875624, |
|
"acc_stderr": 0.034611994290400135, |
|
"acc_norm": 0.39800995024875624, |
|
"acc_norm_stderr": 0.034611994290400135 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.2658959537572254, |
|
"acc_stderr": 0.033687629322594316, |
|
"acc_norm": 0.2658959537572254, |
|
"acc_norm_stderr": 0.033687629322594316 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.25132275132275134, |
|
"acc_stderr": 0.022340482339643895, |
|
"acc_norm": 0.25132275132275134, |
|
"acc_norm_stderr": 0.022340482339643895 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.3194444444444444, |
|
"acc_stderr": 0.038990736873573344, |
|
"acc_norm": 0.3194444444444444, |
|
"acc_norm_stderr": 0.038990736873573344 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.24, |
|
"acc_stderr": 0.04292346959909282, |
|
"acc_norm": 0.24, |
|
"acc_norm_stderr": 0.04292346959909282 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.4, |
|
"acc_stderr": 0.04923659639173309, |
|
"acc_norm": 0.4, |
|
"acc_norm_stderr": 0.04923659639173309 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.32947976878612717, |
|
"acc_stderr": 0.025305258131879723, |
|
"acc_norm": 0.32947976878612717, |
|
"acc_norm_stderr": 0.025305258131879723 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.3128834355828221, |
|
"acc_stderr": 0.036429145782924055, |
|
"acc_norm": 0.3128834355828221, |
|
"acc_norm_stderr": 0.036429145782924055 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.2716049382716049, |
|
"acc_stderr": 0.024748624490537375, |
|
"acc_norm": 0.2716049382716049, |
|
"acc_norm_stderr": 0.024748624490537375 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.29, |
|
"acc_stderr": 0.045604802157206845, |
|
"acc_norm": 0.29, |
|
"acc_norm_stderr": 0.045604802157206845 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.24870466321243523, |
|
"acc_stderr": 0.031195840877700304, |
|
"acc_norm": 0.24870466321243523, |
|
"acc_norm_stderr": 0.031195840877700304 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.2894736842105263, |
|
"acc_stderr": 0.04266339443159394, |
|
"acc_norm": 0.2894736842105263, |
|
"acc_norm_stderr": 0.04266339443159394 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.3100917431192661, |
|
"acc_stderr": 0.019830849684439756, |
|
"acc_norm": 0.3100917431192661, |
|
"acc_norm_stderr": 0.019830849684439756 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.2619047619047619, |
|
"acc_stderr": 0.039325376803928704, |
|
"acc_norm": 0.2619047619047619, |
|
"acc_norm_stderr": 0.039325376803928704 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.3464052287581699, |
|
"acc_stderr": 0.027245613047215355, |
|
"acc_norm": 0.3464052287581699, |
|
"acc_norm_stderr": 0.027245613047215355 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.37, |
|
"acc_stderr": 0.04852365870939098, |
|
"acc_norm": 0.37, |
|
"acc_norm_stderr": 0.04852365870939098 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.4049586776859504, |
|
"acc_stderr": 0.04481137755942469, |
|
"acc_norm": 0.4049586776859504, |
|
"acc_norm_stderr": 0.04481137755942469 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.2565789473684211, |
|
"acc_stderr": 0.035541803680256896, |
|
"acc_norm": 0.2565789473684211, |
|
"acc_norm_stderr": 0.035541803680256896 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.28921568627450983, |
|
"acc_stderr": 0.01834252984527591, |
|
"acc_norm": 0.28921568627450983, |
|
"acc_norm_stderr": 0.01834252984527591 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.2872340425531915, |
|
"acc_stderr": 0.026992199173064356, |
|
"acc_norm": 0.2872340425531915, |
|
"acc_norm_stderr": 0.026992199173064356 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.2767857142857143, |
|
"acc_stderr": 0.04246624336697624, |
|
"acc_norm": 0.2767857142857143, |
|
"acc_norm_stderr": 0.04246624336697624 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.33796296296296297, |
|
"acc_stderr": 0.03225941352631296, |
|
"acc_norm": 0.33796296296296297, |
|
"acc_norm_stderr": 0.03225941352631296 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.25027932960893856, |
|
"acc_stderr": 0.014487500852850409, |
|
"acc_norm": 0.25027932960893856, |
|
"acc_norm_stderr": 0.014487500852850409 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.33, |
|
"acc_stderr": 0.047258156262526045, |
|
"acc_norm": 0.33, |
|
"acc_norm_stderr": 0.047258156262526045 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.4, |
|
"acc_stderr": 0.04923659639173309, |
|
"acc_norm": 0.4, |
|
"acc_norm_stderr": 0.04923659639173309 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.3014705882352941, |
|
"acc_stderr": 0.027875982114273168, |
|
"acc_norm": 0.3014705882352941, |
|
"acc_norm_stderr": 0.027875982114273168 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.3020408163265306, |
|
"acc_stderr": 0.029393609319879815, |
|
"acc_norm": 0.3020408163265306, |
|
"acc_norm_stderr": 0.029393609319879815 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.379746835443038, |
|
"acc_stderr": 0.031591887529658504, |
|
"acc_norm": 0.379746835443038, |
|
"acc_norm_stderr": 0.031591887529658504 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.2646675358539765, |
|
"acc_stderr": 0.011267332992845531, |
|
"acc_norm": 0.2646675358539765, |
|
"acc_norm_stderr": 0.011267332992845531 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.27450980392156865, |
|
"acc_stderr": 0.031321798030832904, |
|
"acc_norm": 0.27450980392156865, |
|
"acc_norm_stderr": 0.031321798030832904 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.24848484848484848, |
|
"acc_stderr": 0.03374402644139404, |
|
"acc_norm": 0.24848484848484848, |
|
"acc_norm_stderr": 0.03374402644139404 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.2558139534883721, |
|
"mc1_stderr": 0.015274176219283349, |
|
"mc2": 0.42277041139901306, |
|
"mc2_stderr": 0.015451140013408284 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.27744982290436837, |
|
"acc_stderr": 0.015393630236605975, |
|
"acc_norm": 0.35537190082644626, |
|
"acc_norm_stderr": 0.01645549600031454 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "Herry443/Mistral-7B-KNUT-v0.4", |
|
"model_sha": "ed7abbc15e628a6832b00b24aad888e015e2a65b", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |