|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.2977815699658703, |
|
"acc_stderr": 0.01336308010724449, |
|
"acc_norm": 0.3395904436860068, |
|
"acc_norm_stderr": 0.013839039762820167 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.39055964947221666, |
|
"acc_stderr": 0.004868787333436579, |
|
"acc_norm": 0.5038836885082653, |
|
"acc_norm_stderr": 0.004989630887066195 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.30994152046783624, |
|
"acc_stderr": 0.03546976959393161, |
|
"acc_norm": 0.30994152046783624, |
|
"acc_norm_stderr": 0.03546976959393161 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.18446601941747573, |
|
"acc_stderr": 0.03840423627288276, |
|
"acc_norm": 0.18446601941747573, |
|
"acc_norm_stderr": 0.03840423627288276 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.2515964240102171, |
|
"acc_stderr": 0.015517322365529631, |
|
"acc_norm": 0.2515964240102171, |
|
"acc_norm_stderr": 0.015517322365529631 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.35555555555555557, |
|
"acc_stderr": 0.04135176749720386, |
|
"acc_norm": 0.35555555555555557, |
|
"acc_norm_stderr": 0.04135176749720386 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.32, |
|
"acc_stderr": 0.046882617226215034, |
|
"acc_norm": 0.32, |
|
"acc_norm_stderr": 0.046882617226215034 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.2170212765957447, |
|
"acc_stderr": 0.026947483121496245, |
|
"acc_norm": 0.2170212765957447, |
|
"acc_norm_stderr": 0.026947483121496245 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.21084337349397592, |
|
"acc_stderr": 0.0317555478662992, |
|
"acc_norm": 0.21084337349397592, |
|
"acc_norm_stderr": 0.0317555478662992 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.3215434083601286, |
|
"acc_stderr": 0.026527724079528872, |
|
"acc_norm": 0.3215434083601286, |
|
"acc_norm_stderr": 0.026527724079528872 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.15695067264573992, |
|
"acc_stderr": 0.02441358717490739, |
|
"acc_norm": 0.15695067264573992, |
|
"acc_norm_stderr": 0.02441358717490739 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.2366412213740458, |
|
"acc_stderr": 0.037276735755969174, |
|
"acc_norm": 0.2366412213740458, |
|
"acc_norm_stderr": 0.037276735755969174 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.22, |
|
"acc_stderr": 0.041633319989322695, |
|
"acc_norm": 0.22, |
|
"acc_norm_stderr": 0.041633319989322695 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.2777777777777778, |
|
"acc_stderr": 0.03191178226713549, |
|
"acc_norm": 0.2777777777777778, |
|
"acc_norm_stderr": 0.03191178226713549 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.2896551724137931, |
|
"acc_stderr": 0.03780019230438015, |
|
"acc_norm": 0.2896551724137931, |
|
"acc_norm_stderr": 0.03780019230438015 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.21568627450980393, |
|
"acc_stderr": 0.04092563958237656, |
|
"acc_norm": 0.21568627450980393, |
|
"acc_norm_stderr": 0.04092563958237656 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.23109243697478993, |
|
"acc_stderr": 0.027381406927868966, |
|
"acc_norm": 0.23109243697478993, |
|
"acc_norm_stderr": 0.027381406927868966 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.2282051282051282, |
|
"acc_stderr": 0.02127839386358628, |
|
"acc_norm": 0.2282051282051282, |
|
"acc_norm_stderr": 0.02127839386358628 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.23, |
|
"acc_stderr": 0.042295258468165044, |
|
"acc_norm": 0.23, |
|
"acc_norm_stderr": 0.042295258468165044 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.23148148148148148, |
|
"acc_stderr": 0.04077494709252627, |
|
"acc_norm": 0.23148148148148148, |
|
"acc_norm_stderr": 0.04077494709252627 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.2660098522167488, |
|
"acc_stderr": 0.03108982600293752, |
|
"acc_norm": 0.2660098522167488, |
|
"acc_norm_stderr": 0.03108982600293752 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.25161290322580643, |
|
"acc_stderr": 0.02468597928623997, |
|
"acc_norm": 0.25161290322580643, |
|
"acc_norm_stderr": 0.02468597928623997 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.24786324786324787, |
|
"acc_stderr": 0.028286324075564386, |
|
"acc_norm": 0.24786324786324787, |
|
"acc_norm_stderr": 0.028286324075564386 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.2490566037735849, |
|
"acc_stderr": 0.026616482980501715, |
|
"acc_norm": 0.2490566037735849, |
|
"acc_norm_stderr": 0.026616482980501715 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.21818181818181817, |
|
"acc_stderr": 0.03955932861795833, |
|
"acc_norm": 0.21818181818181817, |
|
"acc_norm_stderr": 0.03955932861795833 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.25555555555555554, |
|
"acc_stderr": 0.02659393910184408, |
|
"acc_norm": 0.25555555555555554, |
|
"acc_norm_stderr": 0.02659393910184408 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.26490066225165565, |
|
"acc_stderr": 0.03603038545360383, |
|
"acc_norm": 0.26490066225165565, |
|
"acc_norm_stderr": 0.03603038545360383 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.21890547263681592, |
|
"acc_stderr": 0.029239174636647, |
|
"acc_norm": 0.21890547263681592, |
|
"acc_norm_stderr": 0.029239174636647 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.2832369942196532, |
|
"acc_stderr": 0.03435568056047873, |
|
"acc_norm": 0.2832369942196532, |
|
"acc_norm_stderr": 0.03435568056047873 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.23809523809523808, |
|
"acc_stderr": 0.021935878081184766, |
|
"acc_norm": 0.23809523809523808, |
|
"acc_norm_stderr": 0.021935878081184766 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.2569444444444444, |
|
"acc_stderr": 0.03653946969442099, |
|
"acc_norm": 0.2569444444444444, |
|
"acc_norm_stderr": 0.03653946969442099 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.19, |
|
"acc_stderr": 0.03942772444036624, |
|
"acc_norm": 0.19, |
|
"acc_norm_stderr": 0.03942772444036624 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.29, |
|
"acc_stderr": 0.045604802157206845, |
|
"acc_norm": 0.29, |
|
"acc_norm_stderr": 0.045604802157206845 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.23410404624277456, |
|
"acc_stderr": 0.022797110278071138, |
|
"acc_norm": 0.23410404624277456, |
|
"acc_norm_stderr": 0.022797110278071138 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.3067484662576687, |
|
"acc_stderr": 0.03623089915724148, |
|
"acc_norm": 0.3067484662576687, |
|
"acc_norm_stderr": 0.03623089915724148 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.30864197530864196, |
|
"acc_stderr": 0.02570264026060376, |
|
"acc_norm": 0.30864197530864196, |
|
"acc_norm_stderr": 0.02570264026060376 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.31, |
|
"acc_stderr": 0.04648231987117316, |
|
"acc_norm": 0.31, |
|
"acc_norm_stderr": 0.04648231987117316 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.24352331606217617, |
|
"acc_stderr": 0.030975436386845426, |
|
"acc_norm": 0.24352331606217617, |
|
"acc_norm_stderr": 0.030975436386845426 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.2543859649122807, |
|
"acc_stderr": 0.040969851398436716, |
|
"acc_norm": 0.2543859649122807, |
|
"acc_norm_stderr": 0.040969851398436716 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.23486238532110093, |
|
"acc_stderr": 0.018175110510343588, |
|
"acc_norm": 0.23486238532110093, |
|
"acc_norm_stderr": 0.018175110510343588 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.16666666666666666, |
|
"acc_stderr": 0.03333333333333338, |
|
"acc_norm": 0.16666666666666666, |
|
"acc_norm_stderr": 0.03333333333333338 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.23202614379084968, |
|
"acc_stderr": 0.024170840879341016, |
|
"acc_norm": 0.23202614379084968, |
|
"acc_norm_stderr": 0.024170840879341016 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.22, |
|
"acc_stderr": 0.041633319989322674, |
|
"acc_norm": 0.22, |
|
"acc_norm_stderr": 0.041633319989322674 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.30578512396694213, |
|
"acc_stderr": 0.04205953933884124, |
|
"acc_norm": 0.30578512396694213, |
|
"acc_norm_stderr": 0.04205953933884124 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.3157894736842105, |
|
"acc_stderr": 0.03782728980865469, |
|
"acc_norm": 0.3157894736842105, |
|
"acc_norm_stderr": 0.03782728980865469 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.28104575163398693, |
|
"acc_stderr": 0.018185218954318082, |
|
"acc_norm": 0.28104575163398693, |
|
"acc_norm_stderr": 0.018185218954318082 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.23404255319148937, |
|
"acc_stderr": 0.025257861359432407, |
|
"acc_norm": 0.23404255319148937, |
|
"acc_norm_stderr": 0.025257861359432407 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.21428571428571427, |
|
"acc_stderr": 0.03894641120044793, |
|
"acc_norm": 0.21428571428571427, |
|
"acc_norm_stderr": 0.03894641120044793 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.30092592592592593, |
|
"acc_stderr": 0.031280390843298804, |
|
"acc_norm": 0.30092592592592593, |
|
"acc_norm_stderr": 0.031280390843298804 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.2536312849162011, |
|
"acc_stderr": 0.014551553659369922, |
|
"acc_norm": 0.2536312849162011, |
|
"acc_norm_stderr": 0.014551553659369922 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.33, |
|
"acc_stderr": 0.047258156262526045, |
|
"acc_norm": 0.33, |
|
"acc_norm_stderr": 0.047258156262526045 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.29, |
|
"acc_stderr": 0.045604802157206845, |
|
"acc_norm": 0.29, |
|
"acc_norm_stderr": 0.045604802157206845 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.33088235294117646, |
|
"acc_stderr": 0.028582709753898445, |
|
"acc_norm": 0.33088235294117646, |
|
"acc_norm_stderr": 0.028582709753898445 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.2612244897959184, |
|
"acc_stderr": 0.028123429335142783, |
|
"acc_norm": 0.2612244897959184, |
|
"acc_norm_stderr": 0.028123429335142783 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.26582278481012656, |
|
"acc_stderr": 0.028756799629658335, |
|
"acc_norm": 0.26582278481012656, |
|
"acc_norm_stderr": 0.028756799629658335 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.27053455019556716, |
|
"acc_stderr": 0.011345996743539265, |
|
"acc_norm": 0.27053455019556716, |
|
"acc_norm_stderr": 0.011345996743539265 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.27941176470588236, |
|
"acc_stderr": 0.031493281045079556, |
|
"acc_norm": 0.27941176470588236, |
|
"acc_norm_stderr": 0.031493281045079556 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.24242424242424243, |
|
"acc_stderr": 0.033464098810559534, |
|
"acc_norm": 0.24242424242424243, |
|
"acc_norm_stderr": 0.033464098810559534 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.2692778457772338, |
|
"mc1_stderr": 0.015528566637087298, |
|
"mc2": 0.4211117529867161, |
|
"mc2_stderr": 0.014959536407311791 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.31286894923258557, |
|
"acc_stderr": 0.015941010118302658, |
|
"acc_norm": 0.3754427390791027, |
|
"acc_norm_stderr": 0.016648411589511098 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "ingeol/sft_merged", |
|
"model_sha": "a958e5054c1935e86f418c797825ebccb9e7fd89", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |