|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.38822525597269625, |
|
"acc_stderr": 0.014241614207414046, |
|
"acc_norm": 0.4513651877133106, |
|
"acc_norm_stderr": 0.014542104569955262 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.40689105755825533, |
|
"acc_stderr": 0.004902502514738606, |
|
"acc_norm": 0.5412268472415853, |
|
"acc_norm_stderr": 0.004972790690640187 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.30409356725146197, |
|
"acc_stderr": 0.0352821125824523, |
|
"acc_norm": 0.30409356725146197, |
|
"acc_norm_stderr": 0.0352821125824523 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.36893203883495146, |
|
"acc_stderr": 0.04777615181156739, |
|
"acc_norm": 0.36893203883495146, |
|
"acc_norm_stderr": 0.04777615181156739 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.2822477650063857, |
|
"acc_stderr": 0.016095302969878548, |
|
"acc_norm": 0.2822477650063857, |
|
"acc_norm_stderr": 0.016095302969878548 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.2740740740740741, |
|
"acc_stderr": 0.03853254836552003, |
|
"acc_norm": 0.2740740740740741, |
|
"acc_norm_stderr": 0.03853254836552003 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.2, |
|
"acc_stderr": 0.04020151261036845, |
|
"acc_norm": 0.2, |
|
"acc_norm_stderr": 0.04020151261036845 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.24680851063829787, |
|
"acc_stderr": 0.0281854413012341, |
|
"acc_norm": 0.24680851063829787, |
|
"acc_norm_stderr": 0.0281854413012341 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.3132530120481928, |
|
"acc_stderr": 0.036108050180310235, |
|
"acc_norm": 0.3132530120481928, |
|
"acc_norm_stderr": 0.036108050180310235 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.3247588424437299, |
|
"acc_stderr": 0.026596782287697043, |
|
"acc_norm": 0.3247588424437299, |
|
"acc_norm_stderr": 0.026596782287697043 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.25112107623318386, |
|
"acc_stderr": 0.029105220833224622, |
|
"acc_norm": 0.25112107623318386, |
|
"acc_norm_stderr": 0.029105220833224622 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.32061068702290074, |
|
"acc_stderr": 0.040933292298342784, |
|
"acc_norm": 0.32061068702290074, |
|
"acc_norm_stderr": 0.040933292298342784 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.27, |
|
"acc_stderr": 0.0446196043338474, |
|
"acc_norm": 0.27, |
|
"acc_norm_stderr": 0.0446196043338474 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.30303030303030304, |
|
"acc_stderr": 0.03274287914026866, |
|
"acc_norm": 0.30303030303030304, |
|
"acc_norm_stderr": 0.03274287914026866 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.25517241379310346, |
|
"acc_stderr": 0.03632984052707842, |
|
"acc_norm": 0.25517241379310346, |
|
"acc_norm_stderr": 0.03632984052707842 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.20588235294117646, |
|
"acc_stderr": 0.04023382273617747, |
|
"acc_norm": 0.20588235294117646, |
|
"acc_norm_stderr": 0.04023382273617747 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.3697478991596639, |
|
"acc_stderr": 0.03135709599613591, |
|
"acc_norm": 0.3697478991596639, |
|
"acc_norm_stderr": 0.03135709599613591 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.33076923076923076, |
|
"acc_stderr": 0.023854795680971142, |
|
"acc_norm": 0.33076923076923076, |
|
"acc_norm_stderr": 0.023854795680971142 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.33, |
|
"acc_stderr": 0.047258156262526045, |
|
"acc_norm": 0.33, |
|
"acc_norm_stderr": 0.047258156262526045 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.17, |
|
"acc_stderr": 0.0377525168068637, |
|
"acc_norm": 0.17, |
|
"acc_norm_stderr": 0.0377525168068637 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.3055555555555556, |
|
"acc_stderr": 0.044531975073749834, |
|
"acc_norm": 0.3055555555555556, |
|
"acc_norm_stderr": 0.044531975073749834 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.2512315270935961, |
|
"acc_stderr": 0.030516530732694433, |
|
"acc_norm": 0.2512315270935961, |
|
"acc_norm_stderr": 0.030516530732694433 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.31290322580645163, |
|
"acc_stderr": 0.02637756702864586, |
|
"acc_norm": 0.31290322580645163, |
|
"acc_norm_stderr": 0.02637756702864586 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.33760683760683763, |
|
"acc_stderr": 0.030980296992618558, |
|
"acc_norm": 0.33760683760683763, |
|
"acc_norm_stderr": 0.030980296992618558 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.27169811320754716, |
|
"acc_stderr": 0.027377706624670713, |
|
"acc_norm": 0.27169811320754716, |
|
"acc_norm_stderr": 0.027377706624670713 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.2727272727272727, |
|
"acc_stderr": 0.04265792110940588, |
|
"acc_norm": 0.2727272727272727, |
|
"acc_norm_stderr": 0.04265792110940588 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.2222222222222222, |
|
"acc_stderr": 0.025348097468097856, |
|
"acc_norm": 0.2222222222222222, |
|
"acc_norm_stderr": 0.025348097468097856 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.31788079470198677, |
|
"acc_stderr": 0.03802039760107903, |
|
"acc_norm": 0.31788079470198677, |
|
"acc_norm_stderr": 0.03802039760107903 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.39303482587064675, |
|
"acc_stderr": 0.0345368246603156, |
|
"acc_norm": 0.39303482587064675, |
|
"acc_norm_stderr": 0.0345368246603156 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.24855491329479767, |
|
"acc_stderr": 0.03295304696818317, |
|
"acc_norm": 0.24855491329479767, |
|
"acc_norm_stderr": 0.03295304696818317 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.22486772486772486, |
|
"acc_stderr": 0.02150209607822914, |
|
"acc_norm": 0.22486772486772486, |
|
"acc_norm_stderr": 0.02150209607822914 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.3541666666666667, |
|
"acc_stderr": 0.039994111357535424, |
|
"acc_norm": 0.3541666666666667, |
|
"acc_norm_stderr": 0.039994111357535424 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.27, |
|
"acc_stderr": 0.044619604333847394, |
|
"acc_norm": 0.27, |
|
"acc_norm_stderr": 0.044619604333847394 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.45, |
|
"acc_stderr": 0.04999999999999999, |
|
"acc_norm": 0.45, |
|
"acc_norm_stderr": 0.04999999999999999 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.3236994219653179, |
|
"acc_stderr": 0.025190181327608415, |
|
"acc_norm": 0.3236994219653179, |
|
"acc_norm_stderr": 0.025190181327608415 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.24539877300613497, |
|
"acc_stderr": 0.03380939813943354, |
|
"acc_norm": 0.24539877300613497, |
|
"acc_norm_stderr": 0.03380939813943354 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.31790123456790126, |
|
"acc_stderr": 0.025910063528240865, |
|
"acc_norm": 0.31790123456790126, |
|
"acc_norm_stderr": 0.025910063528240865 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.33, |
|
"acc_stderr": 0.04725815626252605, |
|
"acc_norm": 0.33, |
|
"acc_norm_stderr": 0.04725815626252605 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.39896373056994816, |
|
"acc_stderr": 0.03533999094065696, |
|
"acc_norm": 0.39896373056994816, |
|
"acc_norm_stderr": 0.03533999094065696 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.2631578947368421, |
|
"acc_stderr": 0.0414243971948936, |
|
"acc_norm": 0.2631578947368421, |
|
"acc_norm_stderr": 0.0414243971948936 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.29357798165137616, |
|
"acc_stderr": 0.01952515112263966, |
|
"acc_norm": 0.29357798165137616, |
|
"acc_norm_stderr": 0.01952515112263966 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.3888888888888889, |
|
"acc_stderr": 0.04360314860077459, |
|
"acc_norm": 0.3888888888888889, |
|
"acc_norm_stderr": 0.04360314860077459 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.3333333333333333, |
|
"acc_stderr": 0.026992544339297226, |
|
"acc_norm": 0.3333333333333333, |
|
"acc_norm_stderr": 0.026992544339297226 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.25, |
|
"acc_stderr": 0.04351941398892446, |
|
"acc_norm": 0.25, |
|
"acc_norm_stderr": 0.04351941398892446 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.49586776859504134, |
|
"acc_stderr": 0.045641987674327526, |
|
"acc_norm": 0.49586776859504134, |
|
"acc_norm_stderr": 0.045641987674327526 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.28289473684210525, |
|
"acc_stderr": 0.03665349695640767, |
|
"acc_norm": 0.28289473684210525, |
|
"acc_norm_stderr": 0.03665349695640767 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.2679738562091503, |
|
"acc_stderr": 0.017917974069594726, |
|
"acc_norm": 0.2679738562091503, |
|
"acc_norm_stderr": 0.017917974069594726 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.2765957446808511, |
|
"acc_stderr": 0.026684564340460994, |
|
"acc_norm": 0.2765957446808511, |
|
"acc_norm_stderr": 0.026684564340460994 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.15178571428571427, |
|
"acc_stderr": 0.03405702838185695, |
|
"acc_norm": 0.15178571428571427, |
|
"acc_norm_stderr": 0.03405702838185695 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.4675925925925926, |
|
"acc_stderr": 0.03402801581358966, |
|
"acc_norm": 0.4675925925925926, |
|
"acc_norm_stderr": 0.03402801581358966 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.26145251396648045, |
|
"acc_stderr": 0.014696599650364545, |
|
"acc_norm": 0.26145251396648045, |
|
"acc_norm_stderr": 0.014696599650364545 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.37, |
|
"acc_stderr": 0.04852365870939099, |
|
"acc_norm": 0.37, |
|
"acc_norm_stderr": 0.04852365870939099 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.33, |
|
"acc_stderr": 0.04725815626252605, |
|
"acc_norm": 0.33, |
|
"acc_norm_stderr": 0.04725815626252605 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.23161764705882354, |
|
"acc_stderr": 0.025626533803777562, |
|
"acc_norm": 0.23161764705882354, |
|
"acc_norm_stderr": 0.025626533803777562 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.40816326530612246, |
|
"acc_stderr": 0.03146465712827423, |
|
"acc_norm": 0.40816326530612246, |
|
"acc_norm_stderr": 0.03146465712827423 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.2869198312236287, |
|
"acc_stderr": 0.02944377302259469, |
|
"acc_norm": 0.2869198312236287, |
|
"acc_norm_stderr": 0.02944377302259469 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.24902216427640156, |
|
"acc_stderr": 0.01104489226404077, |
|
"acc_norm": 0.24902216427640156, |
|
"acc_norm_stderr": 0.01104489226404077 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.24509803921568626, |
|
"acc_stderr": 0.030190282453501933, |
|
"acc_norm": 0.24509803921568626, |
|
"acc_norm_stderr": 0.030190282453501933 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.2909090909090909, |
|
"acc_stderr": 0.03546563019624335, |
|
"acc_norm": 0.2909090909090909, |
|
"acc_norm_stderr": 0.03546563019624335 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.29253365973072215, |
|
"mc1_stderr": 0.015925597445286165, |
|
"mc2": 0.4600456246073735, |
|
"mc2_stderr": 0.014958372484169768 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.3600944510035419, |
|
"acc_stderr": 0.01650368672044008, |
|
"acc_norm": 0.5454545454545454, |
|
"acc_norm_stderr": 0.017119172208061504 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "krevas/LDCC-Instruct-Llama-2-ko-13B-v4.1.8", |
|
"model_sha": "424602efb3cb7b2c4e901d325113335c002a1da2", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |