|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.34215017064846415, |
|
"acc_stderr": 0.013864152159177278, |
|
"acc_norm": 0.38822525597269625, |
|
"acc_norm_stderr": 0.014241614207414044 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.38020314678350925, |
|
"acc_stderr": 0.004844445265582655, |
|
"acc_norm": 0.4870543716391157, |
|
"acc_norm_stderr": 0.004988108663179765 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.47368421052631576, |
|
"acc_stderr": 0.038295098689947286, |
|
"acc_norm": 0.47368421052631576, |
|
"acc_norm_stderr": 0.038295098689947286 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.5533980582524272, |
|
"acc_stderr": 0.04922424153458934, |
|
"acc_norm": 0.5533980582524272, |
|
"acc_norm_stderr": 0.04922424153458934 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.4725415070242657, |
|
"acc_stderr": 0.01785298126663396, |
|
"acc_norm": 0.4725415070242657, |
|
"acc_norm_stderr": 0.01785298126663396 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.37777777777777777, |
|
"acc_stderr": 0.04188307537595853, |
|
"acc_norm": 0.37777777777777777, |
|
"acc_norm_stderr": 0.04188307537595853 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.27, |
|
"acc_stderr": 0.044619604333847394, |
|
"acc_norm": 0.27, |
|
"acc_norm_stderr": 0.044619604333847394 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.3702127659574468, |
|
"acc_stderr": 0.03156564682236785, |
|
"acc_norm": 0.3702127659574468, |
|
"acc_norm_stderr": 0.03156564682236785 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.41566265060240964, |
|
"acc_stderr": 0.03836722176598052, |
|
"acc_norm": 0.41566265060240964, |
|
"acc_norm_stderr": 0.03836722176598052 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.45980707395498394, |
|
"acc_stderr": 0.028306190403305696, |
|
"acc_norm": 0.45980707395498394, |
|
"acc_norm_stderr": 0.028306190403305696 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.49327354260089684, |
|
"acc_stderr": 0.03355476596234354, |
|
"acc_norm": 0.49327354260089684, |
|
"acc_norm_stderr": 0.03355476596234354 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.4580152671755725, |
|
"acc_stderr": 0.04369802690578756, |
|
"acc_norm": 0.4580152671755725, |
|
"acc_norm_stderr": 0.04369802690578756 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.39, |
|
"acc_stderr": 0.04902071300001974, |
|
"acc_norm": 0.39, |
|
"acc_norm_stderr": 0.04902071300001974 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.5909090909090909, |
|
"acc_stderr": 0.03502975799413007, |
|
"acc_norm": 0.5909090909090909, |
|
"acc_norm_stderr": 0.03502975799413007 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.46206896551724136, |
|
"acc_stderr": 0.041546596717075474, |
|
"acc_norm": 0.46206896551724136, |
|
"acc_norm_stderr": 0.041546596717075474 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.2647058823529412, |
|
"acc_stderr": 0.043898699568087785, |
|
"acc_norm": 0.2647058823529412, |
|
"acc_norm_stderr": 0.043898699568087785 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.46638655462184875, |
|
"acc_stderr": 0.03240501447690071, |
|
"acc_norm": 0.46638655462184875, |
|
"acc_norm_stderr": 0.03240501447690071 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.4461538461538462, |
|
"acc_stderr": 0.025203571773028333, |
|
"acc_norm": 0.4461538461538462, |
|
"acc_norm_stderr": 0.025203571773028333 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.65, |
|
"acc_stderr": 0.04793724854411021, |
|
"acc_norm": 0.65, |
|
"acc_norm_stderr": 0.04793724854411021 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.5, |
|
"acc_stderr": 0.04833682445228318, |
|
"acc_norm": 0.5, |
|
"acc_norm_stderr": 0.04833682445228318 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.37438423645320196, |
|
"acc_stderr": 0.03405155380561952, |
|
"acc_norm": 0.37438423645320196, |
|
"acc_norm_stderr": 0.03405155380561952 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.4290322580645161, |
|
"acc_stderr": 0.02815603653823321, |
|
"acc_norm": 0.4290322580645161, |
|
"acc_norm_stderr": 0.02815603653823321 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.7350427350427351, |
|
"acc_stderr": 0.028911208802749472, |
|
"acc_norm": 0.7350427350427351, |
|
"acc_norm_stderr": 0.028911208802749472 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.4528301886792453, |
|
"acc_stderr": 0.030635627957961827, |
|
"acc_norm": 0.4528301886792453, |
|
"acc_norm_stderr": 0.030635627957961827 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.5454545454545454, |
|
"acc_stderr": 0.04769300568972744, |
|
"acc_norm": 0.5454545454545454, |
|
"acc_norm_stderr": 0.04769300568972744 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.337037037037037, |
|
"acc_stderr": 0.028820884666253255, |
|
"acc_norm": 0.337037037037037, |
|
"acc_norm_stderr": 0.028820884666253255 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.33774834437086093, |
|
"acc_stderr": 0.038615575462551684, |
|
"acc_norm": 0.33774834437086093, |
|
"acc_norm_stderr": 0.038615575462551684 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.6368159203980099, |
|
"acc_stderr": 0.034005985055990146, |
|
"acc_norm": 0.6368159203980099, |
|
"acc_norm_stderr": 0.034005985055990146 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.3352601156069364, |
|
"acc_stderr": 0.035995863012470784, |
|
"acc_norm": 0.3352601156069364, |
|
"acc_norm_stderr": 0.035995863012470784 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.3835978835978836, |
|
"acc_stderr": 0.025043757318520196, |
|
"acc_norm": 0.3835978835978836, |
|
"acc_norm_stderr": 0.025043757318520196 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.3958333333333333, |
|
"acc_stderr": 0.040894654493255835, |
|
"acc_norm": 0.3958333333333333, |
|
"acc_norm_stderr": 0.040894654493255835 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.32, |
|
"acc_stderr": 0.04688261722621504, |
|
"acc_norm": 0.32, |
|
"acc_norm_stderr": 0.04688261722621504 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.56, |
|
"acc_stderr": 0.049888765156985884, |
|
"acc_norm": 0.56, |
|
"acc_norm_stderr": 0.049888765156985884 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.5202312138728323, |
|
"acc_stderr": 0.026897049996382868, |
|
"acc_norm": 0.5202312138728323, |
|
"acc_norm_stderr": 0.026897049996382868 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.5521472392638037, |
|
"acc_stderr": 0.03906947479456607, |
|
"acc_norm": 0.5521472392638037, |
|
"acc_norm_stderr": 0.03906947479456607 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.5123456790123457, |
|
"acc_stderr": 0.027812262269327235, |
|
"acc_norm": 0.5123456790123457, |
|
"acc_norm_stderr": 0.027812262269327235 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.5025906735751295, |
|
"acc_stderr": 0.03608390745384487, |
|
"acc_norm": 0.5025906735751295, |
|
"acc_norm_stderr": 0.03608390745384487 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.30701754385964913, |
|
"acc_stderr": 0.04339138322579861, |
|
"acc_norm": 0.30701754385964913, |
|
"acc_norm_stderr": 0.04339138322579861 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.5137614678899083, |
|
"acc_stderr": 0.02142920208987408, |
|
"acc_norm": 0.5137614678899083, |
|
"acc_norm_stderr": 0.02142920208987408 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.38095238095238093, |
|
"acc_stderr": 0.04343525428949097, |
|
"acc_norm": 0.38095238095238093, |
|
"acc_norm_stderr": 0.04343525428949097 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.49673202614379086, |
|
"acc_stderr": 0.02862930519400354, |
|
"acc_norm": 0.49673202614379086, |
|
"acc_norm_stderr": 0.02862930519400354 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.48, |
|
"acc_stderr": 0.050211673156867795, |
|
"acc_norm": 0.48, |
|
"acc_norm_stderr": 0.050211673156867795 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.6528925619834711, |
|
"acc_stderr": 0.04345724570292534, |
|
"acc_norm": 0.6528925619834711, |
|
"acc_norm_stderr": 0.04345724570292534 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.39473684210526316, |
|
"acc_stderr": 0.039777499346220734, |
|
"acc_norm": 0.39473684210526316, |
|
"acc_norm_stderr": 0.039777499346220734 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.4166666666666667, |
|
"acc_stderr": 0.01994491413687358, |
|
"acc_norm": 0.4166666666666667, |
|
"acc_norm_stderr": 0.01994491413687358 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.375886524822695, |
|
"acc_stderr": 0.028893955412115886, |
|
"acc_norm": 0.375886524822695, |
|
"acc_norm_stderr": 0.028893955412115886 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.45535714285714285, |
|
"acc_stderr": 0.04726835553719099, |
|
"acc_norm": 0.45535714285714285, |
|
"acc_norm_stderr": 0.04726835553719099 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.3611111111111111, |
|
"acc_stderr": 0.032757734861009996, |
|
"acc_norm": 0.3611111111111111, |
|
"acc_norm_stderr": 0.032757734861009996 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.20446927374301677, |
|
"acc_stderr": 0.013488813404711909, |
|
"acc_norm": 0.20446927374301677, |
|
"acc_norm_stderr": 0.013488813404711909 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.38, |
|
"acc_stderr": 0.04878317312145632, |
|
"acc_norm": 0.38, |
|
"acc_norm_stderr": 0.04878317312145632 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.64, |
|
"acc_stderr": 0.04824181513244218, |
|
"acc_norm": 0.64, |
|
"acc_norm_stderr": 0.04824181513244218 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.3492647058823529, |
|
"acc_stderr": 0.028959755196824873, |
|
"acc_norm": 0.3492647058823529, |
|
"acc_norm_stderr": 0.028959755196824873 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.5265306122448979, |
|
"acc_stderr": 0.03196412734523272, |
|
"acc_norm": 0.5265306122448979, |
|
"acc_norm_stderr": 0.03196412734523272 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.5780590717299579, |
|
"acc_stderr": 0.032148146302403695, |
|
"acc_norm": 0.5780590717299579, |
|
"acc_norm_stderr": 0.032148146302403695 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.3559322033898305, |
|
"acc_stderr": 0.01222864553727757, |
|
"acc_norm": 0.3559322033898305, |
|
"acc_norm_stderr": 0.01222864553727757 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.47058823529411764, |
|
"acc_stderr": 0.03503235296367993, |
|
"acc_norm": 0.47058823529411764, |
|
"acc_norm_stderr": 0.03503235296367993 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.49696969696969695, |
|
"acc_stderr": 0.03904272341431857, |
|
"acc_norm": 0.49696969696969695, |
|
"acc_norm_stderr": 0.03904272341431857 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.31334149326805383, |
|
"mc1_stderr": 0.016238065069059615, |
|
"mc2": 0.49276821876862364, |
|
"mc2_stderr": 0.015815875390844718 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.44037780401416765, |
|
"acc_stderr": 0.01706769977431297, |
|
"acc_norm": 0.4734356552538371, |
|
"acc_norm_stderr": 0.017166075717577747 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "MNCKim/Mistral-7B-OpenHermes", |
|
"model_sha": "847254b43b055cbe217b7aedf1219942457aa942", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |