{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.33361774744027306, "acc_stderr": 0.013778687054176541, "acc_norm": 0.3873720136518771, "acc_norm_stderr": 0.014235872487909876 }, "harness|ko_hellaswag|10": { "acc": 0.37064329814777935, "acc_stderr": 0.0048198999453424925, "acc_norm": 0.4827723561043617, "acc_norm_stderr": 0.004986818680313441 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.47953216374269003, "acc_stderr": 0.038316105328219316, "acc_norm": 0.47953216374269003, "acc_norm_stderr": 0.038316105328219316 }, "harness|ko_mmlu_management|5": { "acc": 0.5922330097087378, "acc_stderr": 0.04865777570410768, "acc_norm": 0.5922330097087378, "acc_norm_stderr": 0.04865777570410768 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.48148148148148145, "acc_stderr": 0.017867695938429778, "acc_norm": 0.48148148148148145, "acc_norm_stderr": 0.017867695938429778 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.35555555555555557, "acc_stderr": 0.04135176749720386, "acc_norm": 0.35555555555555557, "acc_norm_stderr": 0.04135176749720386 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.4127659574468085, "acc_stderr": 0.03218471141400352, "acc_norm": 0.4127659574468085, "acc_norm_stderr": 0.03218471141400352 }, "harness|ko_mmlu_virology|5": { "acc": 0.42771084337349397, "acc_stderr": 0.03851597683718533, "acc_norm": 0.42771084337349397, "acc_norm_stderr": 0.03851597683718533 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.4983922829581994, "acc_stderr": 0.02839794490780661, "acc_norm": 0.4983922829581994, "acc_norm_stderr": 0.02839794490780661 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.4977578475336323, "acc_stderr": 0.03355746535223263, "acc_norm": 0.4977578475336323, "acc_norm_stderr": 0.03355746535223263 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.4732824427480916, "acc_stderr": 0.04379024936553894, "acc_norm": 0.4732824427480916, "acc_norm_stderr": 0.04379024936553894 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.39, "acc_stderr": 0.04902071300001974, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001974 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.5757575757575758, "acc_stderr": 0.03521224908841585, "acc_norm": 0.5757575757575758, "acc_norm_stderr": 0.03521224908841585 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.4482758620689655, "acc_stderr": 0.04144311810878151, "acc_norm": 0.4482758620689655, "acc_norm_stderr": 0.04144311810878151 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.24509803921568626, "acc_stderr": 0.04280105837364395, "acc_norm": 0.24509803921568626, "acc_norm_stderr": 0.04280105837364395 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.4957983193277311, "acc_stderr": 0.0324773433444811, "acc_norm": 0.4957983193277311, "acc_norm_stderr": 0.0324773433444811 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.47692307692307695, "acc_stderr": 0.025323990861736125, "acc_norm": 0.47692307692307695, "acc_norm_stderr": 0.025323990861736125 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.62, "acc_stderr": 0.048783173121456316, "acc_norm": 0.62, "acc_norm_stderr": 0.048783173121456316 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.48148148148148145, "acc_stderr": 0.04830366024635331, "acc_norm": 0.48148148148148145, "acc_norm_stderr": 0.04830366024635331 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.39408866995073893, "acc_stderr": 0.03438157967036545, "acc_norm": 0.39408866995073893, "acc_norm_stderr": 0.03438157967036545 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.47096774193548385, "acc_stderr": 0.028396016402761008, "acc_norm": 0.47096774193548385, "acc_norm_stderr": 0.028396016402761008 }, "harness|ko_mmlu_marketing|5": { "acc": 0.7393162393162394, "acc_stderr": 0.02876034895652341, "acc_norm": 0.7393162393162394, "acc_norm_stderr": 0.02876034895652341 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.4867924528301887, "acc_stderr": 0.03076213487450049, "acc_norm": 0.4867924528301887, "acc_norm_stderr": 0.03076213487450049 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.5272727272727272, "acc_stderr": 0.04782001791380061, "acc_norm": 0.5272727272727272, "acc_norm_stderr": 0.04782001791380061 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.3925925925925926, "acc_stderr": 0.02977384701253297, "acc_norm": 0.3925925925925926, "acc_norm_stderr": 0.02977384701253297 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.23841059602649006, "acc_stderr": 0.03479185572599661, "acc_norm": 0.23841059602649006, "acc_norm_stderr": 0.03479185572599661 }, "harness|ko_mmlu_sociology|5": { "acc": 0.6119402985074627, "acc_stderr": 0.03445789964362749, "acc_norm": 0.6119402985074627, "acc_norm_stderr": 0.03445789964362749 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.3583815028901734, "acc_stderr": 0.036563436533531585, "acc_norm": 0.3583815028901734, "acc_norm_stderr": 0.036563436533531585 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.3862433862433862, "acc_stderr": 0.025075981767601688, "acc_norm": 0.3862433862433862, "acc_norm_stderr": 0.025075981767601688 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.3194444444444444, "acc_stderr": 0.038990736873573344, "acc_norm": 0.3194444444444444, "acc_norm_stderr": 0.038990736873573344 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.33, "acc_stderr": 0.04725815626252605, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252605 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.57, "acc_stderr": 0.04975698519562428, "acc_norm": 0.57, "acc_norm_stderr": 0.04975698519562428 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.4913294797687861, "acc_stderr": 0.02691504735536981, "acc_norm": 0.4913294797687861, "acc_norm_stderr": 0.02691504735536981 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.44785276073619634, "acc_stderr": 0.03906947479456601, "acc_norm": 0.44785276073619634, "acc_norm_stderr": 0.03906947479456601 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.47530864197530864, "acc_stderr": 0.02778680093142745, "acc_norm": 0.47530864197530864, "acc_norm_stderr": 0.02778680093142745 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.44, "acc_stderr": 0.0498887651569859, "acc_norm": 0.44, "acc_norm_stderr": 0.0498887651569859 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.5025906735751295, "acc_stderr": 0.03608390745384487, "acc_norm": 0.5025906735751295, "acc_norm_stderr": 0.03608390745384487 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.2631578947368421, "acc_stderr": 0.0414243971948936, "acc_norm": 0.2631578947368421, "acc_norm_stderr": 0.0414243971948936 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.47706422018348627, "acc_stderr": 0.021414757058175502, "acc_norm": 0.47706422018348627, "acc_norm_stderr": 0.021414757058175502 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.3888888888888889, "acc_stderr": 0.04360314860077459, "acc_norm": 0.3888888888888889, "acc_norm_stderr": 0.04360314860077459 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.48366013071895425, "acc_stderr": 0.028614624752805407, "acc_norm": 0.48366013071895425, "acc_norm_stderr": 0.028614624752805407 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.49, "acc_stderr": 0.05024183937956911, "acc_norm": 0.49, "acc_norm_stderr": 0.05024183937956911 }, "harness|ko_mmlu_international_law|5": { "acc": 0.6694214876033058, "acc_stderr": 0.04294340845212094, "acc_norm": 0.6694214876033058, "acc_norm_stderr": 0.04294340845212094 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.4473684210526316, "acc_stderr": 0.04046336883978251, "acc_norm": 0.4473684210526316, "acc_norm_stderr": 0.04046336883978251 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.3872549019607843, "acc_stderr": 0.019706875804085627, "acc_norm": 0.3872549019607843, "acc_norm_stderr": 0.019706875804085627 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.34397163120567376, "acc_stderr": 0.028338017428611334, "acc_norm": 0.34397163120567376, "acc_norm_stderr": 0.028338017428611334 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.42857142857142855, "acc_stderr": 0.04697113923010212, "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.04697113923010212 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.37037037037037035, "acc_stderr": 0.03293377139415191, "acc_norm": 0.37037037037037035, "acc_norm_stderr": 0.03293377139415191 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.2111731843575419, "acc_stderr": 0.013650276794312199, "acc_norm": 0.2111731843575419, "acc_norm_stderr": 0.013650276794312199 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.42, "acc_stderr": 0.049604496374885836, "acc_norm": 0.42, "acc_norm_stderr": 0.049604496374885836 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.62, "acc_stderr": 0.04878317312145632, "acc_norm": 0.62, "acc_norm_stderr": 0.04878317312145632 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.35661764705882354, "acc_stderr": 0.02909720956841195, "acc_norm": 0.35661764705882354, "acc_norm_stderr": 0.02909720956841195 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.5469387755102041, "acc_stderr": 0.031867859300041275, "acc_norm": 0.5469387755102041, "acc_norm_stderr": 0.031867859300041275 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.5864978902953587, "acc_stderr": 0.03205649904851858, "acc_norm": 0.5864978902953587, "acc_norm_stderr": 0.03205649904851858 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.3305084745762712, "acc_stderr": 0.012014142101842982, "acc_norm": 0.3305084745762712, "acc_norm_stderr": 0.012014142101842982 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.45098039215686275, "acc_stderr": 0.03492406104163613, "acc_norm": 0.45098039215686275, "acc_norm_stderr": 0.03492406104163613 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.48484848484848486, "acc_stderr": 0.03902551007374449, "acc_norm": 0.48484848484848486, "acc_norm_stderr": 0.03902551007374449 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.3182374541003672, "mc1_stderr": 0.01630598864892059, "mc2": 0.48659504962443784, "mc2_stderr": 0.015814727827810682 }, "harness|ko_commongen_v2|2": { "acc": 0.4380165289256198, "acc_stderr": 0.017057753702160287, "acc_norm": 0.4722550177095632, "acc_norm_stderr": 0.01716386797945601 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "BAAI/Infinity-Instruct-3M-0613-Mistral-7B", "model_sha": "d8ffa6d11ef4ef2e6441326383b3d857591d822e", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }