|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.3447098976109215, |
|
"acc_stderr": 0.013888816286782112, |
|
"acc_norm": 0.4129692832764505, |
|
"acc_norm_stderr": 0.014388344935398324 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.4164509061939853, |
|
"acc_stderr": 0.004919626380645514, |
|
"acc_norm": 0.5483967337183828, |
|
"acc_norm_stderr": 0.004966351835028203 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.5321637426900585, |
|
"acc_stderr": 0.03826882417660368, |
|
"acc_norm": 0.5321637426900585, |
|
"acc_norm_stderr": 0.03826882417660368 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.5048543689320388, |
|
"acc_stderr": 0.049505043821289195, |
|
"acc_norm": 0.5048543689320388, |
|
"acc_norm_stderr": 0.049505043821289195 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.5504469987228607, |
|
"acc_stderr": 0.017788725283507337, |
|
"acc_norm": 0.5504469987228607, |
|
"acc_norm_stderr": 0.017788725283507337 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.45925925925925926, |
|
"acc_stderr": 0.04304979692464244, |
|
"acc_norm": 0.45925925925925926, |
|
"acc_norm_stderr": 0.04304979692464244 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.36, |
|
"acc_stderr": 0.048241815132442176, |
|
"acc_norm": 0.36, |
|
"acc_norm_stderr": 0.048241815132442176 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.3659574468085106, |
|
"acc_stderr": 0.031489558297455304, |
|
"acc_norm": 0.3659574468085106, |
|
"acc_norm_stderr": 0.031489558297455304 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.3614457831325301, |
|
"acc_stderr": 0.0374005938202932, |
|
"acc_norm": 0.3614457831325301, |
|
"acc_norm_stderr": 0.0374005938202932 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.4662379421221865, |
|
"acc_stderr": 0.028333277109562807, |
|
"acc_norm": 0.4662379421221865, |
|
"acc_norm_stderr": 0.028333277109562807 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.47085201793721976, |
|
"acc_stderr": 0.03350073248773403, |
|
"acc_norm": 0.47085201793721976, |
|
"acc_norm_stderr": 0.03350073248773403 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.5190839694656488, |
|
"acc_stderr": 0.04382094705550988, |
|
"acc_norm": 0.5190839694656488, |
|
"acc_norm_stderr": 0.04382094705550988 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.46, |
|
"acc_stderr": 0.05009082659620332, |
|
"acc_norm": 0.46, |
|
"acc_norm_stderr": 0.05009082659620332 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.5909090909090909, |
|
"acc_stderr": 0.03502975799413007, |
|
"acc_norm": 0.5909090909090909, |
|
"acc_norm_stderr": 0.03502975799413007 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.4206896551724138, |
|
"acc_stderr": 0.0411391498118926, |
|
"acc_norm": 0.4206896551724138, |
|
"acc_norm_stderr": 0.0411391498118926 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.2647058823529412, |
|
"acc_stderr": 0.04389869956808778, |
|
"acc_norm": 0.2647058823529412, |
|
"acc_norm_stderr": 0.04389869956808778 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.42857142857142855, |
|
"acc_stderr": 0.032145368597886394, |
|
"acc_norm": 0.42857142857142855, |
|
"acc_norm_stderr": 0.032145368597886394 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.43333333333333335, |
|
"acc_stderr": 0.025124653525885134, |
|
"acc_norm": 0.43333333333333335, |
|
"acc_norm_stderr": 0.025124653525885134 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.56, |
|
"acc_stderr": 0.04988876515698589, |
|
"acc_norm": 0.56, |
|
"acc_norm_stderr": 0.04988876515698589 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.24, |
|
"acc_stderr": 0.042923469599092816, |
|
"acc_norm": 0.24, |
|
"acc_norm_stderr": 0.042923469599092816 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.5, |
|
"acc_stderr": 0.04833682445228318, |
|
"acc_norm": 0.5, |
|
"acc_norm_stderr": 0.04833682445228318 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.3054187192118227, |
|
"acc_stderr": 0.032406615658684086, |
|
"acc_norm": 0.3054187192118227, |
|
"acc_norm_stderr": 0.032406615658684086 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.44516129032258067, |
|
"acc_stderr": 0.028272410186214906, |
|
"acc_norm": 0.44516129032258067, |
|
"acc_norm_stderr": 0.028272410186214906 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.6452991452991453, |
|
"acc_stderr": 0.03134250486245402, |
|
"acc_norm": 0.6452991452991453, |
|
"acc_norm_stderr": 0.03134250486245402 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.45660377358490567, |
|
"acc_stderr": 0.03065674869673943, |
|
"acc_norm": 0.45660377358490567, |
|
"acc_norm_stderr": 0.03065674869673943 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.5272727272727272, |
|
"acc_stderr": 0.04782001791380061, |
|
"acc_norm": 0.5272727272727272, |
|
"acc_norm_stderr": 0.04782001791380061 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.22962962962962963, |
|
"acc_stderr": 0.025644108639267638, |
|
"acc_norm": 0.22962962962962963, |
|
"acc_norm_stderr": 0.025644108639267638 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.23178807947019867, |
|
"acc_stderr": 0.034454062719870546, |
|
"acc_norm": 0.23178807947019867, |
|
"acc_norm_stderr": 0.034454062719870546 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.5970149253731343, |
|
"acc_stderr": 0.03468343295111126, |
|
"acc_norm": 0.5970149253731343, |
|
"acc_norm_stderr": 0.03468343295111126 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.4277456647398844, |
|
"acc_stderr": 0.03772446857518027, |
|
"acc_norm": 0.4277456647398844, |
|
"acc_norm_stderr": 0.03772446857518027 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.30423280423280424, |
|
"acc_stderr": 0.023695415009463087, |
|
"acc_norm": 0.30423280423280424, |
|
"acc_norm_stderr": 0.023695415009463087 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.3333333333333333, |
|
"acc_stderr": 0.039420826399272135, |
|
"acc_norm": 0.3333333333333333, |
|
"acc_norm_stderr": 0.039420826399272135 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.28, |
|
"acc_stderr": 0.04512608598542129, |
|
"acc_norm": 0.28, |
|
"acc_norm_stderr": 0.04512608598542129 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.63, |
|
"acc_stderr": 0.04852365870939099, |
|
"acc_norm": 0.63, |
|
"acc_norm_stderr": 0.04852365870939099 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.4161849710982659, |
|
"acc_stderr": 0.026538189104705474, |
|
"acc_norm": 0.4161849710982659, |
|
"acc_norm_stderr": 0.026538189104705474 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.43558282208588955, |
|
"acc_stderr": 0.03895632464138937, |
|
"acc_norm": 0.43558282208588955, |
|
"acc_norm_stderr": 0.03895632464138937 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.4444444444444444, |
|
"acc_stderr": 0.027648477877413324, |
|
"acc_norm": 0.4444444444444444, |
|
"acc_norm_stderr": 0.027648477877413324 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.34, |
|
"acc_stderr": 0.047609522856952344, |
|
"acc_norm": 0.34, |
|
"acc_norm_stderr": 0.047609522856952344 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.538860103626943, |
|
"acc_stderr": 0.035975244117345775, |
|
"acc_norm": 0.538860103626943, |
|
"acc_norm_stderr": 0.035975244117345775 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.24561403508771928, |
|
"acc_stderr": 0.04049339297748141, |
|
"acc_norm": 0.24561403508771928, |
|
"acc_norm_stderr": 0.04049339297748141 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.5761467889908257, |
|
"acc_stderr": 0.021187263209087523, |
|
"acc_norm": 0.5761467889908257, |
|
"acc_norm_stderr": 0.021187263209087523 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.3888888888888889, |
|
"acc_stderr": 0.04360314860077459, |
|
"acc_norm": 0.3888888888888889, |
|
"acc_norm_stderr": 0.04360314860077459 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.45751633986928103, |
|
"acc_stderr": 0.028526383452142635, |
|
"acc_norm": 0.45751633986928103, |
|
"acc_norm_stderr": 0.028526383452142635 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.42, |
|
"acc_stderr": 0.049604496374885836, |
|
"acc_norm": 0.42, |
|
"acc_norm_stderr": 0.049604496374885836 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.512396694214876, |
|
"acc_stderr": 0.04562951548180765, |
|
"acc_norm": 0.512396694214876, |
|
"acc_norm_stderr": 0.04562951548180765 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.4342105263157895, |
|
"acc_stderr": 0.040335656678483205, |
|
"acc_norm": 0.4342105263157895, |
|
"acc_norm_stderr": 0.040335656678483205 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.36764705882352944, |
|
"acc_stderr": 0.019506291693954854, |
|
"acc_norm": 0.36764705882352944, |
|
"acc_norm_stderr": 0.019506291693954854 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.3475177304964539, |
|
"acc_stderr": 0.028406627809590947, |
|
"acc_norm": 0.3475177304964539, |
|
"acc_norm_stderr": 0.028406627809590947 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.30357142857142855, |
|
"acc_stderr": 0.04364226155841044, |
|
"acc_norm": 0.30357142857142855, |
|
"acc_norm_stderr": 0.04364226155841044 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.37962962962962965, |
|
"acc_stderr": 0.03309682581119035, |
|
"acc_norm": 0.37962962962962965, |
|
"acc_norm_stderr": 0.03309682581119035 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.2916201117318436, |
|
"acc_stderr": 0.015201032512520437, |
|
"acc_norm": 0.2916201117318436, |
|
"acc_norm_stderr": 0.015201032512520437 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.34, |
|
"acc_stderr": 0.04760952285695235, |
|
"acc_norm": 0.34, |
|
"acc_norm_stderr": 0.04760952285695235 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.38, |
|
"acc_stderr": 0.048783173121456316, |
|
"acc_norm": 0.38, |
|
"acc_norm_stderr": 0.048783173121456316 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.4227941176470588, |
|
"acc_stderr": 0.03000856284500348, |
|
"acc_norm": 0.4227941176470588, |
|
"acc_norm_stderr": 0.03000856284500348 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.37551020408163266, |
|
"acc_stderr": 0.031001209039894843, |
|
"acc_norm": 0.37551020408163266, |
|
"acc_norm_stderr": 0.031001209039894843 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.5654008438818565, |
|
"acc_stderr": 0.03226759995510145, |
|
"acc_norm": 0.5654008438818565, |
|
"acc_norm_stderr": 0.03226759995510145 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.318122555410691, |
|
"acc_stderr": 0.01189540728110412, |
|
"acc_norm": 0.318122555410691, |
|
"acc_norm_stderr": 0.01189540728110412 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.2549019607843137, |
|
"acc_stderr": 0.030587591351604243, |
|
"acc_norm": 0.2549019607843137, |
|
"acc_norm_stderr": 0.030587591351604243 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.23030303030303031, |
|
"acc_stderr": 0.032876667586034886, |
|
"acc_norm": 0.23030303030303031, |
|
"acc_norm_stderr": 0.032876667586034886 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.2937576499388005, |
|
"mc1_stderr": 0.015945068581236618, |
|
"mc2": 0.4608476284919872, |
|
"mc2_stderr": 0.0153801623360934 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.5737898465171193, |
|
"acc_stderr": 0.01700212260948926, |
|
"acc_norm": 0.5879574970484062, |
|
"acc_norm_stderr": 0.01692227673852836 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "KT-AI/midm-bitext-S-7B-inst-v1", |
|
"model_sha": "401838023f9ce8b7b3ff260fd1b4f971cd280bc5", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |