|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.34812286689419797, |
|
"acc_stderr": 0.013921008595179344, |
|
"acc_norm": 0.39078498293515357, |
|
"acc_norm_stderr": 0.014258563880513778 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.37970523800039835, |
|
"acc_stderr": 0.0048432163250902655, |
|
"acc_norm": 0.4901414060944035, |
|
"acc_norm_stderr": 0.004988811384747425 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.49122807017543857, |
|
"acc_stderr": 0.038342347441649924, |
|
"acc_norm": 0.49122807017543857, |
|
"acc_norm_stderr": 0.038342347441649924 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.5728155339805825, |
|
"acc_stderr": 0.04897957737781168, |
|
"acc_norm": 0.5728155339805825, |
|
"acc_norm_stderr": 0.04897957737781168 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.4725415070242657, |
|
"acc_stderr": 0.017852981266633955, |
|
"acc_norm": 0.4725415070242657, |
|
"acc_norm_stderr": 0.017852981266633955 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.4222222222222222, |
|
"acc_stderr": 0.04266763404099582, |
|
"acc_norm": 0.4222222222222222, |
|
"acc_norm_stderr": 0.04266763404099582 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.27, |
|
"acc_stderr": 0.0446196043338474, |
|
"acc_norm": 0.27, |
|
"acc_norm_stderr": 0.0446196043338474 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.39574468085106385, |
|
"acc_stderr": 0.03196758697835362, |
|
"acc_norm": 0.39574468085106385, |
|
"acc_norm_stderr": 0.03196758697835362 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.41566265060240964, |
|
"acc_stderr": 0.03836722176598052, |
|
"acc_norm": 0.41566265060240964, |
|
"acc_norm_stderr": 0.03836722176598052 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.4662379421221865, |
|
"acc_stderr": 0.028333277109562786, |
|
"acc_norm": 0.4662379421221865, |
|
"acc_norm_stderr": 0.028333277109562786 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.452914798206278, |
|
"acc_stderr": 0.03340867501923325, |
|
"acc_norm": 0.452914798206278, |
|
"acc_norm_stderr": 0.03340867501923325 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.46564885496183206, |
|
"acc_stderr": 0.04374928560599738, |
|
"acc_norm": 0.46564885496183206, |
|
"acc_norm_stderr": 0.04374928560599738 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.36, |
|
"acc_stderr": 0.04824181513244218, |
|
"acc_norm": 0.36, |
|
"acc_norm_stderr": 0.04824181513244218 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.5555555555555556, |
|
"acc_stderr": 0.035402943770953675, |
|
"acc_norm": 0.5555555555555556, |
|
"acc_norm_stderr": 0.035402943770953675 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.4689655172413793, |
|
"acc_stderr": 0.04158632762097828, |
|
"acc_norm": 0.4689655172413793, |
|
"acc_norm_stderr": 0.04158632762097828 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.24509803921568626, |
|
"acc_stderr": 0.04280105837364395, |
|
"acc_norm": 0.24509803921568626, |
|
"acc_norm_stderr": 0.04280105837364395 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.5168067226890757, |
|
"acc_stderr": 0.03246013680375308, |
|
"acc_norm": 0.5168067226890757, |
|
"acc_norm_stderr": 0.03246013680375308 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.4641025641025641, |
|
"acc_stderr": 0.025285585990017834, |
|
"acc_norm": 0.4641025641025641, |
|
"acc_norm_stderr": 0.025285585990017834 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.58, |
|
"acc_stderr": 0.04960449637488583, |
|
"acc_norm": 0.58, |
|
"acc_norm_stderr": 0.04960449637488583 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.33, |
|
"acc_stderr": 0.04725815626252604, |
|
"acc_norm": 0.33, |
|
"acc_norm_stderr": 0.04725815626252604 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.5555555555555556, |
|
"acc_stderr": 0.04803752235190192, |
|
"acc_norm": 0.5555555555555556, |
|
"acc_norm_stderr": 0.04803752235190192 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.42857142857142855, |
|
"acc_stderr": 0.03481904844438803, |
|
"acc_norm": 0.42857142857142855, |
|
"acc_norm_stderr": 0.03481904844438803 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.45806451612903226, |
|
"acc_stderr": 0.028343787250540636, |
|
"acc_norm": 0.45806451612903226, |
|
"acc_norm_stderr": 0.028343787250540636 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.7307692307692307, |
|
"acc_stderr": 0.029058588303748845, |
|
"acc_norm": 0.7307692307692307, |
|
"acc_norm_stderr": 0.029058588303748845 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.44528301886792454, |
|
"acc_stderr": 0.030588052974270655, |
|
"acc_norm": 0.44528301886792454, |
|
"acc_norm_stderr": 0.030588052974270655 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.5363636363636364, |
|
"acc_stderr": 0.04776449162396197, |
|
"acc_norm": 0.5363636363636364, |
|
"acc_norm_stderr": 0.04776449162396197 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.3333333333333333, |
|
"acc_stderr": 0.028742040903948485, |
|
"acc_norm": 0.3333333333333333, |
|
"acc_norm_stderr": 0.028742040903948485 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.31788079470198677, |
|
"acc_stderr": 0.038020397601079024, |
|
"acc_norm": 0.31788079470198677, |
|
"acc_norm_stderr": 0.038020397601079024 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.6119402985074627, |
|
"acc_stderr": 0.03445789964362749, |
|
"acc_norm": 0.6119402985074627, |
|
"acc_norm_stderr": 0.03445789964362749 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.3583815028901734, |
|
"acc_stderr": 0.036563436533531585, |
|
"acc_norm": 0.3583815028901734, |
|
"acc_norm_stderr": 0.036563436533531585 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.35978835978835977, |
|
"acc_stderr": 0.024718075944129274, |
|
"acc_norm": 0.35978835978835977, |
|
"acc_norm_stderr": 0.024718075944129274 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.4097222222222222, |
|
"acc_stderr": 0.04112490974670787, |
|
"acc_norm": 0.4097222222222222, |
|
"acc_norm_stderr": 0.04112490974670787 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.38, |
|
"acc_stderr": 0.04878317312145633, |
|
"acc_norm": 0.38, |
|
"acc_norm_stderr": 0.04878317312145633 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.57, |
|
"acc_stderr": 0.04975698519562426, |
|
"acc_norm": 0.57, |
|
"acc_norm_stderr": 0.04975698519562426 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.5317919075144508, |
|
"acc_stderr": 0.02686462436675666, |
|
"acc_norm": 0.5317919075144508, |
|
"acc_norm_stderr": 0.02686462436675666 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.5030674846625767, |
|
"acc_stderr": 0.03928297078179663, |
|
"acc_norm": 0.5030674846625767, |
|
"acc_norm_stderr": 0.03928297078179663 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.42901234567901236, |
|
"acc_stderr": 0.027538925613470867, |
|
"acc_norm": 0.42901234567901236, |
|
"acc_norm_stderr": 0.027538925613470867 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.32, |
|
"acc_stderr": 0.04688261722621504, |
|
"acc_norm": 0.32, |
|
"acc_norm_stderr": 0.04688261722621504 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.5233160621761658, |
|
"acc_stderr": 0.03604513672442202, |
|
"acc_norm": 0.5233160621761658, |
|
"acc_norm_stderr": 0.03604513672442202 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.30701754385964913, |
|
"acc_stderr": 0.043391383225798594, |
|
"acc_norm": 0.30701754385964913, |
|
"acc_norm_stderr": 0.043391383225798594 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.4917431192660551, |
|
"acc_stderr": 0.021434399918214334, |
|
"acc_norm": 0.4917431192660551, |
|
"acc_norm_stderr": 0.021434399918214334 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.42063492063492064, |
|
"acc_stderr": 0.04415438226743744, |
|
"acc_norm": 0.42063492063492064, |
|
"acc_norm_stderr": 0.04415438226743744 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.48366013071895425, |
|
"acc_stderr": 0.028614624752805407, |
|
"acc_norm": 0.48366013071895425, |
|
"acc_norm_stderr": 0.028614624752805407 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.47, |
|
"acc_stderr": 0.050161355804659205, |
|
"acc_norm": 0.47, |
|
"acc_norm_stderr": 0.050161355804659205 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.7603305785123967, |
|
"acc_stderr": 0.03896878985070417, |
|
"acc_norm": 0.7603305785123967, |
|
"acc_norm_stderr": 0.03896878985070417 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.3881578947368421, |
|
"acc_stderr": 0.03965842097512744, |
|
"acc_norm": 0.3881578947368421, |
|
"acc_norm_stderr": 0.03965842097512744 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.4150326797385621, |
|
"acc_stderr": 0.01993362777685742, |
|
"acc_norm": 0.4150326797385621, |
|
"acc_norm_stderr": 0.01993362777685742 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.34397163120567376, |
|
"acc_stderr": 0.028338017428611317, |
|
"acc_norm": 0.34397163120567376, |
|
"acc_norm_stderr": 0.028338017428611317 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.41964285714285715, |
|
"acc_stderr": 0.04684099321077106, |
|
"acc_norm": 0.41964285714285715, |
|
"acc_norm_stderr": 0.04684099321077106 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.4074074074074074, |
|
"acc_stderr": 0.03350991604696043, |
|
"acc_norm": 0.4074074074074074, |
|
"acc_norm_stderr": 0.03350991604696043 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.3039106145251397, |
|
"acc_stderr": 0.015382845587584518, |
|
"acc_norm": 0.3039106145251397, |
|
"acc_norm_stderr": 0.015382845587584518 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.45, |
|
"acc_stderr": 0.05, |
|
"acc_norm": 0.45, |
|
"acc_norm_stderr": 0.05 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.6, |
|
"acc_stderr": 0.049236596391733084, |
|
"acc_norm": 0.6, |
|
"acc_norm_stderr": 0.049236596391733084 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.3492647058823529, |
|
"acc_stderr": 0.028959755196824866, |
|
"acc_norm": 0.3492647058823529, |
|
"acc_norm_stderr": 0.028959755196824866 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.5673469387755102, |
|
"acc_stderr": 0.031717528240626645, |
|
"acc_norm": 0.5673469387755102, |
|
"acc_norm_stderr": 0.031717528240626645 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.5569620253164557, |
|
"acc_stderr": 0.03233532777533484, |
|
"acc_norm": 0.5569620253164557, |
|
"acc_norm_stderr": 0.03233532777533484 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.32790091264667537, |
|
"acc_stderr": 0.011989936640666544, |
|
"acc_norm": 0.32790091264667537, |
|
"acc_norm_stderr": 0.011989936640666544 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.4215686274509804, |
|
"acc_stderr": 0.03465868196380758, |
|
"acc_norm": 0.4215686274509804, |
|
"acc_norm_stderr": 0.03465868196380758 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.45454545454545453, |
|
"acc_stderr": 0.03888176921674098, |
|
"acc_norm": 0.45454545454545453, |
|
"acc_norm_stderr": 0.03888176921674098 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.26438188494492043, |
|
"mc1_stderr": 0.015438211119522512, |
|
"mc2": 0.43306568977437526, |
|
"mc2_stderr": 0.015345933860590263 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.4935064935064935, |
|
"acc_stderr": 0.01718890435907731, |
|
"acc_norm": 0.5348288075560803, |
|
"acc_norm_stderr": 0.017148598015747425 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "BM-K/mistral-ko-7b-it-v2.0.0", |
|
"model_sha": "344b5f989128dc9f7a1bb3a1e8bbfe8b50a4159d", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |