results / siryuon /KOEN-13B /result_2023-10-06 14:34:04.json
open-ko-llm-bot's picture
Add results for 2023-10-06 14:34:04
8a7fab0
raw
history blame
17.9 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.3660409556313993,
"acc_stderr": 0.014077223108470139,
"acc_norm": 0.4180887372013652,
"acc_norm_stderr": 0.014413988396996083
},
"harness|ko_hellaswag|10": {
"acc": 0.403505277833101,
"acc_stderr": 0.0048959776766252395,
"acc_norm": 0.536247759410476,
"acc_norm_stderr": 0.0049766519897576356
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.47953216374269003,
"acc_stderr": 0.0383161053282193,
"acc_norm": 0.47953216374269003,
"acc_norm_stderr": 0.0383161053282193
},
"harness|ko_mmlu_management|5": {
"acc": 0.3883495145631068,
"acc_stderr": 0.0482572933735639,
"acc_norm": 0.3883495145631068,
"acc_norm_stderr": 0.0482572933735639
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.4904214559386973,
"acc_stderr": 0.017876682275340873,
"acc_norm": 0.4904214559386973,
"acc_norm_stderr": 0.017876682275340873
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.37037037037037035,
"acc_stderr": 0.04171654161354543,
"acc_norm": 0.37037037037037035,
"acc_norm_stderr": 0.04171654161354543
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.28,
"acc_stderr": 0.045126085985421255,
"acc_norm": 0.28,
"acc_norm_stderr": 0.045126085985421255
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.33617021276595743,
"acc_stderr": 0.030881618520676942,
"acc_norm": 0.33617021276595743,
"acc_norm_stderr": 0.030881618520676942
},
"harness|ko_mmlu_virology|5": {
"acc": 0.41566265060240964,
"acc_stderr": 0.03836722176598053,
"acc_norm": 0.41566265060240964,
"acc_norm_stderr": 0.03836722176598053
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.44694533762057875,
"acc_stderr": 0.028237769422085328,
"acc_norm": 0.44694533762057875,
"acc_norm_stderr": 0.028237769422085328
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.3632286995515695,
"acc_stderr": 0.03227790442850499,
"acc_norm": 0.3632286995515695,
"acc_norm_stderr": 0.03227790442850499
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.48091603053435117,
"acc_stderr": 0.04382094705550988,
"acc_norm": 0.48091603053435117,
"acc_norm_stderr": 0.04382094705550988
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.33,
"acc_stderr": 0.04725815626252605,
"acc_norm": 0.33,
"acc_norm_stderr": 0.04725815626252605
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.494949494949495,
"acc_stderr": 0.035621707606254015,
"acc_norm": 0.494949494949495,
"acc_norm_stderr": 0.035621707606254015
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.38620689655172413,
"acc_stderr": 0.04057324734419035,
"acc_norm": 0.38620689655172413,
"acc_norm_stderr": 0.04057324734419035
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.2549019607843137,
"acc_stderr": 0.043364327079931785,
"acc_norm": 0.2549019607843137,
"acc_norm_stderr": 0.043364327079931785
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.3865546218487395,
"acc_stderr": 0.03163145807552379,
"acc_norm": 0.3865546218487395,
"acc_norm_stderr": 0.03163145807552379
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.36153846153846153,
"acc_stderr": 0.02435958146539696,
"acc_norm": 0.36153846153846153,
"acc_norm_stderr": 0.02435958146539696
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.44,
"acc_stderr": 0.049888765156985884,
"acc_norm": 0.44,
"acc_norm_stderr": 0.049888765156985884
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.36,
"acc_stderr": 0.048241815132442176,
"acc_norm": 0.36,
"acc_norm_stderr": 0.048241815132442176
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.37962962962962965,
"acc_stderr": 0.04691521224077742,
"acc_norm": 0.37962962962962965,
"acc_norm_stderr": 0.04691521224077742
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.3399014778325123,
"acc_stderr": 0.033327690684107895,
"acc_norm": 0.3399014778325123,
"acc_norm_stderr": 0.033327690684107895
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.42258064516129035,
"acc_stderr": 0.02810096472427264,
"acc_norm": 0.42258064516129035,
"acc_norm_stderr": 0.02810096472427264
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.594017094017094,
"acc_stderr": 0.03217180182641086,
"acc_norm": 0.594017094017094,
"acc_norm_stderr": 0.03217180182641086
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.4188679245283019,
"acc_stderr": 0.030365050829115208,
"acc_norm": 0.4188679245283019,
"acc_norm_stderr": 0.030365050829115208
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.38181818181818183,
"acc_stderr": 0.046534298079135075,
"acc_norm": 0.38181818181818183,
"acc_norm_stderr": 0.046534298079135075
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.26296296296296295,
"acc_stderr": 0.02684205787383371,
"acc_norm": 0.26296296296296295,
"acc_norm_stderr": 0.02684205787383371
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.26490066225165565,
"acc_stderr": 0.03603038545360385,
"acc_norm": 0.26490066225165565,
"acc_norm_stderr": 0.03603038545360385
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.4626865671641791,
"acc_stderr": 0.03525675167467974,
"acc_norm": 0.4626865671641791,
"acc_norm_stderr": 0.03525675167467974
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.3063583815028902,
"acc_stderr": 0.035149425512674366,
"acc_norm": 0.3063583815028902,
"acc_norm_stderr": 0.035149425512674366
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.30423280423280424,
"acc_stderr": 0.023695415009463087,
"acc_norm": 0.30423280423280424,
"acc_norm_stderr": 0.023695415009463087
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.3472222222222222,
"acc_stderr": 0.039812405437178615,
"acc_norm": 0.3472222222222222,
"acc_norm_stderr": 0.039812405437178615
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.32,
"acc_stderr": 0.046882617226215034,
"acc_norm": 0.32,
"acc_norm_stderr": 0.046882617226215034
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.51,
"acc_stderr": 0.05024183937956913,
"acc_norm": 0.51,
"acc_norm_stderr": 0.05024183937956913
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.43641618497109824,
"acc_stderr": 0.026700545424943684,
"acc_norm": 0.43641618497109824,
"acc_norm_stderr": 0.026700545424943684
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.3496932515337423,
"acc_stderr": 0.03746668325470021,
"acc_norm": 0.3496932515337423,
"acc_norm_stderr": 0.03746668325470021
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.4382716049382716,
"acc_stderr": 0.02760791408740046,
"acc_norm": 0.4382716049382716,
"acc_norm_stderr": 0.02760791408740046
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.27,
"acc_stderr": 0.044619604333847394,
"acc_norm": 0.27,
"acc_norm_stderr": 0.044619604333847394
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.37305699481865284,
"acc_stderr": 0.034902055920485744,
"acc_norm": 0.37305699481865284,
"acc_norm_stderr": 0.034902055920485744
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.2807017543859649,
"acc_stderr": 0.042270544512321984,
"acc_norm": 0.2807017543859649,
"acc_norm_stderr": 0.042270544512321984
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.42568807339449544,
"acc_stderr": 0.021199235972470802,
"acc_norm": 0.42568807339449544,
"acc_norm_stderr": 0.021199235972470802
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.2857142857142857,
"acc_stderr": 0.04040610178208839,
"acc_norm": 0.2857142857142857,
"acc_norm_stderr": 0.04040610178208839
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.3954248366013072,
"acc_stderr": 0.02799672318063145,
"acc_norm": 0.3954248366013072,
"acc_norm_stderr": 0.02799672318063145
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.38,
"acc_stderr": 0.048783173121456316,
"acc_norm": 0.38,
"acc_norm_stderr": 0.048783173121456316
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.5454545454545454,
"acc_stderr": 0.045454545454545484,
"acc_norm": 0.5454545454545454,
"acc_norm_stderr": 0.045454545454545484
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.45394736842105265,
"acc_stderr": 0.04051646342874141,
"acc_norm": 0.45394736842105265,
"acc_norm_stderr": 0.04051646342874141
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.3055555555555556,
"acc_stderr": 0.018635594034423983,
"acc_norm": 0.3055555555555556,
"acc_norm_stderr": 0.018635594034423983
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.3191489361702128,
"acc_stderr": 0.027807990141320214,
"acc_norm": 0.3191489361702128,
"acc_norm_stderr": 0.027807990141320214
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.2767857142857143,
"acc_stderr": 0.042466243366976256,
"acc_norm": 0.2767857142857143,
"acc_norm_stderr": 0.042466243366976256
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.3333333333333333,
"acc_stderr": 0.03214952147802749,
"acc_norm": 0.3333333333333333,
"acc_norm_stderr": 0.03214952147802749
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.2424581005586592,
"acc_stderr": 0.01433352205921789,
"acc_norm": 0.2424581005586592,
"acc_norm_stderr": 0.01433352205921789
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.33,
"acc_stderr": 0.04725815626252604,
"acc_norm": 0.33,
"acc_norm_stderr": 0.04725815626252604
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.36,
"acc_stderr": 0.04824181513244218,
"acc_norm": 0.36,
"acc_norm_stderr": 0.04824181513244218
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.27941176470588236,
"acc_stderr": 0.027257202606114948,
"acc_norm": 0.27941176470588236,
"acc_norm_stderr": 0.027257202606114948
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.3346938775510204,
"acc_stderr": 0.030209235226242307,
"acc_norm": 0.3346938775510204,
"acc_norm_stderr": 0.030209235226242307
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.48523206751054854,
"acc_stderr": 0.032533028078777386,
"acc_norm": 0.48523206751054854,
"acc_norm_stderr": 0.032533028078777386
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.28552803129074317,
"acc_stderr": 0.011535751586665657,
"acc_norm": 0.28552803129074317,
"acc_norm_stderr": 0.011535751586665657
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.4019607843137255,
"acc_stderr": 0.034411900234824655,
"acc_norm": 0.4019607843137255,
"acc_norm_stderr": 0.034411900234824655
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.4121212121212121,
"acc_stderr": 0.03843566993588717,
"acc_norm": 0.4121212121212121,
"acc_norm_stderr": 0.03843566993588717
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.26560587515299877,
"mc1_stderr": 0.015461027627253602,
"mc2": 0.4382289478351752,
"mc2_stderr": 0.014859555204764835
},
"harness|ko_commongen_v2|2": {
"acc": 0.5434272300469484,
"acc_stderr": 0.017075008217544083,
"acc_norm": 0.6244131455399061,
"acc_norm_stderr": 0.01660070111698995
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "siryuon/KOEN-13B",
"model_sha": "c18bdc67d61099d74c7b77ea46714795082fc698",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}