results / krevas /LDCC-Instruct-Llama-2-ko-13B-v6 /result_2023-10-16 13:04:43.json
open-ko-llm-bot's picture
Add results for 2023-10-16 13:04:43
217932b
raw
history blame
17.9 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.3890784982935154,
"acc_stderr": 0.014247309976045609,
"acc_norm": 0.4496587030716723,
"acc_norm_stderr": 0.01453714444428475
},
"harness|ko_hellaswag|10": {
"acc": 0.4111730730930094,
"acc_stderr": 0.004910409150135485,
"acc_norm": 0.5493925512846046,
"acc_norm_stderr": 0.004965375341643134
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.3508771929824561,
"acc_stderr": 0.03660298834049164,
"acc_norm": 0.3508771929824561,
"acc_norm_stderr": 0.03660298834049164
},
"harness|ko_mmlu_management|5": {
"acc": 0.3786407766990291,
"acc_stderr": 0.048026946982589726,
"acc_norm": 0.3786407766990291,
"acc_norm_stderr": 0.048026946982589726
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.3537675606641124,
"acc_stderr": 0.01709818470816191,
"acc_norm": 0.3537675606641124,
"acc_norm_stderr": 0.01709818470816191
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.34074074074074073,
"acc_stderr": 0.04094376269996793,
"acc_norm": 0.34074074074074073,
"acc_norm_stderr": 0.04094376269996793
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.27,
"acc_stderr": 0.044619604333847394,
"acc_norm": 0.27,
"acc_norm_stderr": 0.044619604333847394
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.2553191489361702,
"acc_stderr": 0.02850485647051418,
"acc_norm": 0.2553191489361702,
"acc_norm_stderr": 0.02850485647051418
},
"harness|ko_mmlu_virology|5": {
"acc": 0.30120481927710846,
"acc_stderr": 0.0357160923005348,
"acc_norm": 0.30120481927710846,
"acc_norm_stderr": 0.0357160923005348
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.40514469453376206,
"acc_stderr": 0.027882383791325956,
"acc_norm": 0.40514469453376206,
"acc_norm_stderr": 0.027882383791325956
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.31390134529147984,
"acc_stderr": 0.031146796482972465,
"acc_norm": 0.31390134529147984,
"acc_norm_stderr": 0.031146796482972465
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.37404580152671757,
"acc_stderr": 0.04243869242230524,
"acc_norm": 0.37404580152671757,
"acc_norm_stderr": 0.04243869242230524
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.29,
"acc_stderr": 0.045604802157206845,
"acc_norm": 0.29,
"acc_norm_stderr": 0.045604802157206845
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.31313131313131315,
"acc_stderr": 0.03304205087813652,
"acc_norm": 0.31313131313131315,
"acc_norm_stderr": 0.03304205087813652
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.2896551724137931,
"acc_stderr": 0.03780019230438015,
"acc_norm": 0.2896551724137931,
"acc_norm_stderr": 0.03780019230438015
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.11764705882352941,
"acc_stderr": 0.032059077331445286,
"acc_norm": 0.11764705882352941,
"acc_norm_stderr": 0.032059077331445286
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.22268907563025211,
"acc_stderr": 0.02702543349888239,
"acc_norm": 0.22268907563025211,
"acc_norm_stderr": 0.02702543349888239
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.26666666666666666,
"acc_stderr": 0.022421273612923714,
"acc_norm": 0.26666666666666666,
"acc_norm_stderr": 0.022421273612923714
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.41,
"acc_stderr": 0.049431107042371025,
"acc_norm": 0.41,
"acc_norm_stderr": 0.049431107042371025
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.3,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.37037037037037035,
"acc_stderr": 0.04668408033024931,
"acc_norm": 0.37037037037037035,
"acc_norm_stderr": 0.04668408033024931
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.3054187192118227,
"acc_stderr": 0.03240661565868408,
"acc_norm": 0.3054187192118227,
"acc_norm_stderr": 0.03240661565868408
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.3387096774193548,
"acc_stderr": 0.026923446059302848,
"acc_norm": 0.3387096774193548,
"acc_norm_stderr": 0.026923446059302848
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.45726495726495725,
"acc_stderr": 0.03263622596380688,
"acc_norm": 0.45726495726495725,
"acc_norm_stderr": 0.03263622596380688
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.2830188679245283,
"acc_stderr": 0.027724236492700907,
"acc_norm": 0.2830188679245283,
"acc_norm_stderr": 0.027724236492700907
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.32727272727272727,
"acc_stderr": 0.04494290866252089,
"acc_norm": 0.32727272727272727,
"acc_norm_stderr": 0.04494290866252089
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.2740740740740741,
"acc_stderr": 0.027195934804085626,
"acc_norm": 0.2740740740740741,
"acc_norm_stderr": 0.027195934804085626
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.23841059602649006,
"acc_stderr": 0.03479185572599661,
"acc_norm": 0.23841059602649006,
"acc_norm_stderr": 0.03479185572599661
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.40298507462686567,
"acc_stderr": 0.03468343295111126,
"acc_norm": 0.40298507462686567,
"acc_norm_stderr": 0.03468343295111126
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.26011560693641617,
"acc_stderr": 0.033450369167889904,
"acc_norm": 0.26011560693641617,
"acc_norm_stderr": 0.033450369167889904
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.2724867724867725,
"acc_stderr": 0.022930973071633356,
"acc_norm": 0.2724867724867725,
"acc_norm_stderr": 0.022930973071633356
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.3472222222222222,
"acc_stderr": 0.039812405437178615,
"acc_norm": 0.3472222222222222,
"acc_norm_stderr": 0.039812405437178615
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.15,
"acc_stderr": 0.03588702812826371,
"acc_norm": 0.15,
"acc_norm_stderr": 0.03588702812826371
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.37,
"acc_stderr": 0.048523658709390974,
"acc_norm": 0.37,
"acc_norm_stderr": 0.048523658709390974
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.41329479768786126,
"acc_stderr": 0.02651126136940925,
"acc_norm": 0.41329479768786126,
"acc_norm_stderr": 0.02651126136940925
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.3558282208588957,
"acc_stderr": 0.03761521380046734,
"acc_norm": 0.3558282208588957,
"acc_norm_stderr": 0.03761521380046734
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.3734567901234568,
"acc_stderr": 0.02691500301138016,
"acc_norm": 0.3734567901234568,
"acc_norm_stderr": 0.02691500301138016
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.28,
"acc_stderr": 0.04512608598542126,
"acc_norm": 0.28,
"acc_norm_stderr": 0.04512608598542126
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.3471502590673575,
"acc_stderr": 0.03435696168361355,
"acc_norm": 0.3471502590673575,
"acc_norm_stderr": 0.03435696168361355
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.23684210526315788,
"acc_stderr": 0.03999423879281336,
"acc_norm": 0.23684210526315788,
"acc_norm_stderr": 0.03999423879281336
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.3339449541284404,
"acc_stderr": 0.0202205541967364,
"acc_norm": 0.3339449541284404,
"acc_norm_stderr": 0.0202205541967364
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.24603174603174602,
"acc_stderr": 0.03852273364924315,
"acc_norm": 0.24603174603174602,
"acc_norm_stderr": 0.03852273364924315
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.31699346405228757,
"acc_stderr": 0.02664327847450875,
"acc_norm": 0.31699346405228757,
"acc_norm_stderr": 0.02664327847450875
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.26,
"acc_stderr": 0.044084400227680794,
"acc_norm": 0.26,
"acc_norm_stderr": 0.044084400227680794
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.512396694214876,
"acc_stderr": 0.04562951548180765,
"acc_norm": 0.512396694214876,
"acc_norm_stderr": 0.04562951548180765
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.3157894736842105,
"acc_stderr": 0.037827289808654685,
"acc_norm": 0.3157894736842105,
"acc_norm_stderr": 0.037827289808654685
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.33169934640522875,
"acc_stderr": 0.019047485239360378,
"acc_norm": 0.33169934640522875,
"acc_norm_stderr": 0.019047485239360378
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.30851063829787234,
"acc_stderr": 0.02755336616510137,
"acc_norm": 0.30851063829787234,
"acc_norm_stderr": 0.02755336616510137
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.24107142857142858,
"acc_stderr": 0.040598672469526885,
"acc_norm": 0.24107142857142858,
"acc_norm_stderr": 0.040598672469526885
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.26851851851851855,
"acc_stderr": 0.030225226160012407,
"acc_norm": 0.26851851851851855,
"acc_norm_stderr": 0.030225226160012407
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.24134078212290502,
"acc_stderr": 0.014310999547961443,
"acc_norm": 0.24134078212290502,
"acc_norm_stderr": 0.014310999547961443
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.28,
"acc_stderr": 0.045126085985421276,
"acc_norm": 0.28,
"acc_norm_stderr": 0.045126085985421276
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.37,
"acc_stderr": 0.04852365870939099,
"acc_norm": 0.37,
"acc_norm_stderr": 0.04852365870939099
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.21691176470588236,
"acc_stderr": 0.025035845227711254,
"acc_norm": 0.21691176470588236,
"acc_norm_stderr": 0.025035845227711254
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.4,
"acc_stderr": 0.03136250240935893,
"acc_norm": 0.4,
"acc_norm_stderr": 0.03136250240935893
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.38396624472573837,
"acc_stderr": 0.031658678064106674,
"acc_norm": 0.38396624472573837,
"acc_norm_stderr": 0.031658678064106674
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.29791395045632335,
"acc_stderr": 0.011680717340400031,
"acc_norm": 0.29791395045632335,
"acc_norm_stderr": 0.011680717340400031
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.31862745098039214,
"acc_stderr": 0.0327028718148208,
"acc_norm": 0.31862745098039214,
"acc_norm_stderr": 0.0327028718148208
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.34545454545454546,
"acc_stderr": 0.03713158067481912,
"acc_norm": 0.34545454545454546,
"acc_norm_stderr": 0.03713158067481912
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.29008567931456547,
"mc1_stderr": 0.01588623687420952,
"mc2": 0.44352094267213416,
"mc2_stderr": 0.014982781844107165
},
"harness|ko_commongen_v2|2": {
"acc": 0.323943661971831,
"acc_stderr": 0.016042106970464827,
"acc_norm": 0.43896713615023475,
"acc_norm_stderr": 0.017011608310486013
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "krevas/LDCC-Instruct-Llama-2-ko-13B-v6",
"model_sha": "987860d23201c3c0611a1879baef61d10bfb0b4c",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}