diff --git a/.gitattributes b/.gitattributes deleted file mode 100644 index 28df5f900b358436f0267334b3e3e9af33f917ba..0000000000000000000000000000000000000000 --- a/.gitattributes +++ /dev/null @@ -1,55 +0,0 @@ -*.7z filter=lfs diff=lfs merge=lfs -text -*.arrow filter=lfs diff=lfs merge=lfs -text -*.bin filter=lfs diff=lfs merge=lfs -text -*.bz2 filter=lfs diff=lfs merge=lfs -text -*.ckpt filter=lfs diff=lfs merge=lfs -text -*.ftz filter=lfs diff=lfs merge=lfs -text -*.gz filter=lfs diff=lfs merge=lfs -text -*.h5 filter=lfs diff=lfs merge=lfs -text -*.joblib filter=lfs diff=lfs merge=lfs -text -*.lfs.* filter=lfs diff=lfs merge=lfs -text -*.lz4 filter=lfs diff=lfs merge=lfs -text -*.mlmodel filter=lfs diff=lfs merge=lfs -text -*.model filter=lfs diff=lfs merge=lfs -text -*.msgpack filter=lfs diff=lfs merge=lfs -text -*.npy filter=lfs diff=lfs merge=lfs -text -*.npz filter=lfs diff=lfs merge=lfs -text -*.onnx filter=lfs diff=lfs merge=lfs -text -*.ot filter=lfs diff=lfs merge=lfs -text -*.parquet filter=lfs diff=lfs merge=lfs -text -*.pb filter=lfs diff=lfs merge=lfs -text -*.pickle filter=lfs diff=lfs merge=lfs -text -*.pkl filter=lfs diff=lfs merge=lfs -text -*.pt filter=lfs diff=lfs merge=lfs -text -*.pth filter=lfs diff=lfs merge=lfs -text -*.rar filter=lfs diff=lfs merge=lfs -text -*.safetensors filter=lfs diff=lfs merge=lfs -text -saved_model/**/* filter=lfs diff=lfs merge=lfs -text -*.tar.* filter=lfs diff=lfs merge=lfs -text -*.tar filter=lfs diff=lfs merge=lfs -text -*.tflite filter=lfs diff=lfs merge=lfs -text -*.tgz filter=lfs diff=lfs merge=lfs -text -*.wasm filter=lfs diff=lfs merge=lfs -text -*.xz filter=lfs diff=lfs merge=lfs -text -*.zip filter=lfs diff=lfs merge=lfs -text -*.zst filter=lfs diff=lfs merge=lfs -text -*tfevents* filter=lfs diff=lfs merge=lfs -text -# Audio files - uncompressed -*.pcm filter=lfs diff=lfs merge=lfs -text -*.sam filter=lfs diff=lfs merge=lfs -text -*.raw filter=lfs diff=lfs merge=lfs -text -# Audio files - compressed -*.aac filter=lfs diff=lfs merge=lfs -text -*.flac filter=lfs diff=lfs merge=lfs -text -*.mp3 filter=lfs diff=lfs merge=lfs -text -*.ogg filter=lfs diff=lfs merge=lfs -text -*.wav filter=lfs diff=lfs merge=lfs -text -# Image files - uncompressed -*.bmp filter=lfs diff=lfs merge=lfs -text -*.gif filter=lfs diff=lfs merge=lfs -text -*.png filter=lfs diff=lfs merge=lfs -text -*.tiff filter=lfs diff=lfs merge=lfs -text -# Image files - compressed -*.jpg filter=lfs diff=lfs merge=lfs -text -*.jpeg filter=lfs diff=lfs merge=lfs -text -*.webp filter=lfs diff=lfs merge=lfs -text diff --git a/42MARU/GenAI-llama-2-ko-en-instruct-v1/result_2023-10-12 11:14:34.json b/42MARU/GenAI-llama-2-ko-en-instruct-v1/result_2023-10-12 11:14:34.json deleted file mode 100644 index 21bb4290886674f31b01f905514b05d1cc441b00..0000000000000000000000000000000000000000 --- a/42MARU/GenAI-llama-2-ko-en-instruct-v1/result_2023-10-12 11:14:34.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.39505119453924914, - "acc_stderr": 0.014285898292938165, - "acc_norm": 0.4445392491467577, - "acc_norm_stderr": 0.014521226405627077 - }, - "harness|ko_hellaswag|10": { - "acc": 0.41545508862776337, - "acc_stderr": 0.004917931778593191, - "acc_norm": 0.5571599283011353, - "acc_norm_stderr": 0.004957068377516512 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.47953216374269003, - "acc_stderr": 0.0383161053282193, - "acc_norm": 0.47953216374269003, - "acc_norm_stderr": 0.0383161053282193 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.4174757281553398, - "acc_stderr": 0.048828405482122375, - "acc_norm": 0.4174757281553398, - "acc_norm_stderr": 0.048828405482122375 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.5108556832694764, - "acc_stderr": 0.017875748840242407, - "acc_norm": 0.5108556832694764, - "acc_norm_stderr": 0.017875748840242407 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.4222222222222222, - "acc_stderr": 0.04266763404099582, - "acc_norm": 0.4222222222222222, - "acc_norm_stderr": 0.04266763404099582 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.32340425531914896, - "acc_stderr": 0.03057944277361033, - "acc_norm": 0.32340425531914896, - "acc_norm_stderr": 0.03057944277361033 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.41566265060240964, - "acc_stderr": 0.03836722176598053, - "acc_norm": 0.41566265060240964, - "acc_norm_stderr": 0.03836722176598053 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.4758842443729904, - "acc_stderr": 0.02836504154256457, - "acc_norm": 0.4758842443729904, - "acc_norm_stderr": 0.02836504154256457 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.3811659192825112, - "acc_stderr": 0.03259625118416828, - "acc_norm": 0.3811659192825112, - "acc_norm_stderr": 0.03259625118416828 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.46564885496183206, - "acc_stderr": 0.043749285605997376, - "acc_norm": 0.46564885496183206, - "acc_norm_stderr": 0.043749285605997376 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.4696969696969697, - "acc_stderr": 0.03555804051763929, - "acc_norm": 0.4696969696969697, - "acc_norm_stderr": 0.03555804051763929 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.4068965517241379, - "acc_stderr": 0.04093793981266237, - "acc_norm": 0.4068965517241379, - "acc_norm_stderr": 0.04093793981266237 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.2549019607843137, - "acc_stderr": 0.043364327079931785, - "acc_norm": 0.2549019607843137, - "acc_norm_stderr": 0.043364327079931785 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.36134453781512604, - "acc_stderr": 0.031204691225150013, - "acc_norm": 0.36134453781512604, - "acc_norm_stderr": 0.031204691225150013 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.3564102564102564, - "acc_stderr": 0.02428314052946728, - "acc_norm": 0.3564102564102564, - "acc_norm_stderr": 0.02428314052946728 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.47, - "acc_stderr": 0.050161355804659205, - "acc_norm": 0.47, - "acc_norm_stderr": 0.050161355804659205 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.35, - "acc_stderr": 0.047937248544110196, - "acc_norm": 0.35, - "acc_norm_stderr": 0.047937248544110196 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.4166666666666667, - "acc_stderr": 0.04766075165356461, - "acc_norm": 0.4166666666666667, - "acc_norm_stderr": 0.04766075165356461 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.3448275862068966, - "acc_stderr": 0.03344283744280459, - "acc_norm": 0.3448275862068966, - "acc_norm_stderr": 0.03344283744280459 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.4483870967741935, - "acc_stderr": 0.028292056830112735, - "acc_norm": 0.4483870967741935, - "acc_norm_stderr": 0.028292056830112735 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.6068376068376068, - "acc_stderr": 0.03199957924651047, - "acc_norm": 0.6068376068376068, - "acc_norm_stderr": 0.03199957924651047 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.43018867924528303, - "acc_stderr": 0.030471445867183238, - "acc_norm": 0.43018867924528303, - "acc_norm_stderr": 0.030471445867183238 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.4909090909090909, - "acc_stderr": 0.04788339768702861, - "acc_norm": 0.4909090909090909, - "acc_norm_stderr": 0.04788339768702861 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.2518518518518518, - "acc_stderr": 0.026466117538959916, - "acc_norm": 0.2518518518518518, - "acc_norm_stderr": 0.026466117538959916 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.26490066225165565, - "acc_stderr": 0.03603038545360384, - "acc_norm": 0.26490066225165565, - "acc_norm_stderr": 0.03603038545360384 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.5124378109452736, - "acc_stderr": 0.0353443984853958, - "acc_norm": 0.5124378109452736, - "acc_norm_stderr": 0.0353443984853958 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.3583815028901734, - "acc_stderr": 0.03656343653353159, - "acc_norm": 0.3583815028901734, - "acc_norm_stderr": 0.03656343653353159 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.30687830687830686, - "acc_stderr": 0.023752928712112126, - "acc_norm": 0.30687830687830686, - "acc_norm_stderr": 0.023752928712112126 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2986111111111111, - "acc_stderr": 0.03827052357950756, - "acc_norm": 0.2986111111111111, - "acc_norm_stderr": 0.03827052357950756 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.22, - "acc_stderr": 0.04163331998932269, - "acc_norm": 0.22, - "acc_norm_stderr": 0.04163331998932269 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.53, - "acc_stderr": 0.050161355804659205, - "acc_norm": 0.53, - "acc_norm_stderr": 0.050161355804659205 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.44508670520231214, - "acc_stderr": 0.02675625512966377, - "acc_norm": 0.44508670520231214, - "acc_norm_stderr": 0.02675625512966377 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.3558282208588957, - "acc_stderr": 0.03761521380046734, - "acc_norm": 0.3558282208588957, - "acc_norm_stderr": 0.03761521380046734 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.44135802469135804, - "acc_stderr": 0.027628737155668777, - "acc_norm": 0.44135802469135804, - "acc_norm_stderr": 0.027628737155668777 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.24, - "acc_stderr": 0.04292346959909283, - "acc_norm": 0.24, - "acc_norm_stderr": 0.04292346959909283 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.40414507772020725, - "acc_stderr": 0.0354150857888402, - "acc_norm": 0.40414507772020725, - "acc_norm_stderr": 0.0354150857888402 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2631578947368421, - "acc_stderr": 0.041424397194893624, - "acc_norm": 0.2631578947368421, - "acc_norm_stderr": 0.041424397194893624 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.4091743119266055, - "acc_stderr": 0.02108067026443373, - "acc_norm": 0.4091743119266055, - "acc_norm_stderr": 0.02108067026443373 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.1984126984126984, - "acc_stderr": 0.03567016675276863, - "acc_norm": 0.1984126984126984, - "acc_norm_stderr": 0.03567016675276863 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.39869281045751637, - "acc_stderr": 0.02803609227389177, - "acc_norm": 0.39869281045751637, - "acc_norm_stderr": 0.02803609227389177 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.42, - "acc_stderr": 0.049604496374885836, - "acc_norm": 0.42, - "acc_norm_stderr": 0.049604496374885836 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.6033057851239669, - "acc_stderr": 0.04465869780531009, - "acc_norm": 0.6033057851239669, - "acc_norm_stderr": 0.04465869780531009 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.4342105263157895, - "acc_stderr": 0.040335656678483184, - "acc_norm": 0.4342105263157895, - "acc_norm_stderr": 0.040335656678483184 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.0190709855896875, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.0190709855896875 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.3191489361702128, - "acc_stderr": 0.027807990141320207, - "acc_norm": 0.3191489361702128, - "acc_norm_stderr": 0.027807990141320207 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.19642857142857142, - "acc_stderr": 0.03770970049347019, - "acc_norm": 0.19642857142857142, - "acc_norm_stderr": 0.03770970049347019 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.25462962962962965, - "acc_stderr": 0.02971127586000534, - "acc_norm": 0.25462962962962965, - "acc_norm_stderr": 0.02971127586000534 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2424581005586592, - "acc_stderr": 0.01433352205921789, - "acc_norm": 0.2424581005586592, - "acc_norm_stderr": 0.01433352205921789 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.44, - "acc_stderr": 0.04988876515698589, - "acc_norm": 0.44, - "acc_norm_stderr": 0.04988876515698589 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.22426470588235295, - "acc_stderr": 0.02533684856333237, - "acc_norm": 0.22426470588235295, - "acc_norm_stderr": 0.02533684856333237 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.4163265306122449, - "acc_stderr": 0.03155782816556164, - "acc_norm": 0.4163265306122449, - "acc_norm_stderr": 0.03155782816556164 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.41350210970464135, - "acc_stderr": 0.03205649904851859, - "acc_norm": 0.41350210970464135, - "acc_norm_stderr": 0.03205649904851859 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.29335071707953064, - "acc_stderr": 0.011628520449582076, - "acc_norm": 0.29335071707953064, - "acc_norm_stderr": 0.011628520449582076 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.3627450980392157, - "acc_stderr": 0.033744993563193555, - "acc_norm": 0.3627450980392157, - "acc_norm_stderr": 0.033744993563193555 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.43636363636363634, - "acc_stderr": 0.03872592983524754, - "acc_norm": 0.43636363636363634, - "acc_norm_stderr": 0.03872592983524754 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2962056303549572, - "mc1_stderr": 0.01598359510181139, - "mc2": 0.4602391231259313, - "mc2_stderr": 0.015191570633369808 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.4765258215962441, - "acc_stderr": 0.017120879527725653, - "acc_norm": 0.5258215962441315, - "acc_norm_stderr": 0.017116907933735905 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "42MARU/GenAI-llama-2-ko-en-instruct-v1", - "model_sha": "aee07500d61a1d5d214cf0bc0040650957cf3da0", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/42MARU/GenAI-llama2-ko-en-instruct-v2-13b/result_2023-10-18 01:40:38.json b/42MARU/GenAI-llama2-ko-en-instruct-v2-13b/result_2023-10-18 01:40:38.json deleted file mode 100644 index f7e10277ae67cadffc5a5c4d21637448ba9bfb9f..0000000000000000000000000000000000000000 --- a/42MARU/GenAI-llama2-ko-en-instruct-v2-13b/result_2023-10-18 01:40:38.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.3848122866894198, - "acc_stderr": 0.014218371065251095, - "acc_norm": 0.4402730375426621, - "acc_norm_stderr": 0.014506769524804243 - }, - "harness|ko_hellaswag|10": { - "acc": 0.4190400318661621, - "acc_stderr": 0.0049239357498424945, - "acc_norm": 0.5560645289782912, - "acc_norm_stderr": 0.004958314114266494 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.5614035087719298, - "acc_stderr": 0.038057975055904594, - "acc_norm": 0.5614035087719298, - "acc_norm_stderr": 0.038057975055904594 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.6019417475728155, - "acc_stderr": 0.04846748253977238, - "acc_norm": 0.6019417475728155, - "acc_norm_stderr": 0.04846748253977238 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.5287356321839081, - "acc_stderr": 0.017850410794380173, - "acc_norm": 0.5287356321839081, - "acc_norm_stderr": 0.017850410794380173 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.4222222222222222, - "acc_stderr": 0.04266763404099582, - "acc_norm": 0.4222222222222222, - "acc_norm_stderr": 0.04266763404099582 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.28, - "acc_stderr": 0.045126085985421276, - "acc_norm": 0.28, - "acc_norm_stderr": 0.045126085985421276 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.3404255319148936, - "acc_stderr": 0.030976692998534443, - "acc_norm": 0.3404255319148936, - "acc_norm_stderr": 0.030976692998534443 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.39759036144578314, - "acc_stderr": 0.038099730845402184, - "acc_norm": 0.39759036144578314, - "acc_norm_stderr": 0.038099730845402184 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.5273311897106109, - "acc_stderr": 0.028355633568328188, - "acc_norm": 0.5273311897106109, - "acc_norm_stderr": 0.028355633568328188 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.4798206278026906, - "acc_stderr": 0.033530461674123, - "acc_norm": 0.4798206278026906, - "acc_norm_stderr": 0.033530461674123 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.5114503816793893, - "acc_stderr": 0.043841400240780176, - "acc_norm": 0.5114503816793893, - "acc_norm_stderr": 0.043841400240780176 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.39, - "acc_stderr": 0.04902071300001975, - "acc_norm": 0.39, - "acc_norm_stderr": 0.04902071300001975 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.5353535353535354, - "acc_stderr": 0.03553436368828061, - "acc_norm": 0.5353535353535354, - "acc_norm_stderr": 0.03553436368828061 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.4827586206896552, - "acc_stderr": 0.04164188720169377, - "acc_norm": 0.4827586206896552, - "acc_norm_stderr": 0.04164188720169377 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.23529411764705882, - "acc_stderr": 0.04220773659171452, - "acc_norm": 0.23529411764705882, - "acc_norm_stderr": 0.04220773659171452 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.49159663865546216, - "acc_stderr": 0.03247390276569669, - "acc_norm": 0.49159663865546216, - "acc_norm_stderr": 0.03247390276569669 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.46153846153846156, - "acc_stderr": 0.025275892070240634, - "acc_norm": 0.46153846153846156, - "acc_norm_stderr": 0.025275892070240634 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.45, - "acc_stderr": 0.05, - "acc_norm": 0.45, - "acc_norm_stderr": 0.05 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.37, - "acc_stderr": 0.048523658709391, - "acc_norm": 0.37, - "acc_norm_stderr": 0.048523658709391 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.5277777777777778, - "acc_stderr": 0.04826217294139894, - "acc_norm": 0.5277777777777778, - "acc_norm_stderr": 0.04826217294139894 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.3399014778325123, - "acc_stderr": 0.033327690684107895, - "acc_norm": 0.3399014778325123, - "acc_norm_stderr": 0.033327690684107895 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.4806451612903226, - "acc_stderr": 0.0284226874043121, - "acc_norm": 0.4806451612903226, - "acc_norm_stderr": 0.0284226874043121 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.6752136752136753, - "acc_stderr": 0.03067902276549883, - "acc_norm": 0.6752136752136753, - "acc_norm_stderr": 0.03067902276549883 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.47547169811320755, - "acc_stderr": 0.030735822206205615, - "acc_norm": 0.47547169811320755, - "acc_norm_stderr": 0.030735822206205615 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.5454545454545454, - "acc_stderr": 0.04769300568972745, - "acc_norm": 0.5454545454545454, - "acc_norm_stderr": 0.04769300568972745 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.2777777777777778, - "acc_stderr": 0.027309140588230172, - "acc_norm": 0.2777777777777778, - "acc_norm_stderr": 0.027309140588230172 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.33112582781456956, - "acc_stderr": 0.038425817186598696, - "acc_norm": 0.33112582781456956, - "acc_norm_stderr": 0.038425817186598696 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.5671641791044776, - "acc_stderr": 0.03503490923673282, - "acc_norm": 0.5671641791044776, - "acc_norm_stderr": 0.03503490923673282 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.4393063583815029, - "acc_stderr": 0.037842719328874674, - "acc_norm": 0.4393063583815029, - "acc_norm_stderr": 0.037842719328874674 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.0242785680243077, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.0242785680243077 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.4027777777777778, - "acc_stderr": 0.04101405519842425, - "acc_norm": 0.4027777777777778, - "acc_norm_stderr": 0.04101405519842425 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.34, - "acc_stderr": 0.047609522856952344, - "acc_norm": 0.34, - "acc_norm_stderr": 0.047609522856952344 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.63, - "acc_stderr": 0.04852365870939099, - "acc_norm": 0.63, - "acc_norm_stderr": 0.04852365870939099 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.49421965317919075, - "acc_stderr": 0.026917296179149116, - "acc_norm": 0.49421965317919075, - "acc_norm_stderr": 0.026917296179149116 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.48466257668711654, - "acc_stderr": 0.039265223787088445, - "acc_norm": 0.48466257668711654, - "acc_norm_stderr": 0.039265223787088445 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.49074074074074076, - "acc_stderr": 0.027815973433878014, - "acc_norm": 0.49074074074074076, - "acc_norm_stderr": 0.027815973433878014 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.3, - "acc_stderr": 0.04605661864718381, - "acc_norm": 0.3, - "acc_norm_stderr": 0.04605661864718381 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.5440414507772021, - "acc_stderr": 0.03594413711272436, - "acc_norm": 0.5440414507772021, - "acc_norm_stderr": 0.03594413711272436 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2543859649122807, - "acc_stderr": 0.040969851398436716, - "acc_norm": 0.2543859649122807, - "acc_norm_stderr": 0.040969851398436716 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.5559633027522936, - "acc_stderr": 0.021302621211654518, - "acc_norm": 0.5559633027522936, - "acc_norm_stderr": 0.021302621211654518 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.30158730158730157, - "acc_stderr": 0.04104947269903394, - "acc_norm": 0.30158730158730157, - "acc_norm_stderr": 0.04104947269903394 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.4673202614379085, - "acc_stderr": 0.028568699752225875, - "acc_norm": 0.4673202614379085, - "acc_norm_stderr": 0.028568699752225875 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.51, - "acc_stderr": 0.05024183937956912, - "acc_norm": 0.51, - "acc_norm_stderr": 0.05024183937956912 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.7272727272727273, - "acc_stderr": 0.04065578140908705, - "acc_norm": 0.7272727272727273, - "acc_norm_stderr": 0.04065578140908705 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.47368421052631576, - "acc_stderr": 0.04063302731486671, - "acc_norm": 0.47368421052631576, - "acc_norm_stderr": 0.04063302731486671 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.3741830065359477, - "acc_stderr": 0.019576953122088833, - "acc_norm": 0.3741830065359477, - "acc_norm_stderr": 0.019576953122088833 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.31560283687943264, - "acc_stderr": 0.027724989449509314, - "acc_norm": 0.31560283687943264, - "acc_norm_stderr": 0.027724989449509314 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.22321428571428573, - "acc_stderr": 0.039523019677025116, - "acc_norm": 0.22321428571428573, - "acc_norm_stderr": 0.039523019677025116 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.4074074074074074, - "acc_stderr": 0.033509916046960436, - "acc_norm": 0.4074074074074074, - "acc_norm_stderr": 0.033509916046960436 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.264804469273743, - "acc_stderr": 0.014756906483260664, - "acc_norm": 0.264804469273743, - "acc_norm_stderr": 0.014756906483260664 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.35, - "acc_stderr": 0.0479372485441102, - "acc_norm": 0.35, - "acc_norm_stderr": 0.0479372485441102 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.39, - "acc_stderr": 0.04902071300001975, - "acc_norm": 0.39, - "acc_norm_stderr": 0.04902071300001975 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.39705882352941174, - "acc_stderr": 0.029722152099280058, - "acc_norm": 0.39705882352941174, - "acc_norm_stderr": 0.029722152099280058 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.49387755102040815, - "acc_stderr": 0.032006820201639086, - "acc_norm": 0.49387755102040815, - "acc_norm_stderr": 0.032006820201639086 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.5738396624472574, - "acc_stderr": 0.03219035703131774, - "acc_norm": 0.5738396624472574, - "acc_norm_stderr": 0.03219035703131774 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.35071707953063885, - "acc_stderr": 0.012187773370741518, - "acc_norm": 0.35071707953063885, - "acc_norm_stderr": 0.012187773370741518 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.4950980392156863, - "acc_stderr": 0.03509143375606786, - "acc_norm": 0.4950980392156863, - "acc_norm_stderr": 0.03509143375606786 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.5696969696969697, - "acc_stderr": 0.03866225962879077, - "acc_norm": 0.5696969696969697, - "acc_norm_stderr": 0.03866225962879077 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.27539779681762544, - "mc1_stderr": 0.015638135667775523, - "mc2": 0.44227632802507094, - "mc2_stderr": 0.015242459306682204 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.431924882629108, - "acc_stderr": 0.01698017709257206, - "acc_norm": 0.5105633802816901, - "acc_norm_stderr": 0.017135953743220793 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "42MARU/GenAI-llama2-ko-en-instruct-v2-13b", - "model_sha": "9f429309fc6b939d08c659ab4666f6e80324dcd1", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/42MARU/GenAI-llama2-ko-en-platypus/result_2023-10-13 16:58:44.json b/42MARU/GenAI-llama2-ko-en-platypus/result_2023-10-13 16:58:44.json deleted file mode 100644 index d503432a6be95c5d85212f7fdf0d11879a5257d6..0000000000000000000000000000000000000000 --- a/42MARU/GenAI-llama2-ko-en-platypus/result_2023-10-13 16:58:44.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.3839590443686007, - "acc_stderr": 0.01421244498065189, - "acc_norm": 0.4522184300341297, - "acc_norm_stderr": 0.014544519880633832 - }, - "harness|ko_hellaswag|10": { - "acc": 0.4166500697072296, - "acc_stderr": 0.00491996282220832, - "acc_norm": 0.5524795857398924, - "acc_norm_stderr": 0.004962220512548352 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.5614035087719298, - "acc_stderr": 0.038057975055904594, - "acc_norm": 0.5614035087719298, - "acc_norm_stderr": 0.038057975055904594 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.49514563106796117, - "acc_stderr": 0.049505043821289195, - "acc_norm": 0.49514563106796117, - "acc_norm_stderr": 0.049505043821289195 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.5351213282247765, - "acc_stderr": 0.017835798806290642, - "acc_norm": 0.5351213282247765, - "acc_norm_stderr": 0.017835798806290642 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.34814814814814815, - "acc_stderr": 0.041153246103369526, - "acc_norm": 0.34814814814814815, - "acc_norm_stderr": 0.041153246103369526 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.24, - "acc_stderr": 0.042923469599092816, - "acc_norm": 0.24, - "acc_norm_stderr": 0.042923469599092816 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.31063829787234043, - "acc_stderr": 0.03025123757921317, - "acc_norm": 0.31063829787234043, - "acc_norm_stderr": 0.03025123757921317 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.42168674698795183, - "acc_stderr": 0.038444531817709175, - "acc_norm": 0.42168674698795183, - "acc_norm_stderr": 0.038444531817709175 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.5048231511254019, - "acc_stderr": 0.02839677044411129, - "acc_norm": 0.5048231511254019, - "acc_norm_stderr": 0.02839677044411129 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.4484304932735426, - "acc_stderr": 0.03337883736255099, - "acc_norm": 0.4484304932735426, - "acc_norm_stderr": 0.03337883736255099 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.5267175572519084, - "acc_stderr": 0.04379024936553894, - "acc_norm": 0.5267175572519084, - "acc_norm_stderr": 0.04379024936553894 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.43, - "acc_stderr": 0.04975698519562429, - "acc_norm": 0.43, - "acc_norm_stderr": 0.04975698519562429 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.494949494949495, - "acc_stderr": 0.035621707606254015, - "acc_norm": 0.494949494949495, - "acc_norm_stderr": 0.035621707606254015 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.4068965517241379, - "acc_stderr": 0.04093793981266236, - "acc_norm": 0.4068965517241379, - "acc_norm_stderr": 0.04093793981266236 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.17647058823529413, - "acc_stderr": 0.0379328118530781, - "acc_norm": 0.17647058823529413, - "acc_norm_stderr": 0.0379328118530781 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.4957983193277311, - "acc_stderr": 0.03247734334448111, - "acc_norm": 0.4957983193277311, - "acc_norm_stderr": 0.03247734334448111 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.4230769230769231, - "acc_stderr": 0.025049197876042328, - "acc_norm": 0.4230769230769231, - "acc_norm_stderr": 0.025049197876042328 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.46, - "acc_stderr": 0.05009082659620332, - "acc_norm": 0.46, - "acc_norm_stderr": 0.05009082659620332 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542128, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542128 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.46296296296296297, - "acc_stderr": 0.04820403072760628, - "acc_norm": 0.46296296296296297, - "acc_norm_stderr": 0.04820403072760628 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.35960591133004927, - "acc_stderr": 0.03376458246509568, - "acc_norm": 0.35960591133004927, - "acc_norm_stderr": 0.03376458246509568 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.43548387096774194, - "acc_stderr": 0.02820622559150275, - "acc_norm": 0.43548387096774194, - "acc_norm_stderr": 0.02820622559150275 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.6581196581196581, - "acc_stderr": 0.03107502852650775, - "acc_norm": 0.6581196581196581, - "acc_norm_stderr": 0.03107502852650775 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.4490566037735849, - "acc_stderr": 0.030612730713641095, - "acc_norm": 0.4490566037735849, - "acc_norm_stderr": 0.030612730713641095 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.41818181818181815, - "acc_stderr": 0.04724577405731572, - "acc_norm": 0.41818181818181815, - "acc_norm_stderr": 0.04724577405731572 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.23333333333333334, - "acc_stderr": 0.02578787422095932, - "acc_norm": 0.23333333333333334, - "acc_norm_stderr": 0.02578787422095932 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2052980132450331, - "acc_stderr": 0.032979866484738336, - "acc_norm": 0.2052980132450331, - "acc_norm_stderr": 0.032979866484738336 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.5771144278606966, - "acc_stderr": 0.034932317774212816, - "acc_norm": 0.5771144278606966, - "acc_norm_stderr": 0.034932317774212816 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.3699421965317919, - "acc_stderr": 0.036812296333943194, - "acc_norm": 0.3699421965317919, - "acc_norm_stderr": 0.036812296333943194 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2830687830687831, - "acc_stderr": 0.023201392938194978, - "acc_norm": 0.2830687830687831, - "acc_norm_stderr": 0.023201392938194978 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.4166666666666667, - "acc_stderr": 0.04122728707651282, - "acc_norm": 0.4166666666666667, - "acc_norm_stderr": 0.04122728707651282 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542128, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542128 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.66, - "acc_stderr": 0.04760952285695237, - "acc_norm": 0.66, - "acc_norm_stderr": 0.04760952285695237 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.5057803468208093, - "acc_stderr": 0.02691729617914911, - "acc_norm": 0.5057803468208093, - "acc_norm_stderr": 0.02691729617914911 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.44785276073619634, - "acc_stderr": 0.03906947479456602, - "acc_norm": 0.44785276073619634, - "acc_norm_stderr": 0.03906947479456602 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.4722222222222222, - "acc_stderr": 0.027777777777777797, - "acc_norm": 0.4722222222222222, - "acc_norm_stderr": 0.027777777777777797 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.5647668393782384, - "acc_stderr": 0.03578038165008586, - "acc_norm": 0.5647668393782384, - "acc_norm_stderr": 0.03578038165008586 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.23684210526315788, - "acc_stderr": 0.03999423879281335, - "acc_norm": 0.23684210526315788, - "acc_norm_stderr": 0.03999423879281335 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.5522935779816514, - "acc_stderr": 0.02131975496242546, - "acc_norm": 0.5522935779816514, - "acc_norm_stderr": 0.02131975496242546 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.2777777777777778, - "acc_stderr": 0.040061680838488774, - "acc_norm": 0.2777777777777778, - "acc_norm_stderr": 0.040061680838488774 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.45751633986928103, - "acc_stderr": 0.02852638345214264, - "acc_norm": 0.45751633986928103, - "acc_norm_stderr": 0.02852638345214264 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.48, - "acc_stderr": 0.050211673156867795, - "acc_norm": 0.48, - "acc_norm_stderr": 0.050211673156867795 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.6859504132231405, - "acc_stderr": 0.04236964753041018, - "acc_norm": 0.6859504132231405, - "acc_norm_stderr": 0.04236964753041018 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.45394736842105265, - "acc_stderr": 0.04051646342874141, - "acc_norm": 0.45394736842105265, - "acc_norm_stderr": 0.04051646342874141 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.3839869281045752, - "acc_stderr": 0.019675808135281525, - "acc_norm": 0.3839869281045752, - "acc_norm_stderr": 0.019675808135281525 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.35815602836879434, - "acc_stderr": 0.02860208586275942, - "acc_norm": 0.35815602836879434, - "acc_norm_stderr": 0.02860208586275942 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.2857142857142857, - "acc_stderr": 0.042878587513404544, - "acc_norm": 0.2857142857142857, - "acc_norm_stderr": 0.042878587513404544 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.25462962962962965, - "acc_stderr": 0.02971127586000534, - "acc_norm": 0.25462962962962965, - "acc_norm_stderr": 0.02971127586000534 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.293854748603352, - "acc_stderr": 0.015235075776719616, - "acc_norm": 0.293854748603352, - "acc_norm_stderr": 0.015235075776719616 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.32, - "acc_stderr": 0.046882617226215034, - "acc_norm": 0.32, - "acc_norm_stderr": 0.046882617226215034 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.41, - "acc_stderr": 0.049431107042371025, - "acc_norm": 0.41, - "acc_norm_stderr": 0.049431107042371025 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.27941176470588236, - "acc_stderr": 0.02725720260611495, - "acc_norm": 0.27941176470588236, - "acc_norm_stderr": 0.02725720260611495 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.4775510204081633, - "acc_stderr": 0.031976941187136725, - "acc_norm": 0.4775510204081633, - "acc_norm_stderr": 0.031976941187136725 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.6075949367088608, - "acc_stderr": 0.0317847187456473, - "acc_norm": 0.6075949367088608, - "acc_norm_stderr": 0.0317847187456473 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.33116036505867014, - "acc_stderr": 0.01202012819598576, - "acc_norm": 0.33116036505867014, - "acc_norm_stderr": 0.01202012819598576 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.49019607843137253, - "acc_stderr": 0.03508637358630572, - "acc_norm": 0.49019607843137253, - "acc_norm_stderr": 0.03508637358630572 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.5454545454545454, - "acc_stderr": 0.038881769216741004, - "acc_norm": 0.5454545454545454, - "acc_norm_stderr": 0.038881769216741004 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.27539779681762544, - "mc1_stderr": 0.015638135667775523, - "mc2": 0.4478448789442893, - "mc2_stderr": 0.015296172022310957 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.568075117370892, - "acc_stderr": 0.016980177092572074, - "acc_norm": 0.6197183098591549, - "acc_norm_stderr": 0.01664121729750358 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "42MARU/GenAI-llama2-ko-en-platypus", - "model_sha": "35093e36712fc3edde865a2ac45e7a8f09c9f514", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/42MARU/llama-2-ko-7b-instruct/result_2023-09-29 09:41:36.json b/42MARU/llama-2-ko-7b-instruct/result_2023-09-29 09:41:36.json deleted file mode 100644 index 338a5fc99b05149204a6be596a661676ef59dd06..0000000000000000000000000000000000000000 --- a/42MARU/llama-2-ko-7b-instruct/result_2023-09-29 09:41:36.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.3455631399317406, - "acc_stderr": 0.013896938461145678, - "acc_norm": 0.3839590443686007, - "acc_norm_stderr": 0.01421244498065189 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3975303724357698, - "acc_stderr": 0.004883871774350598, - "acc_norm": 0.5247958573989245, - "acc_norm_stderr": 0.004983641854351152 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.3684210526315789, - "acc_stderr": 0.036996580176568775, - "acc_norm": 0.3684210526315789, - "acc_norm_stderr": 0.036996580176568775 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.32038834951456313, - "acc_stderr": 0.0462028408228004, - "acc_norm": 0.32038834951456313, - "acc_norm_stderr": 0.0462028408228004 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.3831417624521073, - "acc_stderr": 0.01738477419488563, - "acc_norm": 0.3831417624521073, - "acc_norm_stderr": 0.01738477419488563 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.32592592592592595, - "acc_stderr": 0.040491220417025055, - "acc_norm": 0.32592592592592595, - "acc_norm_stderr": 0.040491220417025055 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.28, - "acc_stderr": 0.045126085985421255, - "acc_norm": 0.28, - "acc_norm_stderr": 0.045126085985421255 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.28085106382978725, - "acc_stderr": 0.02937917046412482, - "acc_norm": 0.28085106382978725, - "acc_norm_stderr": 0.02937917046412482 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3795180722891566, - "acc_stderr": 0.03777798822748017, - "acc_norm": 0.3795180722891566, - "acc_norm_stderr": 0.03777798822748017 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.35691318327974275, - "acc_stderr": 0.027210420375934012, - "acc_norm": 0.35691318327974275, - "acc_norm_stderr": 0.027210420375934012 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.4170403587443946, - "acc_stderr": 0.03309266936071721, - "acc_norm": 0.4170403587443946, - "acc_norm_stderr": 0.03309266936071721 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.4198473282442748, - "acc_stderr": 0.043285772152629715, - "acc_norm": 0.4198473282442748, - "acc_norm_stderr": 0.043285772152629715 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542127, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542127 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.35858585858585856, - "acc_stderr": 0.034169036403915214, - "acc_norm": 0.35858585858585856, - "acc_norm_stderr": 0.034169036403915214 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.25517241379310346, - "acc_stderr": 0.03632984052707842, - "acc_norm": 0.25517241379310346, - "acc_norm_stderr": 0.03632984052707842 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.1568627450980392, - "acc_stderr": 0.03618664819936245, - "acc_norm": 0.1568627450980392, - "acc_norm_stderr": 0.03618664819936245 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.25630252100840334, - "acc_stderr": 0.028359620870533953, - "acc_norm": 0.25630252100840334, - "acc_norm_stderr": 0.028359620870533953 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.24871794871794872, - "acc_stderr": 0.021916957709213803, - "acc_norm": 0.24871794871794872, - "acc_norm_stderr": 0.021916957709213803 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.36, - "acc_stderr": 0.048241815132442176, - "acc_norm": 0.36, - "acc_norm_stderr": 0.048241815132442176 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252605, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252605 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.37037037037037035, - "acc_stderr": 0.04668408033024932, - "acc_norm": 0.37037037037037035, - "acc_norm_stderr": 0.04668408033024932 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.23645320197044334, - "acc_stderr": 0.029896114291733545, - "acc_norm": 0.23645320197044334, - "acc_norm_stderr": 0.029896114291733545 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3161290322580645, - "acc_stderr": 0.026450874489042767, - "acc_norm": 0.3161290322580645, - "acc_norm_stderr": 0.026450874489042767 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.4358974358974359, - "acc_stderr": 0.03248577511578401, - "acc_norm": 0.4358974358974359, - "acc_norm_stderr": 0.03248577511578401 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.30943396226415093, - "acc_stderr": 0.028450154794118627, - "acc_norm": 0.30943396226415093, - "acc_norm_stderr": 0.028450154794118627 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.37272727272727274, - "acc_stderr": 0.04631381319425463, - "acc_norm": 0.37272727272727274, - "acc_norm_stderr": 0.04631381319425463 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.25555555555555554, - "acc_stderr": 0.026593939101844072, - "acc_norm": 0.25555555555555554, - "acc_norm_stderr": 0.026593939101844072 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.23841059602649006, - "acc_stderr": 0.0347918557259966, - "acc_norm": 0.23841059602649006, - "acc_norm_stderr": 0.0347918557259966 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.3283582089552239, - "acc_stderr": 0.033206858897443244, - "acc_norm": 0.3283582089552239, - "acc_norm_stderr": 0.033206858897443244 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.23121387283236994, - "acc_stderr": 0.0321473730202947, - "acc_norm": 0.23121387283236994, - "acc_norm_stderr": 0.0321473730202947 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.23809523809523808, - "acc_stderr": 0.021935878081184756, - "acc_norm": 0.23809523809523808, - "acc_norm_stderr": 0.021935878081184756 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2777777777777778, - "acc_stderr": 0.03745554791462457, - "acc_norm": 0.2777777777777778, - "acc_norm_stderr": 0.03745554791462457 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.21, - "acc_stderr": 0.040936018074033256, - "acc_norm": 0.21, - "acc_norm_stderr": 0.040936018074033256 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.37, - "acc_stderr": 0.04852365870939099, - "acc_norm": 0.37, - "acc_norm_stderr": 0.04852365870939099 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.33236994219653176, - "acc_stderr": 0.025361168749688225, - "acc_norm": 0.33236994219653176, - "acc_norm_stderr": 0.025361168749688225 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.25766871165644173, - "acc_stderr": 0.03436150827846917, - "acc_norm": 0.25766871165644173, - "acc_norm_stderr": 0.03436150827846917 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.33641975308641975, - "acc_stderr": 0.026289734945952926, - "acc_norm": 0.33641975308641975, - "acc_norm_stderr": 0.026289734945952926 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542129, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542129 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.32642487046632124, - "acc_stderr": 0.033840286211432945, - "acc_norm": 0.32642487046632124, - "acc_norm_stderr": 0.033840286211432945 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.30701754385964913, - "acc_stderr": 0.0433913832257986, - "acc_norm": 0.30701754385964913, - "acc_norm_stderr": 0.0433913832257986 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.3174311926605505, - "acc_stderr": 0.019957152198460497, - "acc_norm": 0.3174311926605505, - "acc_norm_stderr": 0.019957152198460497 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.1746031746031746, - "acc_stderr": 0.03395490020856111, - "acc_norm": 0.1746031746031746, - "acc_norm_stderr": 0.03395490020856111 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.35294117647058826, - "acc_stderr": 0.02736359328468495, - "acc_norm": 0.35294117647058826, - "acc_norm_stderr": 0.02736359328468495 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.36363636363636365, - "acc_stderr": 0.043913262867240704, - "acc_norm": 0.36363636363636365, - "acc_norm_stderr": 0.043913262867240704 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.3026315789473684, - "acc_stderr": 0.037385206761196686, - "acc_norm": 0.3026315789473684, - "acc_norm_stderr": 0.037385206761196686 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.2679738562091503, - "acc_stderr": 0.017917974069594726, - "acc_norm": 0.2679738562091503, - "acc_norm_stderr": 0.017917974069594726 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2624113475177305, - "acc_stderr": 0.02624492034984301, - "acc_norm": 0.2624113475177305, - "acc_norm_stderr": 0.02624492034984301 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.33035714285714285, - "acc_stderr": 0.04464285714285712, - "acc_norm": 0.33035714285714285, - "acc_norm_stderr": 0.04464285714285712 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.24074074074074073, - "acc_stderr": 0.029157522184605586, - "acc_norm": 0.24074074074074073, - "acc_norm_stderr": 0.029157522184605586 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2424581005586592, - "acc_stderr": 0.01433352205921789, - "acc_norm": 0.2424581005586592, - "acc_norm_stderr": 0.01433352205921789 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.32, - "acc_stderr": 0.04688261722621504, - "acc_norm": 0.32, - "acc_norm_stderr": 0.04688261722621504 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.21, - "acc_stderr": 0.040936018074033256, - "acc_norm": 0.21, - "acc_norm_stderr": 0.040936018074033256 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.35294117647058826, - "acc_stderr": 0.0290294228156814, - "acc_norm": 0.35294117647058826, - "acc_norm_stderr": 0.0290294228156814 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.27755102040816326, - "acc_stderr": 0.028666857790274648, - "acc_norm": 0.27755102040816326, - "acc_norm_stderr": 0.028666857790274648 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.35864978902953587, - "acc_stderr": 0.031219569445301847, - "acc_norm": 0.35864978902953587, - "acc_norm_stderr": 0.031219569445301847 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.27249022164276404, - "acc_stderr": 0.01137165829431153, - "acc_norm": 0.27249022164276404, - "acc_norm_stderr": 0.01137165829431153 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.2696078431372549, - "acc_stderr": 0.031145570659486782, - "acc_norm": 0.2696078431372549, - "acc_norm_stderr": 0.031145570659486782 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.3090909090909091, - "acc_stderr": 0.036085410115739666, - "acc_norm": 0.3090909090909091, - "acc_norm_stderr": 0.036085410115739666 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.24969400244798043, - "mc1_stderr": 0.015152286907148125, - "mc2": 0.39805148377575406, - "mc2_stderr": 0.015027401787198838 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.392018779342723, - "acc_stderr": 0.016735309112043194, - "acc_norm": 0.46830985915492956, - "acc_norm_stderr": 0.017105318850828437 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "42MARU/llama-2-ko-7b-instruct", - "model_sha": "3c590472282b5de4c76d846153db5f41b82c1b62", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/42MARU/llama-2-ko-7b-instruction-v3/result_2023-10-01 18:41:33.json b/42MARU/llama-2-ko-7b-instruction-v3/result_2023-10-01 18:41:33.json deleted file mode 100644 index 8f2cfb629e4b7374bb26d1ae29309c20f02a1d49..0000000000000000000000000000000000000000 --- a/42MARU/llama-2-ko-7b-instruction-v3/result_2023-10-01 18:41:33.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.3293515358361775, - "acc_stderr": 0.013734057652635474, - "acc_norm": 0.386518771331058, - "acc_norm_stderr": 0.014230084761910474 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3835889265086636, - "acc_stderr": 0.00485265887677539, - "acc_norm": 0.5022903804023103, - "acc_norm_stderr": 0.004989729059957435 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.391812865497076, - "acc_stderr": 0.03743979825926401, - "acc_norm": 0.391812865497076, - "acc_norm_stderr": 0.03743979825926401 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.2815533980582524, - "acc_stderr": 0.04453254836326466, - "acc_norm": 0.2815533980582524, - "acc_norm_stderr": 0.04453254836326466 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.367816091954023, - "acc_stderr": 0.01724382889184626, - "acc_norm": 0.367816091954023, - "acc_norm_stderr": 0.01724382889184626 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.34074074074074073, - "acc_stderr": 0.04094376269996795, - "acc_norm": 0.34074074074074073, - "acc_norm_stderr": 0.04094376269996795 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.2851063829787234, - "acc_stderr": 0.02951319662553935, - "acc_norm": 0.2851063829787234, - "acc_norm_stderr": 0.02951319662553935 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3132530120481928, - "acc_stderr": 0.03610805018031024, - "acc_norm": 0.3132530120481928, - "acc_norm_stderr": 0.03610805018031024 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.3858520900321543, - "acc_stderr": 0.027648149599751464, - "acc_norm": 0.3858520900321543, - "acc_norm_stderr": 0.027648149599751464 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.43946188340807174, - "acc_stderr": 0.03331092511038179, - "acc_norm": 0.43946188340807174, - "acc_norm_stderr": 0.03331092511038179 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.4351145038167939, - "acc_stderr": 0.04348208051644858, - "acc_norm": 0.4351145038167939, - "acc_norm_stderr": 0.04348208051644858 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.31313131313131315, - "acc_stderr": 0.03304205087813653, - "acc_norm": 0.31313131313131315, - "acc_norm_stderr": 0.03304205087813653 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.3586206896551724, - "acc_stderr": 0.039966295748767186, - "acc_norm": 0.3586206896551724, - "acc_norm_stderr": 0.039966295748767186 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.20588235294117646, - "acc_stderr": 0.04023382273617747, - "acc_norm": 0.20588235294117646, - "acc_norm_stderr": 0.04023382273617747 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.3697478991596639, - "acc_stderr": 0.031357095996135904, - "acc_norm": 0.3697478991596639, - "acc_norm_stderr": 0.031357095996135904 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.2794871794871795, - "acc_stderr": 0.022752388839776823, - "acc_norm": 0.2794871794871795, - "acc_norm_stderr": 0.022752388839776823 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.4, - "acc_stderr": 0.049236596391733084, - "acc_norm": 0.4, - "acc_norm_stderr": 0.049236596391733084 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.2, - "acc_stderr": 0.04020151261036846, - "acc_norm": 0.2, - "acc_norm_stderr": 0.04020151261036846 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.37962962962962965, - "acc_stderr": 0.04691521224077742, - "acc_norm": 0.37962962962962965, - "acc_norm_stderr": 0.04691521224077742 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.22660098522167488, - "acc_stderr": 0.02945486383529298, - "acc_norm": 0.22660098522167488, - "acc_norm_stderr": 0.02945486383529298 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3225806451612903, - "acc_stderr": 0.026593084516572267, - "acc_norm": 0.3225806451612903, - "acc_norm_stderr": 0.026593084516572267 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.5, - "acc_stderr": 0.03275608910402091, - "acc_norm": 0.5, - "acc_norm_stderr": 0.03275608910402091 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.3471698113207547, - "acc_stderr": 0.029300101705549652, - "acc_norm": 0.3471698113207547, - "acc_norm_stderr": 0.029300101705549652 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.36363636363636365, - "acc_stderr": 0.04607582090719976, - "acc_norm": 0.36363636363636365, - "acc_norm_stderr": 0.04607582090719976 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.23703703703703705, - "acc_stderr": 0.025928876132766118, - "acc_norm": 0.23703703703703705, - "acc_norm_stderr": 0.025928876132766118 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2119205298013245, - "acc_stderr": 0.03336767086567977, - "acc_norm": 0.2119205298013245, - "acc_norm_stderr": 0.03336767086567977 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.39800995024875624, - "acc_stderr": 0.034611994290400135, - "acc_norm": 0.39800995024875624, - "acc_norm_stderr": 0.034611994290400135 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.2774566473988439, - "acc_stderr": 0.03414014007044036, - "acc_norm": 0.2774566473988439, - "acc_norm_stderr": 0.03414014007044036 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.24867724867724866, - "acc_stderr": 0.022261817692400175, - "acc_norm": 0.24867724867724866, - "acc_norm_stderr": 0.022261817692400175 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2916666666666667, - "acc_stderr": 0.03800968060554859, - "acc_norm": 0.2916666666666667, - "acc_norm_stderr": 0.03800968060554859 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.21, - "acc_stderr": 0.040936018074033256, - "acc_norm": 0.21, - "acc_norm_stderr": 0.040936018074033256 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.44, - "acc_stderr": 0.04988876515698589, - "acc_norm": 0.44, - "acc_norm_stderr": 0.04988876515698589 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.38439306358381503, - "acc_stderr": 0.026189666966272035, - "acc_norm": 0.38439306358381503, - "acc_norm_stderr": 0.026189666966272035 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.3496932515337423, - "acc_stderr": 0.03746668325470022, - "acc_norm": 0.3496932515337423, - "acc_norm_stderr": 0.03746668325470022 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.3611111111111111, - "acc_stderr": 0.026725868809100793, - "acc_norm": 0.3611111111111111, - "acc_norm_stderr": 0.026725868809100793 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542128, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542128 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.35751295336787564, - "acc_stderr": 0.03458816042181005, - "acc_norm": 0.35751295336787564, - "acc_norm_stderr": 0.03458816042181005 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.23684210526315788, - "acc_stderr": 0.03999423879281336, - "acc_norm": 0.23684210526315788, - "acc_norm_stderr": 0.03999423879281336 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.3394495412844037, - "acc_stderr": 0.02030210934266235, - "acc_norm": 0.3394495412844037, - "acc_norm_stderr": 0.02030210934266235 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.2619047619047619, - "acc_stderr": 0.039325376803928704, - "acc_norm": 0.2619047619047619, - "acc_norm_stderr": 0.039325376803928704 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.3660130718954248, - "acc_stderr": 0.027582811415159607, - "acc_norm": 0.3660130718954248, - "acc_norm_stderr": 0.027582811415159607 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.38, - "acc_stderr": 0.04878317312145632, - "acc_norm": 0.38, - "acc_norm_stderr": 0.04878317312145632 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.49586776859504134, - "acc_stderr": 0.04564198767432754, - "acc_norm": 0.49586776859504134, - "acc_norm_stderr": 0.04564198767432754 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.24342105263157895, - "acc_stderr": 0.034923496688842384, - "acc_norm": 0.24342105263157895, - "acc_norm_stderr": 0.034923496688842384 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.3366013071895425, - "acc_stderr": 0.019117213911495165, - "acc_norm": 0.3366013071895425, - "acc_norm_stderr": 0.019117213911495165 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.29432624113475175, - "acc_stderr": 0.027187127011503796, - "acc_norm": 0.29432624113475175, - "acc_norm_stderr": 0.027187127011503796 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.29464285714285715, - "acc_stderr": 0.04327040932578728, - "acc_norm": 0.29464285714285715, - "acc_norm_stderr": 0.04327040932578728 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.2824074074074074, - "acc_stderr": 0.030701372111510927, - "acc_norm": 0.2824074074074074, - "acc_norm_stderr": 0.030701372111510927 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.25139664804469275, - "acc_stderr": 0.014508979453553977, - "acc_norm": 0.25139664804469275, - "acc_norm_stderr": 0.014508979453553977 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.32, - "acc_stderr": 0.04688261722621505, - "acc_norm": 0.32, - "acc_norm_stderr": 0.04688261722621505 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.3786764705882353, - "acc_stderr": 0.029465133639776125, - "acc_norm": 0.3786764705882353, - "acc_norm_stderr": 0.029465133639776125 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.3510204081632653, - "acc_stderr": 0.03055531675557364, - "acc_norm": 0.3510204081632653, - "acc_norm_stderr": 0.03055531675557364 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.4767932489451477, - "acc_stderr": 0.032512152011410174, - "acc_norm": 0.4767932489451477, - "acc_norm_stderr": 0.032512152011410174 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.3089960886571056, - "acc_stderr": 0.01180172977723925, - "acc_norm": 0.3089960886571056, - "acc_norm_stderr": 0.01180172977723925 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.31862745098039214, - "acc_stderr": 0.032702871814820816, - "acc_norm": 0.31862745098039214, - "acc_norm_stderr": 0.032702871814820816 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.3212121212121212, - "acc_stderr": 0.0364620496325381, - "acc_norm": 0.3212121212121212, - "acc_norm_stderr": 0.0364620496325381 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.25091799265605874, - "mc1_stderr": 0.01517698502770769, - "mc2": 0.38056097212603235, - "mc2_stderr": 0.014936929596682727 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.5704225352112676, - "acc_stderr": 0.01696892392010678, - "acc_norm": 0.6549295774647887, - "acc_norm_stderr": 0.016296201644718785 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "42MARU/llama-2-ko-7b-instruction-v3", - "model_sha": "c0fea9cb31d4ae90aa2ed048f774a9000341b538", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/42MARU/polyglot-ko-12.8b-instruct/result_2023-09-27 21:10:18.json b/42MARU/polyglot-ko-12.8b-instruct/result_2023-09-27 21:10:18.json deleted file mode 100644 index e01de266dae31204e83f7ae6a4b3353d39102fd2..0000000000000000000000000000000000000000 --- a/42MARU/polyglot-ko-12.8b-instruct/result_2023-09-27 21:10:18.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.3046075085324232, - "acc_stderr": 0.013449522109932492, - "acc_norm": 0.363481228668942, - "acc_norm_stderr": 0.014056207319068282 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3981278629755029, - "acc_stderr": 0.0048851164655502755, - "acc_norm": 0.5159330810595499, - "acc_norm_stderr": 0.004987247325495624 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.22807017543859648, - "acc_stderr": 0.03218093795602357, - "acc_norm": 0.22807017543859648, - "acc_norm_stderr": 0.03218093795602357 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.24271844660194175, - "acc_stderr": 0.04245022486384495, - "acc_norm": 0.24271844660194175, - "acc_norm_stderr": 0.04245022486384495 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.23754789272030652, - "acc_stderr": 0.015218733046150193, - "acc_norm": 0.23754789272030652, - "acc_norm_stderr": 0.015218733046150193 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.28888888888888886, - "acc_stderr": 0.039154506304142495, - "acc_norm": 0.28888888888888886, - "acc_norm_stderr": 0.039154506304142495 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.27, - "acc_stderr": 0.0446196043338474, - "acc_norm": 0.27, - "acc_norm_stderr": 0.0446196043338474 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.2297872340425532, - "acc_stderr": 0.02750175294441242, - "acc_norm": 0.2297872340425532, - "acc_norm_stderr": 0.02750175294441242 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.2469879518072289, - "acc_stderr": 0.03357351982064536, - "acc_norm": 0.2469879518072289, - "acc_norm_stderr": 0.03357351982064536 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.33440514469453375, - "acc_stderr": 0.026795422327893944, - "acc_norm": 0.33440514469453375, - "acc_norm_stderr": 0.026795422327893944 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.1031390134529148, - "acc_stderr": 0.020412564289839272, - "acc_norm": 0.1031390134529148, - "acc_norm_stderr": 0.020412564289839272 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.2748091603053435, - "acc_stderr": 0.039153454088478354, - "acc_norm": 0.2748091603053435, - "acc_norm_stderr": 0.039153454088478354 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.37, - "acc_stderr": 0.048523658709391, - "acc_norm": 0.37, - "acc_norm_stderr": 0.048523658709391 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.40404040404040403, - "acc_stderr": 0.03496130972056128, - "acc_norm": 0.40404040404040403, - "acc_norm_stderr": 0.03496130972056128 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.3310344827586207, - "acc_stderr": 0.03921545312467122, - "acc_norm": 0.3310344827586207, - "acc_norm_stderr": 0.03921545312467122 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.2647058823529412, - "acc_stderr": 0.04389869956808777, - "acc_norm": 0.2647058823529412, - "acc_norm_stderr": 0.04389869956808777 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.18907563025210083, - "acc_stderr": 0.02543511943810536, - "acc_norm": 0.18907563025210083, - "acc_norm_stderr": 0.02543511943810536 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.21794871794871795, - "acc_stderr": 0.020932445774463175, - "acc_norm": 0.21794871794871795, - "acc_norm_stderr": 0.020932445774463175 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.18, - "acc_stderr": 0.03861229196653694, - "acc_norm": 0.18, - "acc_norm_stderr": 0.03861229196653694 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.16, - "acc_stderr": 0.03684529491774708, - "acc_norm": 0.16, - "acc_norm_stderr": 0.03684529491774708 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.26851851851851855, - "acc_stderr": 0.04284467968052191, - "acc_norm": 0.26851851851851855, - "acc_norm_stderr": 0.04284467968052191 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.1921182266009852, - "acc_stderr": 0.027719315709614778, - "acc_norm": 0.1921182266009852, - "acc_norm_stderr": 0.027719315709614778 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.25483870967741934, - "acc_stderr": 0.024790118459332204, - "acc_norm": 0.25483870967741934, - "acc_norm_stderr": 0.024790118459332204 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.2905982905982906, - "acc_stderr": 0.029745048572674057, - "acc_norm": 0.2905982905982906, - "acc_norm_stderr": 0.029745048572674057 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.29056603773584905, - "acc_stderr": 0.027943219989337156, - "acc_norm": 0.29056603773584905, - "acc_norm_stderr": 0.027943219989337156 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.2636363636363636, - "acc_stderr": 0.04220224692971987, - "acc_norm": 0.2636363636363636, - "acc_norm_stderr": 0.04220224692971987 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.25925925925925924, - "acc_stderr": 0.026719240783712166, - "acc_norm": 0.25925925925925924, - "acc_norm_stderr": 0.026719240783712166 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2913907284768212, - "acc_stderr": 0.03710185726119995, - "acc_norm": 0.2913907284768212, - "acc_norm_stderr": 0.03710185726119995 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.19402985074626866, - "acc_stderr": 0.027962677604768914, - "acc_norm": 0.19402985074626866, - "acc_norm_stderr": 0.027962677604768914 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.2254335260115607, - "acc_stderr": 0.03186209851641143, - "acc_norm": 0.2254335260115607, - "acc_norm_stderr": 0.03186209851641143 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.24867724867724866, - "acc_stderr": 0.02226181769240018, - "acc_norm": 0.24867724867724866, - "acc_norm_stderr": 0.02226181769240018 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3125, - "acc_stderr": 0.038760854559127644, - "acc_norm": 0.3125, - "acc_norm_stderr": 0.038760854559127644 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.21, - "acc_stderr": 0.04093601807403326, - "acc_norm": 0.21, - "acc_norm_stderr": 0.04093601807403326 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.27, - "acc_stderr": 0.044619604333847394, - "acc_norm": 0.27, - "acc_norm_stderr": 0.044619604333847394 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.2774566473988439, - "acc_stderr": 0.024105712607754307, - "acc_norm": 0.2774566473988439, - "acc_norm_stderr": 0.024105712607754307 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.3067484662576687, - "acc_stderr": 0.036230899157241474, - "acc_norm": 0.3067484662576687, - "acc_norm_stderr": 0.036230899157241474 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.24074074074074073, - "acc_stderr": 0.023788583551658537, - "acc_norm": 0.24074074074074073, - "acc_norm_stderr": 0.023788583551658537 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.26, - "acc_stderr": 0.04408440022768077, - "acc_norm": 0.26, - "acc_norm_stderr": 0.04408440022768077 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.22797927461139897, - "acc_stderr": 0.030276909945178253, - "acc_norm": 0.22797927461139897, - "acc_norm_stderr": 0.030276909945178253 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.30701754385964913, - "acc_stderr": 0.043391383225798594, - "acc_norm": 0.30701754385964913, - "acc_norm_stderr": 0.043391383225798594 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.3376146788990826, - "acc_stderr": 0.02027526598663891, - "acc_norm": 0.3376146788990826, - "acc_norm_stderr": 0.02027526598663891 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.23809523809523808, - "acc_stderr": 0.038095238095238126, - "acc_norm": 0.23809523809523808, - "acc_norm_stderr": 0.038095238095238126 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.27450980392156865, - "acc_stderr": 0.025553169991826524, - "acc_norm": 0.27450980392156865, - "acc_norm_stderr": 0.025553169991826524 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.32, - "acc_stderr": 0.04688261722621505, - "acc_norm": 0.32, - "acc_norm_stderr": 0.04688261722621505 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.24793388429752067, - "acc_stderr": 0.03941897526516302, - "acc_norm": 0.24793388429752067, - "acc_norm_stderr": 0.03941897526516302 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.23684210526315788, - "acc_stderr": 0.03459777606810537, - "acc_norm": 0.23684210526315788, - "acc_norm_stderr": 0.03459777606810537 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.25980392156862747, - "acc_stderr": 0.01774089950917779, - "acc_norm": 0.25980392156862747, - "acc_norm_stderr": 0.01774089950917779 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.22340425531914893, - "acc_stderr": 0.024847921358063962, - "acc_norm": 0.22340425531914893, - "acc_norm_stderr": 0.024847921358063962 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.25892857142857145, - "acc_stderr": 0.04157751539865629, - "acc_norm": 0.25892857142857145, - "acc_norm_stderr": 0.04157751539865629 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.24537037037037038, - "acc_stderr": 0.029346665094372937, - "acc_norm": 0.24537037037037038, - "acc_norm_stderr": 0.029346665094372937 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.27262569832402234, - "acc_stderr": 0.014893391735249608, - "acc_norm": 0.27262569832402234, - "acc_norm_stderr": 0.014893391735249608 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.32, - "acc_stderr": 0.046882617226215034, - "acc_norm": 0.32, - "acc_norm_stderr": 0.046882617226215034 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.3786764705882353, - "acc_stderr": 0.02946513363977613, - "acc_norm": 0.3786764705882353, - "acc_norm_stderr": 0.02946513363977613 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.35918367346938773, - "acc_stderr": 0.03071356045510849, - "acc_norm": 0.35918367346938773, - "acc_norm_stderr": 0.03071356045510849 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.24472573839662448, - "acc_stderr": 0.027985699387036416, - "acc_norm": 0.24472573839662448, - "acc_norm_stderr": 0.027985699387036416 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2588005215123859, - "acc_stderr": 0.011186109046564608, - "acc_norm": 0.2588005215123859, - "acc_norm_stderr": 0.011186109046564608 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.28431372549019607, - "acc_stderr": 0.031660096793998116, - "acc_norm": 0.28431372549019607, - "acc_norm_stderr": 0.031660096793998116 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.2, - "acc_stderr": 0.03123475237772118, - "acc_norm": 0.2, - "acc_norm_stderr": 0.03123475237772118 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.27906976744186046, - "mc1_stderr": 0.015702107090627887, - "mc2": 0.4515720476496737, - "mc2_stderr": 0.015493161984611252 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.5528169014084507, - "acc_stderr": 0.017043883876215398, - "acc_norm": 0.5997652582159625, - "acc_norm_stderr": 0.016795125938543782 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "42MARU/polyglot-ko-12.8b-instruct", - "model_sha": "a8354bcedc167e8e1f7dac8a347bf4b61d9c9bf0", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/42MARU/sitebunny-13b/result_2023-09-27 08:17:31.json b/42MARU/sitebunny-13b/result_2023-09-27 08:17:31.json deleted file mode 100644 index ccc211ec5fb005e5338c261dc6bbcc14ddcf2112..0000000000000000000000000000000000000000 --- a/42MARU/sitebunny-13b/result_2023-09-27 08:17:31.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.3643344709897611, - "acc_stderr": 0.014063260279882417, - "acc_norm": 0.4112627986348123, - "acc_norm_stderr": 0.014379441068522084 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3732324238199562, - "acc_stderr": 0.004826746160830189, - "acc_norm": 0.4751045608444533, - "acc_norm_stderr": 0.004983592410934169 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.4853801169590643, - "acc_stderr": 0.038331852752130205, - "acc_norm": 0.4853801169590643, - "acc_norm_stderr": 0.038331852752130205 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.5631067961165048, - "acc_stderr": 0.04911147107365777, - "acc_norm": 0.5631067961165048, - "acc_norm_stderr": 0.04911147107365777 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.4878671775223499, - "acc_stderr": 0.017874698667491355, - "acc_norm": 0.4878671775223499, - "acc_norm_stderr": 0.017874698667491355 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.4148148148148148, - "acc_stderr": 0.042561937679014075, - "acc_norm": 0.4148148148148148, - "acc_norm_stderr": 0.042561937679014075 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.4, - "acc_stderr": 0.03202563076101735, - "acc_norm": 0.4, - "acc_norm_stderr": 0.03202563076101735 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3855421686746988, - "acc_stderr": 0.03789134424611548, - "acc_norm": 0.3855421686746988, - "acc_norm_stderr": 0.03789134424611548 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.4533762057877814, - "acc_stderr": 0.028274359854894245, - "acc_norm": 0.4533762057877814, - "acc_norm_stderr": 0.028274359854894245 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.4663677130044843, - "acc_stderr": 0.033481800170603065, - "acc_norm": 0.4663677130044843, - "acc_norm_stderr": 0.033481800170603065 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.48091603053435117, - "acc_stderr": 0.04382094705550988, - "acc_norm": 0.48091603053435117, - "acc_norm_stderr": 0.04382094705550988 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.42, - "acc_stderr": 0.04960449637488583, - "acc_norm": 0.42, - "acc_norm_stderr": 0.04960449637488583 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.5909090909090909, - "acc_stderr": 0.03502975799413008, - "acc_norm": 0.5909090909090909, - "acc_norm_stderr": 0.03502975799413008 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.4413793103448276, - "acc_stderr": 0.04137931034482758, - "acc_norm": 0.4413793103448276, - "acc_norm_stderr": 0.04137931034482758 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.16666666666666666, - "acc_stderr": 0.03708284662416544, - "acc_norm": 0.16666666666666666, - "acc_norm_stderr": 0.03708284662416544 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.4495798319327731, - "acc_stderr": 0.03231293497137707, - "acc_norm": 0.4495798319327731, - "acc_norm_stderr": 0.03231293497137707 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.4358974358974359, - "acc_stderr": 0.025141801511177498, - "acc_norm": 0.4358974358974359, - "acc_norm_stderr": 0.025141801511177498 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.46, - "acc_stderr": 0.05009082659620333, - "acc_norm": 0.46, - "acc_norm_stderr": 0.05009082659620333 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.42, - "acc_stderr": 0.049604496374885836, - "acc_norm": 0.42, - "acc_norm_stderr": 0.049604496374885836 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.5277777777777778, - "acc_stderr": 0.048262172941398944, - "acc_norm": 0.5277777777777778, - "acc_norm_stderr": 0.048262172941398944 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.3645320197044335, - "acc_stderr": 0.0338640574606209, - "acc_norm": 0.3645320197044335, - "acc_norm_stderr": 0.0338640574606209 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.47096774193548385, - "acc_stderr": 0.028396016402761005, - "acc_norm": 0.47096774193548385, - "acc_norm_stderr": 0.028396016402761005 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.6282051282051282, - "acc_stderr": 0.03166098891888078, - "acc_norm": 0.6282051282051282, - "acc_norm_stderr": 0.03166098891888078 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.4528301886792453, - "acc_stderr": 0.030635627957961823, - "acc_norm": 0.4528301886792453, - "acc_norm_stderr": 0.030635627957961823 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.509090909090909, - "acc_stderr": 0.04788339768702861, - "acc_norm": 0.509090909090909, - "acc_norm_stderr": 0.04788339768702861 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.3111111111111111, - "acc_stderr": 0.028226446749683526, - "acc_norm": 0.3111111111111111, - "acc_norm_stderr": 0.028226446749683526 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.31788079470198677, - "acc_stderr": 0.038020397601079024, - "acc_norm": 0.31788079470198677, - "acc_norm_stderr": 0.038020397601079024 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.5771144278606966, - "acc_stderr": 0.034932317774212816, - "acc_norm": 0.5771144278606966, - "acc_norm_stderr": 0.034932317774212816 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.3815028901734104, - "acc_stderr": 0.03703851193099521, - "acc_norm": 0.3815028901734104, - "acc_norm_stderr": 0.03703851193099521 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.3253968253968254, - "acc_stderr": 0.024130158299762613, - "acc_norm": 0.3253968253968254, - "acc_norm_stderr": 0.024130158299762613 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3819444444444444, - "acc_stderr": 0.040629907841466674, - "acc_norm": 0.3819444444444444, - "acc_norm_stderr": 0.040629907841466674 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.26, - "acc_stderr": 0.04408440022768079, - "acc_norm": 0.26, - "acc_norm_stderr": 0.04408440022768079 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.6, - "acc_stderr": 0.049236596391733084, - "acc_norm": 0.6, - "acc_norm_stderr": 0.049236596391733084 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.5144508670520231, - "acc_stderr": 0.02690784985628254, - "acc_norm": 0.5144508670520231, - "acc_norm_stderr": 0.02690784985628254 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.44171779141104295, - "acc_stderr": 0.03901591825836184, - "acc_norm": 0.44171779141104295, - "acc_norm_stderr": 0.03901591825836184 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.4444444444444444, - "acc_stderr": 0.02764847787741332, - "acc_norm": 0.4444444444444444, - "acc_norm_stderr": 0.02764847787741332 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.35, - "acc_stderr": 0.0479372485441102, - "acc_norm": 0.35, - "acc_norm_stderr": 0.0479372485441102 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.5077720207253886, - "acc_stderr": 0.03608003225569654, - "acc_norm": 0.5077720207253886, - "acc_norm_stderr": 0.03608003225569654 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.19298245614035087, - "acc_stderr": 0.037124548537213684, - "acc_norm": 0.19298245614035087, - "acc_norm_stderr": 0.037124548537213684 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.47889908256880737, - "acc_stderr": 0.021418224754264643, - "acc_norm": 0.47889908256880737, - "acc_norm_stderr": 0.021418224754264643 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.373015873015873, - "acc_stderr": 0.04325506042017086, - "acc_norm": 0.373015873015873, - "acc_norm_stderr": 0.04325506042017086 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.43790849673202614, - "acc_stderr": 0.028408302020332687, - "acc_norm": 0.43790849673202614, - "acc_norm_stderr": 0.028408302020332687 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.42, - "acc_stderr": 0.04960449637488584, - "acc_norm": 0.42, - "acc_norm_stderr": 0.04960449637488584 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.6859504132231405, - "acc_stderr": 0.042369647530410184, - "acc_norm": 0.6859504132231405, - "acc_norm_stderr": 0.042369647530410184 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.34868421052631576, - "acc_stderr": 0.03878139888797609, - "acc_norm": 0.34868421052631576, - "acc_norm_stderr": 0.03878139888797609 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.3480392156862745, - "acc_stderr": 0.01927099870822398, - "acc_norm": 0.3480392156862745, - "acc_norm_stderr": 0.01927099870822398 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.3475177304964539, - "acc_stderr": 0.028406627809590947, - "acc_norm": 0.3475177304964539, - "acc_norm_stderr": 0.028406627809590947 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.23214285714285715, - "acc_stderr": 0.040073418097558065, - "acc_norm": 0.23214285714285715, - "acc_norm_stderr": 0.040073418097558065 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.3101851851851852, - "acc_stderr": 0.031546962856566295, - "acc_norm": 0.3101851851851852, - "acc_norm_stderr": 0.031546962856566295 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.3106145251396648, - "acc_stderr": 0.015476515438005566, - "acc_norm": 0.3106145251396648, - "acc_norm_stderr": 0.015476515438005566 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.41, - "acc_stderr": 0.049431107042371025, - "acc_norm": 0.41, - "acc_norm_stderr": 0.049431107042371025 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.44, - "acc_stderr": 0.04988876515698589, - "acc_norm": 0.44, - "acc_norm_stderr": 0.04988876515698589 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.3235294117647059, - "acc_stderr": 0.02841820861940679, - "acc_norm": 0.3235294117647059, - "acc_norm_stderr": 0.02841820861940679 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.5428571428571428, - "acc_stderr": 0.03189141832421396, - "acc_norm": 0.5428571428571428, - "acc_norm_stderr": 0.03189141832421396 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.5780590717299579, - "acc_stderr": 0.032148146302403695, - "acc_norm": 0.5780590717299579, - "acc_norm_stderr": 0.032148146302403695 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.32333767926988266, - "acc_stderr": 0.011946565758447202, - "acc_norm": 0.32333767926988266, - "acc_norm_stderr": 0.011946565758447202 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.47549019607843135, - "acc_stderr": 0.035050931943487976, - "acc_norm": 0.47549019607843135, - "acc_norm_stderr": 0.035050931943487976 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.5393939393939394, - "acc_stderr": 0.03892207016552012, - "acc_norm": 0.5393939393939394, - "acc_norm_stderr": 0.03892207016552012 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.35006119951040393, - "mc1_stderr": 0.01669794942015103, - "mc2": 0.5148844380994511, - "mc2_stderr": 0.015947695748354234 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.2136150234741784, - "acc_stderr": 0.014049754012186298, - "acc_norm": 0.22769953051643194, - "acc_norm_stderr": 0.014375052416765484 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "42MARU/sitebunny-13b", - "model_sha": "67107327d09c2f9bf3e4b316d97767c97f5a0804", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/42dot/42dot_LLM-PLM-1.3B/result_2023-10-18 01:46:47.json b/42dot/42dot_LLM-PLM-1.3B/result_2023-10-18 01:46:47.json deleted file mode 100644 index 283e32a15684ca6a2b0cf5d26bb9769e61d1726c..0000000000000000000000000000000000000000 --- a/42dot/42dot_LLM-PLM-1.3B/result_2023-10-18 01:46:47.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.2636518771331058, - "acc_stderr": 0.01287592915129705, - "acc_norm": 0.32593856655290104, - "acc_norm_stderr": 0.013697432466693242 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3563035251941844, - "acc_stderr": 0.004779276329704052, - "acc_norm": 0.4473212507468632, - "acc_norm_stderr": 0.004962010338226348 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.23976608187134502, - "acc_stderr": 0.03274485211946956, - "acc_norm": 0.23976608187134502, - "acc_norm_stderr": 0.03274485211946956 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.1941747572815534, - "acc_stderr": 0.03916667762822584, - "acc_norm": 0.1941747572815534, - "acc_norm_stderr": 0.03916667762822584 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.22094508301404853, - "acc_stderr": 0.014836205167333574, - "acc_norm": 0.22094508301404853, - "acc_norm_stderr": 0.014836205167333574 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.18518518518518517, - "acc_stderr": 0.0335567721631314, - "acc_norm": 0.18518518518518517, - "acc_norm_stderr": 0.0335567721631314 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.27, - "acc_stderr": 0.04461960433384741, - "acc_norm": 0.27, - "acc_norm_stderr": 0.04461960433384741 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.2851063829787234, - "acc_stderr": 0.02951319662553935, - "acc_norm": 0.2851063829787234, - "acc_norm_stderr": 0.02951319662553935 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3072289156626506, - "acc_stderr": 0.03591566797824664, - "acc_norm": 0.3072289156626506, - "acc_norm_stderr": 0.03591566797824664 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.2347266881028939, - "acc_stderr": 0.024071805887677045, - "acc_norm": 0.2347266881028939, - "acc_norm_stderr": 0.024071805887677045 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.2242152466367713, - "acc_stderr": 0.027991534258519527, - "acc_norm": 0.2242152466367713, - "acc_norm_stderr": 0.027991534258519527 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.2900763358778626, - "acc_stderr": 0.03980066246467765, - "acc_norm": 0.2900763358778626, - "acc_norm_stderr": 0.03980066246467765 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542127, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542127 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.2474747474747475, - "acc_stderr": 0.030746300742124484, - "acc_norm": 0.2474747474747475, - "acc_norm_stderr": 0.030746300742124484 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.21379310344827587, - "acc_stderr": 0.034165204477475494, - "acc_norm": 0.21379310344827587, - "acc_norm_stderr": 0.034165204477475494 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.22549019607843138, - "acc_stderr": 0.041583075330832865, - "acc_norm": 0.22549019607843138, - "acc_norm_stderr": 0.041583075330832865 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.3235294117647059, - "acc_stderr": 0.030388353551886835, - "acc_norm": 0.3235294117647059, - "acc_norm_stderr": 0.030388353551886835 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.36153846153846153, - "acc_stderr": 0.02435958146539698, - "acc_norm": 0.36153846153846153, - "acc_norm_stderr": 0.02435958146539698 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.17, - "acc_stderr": 0.03775251680686371, - "acc_norm": 0.17, - "acc_norm_stderr": 0.03775251680686371 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695235, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695235 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.2037037037037037, - "acc_stderr": 0.038935425188248475, - "acc_norm": 0.2037037037037037, - "acc_norm_stderr": 0.038935425188248475 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.270935960591133, - "acc_stderr": 0.031270907132976984, - "acc_norm": 0.270935960591133, - "acc_norm_stderr": 0.031270907132976984 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.2870967741935484, - "acc_stderr": 0.025736542745594525, - "acc_norm": 0.2870967741935484, - "acc_norm_stderr": 0.025736542745594525 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.20085470085470086, - "acc_stderr": 0.02624677294689047, - "acc_norm": 0.20085470085470086, - "acc_norm_stderr": 0.02624677294689047 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.25660377358490566, - "acc_stderr": 0.026880647889051985, - "acc_norm": 0.25660377358490566, - "acc_norm_stderr": 0.026880647889051985 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.2636363636363636, - "acc_stderr": 0.04220224692971987, - "acc_norm": 0.2636363636363636, - "acc_norm_stderr": 0.04220224692971987 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.24814814814814815, - "acc_stderr": 0.0263357394040558, - "acc_norm": 0.24814814814814815, - "acc_norm_stderr": 0.0263357394040558 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.3509933774834437, - "acc_stderr": 0.03896981964257374, - "acc_norm": 0.3509933774834437, - "acc_norm_stderr": 0.03896981964257374 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.2935323383084577, - "acc_stderr": 0.03220024104534205, - "acc_norm": 0.2935323383084577, - "acc_norm_stderr": 0.03220024104534205 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.26011560693641617, - "acc_stderr": 0.03345036916788991, - "acc_norm": 0.26011560693641617, - "acc_norm_stderr": 0.03345036916788991 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.24338624338624337, - "acc_stderr": 0.022101128787415426, - "acc_norm": 0.24338624338624337, - "acc_norm_stderr": 0.022101128787415426 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2569444444444444, - "acc_stderr": 0.03653946969442099, - "acc_norm": 0.2569444444444444, - "acc_norm_stderr": 0.03653946969442099 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.26, - "acc_stderr": 0.0440844002276808, - "acc_norm": 0.26, - "acc_norm_stderr": 0.0440844002276808 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.2254335260115607, - "acc_stderr": 0.022497230190967547, - "acc_norm": 0.2254335260115607, - "acc_norm_stderr": 0.022497230190967547 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.26993865030674846, - "acc_stderr": 0.034878251684978906, - "acc_norm": 0.26993865030674846, - "acc_norm_stderr": 0.034878251684978906 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.28703703703703703, - "acc_stderr": 0.025171041915309684, - "acc_norm": 0.28703703703703703, - "acc_norm_stderr": 0.025171041915309684 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.35751295336787564, - "acc_stderr": 0.03458816042181006, - "acc_norm": 0.35751295336787564, - "acc_norm_stderr": 0.03458816042181006 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.30701754385964913, - "acc_stderr": 0.043391383225798594, - "acc_norm": 0.30701754385964913, - "acc_norm_stderr": 0.043391383225798594 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.24954128440366974, - "acc_stderr": 0.018553897629501614, - "acc_norm": 0.24954128440366974, - "acc_norm_stderr": 0.018553897629501614 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.373015873015873, - "acc_stderr": 0.04325506042017086, - "acc_norm": 0.373015873015873, - "acc_norm_stderr": 0.04325506042017086 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.24183006535947713, - "acc_stderr": 0.024518195641879334, - "acc_norm": 0.24183006535947713, - "acc_norm_stderr": 0.024518195641879334 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.2, - "acc_stderr": 0.04020151261036846, - "acc_norm": 0.2, - "acc_norm_stderr": 0.04020151261036846 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.38016528925619836, - "acc_stderr": 0.04431324501968432, - "acc_norm": 0.38016528925619836, - "acc_norm_stderr": 0.04431324501968432 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.21710526315789475, - "acc_stderr": 0.03355045304882924, - "acc_norm": 0.21710526315789475, - "acc_norm_stderr": 0.03355045304882924 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.2549019607843137, - "acc_stderr": 0.017630827375148383, - "acc_norm": 0.2549019607843137, - "acc_norm_stderr": 0.017630827375148383 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.26595744680851063, - "acc_stderr": 0.026358065698880592, - "acc_norm": 0.26595744680851063, - "acc_norm_stderr": 0.026358065698880592 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.25892857142857145, - "acc_stderr": 0.041577515398656284, - "acc_norm": 0.25892857142857145, - "acc_norm_stderr": 0.041577515398656284 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.4722222222222222, - "acc_stderr": 0.0340470532865388, - "acc_norm": 0.4722222222222222, - "acc_norm_stderr": 0.0340470532865388 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.27262569832402234, - "acc_stderr": 0.014893391735249608, - "acc_norm": 0.27262569832402234, - "acc_norm_stderr": 0.014893391735249608 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.23, - "acc_stderr": 0.04229525846816505, - "acc_norm": 0.23, - "acc_norm_stderr": 0.04229525846816505 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.4522058823529412, - "acc_stderr": 0.030233758551596452, - "acc_norm": 0.4522058823529412, - "acc_norm_stderr": 0.030233758551596452 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.27346938775510204, - "acc_stderr": 0.02853556033712845, - "acc_norm": 0.27346938775510204, - "acc_norm_stderr": 0.02853556033712845 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.26582278481012656, - "acc_stderr": 0.02875679962965833, - "acc_norm": 0.26582278481012656, - "acc_norm_stderr": 0.02875679962965833 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2561929595827901, - "acc_stderr": 0.011149173153110583, - "acc_norm": 0.2561929595827901, - "acc_norm_stderr": 0.011149173153110583 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.24019607843137256, - "acc_stderr": 0.02998373305591361, - "acc_norm": 0.24019607843137256, - "acc_norm_stderr": 0.02998373305591361 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.24242424242424243, - "acc_stderr": 0.03346409881055953, - "acc_norm": 0.24242424242424243, - "acc_norm_stderr": 0.03346409881055953 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.24479804161566707, - "mc1_stderr": 0.015051869486715006, - "mc2": 0.40367736123530334, - "mc2_stderr": 0.014824402657107816 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.2992957746478873, - "acc_stderr": 0.015698309276204924, - "acc_norm": 0.3591549295774648, - "acc_norm_stderr": 0.016445711213506745 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "42dot/42dot_LLM-PLM-1.3B", - "model_sha": "a72bf57eb02cd4ea4388a344b4a5893aa95698da", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/42dot/42dot_LLM-SFT-1.3B/result_2023-10-18 01:47:03.json b/42dot/42dot_LLM-SFT-1.3B/result_2023-10-18 01:47:03.json deleted file mode 100644 index db262855fe6e3f10d9e2250f8d7b7e44b788c9bc..0000000000000000000000000000000000000000 --- a/42dot/42dot_LLM-SFT-1.3B/result_2023-10-18 01:47:03.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.28242320819112626, - "acc_stderr": 0.01315545688409722, - "acc_norm": 0.35494880546075086, - "acc_norm_stderr": 0.013983036904094094 - }, - "harness|ko_hellaswag|10": { - "acc": 0.36317466640111534, - "acc_stderr": 0.004799317209902023, - "acc_norm": 0.4613622784305915, - "acc_norm_stderr": 0.004974860878464429 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.32748538011695905, - "acc_stderr": 0.035993357714560276, - "acc_norm": 0.32748538011695905, - "acc_norm_stderr": 0.035993357714560276 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.13592233009708737, - "acc_stderr": 0.033932957297610124, - "acc_norm": 0.13592233009708737, - "acc_norm_stderr": 0.033932957297610124 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.23754789272030652, - "acc_stderr": 0.015218733046150193, - "acc_norm": 0.23754789272030652, - "acc_norm_stderr": 0.015218733046150193 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.23703703703703705, - "acc_stderr": 0.03673731683969506, - "acc_norm": 0.23703703703703705, - "acc_norm_stderr": 0.03673731683969506 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542127, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542127 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.2723404255319149, - "acc_stderr": 0.029101290698386698, - "acc_norm": 0.2723404255319149, - "acc_norm_stderr": 0.029101290698386698 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.2710843373493976, - "acc_stderr": 0.034605799075530276, - "acc_norm": 0.2710843373493976, - "acc_norm_stderr": 0.034605799075530276 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.2604501607717042, - "acc_stderr": 0.024926723224845543, - "acc_norm": 0.2604501607717042, - "acc_norm_stderr": 0.024926723224845543 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.242152466367713, - "acc_stderr": 0.028751392398694755, - "acc_norm": 0.242152466367713, - "acc_norm_stderr": 0.028751392398694755 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.2366412213740458, - "acc_stderr": 0.03727673575596918, - "acc_norm": 0.2366412213740458, - "acc_norm_stderr": 0.03727673575596918 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.27, - "acc_stderr": 0.0446196043338474, - "acc_norm": 0.27, - "acc_norm_stderr": 0.0446196043338474 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.22727272727272727, - "acc_stderr": 0.02985751567338641, - "acc_norm": 0.22727272727272727, - "acc_norm_stderr": 0.02985751567338641 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.2482758620689655, - "acc_stderr": 0.036001056927277716, - "acc_norm": 0.2482758620689655, - "acc_norm_stderr": 0.036001056927277716 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.1568627450980392, - "acc_stderr": 0.03618664819936246, - "acc_norm": 0.1568627450980392, - "acc_norm_stderr": 0.03618664819936246 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.24789915966386555, - "acc_stderr": 0.028047967224176892, - "acc_norm": 0.24789915966386555, - "acc_norm_stderr": 0.028047967224176892 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.23076923076923078, - "acc_stderr": 0.021362027725222728, - "acc_norm": 0.23076923076923078, - "acc_norm_stderr": 0.021362027725222728 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.32, - "acc_stderr": 0.04688261722621504, - "acc_norm": 0.32, - "acc_norm_stderr": 0.04688261722621504 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.19, - "acc_stderr": 0.03942772444036623, - "acc_norm": 0.19, - "acc_norm_stderr": 0.03942772444036623 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.25925925925925924, - "acc_stderr": 0.042365112580946336, - "acc_norm": 0.25925925925925924, - "acc_norm_stderr": 0.042365112580946336 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.18719211822660098, - "acc_stderr": 0.027444924966882618, - "acc_norm": 0.18719211822660098, - "acc_norm_stderr": 0.027444924966882618 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.2645161290322581, - "acc_stderr": 0.02509189237885928, - "acc_norm": 0.2645161290322581, - "acc_norm_stderr": 0.02509189237885928 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.3034188034188034, - "acc_stderr": 0.030118210106942652, - "acc_norm": 0.3034188034188034, - "acc_norm_stderr": 0.030118210106942652 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.2037735849056604, - "acc_stderr": 0.02479078450177541, - "acc_norm": 0.2037735849056604, - "acc_norm_stderr": 0.02479078450177541 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.22727272727272727, - "acc_stderr": 0.04013964554072775, - "acc_norm": 0.22727272727272727, - "acc_norm_stderr": 0.04013964554072775 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.24074074074074073, - "acc_stderr": 0.026067159222275794, - "acc_norm": 0.24074074074074073, - "acc_norm_stderr": 0.026067159222275794 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.24503311258278146, - "acc_stderr": 0.035118075718047245, - "acc_norm": 0.24503311258278146, - "acc_norm_stderr": 0.035118075718047245 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.24875621890547264, - "acc_stderr": 0.030567675938916707, - "acc_norm": 0.24875621890547264, - "acc_norm_stderr": 0.030567675938916707 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.18497109826589594, - "acc_stderr": 0.029605623981771204, - "acc_norm": 0.18497109826589594, - "acc_norm_stderr": 0.029605623981771204 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.25132275132275134, - "acc_stderr": 0.022340482339643898, - "acc_norm": 0.25132275132275134, - "acc_norm_stderr": 0.022340482339643898 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.22916666666666666, - "acc_stderr": 0.035146974678623884, - "acc_norm": 0.22916666666666666, - "acc_norm_stderr": 0.035146974678623884 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.24, - "acc_stderr": 0.04292346959909284, - "acc_norm": 0.24, - "acc_norm_stderr": 0.04292346959909284 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542129, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542129 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.23410404624277456, - "acc_stderr": 0.022797110278071128, - "acc_norm": 0.23410404624277456, - "acc_norm_stderr": 0.022797110278071128 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.26380368098159507, - "acc_stderr": 0.03462419931615624, - "acc_norm": 0.26380368098159507, - "acc_norm_stderr": 0.03462419931615624 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.28703703703703703, - "acc_stderr": 0.025171041915309684, - "acc_norm": 0.28703703703703703, - "acc_norm_stderr": 0.025171041915309684 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.20725388601036268, - "acc_stderr": 0.029252823291803644, - "acc_norm": 0.20725388601036268, - "acc_norm_stderr": 0.029252823291803644 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2631578947368421, - "acc_stderr": 0.04142439719489362, - "acc_norm": 0.2631578947368421, - "acc_norm_stderr": 0.04142439719489362 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.22201834862385322, - "acc_stderr": 0.017818849564796624, - "acc_norm": 0.22201834862385322, - "acc_norm_stderr": 0.017818849564796624 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.31746031746031744, - "acc_stderr": 0.04163453031302859, - "acc_norm": 0.31746031746031744, - "acc_norm_stderr": 0.04163453031302859 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.24836601307189543, - "acc_stderr": 0.024739981355113592, - "acc_norm": 0.24836601307189543, - "acc_norm_stderr": 0.024739981355113592 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.22, - "acc_stderr": 0.04163331998932269, - "acc_norm": 0.22, - "acc_norm_stderr": 0.04163331998932269 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.32231404958677684, - "acc_stderr": 0.042664163633521685, - "acc_norm": 0.32231404958677684, - "acc_norm_stderr": 0.042664163633521685 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.20394736842105263, - "acc_stderr": 0.0327900040631005, - "acc_norm": 0.20394736842105263, - "acc_norm_stderr": 0.0327900040631005 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.2581699346405229, - "acc_stderr": 0.017704531653250075, - "acc_norm": 0.2581699346405229, - "acc_norm_stderr": 0.017704531653250075 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2375886524822695, - "acc_stderr": 0.025389512552729903, - "acc_norm": 0.2375886524822695, - "acc_norm_stderr": 0.025389512552729903 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.25, - "acc_stderr": 0.04109974682633932, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04109974682633932 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.03214952147802747, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.03214952147802747 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.27262569832402234, - "acc_stderr": 0.014893391735249608, - "acc_norm": 0.27262569832402234, - "acc_norm_stderr": 0.014893391735249608 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542127, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542127 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.19, - "acc_stderr": 0.039427724440366234, - "acc_norm": 0.19, - "acc_norm_stderr": 0.039427724440366234 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.25, - "acc_stderr": 0.026303648393696036, - "acc_norm": 0.25, - "acc_norm_stderr": 0.026303648393696036 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.21224489795918366, - "acc_stderr": 0.026176967197866764, - "acc_norm": 0.21224489795918366, - "acc_norm_stderr": 0.026176967197866764 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.28270042194092826, - "acc_stderr": 0.029312814153955914, - "acc_norm": 0.28270042194092826, - "acc_norm_stderr": 0.029312814153955914 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2503259452411995, - "acc_stderr": 0.011064151027165438, - "acc_norm": 0.2503259452411995, - "acc_norm_stderr": 0.011064151027165438 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.25, - "acc_stderr": 0.03039153369274154, - "acc_norm": 0.25, - "acc_norm_stderr": 0.03039153369274154 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.23030303030303031, - "acc_stderr": 0.03287666758603488, - "acc_norm": 0.23030303030303031, - "acc_norm_stderr": 0.03287666758603488 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.28518971848225216, - "mc1_stderr": 0.015805827874454895, - "mc2": 0.43765472485909873, - "mc2_stderr": 0.015405588178148114 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.3051643192488263, - "acc_stderr": 0.01578494789073776, - "acc_norm": 0.3732394366197183, - "acc_norm_stderr": 0.01657982009187977 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "42dot/42dot_LLM-SFT-1.3B", - "model_sha": "2dadd4492f0b27c302d8a5518003fa6045e32a8a", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/AtAndDev/ShortKingv0.1/result_2023-09-29 19:59:47.json b/AtAndDev/ShortKingv0.1/result_2023-09-29 19:59:47.json deleted file mode 100644 index 50c3507c7b64c570734ad8ed22d815f566e1286e..0000000000000000000000000000000000000000 --- a/AtAndDev/ShortKingv0.1/result_2023-09-29 19:59:47.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.19112627986348124, - "acc_stderr": 0.011490055292778596, - "acc_norm": 0.24829351535836178, - "acc_norm_stderr": 0.012624912868089764 - }, - "harness|ko_hellaswag|10": { - "acc": 0.2756423023302131, - "acc_stderr": 0.0044592414745187915, - "acc_norm": 0.29884485162318264, - "acc_norm_stderr": 0.004568161710399566 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.27485380116959063, - "acc_stderr": 0.03424042924691582, - "acc_norm": 0.27485380116959063, - "acc_norm_stderr": 0.03424042924691582 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.17475728155339806, - "acc_stderr": 0.037601780060266196, - "acc_norm": 0.17475728155339806, - "acc_norm_stderr": 0.037601780060266196 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.2669220945083014, - "acc_stderr": 0.015818450894777555, - "acc_norm": 0.2669220945083014, - "acc_norm_stderr": 0.015818450894777555 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.3111111111111111, - "acc_stderr": 0.03999262876617722, - "acc_norm": 0.3111111111111111, - "acc_norm_stderr": 0.03999262876617722 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.23, - "acc_stderr": 0.04229525846816506, - "acc_norm": 0.23, - "acc_norm_stderr": 0.04229525846816506 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.2936170212765957, - "acc_stderr": 0.02977164271249123, - "acc_norm": 0.2936170212765957, - "acc_norm_stderr": 0.02977164271249123 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.27710843373493976, - "acc_stderr": 0.03484331592680588, - "acc_norm": 0.27710843373493976, - "acc_norm_stderr": 0.03484331592680588 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.20257234726688103, - "acc_stderr": 0.022827317491059686, - "acc_norm": 0.20257234726688103, - "acc_norm_stderr": 0.022827317491059686 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.3632286995515695, - "acc_stderr": 0.032277904428505, - "acc_norm": 0.3632286995515695, - "acc_norm_stderr": 0.032277904428505 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.2824427480916031, - "acc_stderr": 0.03948406125768361, - "acc_norm": 0.2824427480916031, - "acc_norm_stderr": 0.03948406125768361 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.21717171717171718, - "acc_stderr": 0.029376616484945644, - "acc_norm": 0.21717171717171718, - "acc_norm_stderr": 0.029376616484945644 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.2206896551724138, - "acc_stderr": 0.03455930201924812, - "acc_norm": 0.2206896551724138, - "acc_norm_stderr": 0.03455930201924812 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.14705882352941177, - "acc_stderr": 0.035240689515674474, - "acc_norm": 0.14705882352941177, - "acc_norm_stderr": 0.035240689515674474 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.226890756302521, - "acc_stderr": 0.027205371538279476, - "acc_norm": 0.226890756302521, - "acc_norm_stderr": 0.027205371538279476 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.2153846153846154, - "acc_stderr": 0.020843034557462878, - "acc_norm": 0.2153846153846154, - "acc_norm_stderr": 0.020843034557462878 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.21, - "acc_stderr": 0.040936018074033256, - "acc_norm": 0.21, - "acc_norm_stderr": 0.040936018074033256 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.26, - "acc_stderr": 0.0440844002276808, - "acc_norm": 0.26, - "acc_norm_stderr": 0.0440844002276808 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.25, - "acc_stderr": 0.04186091791394607, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04186091791394607 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.13793103448275862, - "acc_stderr": 0.024261984301044565, - "acc_norm": 0.13793103448275862, - "acc_norm_stderr": 0.024261984301044565 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.20967741935483872, - "acc_stderr": 0.023157879349083536, - "acc_norm": 0.20967741935483872, - "acc_norm_stderr": 0.023157879349083536 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.2094017094017094, - "acc_stderr": 0.026655699653922737, - "acc_norm": 0.2094017094017094, - "acc_norm_stderr": 0.026655699653922737 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.2490566037735849, - "acc_stderr": 0.026616482980501715, - "acc_norm": 0.2490566037735849, - "acc_norm_stderr": 0.026616482980501715 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.21818181818181817, - "acc_stderr": 0.03955932861795833, - "acc_norm": 0.21818181818181817, - "acc_norm_stderr": 0.03955932861795833 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.26296296296296295, - "acc_stderr": 0.02684205787383371, - "acc_norm": 0.26296296296296295, - "acc_norm_stderr": 0.02684205787383371 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2847682119205298, - "acc_stderr": 0.03684881521389024, - "acc_norm": 0.2847682119205298, - "acc_norm_stderr": 0.03684881521389024 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.24378109452736318, - "acc_stderr": 0.03036049015401464, - "acc_norm": 0.24378109452736318, - "acc_norm_stderr": 0.03036049015401464 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.2138728323699422, - "acc_stderr": 0.03126511206173043, - "acc_norm": 0.2138728323699422, - "acc_norm_stderr": 0.03126511206173043 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.25396825396825395, - "acc_stderr": 0.022418042891113946, - "acc_norm": 0.25396825396825395, - "acc_norm_stderr": 0.022418042891113946 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2222222222222222, - "acc_stderr": 0.03476590104304134, - "acc_norm": 0.2222222222222222, - "acc_norm_stderr": 0.03476590104304134 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.24, - "acc_stderr": 0.04292346959909283, - "acc_norm": 0.24, - "acc_norm_stderr": 0.04292346959909283 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.26, - "acc_stderr": 0.0440844002276808, - "acc_norm": 0.26, - "acc_norm_stderr": 0.0440844002276808 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.26011560693641617, - "acc_stderr": 0.023618678310069363, - "acc_norm": 0.26011560693641617, - "acc_norm_stderr": 0.023618678310069363 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.27607361963190186, - "acc_stderr": 0.0351238528370505, - "acc_norm": 0.27607361963190186, - "acc_norm_stderr": 0.0351238528370505 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.24074074074074073, - "acc_stderr": 0.02378858355165854, - "acc_norm": 0.24074074074074073, - "acc_norm_stderr": 0.02378858355165854 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.20725388601036268, - "acc_stderr": 0.02925282329180363, - "acc_norm": 0.20725388601036268, - "acc_norm_stderr": 0.02925282329180363 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2807017543859649, - "acc_stderr": 0.042270544512322004, - "acc_norm": 0.2807017543859649, - "acc_norm_stderr": 0.042270544512322004 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.21100917431192662, - "acc_stderr": 0.017493922404112648, - "acc_norm": 0.21100917431192662, - "acc_norm_stderr": 0.017493922404112648 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.23809523809523808, - "acc_stderr": 0.038095238095238106, - "acc_norm": 0.23809523809523808, - "acc_norm_stderr": 0.038095238095238106 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.23529411764705882, - "acc_stderr": 0.024288619466046116, - "acc_norm": 0.23529411764705882, - "acc_norm_stderr": 0.024288619466046116 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.19008264462809918, - "acc_stderr": 0.03581796951709282, - "acc_norm": 0.19008264462809918, - "acc_norm_stderr": 0.03581796951709282 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.17763157894736842, - "acc_stderr": 0.03110318238312338, - "acc_norm": 0.17763157894736842, - "acc_norm_stderr": 0.03110318238312338 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.25980392156862747, - "acc_stderr": 0.017740899509177795, - "acc_norm": 0.25980392156862747, - "acc_norm_stderr": 0.017740899509177795 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2765957446808511, - "acc_stderr": 0.026684564340460997, - "acc_norm": 0.2765957446808511, - "acc_norm_stderr": 0.026684564340460997 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.2857142857142857, - "acc_stderr": 0.042878587513404544, - "acc_norm": 0.2857142857142857, - "acc_norm_stderr": 0.042878587513404544 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.35648148148148145, - "acc_stderr": 0.032664783315272714, - "acc_norm": 0.35648148148148145, - "acc_norm_stderr": 0.032664783315272714 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.23016759776536314, - "acc_stderr": 0.014078339253425809, - "acc_norm": 0.23016759776536314, - "acc_norm_stderr": 0.014078339253425809 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542128, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542128 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.29411764705882354, - "acc_stderr": 0.02767846864214471, - "acc_norm": 0.29411764705882354, - "acc_norm_stderr": 0.02767846864214471 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.3142857142857143, - "acc_stderr": 0.029719329422417468, - "acc_norm": 0.3142857142857143, - "acc_norm_stderr": 0.029719329422417468 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.2742616033755274, - "acc_stderr": 0.02904133351059804, - "acc_norm": 0.2742616033755274, - "acc_norm_stderr": 0.02904133351059804 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.24511082138200782, - "acc_stderr": 0.010986307870045526, - "acc_norm": 0.24511082138200782, - "acc_norm_stderr": 0.010986307870045526 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.27941176470588236, - "acc_stderr": 0.03149328104507955, - "acc_norm": 0.27941176470588236, - "acc_norm_stderr": 0.03149328104507955 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.21818181818181817, - "acc_stderr": 0.03225078108306289, - "acc_norm": 0.21818181818181817, - "acc_norm_stderr": 0.03225078108306289 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.29498164014687883, - "mc1_stderr": 0.015964400965589674, - "mc2": 0.49219803033147647, - "mc2_stderr": 0.015947492879186672 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.0539906103286385, - "acc_stderr": 0.007747151732014082, - "acc_norm": 0.09859154929577464, - "acc_norm_stderr": 0.010219175985280616 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "AtAndDev/ShortKingv0.1", - "model_sha": "6cd9b5bc13ee15b5e7e7cfb46477bc6a7c0b5d47", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/BM-K/polyglot-ko-1.3b-it-v1.0/result_2023-10-06 06:41:38.json b/BM-K/polyglot-ko-1.3b-it-v1.0/result_2023-10-06 06:41:38.json deleted file mode 100644 index 59640e7fc77b93719c7c95a4369c944e80685c68..0000000000000000000000000000000000000000 --- a/BM-K/polyglot-ko-1.3b-it-v1.0/result_2023-10-06 06:41:38.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.22696245733788395, - "acc_stderr": 0.012240491536132861, - "acc_norm": 0.2773037542662116, - "acc_norm_stderr": 0.013082095839059374 - }, - "harness|ko_hellaswag|10": { - "acc": 0.33578968333001397, - "acc_stderr": 0.004713006072807706, - "acc_norm": 0.41585341565425216, - "acc_norm_stderr": 0.0049186120989440285 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.29239766081871343, - "acc_stderr": 0.034886477134579236, - "acc_norm": 0.29239766081871343, - "acc_norm_stderr": 0.034886477134579236 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.2524271844660194, - "acc_stderr": 0.04301250399690877, - "acc_norm": 0.2524271844660194, - "acc_norm_stderr": 0.04301250399690877 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.26309067688378035, - "acc_stderr": 0.015745497169049057, - "acc_norm": 0.26309067688378035, - "acc_norm_stderr": 0.015745497169049057 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.24444444444444444, - "acc_stderr": 0.037125378336148665, - "acc_norm": 0.24444444444444444, - "acc_norm_stderr": 0.037125378336148665 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.33, - "acc_stderr": 0.047258156262526045, - "acc_norm": 0.33, - "acc_norm_stderr": 0.047258156262526045 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.251063829787234, - "acc_stderr": 0.028346963777162452, - "acc_norm": 0.251063829787234, - "acc_norm_stderr": 0.028346963777162452 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.20481927710843373, - "acc_stderr": 0.03141784291663926, - "acc_norm": 0.20481927710843373, - "acc_norm_stderr": 0.03141784291663926 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.2958199356913183, - "acc_stderr": 0.025922371788818784, - "acc_norm": 0.2958199356913183, - "acc_norm_stderr": 0.025922371788818784 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.34977578475336324, - "acc_stderr": 0.03200736719484503, - "acc_norm": 0.34977578475336324, - "acc_norm_stderr": 0.03200736719484503 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.25190839694656486, - "acc_stderr": 0.03807387116306085, - "acc_norm": 0.25190839694656486, - "acc_norm_stderr": 0.03807387116306085 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.27, - "acc_stderr": 0.044619604333847394, - "acc_norm": 0.27, - "acc_norm_stderr": 0.044619604333847394 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.2676767676767677, - "acc_stderr": 0.031544498882702866, - "acc_norm": 0.2676767676767677, - "acc_norm_stderr": 0.031544498882702866 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.22758620689655173, - "acc_stderr": 0.03493950380131184, - "acc_norm": 0.22758620689655173, - "acc_norm_stderr": 0.03493950380131184 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.18627450980392157, - "acc_stderr": 0.03873958714149351, - "acc_norm": 0.18627450980392157, - "acc_norm_stderr": 0.03873958714149351 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.24789915966386555, - "acc_stderr": 0.028047967224176892, - "acc_norm": 0.24789915966386555, - "acc_norm_stderr": 0.028047967224176892 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.21794871794871795, - "acc_stderr": 0.020932445774463185, - "acc_norm": 0.21794871794871795, - "acc_norm_stderr": 0.020932445774463185 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.2, - "acc_stderr": 0.04020151261036846, - "acc_norm": 0.2, - "acc_norm_stderr": 0.04020151261036846 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.26851851851851855, - "acc_stderr": 0.04284467968052191, - "acc_norm": 0.26851851851851855, - "acc_norm_stderr": 0.04284467968052191 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.27586206896551724, - "acc_stderr": 0.031447125816782426, - "acc_norm": 0.27586206896551724, - "acc_norm_stderr": 0.031447125816782426 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.27419354838709675, - "acc_stderr": 0.025378139970885193, - "acc_norm": 0.27419354838709675, - "acc_norm_stderr": 0.025378139970885193 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.24358974358974358, - "acc_stderr": 0.02812096650391441, - "acc_norm": 0.24358974358974358, - "acc_norm_stderr": 0.02812096650391441 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.2528301886792453, - "acc_stderr": 0.026749899771241238, - "acc_norm": 0.2528301886792453, - "acc_norm_stderr": 0.026749899771241238 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.2545454545454545, - "acc_stderr": 0.04172343038705383, - "acc_norm": 0.2545454545454545, - "acc_norm_stderr": 0.04172343038705383 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.3037037037037037, - "acc_stderr": 0.028037929969114993, - "acc_norm": 0.3037037037037037, - "acc_norm_stderr": 0.028037929969114993 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2781456953642384, - "acc_stderr": 0.03658603262763743, - "acc_norm": 0.2781456953642384, - "acc_norm_stderr": 0.03658603262763743 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.2537313432835821, - "acc_stderr": 0.03076944496729602, - "acc_norm": 0.2537313432835821, - "acc_norm_stderr": 0.03076944496729602 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.19653179190751446, - "acc_stderr": 0.03029957466478815, - "acc_norm": 0.19653179190751446, - "acc_norm_stderr": 0.03029957466478815 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2962962962962963, - "acc_stderr": 0.023517294335963286, - "acc_norm": 0.2962962962962963, - "acc_norm_stderr": 0.023517294335963286 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2777777777777778, - "acc_stderr": 0.03745554791462457, - "acc_norm": 0.2777777777777778, - "acc_norm_stderr": 0.03745554791462457 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.21, - "acc_stderr": 0.040936018074033256, - "acc_norm": 0.21, - "acc_norm_stderr": 0.040936018074033256 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.23, - "acc_stderr": 0.04229525846816505, - "acc_norm": 0.23, - "acc_norm_stderr": 0.04229525846816505 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.24855491329479767, - "acc_stderr": 0.023267528432100174, - "acc_norm": 0.24855491329479767, - "acc_norm_stderr": 0.023267528432100174 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.3374233128834356, - "acc_stderr": 0.03714908409935574, - "acc_norm": 0.3374233128834356, - "acc_norm_stderr": 0.03714908409935574 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.26851851851851855, - "acc_stderr": 0.02465968518596728, - "acc_norm": 0.26851851851851855, - "acc_norm_stderr": 0.02465968518596728 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.29, - "acc_stderr": 0.04560480215720684, - "acc_norm": 0.29, - "acc_norm_stderr": 0.04560480215720684 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.19689119170984457, - "acc_stderr": 0.028697873971860688, - "acc_norm": 0.19689119170984457, - "acc_norm_stderr": 0.028697873971860688 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.15789473684210525, - "acc_stderr": 0.03430265978485698, - "acc_norm": 0.15789473684210525, - "acc_norm_stderr": 0.03430265978485698 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.24036697247706423, - "acc_stderr": 0.01832060732096407, - "acc_norm": 0.24036697247706423, - "acc_norm_stderr": 0.01832060732096407 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.25396825396825395, - "acc_stderr": 0.03893259610604673, - "acc_norm": 0.25396825396825395, - "acc_norm_stderr": 0.03893259610604673 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.23202614379084968, - "acc_stderr": 0.02417084087934102, - "acc_norm": 0.23202614379084968, - "acc_norm_stderr": 0.02417084087934102 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.16, - "acc_stderr": 0.0368452949177471, - "acc_norm": 0.16, - "acc_norm_stderr": 0.0368452949177471 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.23140495867768596, - "acc_stderr": 0.03849856098794088, - "acc_norm": 0.23140495867768596, - "acc_norm_stderr": 0.03849856098794088 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.21710526315789475, - "acc_stderr": 0.03355045304882924, - "acc_norm": 0.21710526315789475, - "acc_norm_stderr": 0.03355045304882924 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.25163398692810457, - "acc_stderr": 0.01755581809132226, - "acc_norm": 0.25163398692810457, - "acc_norm_stderr": 0.01755581809132226 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2624113475177305, - "acc_stderr": 0.026244920349843014, - "acc_norm": 0.2624113475177305, - "acc_norm_stderr": 0.026244920349843014 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.23214285714285715, - "acc_stderr": 0.04007341809755807, - "acc_norm": 0.23214285714285715, - "acc_norm_stderr": 0.04007341809755807 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.4583333333333333, - "acc_stderr": 0.033981108902946366, - "acc_norm": 0.4583333333333333, - "acc_norm_stderr": 0.033981108902946366 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.27262569832402234, - "acc_stderr": 0.014893391735249608, - "acc_norm": 0.27262569832402234, - "acc_norm_stderr": 0.014893391735249608 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.14, - "acc_stderr": 0.03487350880197771, - "acc_norm": 0.14, - "acc_norm_stderr": 0.03487350880197771 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.3161764705882353, - "acc_stderr": 0.02824568739146292, - "acc_norm": 0.3161764705882353, - "acc_norm_stderr": 0.02824568739146292 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.2653061224489796, - "acc_stderr": 0.028263889943784586, - "acc_norm": 0.2653061224489796, - "acc_norm_stderr": 0.028263889943784586 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.26582278481012656, - "acc_stderr": 0.028756799629658335, - "acc_norm": 0.26582278481012656, - "acc_norm_stderr": 0.028756799629658335 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.23728813559322035, - "acc_stderr": 0.01086543669078027, - "acc_norm": 0.23728813559322035, - "acc_norm_stderr": 0.01086543669078027 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.22549019607843138, - "acc_stderr": 0.02933116229425173, - "acc_norm": 0.22549019607843138, - "acc_norm_stderr": 0.02933116229425173 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.21212121212121213, - "acc_stderr": 0.031922715695483, - "acc_norm": 0.21212121212121213, - "acc_norm_stderr": 0.031922715695483 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.24479804161566707, - "mc1_stderr": 0.015051869486715008, - "mc2": 0.41338491158026774, - "mc2_stderr": 0.01512108388775634 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.23826291079812206, - "acc_stderr": 0.014603803898011337, - "acc_norm": 0.3380281690140845, - "acc_norm_stderr": 0.016215540194273206 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "BM-K/polyglot-ko-1.3b-it-v1.0", - "model_sha": "2f5b0dfed443e3a89c13a13b48d6fe6838c86e67", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/BM-K/polyglot-ko-1.3b-it-v1.1/result_2023-10-06 07:23:29.json b/BM-K/polyglot-ko-1.3b-it-v1.1/result_2023-10-06 07:23:29.json deleted file mode 100644 index 225dfaf199c7235aab7572a3b28d2bec2c76b32e..0000000000000000000000000000000000000000 --- a/BM-K/polyglot-ko-1.3b-it-v1.1/result_2023-10-06 07:23:29.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.2363481228668942, - "acc_stderr": 0.012414960524301818, - "acc_norm": 0.2841296928327645, - "acc_norm_stderr": 0.013179442447653887 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3379804819757021, - "acc_stderr": 0.004720551323547123, - "acc_norm": 0.4192391953794065, - "acc_norm_stderr": 0.004924261467934422 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.29239766081871343, - "acc_stderr": 0.03488647713457923, - "acc_norm": 0.29239766081871343, - "acc_norm_stderr": 0.03488647713457923 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.27184466019417475, - "acc_stderr": 0.044052680241409216, - "acc_norm": 0.27184466019417475, - "acc_norm_stderr": 0.044052680241409216 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.24521072796934865, - "acc_stderr": 0.01538435228454394, - "acc_norm": 0.24521072796934865, - "acc_norm_stderr": 0.01538435228454394 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.2518518518518518, - "acc_stderr": 0.037498507091740234, - "acc_norm": 0.2518518518518518, - "acc_norm_stderr": 0.037498507091740234 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.2553191489361702, - "acc_stderr": 0.028504856470514192, - "acc_norm": 0.2553191489361702, - "acc_norm_stderr": 0.028504856470514192 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.2289156626506024, - "acc_stderr": 0.03270745277352477, - "acc_norm": 0.2289156626506024, - "acc_norm_stderr": 0.03270745277352477 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.2990353697749196, - "acc_stderr": 0.026003301117885142, - "acc_norm": 0.2990353697749196, - "acc_norm_stderr": 0.026003301117885142 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.3273542600896861, - "acc_stderr": 0.03149384670994131, - "acc_norm": 0.3273542600896861, - "acc_norm_stderr": 0.03149384670994131 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.2366412213740458, - "acc_stderr": 0.037276735755969174, - "acc_norm": 0.2366412213740458, - "acc_norm_stderr": 0.037276735755969174 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.2, - "acc_stderr": 0.04020151261036846, - "acc_norm": 0.2, - "acc_norm_stderr": 0.04020151261036846 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.2828282828282828, - "acc_stderr": 0.032087795587867514, - "acc_norm": 0.2828282828282828, - "acc_norm_stderr": 0.032087795587867514 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.2482758620689655, - "acc_stderr": 0.03600105692727771, - "acc_norm": 0.2482758620689655, - "acc_norm_stderr": 0.03600105692727771 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.21568627450980393, - "acc_stderr": 0.04092563958237655, - "acc_norm": 0.21568627450980393, - "acc_norm_stderr": 0.04092563958237655 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.23109243697478993, - "acc_stderr": 0.027381406927868973, - "acc_norm": 0.23109243697478993, - "acc_norm_stderr": 0.027381406927868973 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.2282051282051282, - "acc_stderr": 0.02127839386358628, - "acc_norm": 0.2282051282051282, - "acc_norm_stderr": 0.02127839386358628 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.23, - "acc_stderr": 0.04229525846816505, - "acc_norm": 0.23, - "acc_norm_stderr": 0.04229525846816505 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.26851851851851855, - "acc_stderr": 0.04284467968052191, - "acc_norm": 0.26851851851851855, - "acc_norm_stderr": 0.04284467968052191 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.2561576354679803, - "acc_stderr": 0.0307127300709826, - "acc_norm": 0.2561576354679803, - "acc_norm_stderr": 0.0307127300709826 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.2838709677419355, - "acc_stderr": 0.02564938106302926, - "acc_norm": 0.2838709677419355, - "acc_norm_stderr": 0.02564938106302926 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.25213675213675213, - "acc_stderr": 0.02844796547623101, - "acc_norm": 0.25213675213675213, - "acc_norm_stderr": 0.02844796547623101 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.2528301886792453, - "acc_stderr": 0.02674989977124124, - "acc_norm": 0.2528301886792453, - "acc_norm_stderr": 0.02674989977124124 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.22727272727272727, - "acc_stderr": 0.04013964554072773, - "acc_norm": 0.22727272727272727, - "acc_norm_stderr": 0.04013964554072773 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.3111111111111111, - "acc_stderr": 0.028226446749683515, - "acc_norm": 0.3111111111111111, - "acc_norm_stderr": 0.028226446749683515 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2847682119205298, - "acc_stderr": 0.03684881521389023, - "acc_norm": 0.2847682119205298, - "acc_norm_stderr": 0.03684881521389023 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.23880597014925373, - "acc_stderr": 0.030147775935409217, - "acc_norm": 0.23880597014925373, - "acc_norm_stderr": 0.030147775935409217 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.1791907514450867, - "acc_stderr": 0.0292425130590633, - "acc_norm": 0.1791907514450867, - "acc_norm_stderr": 0.0292425130590633 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2830687830687831, - "acc_stderr": 0.023201392938194974, - "acc_norm": 0.2830687830687831, - "acc_norm_stderr": 0.023201392938194974 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2777777777777778, - "acc_stderr": 0.03745554791462457, - "acc_norm": 0.2777777777777778, - "acc_norm_stderr": 0.03745554791462457 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.18, - "acc_stderr": 0.038612291966536955, - "acc_norm": 0.18, - "acc_norm_stderr": 0.038612291966536955 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.22, - "acc_stderr": 0.041633319989322695, - "acc_norm": 0.22, - "acc_norm_stderr": 0.041633319989322695 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.2398843930635838, - "acc_stderr": 0.022989592543123567, - "acc_norm": 0.2398843930635838, - "acc_norm_stderr": 0.022989592543123567 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.3312883435582822, - "acc_stderr": 0.03697983910025588, - "acc_norm": 0.3312883435582822, - "acc_norm_stderr": 0.03697983910025588 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.2993827160493827, - "acc_stderr": 0.02548311560119547, - "acc_norm": 0.2993827160493827, - "acc_norm_stderr": 0.02548311560119547 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.32, - "acc_stderr": 0.046882617226215034, - "acc_norm": 0.32, - "acc_norm_stderr": 0.046882617226215034 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.26424870466321243, - "acc_stderr": 0.03182155050916647, - "acc_norm": 0.26424870466321243, - "acc_norm_stderr": 0.03182155050916647 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.18421052631578946, - "acc_stderr": 0.03646758875075566, - "acc_norm": 0.18421052631578946, - "acc_norm_stderr": 0.03646758875075566 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.22201834862385322, - "acc_stderr": 0.017818849564796617, - "acc_norm": 0.22201834862385322, - "acc_norm_stderr": 0.017818849564796617 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.25396825396825395, - "acc_stderr": 0.03893259610604672, - "acc_norm": 0.25396825396825395, - "acc_norm_stderr": 0.03893259610604672 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.24836601307189543, - "acc_stderr": 0.02473998135511359, - "acc_norm": 0.24836601307189543, - "acc_norm_stderr": 0.02473998135511359 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.14, - "acc_stderr": 0.03487350880197771, - "acc_norm": 0.14, - "acc_norm_stderr": 0.03487350880197771 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.2644628099173554, - "acc_stderr": 0.040261875275912046, - "acc_norm": 0.2644628099173554, - "acc_norm_stderr": 0.040261875275912046 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.24342105263157895, - "acc_stderr": 0.034923496688842384, - "acc_norm": 0.24342105263157895, - "acc_norm_stderr": 0.034923496688842384 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.2647058823529412, - "acc_stderr": 0.017848089574913222, - "acc_norm": 0.2647058823529412, - "acc_norm_stderr": 0.017848089574913222 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2695035460992908, - "acc_stderr": 0.026469036818590634, - "acc_norm": 0.2695035460992908, - "acc_norm_stderr": 0.026469036818590634 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.21428571428571427, - "acc_stderr": 0.03894641120044792, - "acc_norm": 0.21428571428571427, - "acc_norm_stderr": 0.03894641120044792 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.4351851851851852, - "acc_stderr": 0.03381200005643525, - "acc_norm": 0.4351851851851852, - "acc_norm_stderr": 0.03381200005643525 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.27262569832402234, - "acc_stderr": 0.014893391735249608, - "acc_norm": 0.27262569832402234, - "acc_norm_stderr": 0.014893391735249608 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.22, - "acc_stderr": 0.0416333199893227, - "acc_norm": 0.22, - "acc_norm_stderr": 0.0416333199893227 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.32, - "acc_stderr": 0.046882617226215034, - "acc_norm": 0.32, - "acc_norm_stderr": 0.046882617226215034 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.39338235294117646, - "acc_stderr": 0.029674288281311183, - "acc_norm": 0.39338235294117646, - "acc_norm_stderr": 0.029674288281311183 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.2938775510204082, - "acc_stderr": 0.02916273841024977, - "acc_norm": 0.2938775510204082, - "acc_norm_stderr": 0.02916273841024977 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.24050632911392406, - "acc_stderr": 0.02782078198114968, - "acc_norm": 0.24050632911392406, - "acc_norm_stderr": 0.02782078198114968 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.23989569752281617, - "acc_stderr": 0.010906282617981653, - "acc_norm": 0.23989569752281617, - "acc_norm_stderr": 0.010906282617981653 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.24019607843137256, - "acc_stderr": 0.02998373305591362, - "acc_norm": 0.24019607843137256, - "acc_norm_stderr": 0.02998373305591362 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.21212121212121213, - "acc_stderr": 0.031922715695482995, - "acc_norm": 0.21212121212121213, - "acc_norm_stderr": 0.031922715695482995 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.24479804161566707, - "mc1_stderr": 0.01505186948671501, - "mc2": 0.4174341547322483, - "mc2_stderr": 0.015183101828823979 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.2676056338028169, - "acc_stderr": 0.01517593100847281, - "acc_norm": 0.3615023474178404, - "acc_norm_stderr": 0.01646912149043009 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "BM-K/polyglot-ko-1.3b-it-v1.1", - "model_sha": "78f227625af9b7013b69de4ef2a203ac71bdda5b", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/BM-K/polyglot-ko-1.3b-it-v1.2/result_2023-10-09 06:14:19.json b/BM-K/polyglot-ko-1.3b-it-v1.2/result_2023-10-09 06:14:19.json deleted file mode 100644 index c24528e66846deca6b0578a3431522868f20025e..0000000000000000000000000000000000000000 --- a/BM-K/polyglot-ko-1.3b-it-v1.2/result_2023-10-09 06:14:19.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.2551194539249147, - "acc_stderr": 0.012739038695202109, - "acc_norm": 0.30119453924914674, - "acc_norm_stderr": 0.01340674176784762 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3379804819757021, - "acc_stderr": 0.0047205513235471196, - "acc_norm": 0.4176458872734515, - "acc_norm_stderr": 0.00492163264510238 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.2807017543859649, - "acc_stderr": 0.034462962170884265, - "acc_norm": 0.2807017543859649, - "acc_norm_stderr": 0.034462962170884265 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.2524271844660194, - "acc_stderr": 0.04301250399690877, - "acc_norm": 0.2524271844660194, - "acc_norm_stderr": 0.04301250399690877 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.29118773946360155, - "acc_stderr": 0.016246087069701393, - "acc_norm": 0.29118773946360155, - "acc_norm_stderr": 0.016246087069701393 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.26666666666666666, - "acc_stderr": 0.038201699145179055, - "acc_norm": 0.26666666666666666, - "acc_norm_stderr": 0.038201699145179055 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.23, - "acc_stderr": 0.04229525846816506, - "acc_norm": 0.23, - "acc_norm_stderr": 0.04229525846816506 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.25957446808510637, - "acc_stderr": 0.028659179374292326, - "acc_norm": 0.25957446808510637, - "acc_norm_stderr": 0.028659179374292326 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.26506024096385544, - "acc_stderr": 0.03436024037944967, - "acc_norm": 0.26506024096385544, - "acc_norm_stderr": 0.03436024037944967 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.2540192926045016, - "acc_stderr": 0.02472386150477169, - "acc_norm": 0.2540192926045016, - "acc_norm_stderr": 0.02472386150477169 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.3273542600896861, - "acc_stderr": 0.031493846709941306, - "acc_norm": 0.3273542600896861, - "acc_norm_stderr": 0.031493846709941306 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.24427480916030533, - "acc_stderr": 0.037683359597287414, - "acc_norm": 0.24427480916030533, - "acc_norm_stderr": 0.037683359597287414 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.23, - "acc_stderr": 0.04229525846816506, - "acc_norm": 0.23, - "acc_norm_stderr": 0.04229525846816506 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.24242424242424243, - "acc_stderr": 0.030532892233932026, - "acc_norm": 0.24242424242424243, - "acc_norm_stderr": 0.030532892233932026 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.23448275862068965, - "acc_stderr": 0.035306258743465914, - "acc_norm": 0.23448275862068965, - "acc_norm_stderr": 0.035306258743465914 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.21568627450980393, - "acc_stderr": 0.04092563958237655, - "acc_norm": 0.21568627450980393, - "acc_norm_stderr": 0.04092563958237655 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.3445378151260504, - "acc_stderr": 0.030868682604121626, - "acc_norm": 0.3445378151260504, - "acc_norm_stderr": 0.030868682604121626 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.25384615384615383, - "acc_stderr": 0.022066054378726257, - "acc_norm": 0.25384615384615383, - "acc_norm_stderr": 0.022066054378726257 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.26, - "acc_stderr": 0.04408440022768079, - "acc_norm": 0.26, - "acc_norm_stderr": 0.04408440022768079 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.24074074074074073, - "acc_stderr": 0.04133119440243838, - "acc_norm": 0.24074074074074073, - "acc_norm_stderr": 0.04133119440243838 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.28078817733990147, - "acc_stderr": 0.03161856335358609, - "acc_norm": 0.28078817733990147, - "acc_norm_stderr": 0.03161856335358609 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.2903225806451613, - "acc_stderr": 0.025822106119415898, - "acc_norm": 0.2903225806451613, - "acc_norm_stderr": 0.025822106119415898 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.23504273504273504, - "acc_stderr": 0.027778835904935437, - "acc_norm": 0.23504273504273504, - "acc_norm_stderr": 0.027778835904935437 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.2792452830188679, - "acc_stderr": 0.027611163402399715, - "acc_norm": 0.2792452830188679, - "acc_norm_stderr": 0.027611163402399715 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.21818181818181817, - "acc_stderr": 0.039559328617958335, - "acc_norm": 0.21818181818181817, - "acc_norm_stderr": 0.039559328617958335 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.24074074074074073, - "acc_stderr": 0.026067159222275815, - "acc_norm": 0.24074074074074073, - "acc_norm_stderr": 0.026067159222275815 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2185430463576159, - "acc_stderr": 0.033742355504256936, - "acc_norm": 0.2185430463576159, - "acc_norm_stderr": 0.033742355504256936 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.208955223880597, - "acc_stderr": 0.028748298931728665, - "acc_norm": 0.208955223880597, - "acc_norm_stderr": 0.028748298931728665 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.2138728323699422, - "acc_stderr": 0.031265112061730424, - "acc_norm": 0.2138728323699422, - "acc_norm_stderr": 0.031265112061730424 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2566137566137566, - "acc_stderr": 0.022494510767503154, - "acc_norm": 0.2566137566137566, - "acc_norm_stderr": 0.022494510767503154 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2222222222222222, - "acc_stderr": 0.03476590104304134, - "acc_norm": 0.2222222222222222, - "acc_norm_stderr": 0.03476590104304134 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.21, - "acc_stderr": 0.040936018074033256, - "acc_norm": 0.21, - "acc_norm_stderr": 0.040936018074033256 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.23, - "acc_stderr": 0.04229525846816505, - "acc_norm": 0.23, - "acc_norm_stderr": 0.04229525846816505 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.2774566473988439, - "acc_stderr": 0.024105712607754307, - "acc_norm": 0.2774566473988439, - "acc_norm_stderr": 0.024105712607754307 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.31901840490797545, - "acc_stderr": 0.03661997551073836, - "acc_norm": 0.31901840490797545, - "acc_norm_stderr": 0.03661997551073836 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.28703703703703703, - "acc_stderr": 0.025171041915309684, - "acc_norm": 0.28703703703703703, - "acc_norm_stderr": 0.025171041915309684 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.29533678756476683, - "acc_stderr": 0.0329229663915514, - "acc_norm": 0.29533678756476683, - "acc_norm_stderr": 0.0329229663915514 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2543859649122807, - "acc_stderr": 0.04096985139843671, - "acc_norm": 0.2543859649122807, - "acc_norm_stderr": 0.04096985139843671 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.23853211009174313, - "acc_stderr": 0.018272575810231863, - "acc_norm": 0.23853211009174313, - "acc_norm_stderr": 0.018272575810231863 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.3492063492063492, - "acc_stderr": 0.04263906892795132, - "acc_norm": 0.3492063492063492, - "acc_norm_stderr": 0.04263906892795132 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.2581699346405229, - "acc_stderr": 0.025058503316958157, - "acc_norm": 0.2581699346405229, - "acc_norm_stderr": 0.025058503316958157 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.2231404958677686, - "acc_stderr": 0.03800754475228733, - "acc_norm": 0.2231404958677686, - "acc_norm_stderr": 0.03800754475228733 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.17105263157894737, - "acc_stderr": 0.0306436070716771, - "acc_norm": 0.17105263157894737, - "acc_norm_stderr": 0.0306436070716771 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.26633986928104575, - "acc_stderr": 0.01788318813466719, - "acc_norm": 0.26633986928104575, - "acc_norm_stderr": 0.01788318813466719 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2553191489361702, - "acc_stderr": 0.02601199293090201, - "acc_norm": 0.2553191489361702, - "acc_norm_stderr": 0.02601199293090201 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.20535714285714285, - "acc_stderr": 0.038342410214190735, - "acc_norm": 0.20535714285714285, - "acc_norm_stderr": 0.038342410214190735 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.2361111111111111, - "acc_stderr": 0.02896370257079101, - "acc_norm": 0.2361111111111111, - "acc_norm_stderr": 0.02896370257079101 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2670391061452514, - "acc_stderr": 0.014796502622562544, - "acc_norm": 0.2670391061452514, - "acc_norm_stderr": 0.014796502622562544 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.16, - "acc_stderr": 0.0368452949177471, - "acc_norm": 0.16, - "acc_norm_stderr": 0.0368452949177471 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.27, - "acc_stderr": 0.0446196043338474, - "acc_norm": 0.27, - "acc_norm_stderr": 0.0446196043338474 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.2536764705882353, - "acc_stderr": 0.026431329870789513, - "acc_norm": 0.2536764705882353, - "acc_norm_stderr": 0.026431329870789513 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.31020408163265306, - "acc_stderr": 0.029613459872484378, - "acc_norm": 0.31020408163265306, - "acc_norm_stderr": 0.029613459872484378 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.270042194092827, - "acc_stderr": 0.028900721906293426, - "acc_norm": 0.270042194092827, - "acc_norm_stderr": 0.028900721906293426 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.23598435462842243, - "acc_stderr": 0.010844802669662682, - "acc_norm": 0.23598435462842243, - "acc_norm_stderr": 0.010844802669662682 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.2696078431372549, - "acc_stderr": 0.031145570659486782, - "acc_norm": 0.2696078431372549, - "acc_norm_stderr": 0.031145570659486782 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.2606060606060606, - "acc_stderr": 0.03427743175816524, - "acc_norm": 0.2606060606060606, - "acc_norm_stderr": 0.03427743175816524 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2252141982864137, - "mc1_stderr": 0.01462324076802348, - "mc2": 0.4080616788903193, - "mc2_stderr": 0.015242253889585933 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.4107981220657277, - "acc_stderr": 0.01686481206490982, - "acc_norm": 0.4953051643192488, - "acc_norm_stderr": 0.017139023665847616 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "BM-K/polyglot-ko-1.3b-it-v1.2", - "model_sha": "d1a6abed1624c40b91b5df3acb5e245e281adc18", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/BM-K/polyglot-ko-1.3b-it-v1.3/result_2023-10-09 06:23:09.json b/BM-K/polyglot-ko-1.3b-it-v1.3/result_2023-10-09 06:23:09.json deleted file mode 100644 index 369d8dc31751e91186c98c82019f6bfd945cfd8b..0000000000000000000000000000000000000000 --- a/BM-K/polyglot-ko-1.3b-it-v1.3/result_2023-10-09 06:23:09.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.2619453924914676, - "acc_stderr": 0.012849054826858117, - "acc_norm": 0.30802047781569963, - "acc_norm_stderr": 0.01349142951729204 - }, - "harness|ko_hellaswag|10": { - "acc": 0.33957379008165706, - "acc_stderr": 0.0047259676848064045, - "acc_norm": 0.4195379406492731, - "acc_norm_stderr": 0.004924748500639348 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.25146198830409355, - "acc_stderr": 0.033275044238468436, - "acc_norm": 0.25146198830409355, - "acc_norm_stderr": 0.033275044238468436 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.2524271844660194, - "acc_stderr": 0.04301250399690877, - "acc_norm": 0.2524271844660194, - "acc_norm_stderr": 0.04301250399690877 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.2784163473818646, - "acc_stderr": 0.01602829518899247, - "acc_norm": 0.2784163473818646, - "acc_norm_stderr": 0.01602829518899247 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.23703703703703705, - "acc_stderr": 0.03673731683969506, - "acc_norm": 0.23703703703703705, - "acc_norm_stderr": 0.03673731683969506 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.21, - "acc_stderr": 0.040936018074033256, - "acc_norm": 0.21, - "acc_norm_stderr": 0.040936018074033256 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.25957446808510637, - "acc_stderr": 0.028659179374292326, - "acc_norm": 0.25957446808510637, - "acc_norm_stderr": 0.028659179374292326 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3192771084337349, - "acc_stderr": 0.036293353299478595, - "acc_norm": 0.3192771084337349, - "acc_norm_stderr": 0.036293353299478595 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.2733118971061093, - "acc_stderr": 0.02531176597542612, - "acc_norm": 0.2733118971061093, - "acc_norm_stderr": 0.02531176597542612 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.29596412556053814, - "acc_stderr": 0.030636591348699796, - "acc_norm": 0.29596412556053814, - "acc_norm_stderr": 0.030636591348699796 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.22900763358778625, - "acc_stderr": 0.036853466317118506, - "acc_norm": 0.22900763358778625, - "acc_norm_stderr": 0.036853466317118506 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.24, - "acc_stderr": 0.042923469599092816, - "acc_norm": 0.24, - "acc_norm_stderr": 0.042923469599092816 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.2676767676767677, - "acc_stderr": 0.03154449888270287, - "acc_norm": 0.2676767676767677, - "acc_norm_stderr": 0.03154449888270287 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.1724137931034483, - "acc_stderr": 0.031478307902595745, - "acc_norm": 0.1724137931034483, - "acc_norm_stderr": 0.031478307902595745 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.22549019607843138, - "acc_stderr": 0.041583075330832865, - "acc_norm": 0.22549019607843138, - "acc_norm_stderr": 0.041583075330832865 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.31512605042016806, - "acc_stderr": 0.030176808288974337, - "acc_norm": 0.31512605042016806, - "acc_norm_stderr": 0.030176808288974337 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.2282051282051282, - "acc_stderr": 0.021278393863586282, - "acc_norm": 0.2282051282051282, - "acc_norm_stderr": 0.021278393863586282 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.25, - "acc_stderr": 0.04186091791394607, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04186091791394607 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.2315270935960591, - "acc_stderr": 0.02967833314144446, - "acc_norm": 0.2315270935960591, - "acc_norm_stderr": 0.02967833314144446 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.2838709677419355, - "acc_stderr": 0.02564938106302925, - "acc_norm": 0.2838709677419355, - "acc_norm_stderr": 0.02564938106302925 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.24786324786324787, - "acc_stderr": 0.028286324075564393, - "acc_norm": 0.24786324786324787, - "acc_norm_stderr": 0.028286324075564393 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.27169811320754716, - "acc_stderr": 0.027377706624670713, - "acc_norm": 0.27169811320754716, - "acc_norm_stderr": 0.027377706624670713 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.2909090909090909, - "acc_stderr": 0.04350271442923243, - "acc_norm": 0.2909090909090909, - "acc_norm_stderr": 0.04350271442923243 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.26666666666666666, - "acc_stderr": 0.02696242432507383, - "acc_norm": 0.26666666666666666, - "acc_norm_stderr": 0.02696242432507383 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.23841059602649006, - "acc_stderr": 0.034791855725996586, - "acc_norm": 0.23841059602649006, - "acc_norm_stderr": 0.034791855725996586 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.263681592039801, - "acc_stderr": 0.031157150869355568, - "acc_norm": 0.263681592039801, - "acc_norm_stderr": 0.031157150869355568 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.2254335260115607, - "acc_stderr": 0.03186209851641144, - "acc_norm": 0.2254335260115607, - "acc_norm_stderr": 0.03186209851641144 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.25132275132275134, - "acc_stderr": 0.022340482339643898, - "acc_norm": 0.25132275132275134, - "acc_norm_stderr": 0.022340482339643898 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.22916666666666666, - "acc_stderr": 0.035146974678623884, - "acc_norm": 0.22916666666666666, - "acc_norm_stderr": 0.035146974678623884 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.23, - "acc_stderr": 0.042295258468165044, - "acc_norm": 0.23, - "acc_norm_stderr": 0.042295258468165044 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.21, - "acc_stderr": 0.040936018074033256, - "acc_norm": 0.21, - "acc_norm_stderr": 0.040936018074033256 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.30057803468208094, - "acc_stderr": 0.02468531686725781, - "acc_norm": 0.30057803468208094, - "acc_norm_stderr": 0.02468531686725781 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.26380368098159507, - "acc_stderr": 0.03462419931615624, - "acc_norm": 0.26380368098159507, - "acc_norm_stderr": 0.03462419931615624 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.2623456790123457, - "acc_stderr": 0.0244772228561351, - "acc_norm": 0.2623456790123457, - "acc_norm_stderr": 0.0244772228561351 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.27, - "acc_stderr": 0.044619604333847394, - "acc_norm": 0.27, - "acc_norm_stderr": 0.044619604333847394 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.24352331606217617, - "acc_stderr": 0.03097543638684542, - "acc_norm": 0.24352331606217617, - "acc_norm_stderr": 0.03097543638684542 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.23684210526315788, - "acc_stderr": 0.03999423879281336, - "acc_norm": 0.23684210526315788, - "acc_norm_stderr": 0.03999423879281336 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.21100917431192662, - "acc_stderr": 0.01749392240411265, - "acc_norm": 0.21100917431192662, - "acc_norm_stderr": 0.01749392240411265 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.2619047619047619, - "acc_stderr": 0.03932537680392871, - "acc_norm": 0.2619047619047619, - "acc_norm_stderr": 0.03932537680392871 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.25163398692810457, - "acc_stderr": 0.0248480182638752, - "acc_norm": 0.25163398692810457, - "acc_norm_stderr": 0.0248480182638752 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.2, - "acc_stderr": 0.040201512610368445, - "acc_norm": 0.2, - "acc_norm_stderr": 0.040201512610368445 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.35537190082644626, - "acc_stderr": 0.04369236326573981, - "acc_norm": 0.35537190082644626, - "acc_norm_stderr": 0.04369236326573981 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.17105263157894737, - "acc_stderr": 0.030643607071677105, - "acc_norm": 0.17105263157894737, - "acc_norm_stderr": 0.030643607071677105 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.2679738562091503, - "acc_stderr": 0.017917974069594722, - "acc_norm": 0.2679738562091503, - "acc_norm_stderr": 0.017917974069594722 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2553191489361702, - "acc_stderr": 0.02601199293090201, - "acc_norm": 0.2553191489361702, - "acc_norm_stderr": 0.02601199293090201 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.23214285714285715, - "acc_stderr": 0.04007341809755806, - "acc_norm": 0.23214285714285715, - "acc_norm_stderr": 0.04007341809755806 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.2175925925925926, - "acc_stderr": 0.02813968944485966, - "acc_norm": 0.2175925925925926, - "acc_norm_stderr": 0.02813968944485966 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.25921787709497207, - "acc_stderr": 0.014655780837497717, - "acc_norm": 0.25921787709497207, - "acc_norm_stderr": 0.014655780837497717 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.15, - "acc_stderr": 0.0358870281282637, - "acc_norm": 0.15, - "acc_norm_stderr": 0.0358870281282637 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.27, - "acc_stderr": 0.0446196043338474, - "acc_norm": 0.27, - "acc_norm_stderr": 0.0446196043338474 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.2867647058823529, - "acc_stderr": 0.027472274473233818, - "acc_norm": 0.2867647058823529, - "acc_norm_stderr": 0.027472274473233818 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.3020408163265306, - "acc_stderr": 0.029393609319879818, - "acc_norm": 0.3020408163265306, - "acc_norm_stderr": 0.029393609319879818 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.25738396624472576, - "acc_stderr": 0.028458820991460288, - "acc_norm": 0.25738396624472576, - "acc_norm_stderr": 0.028458820991460288 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.23728813559322035, - "acc_stderr": 0.010865436690780272, - "acc_norm": 0.23728813559322035, - "acc_norm_stderr": 0.010865436690780272 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.2549019607843137, - "acc_stderr": 0.03058759135160425, - "acc_norm": 0.2549019607843137, - "acc_norm_stderr": 0.03058759135160425 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.2727272727272727, - "acc_stderr": 0.0347769116216366, - "acc_norm": 0.2727272727272727, - "acc_norm_stderr": 0.0347769116216366 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.22888616891064872, - "mc1_stderr": 0.014706994909055027, - "mc2": 0.4031826036090223, - "mc2_stderr": 0.0151985432197755 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.318075117370892, - "acc_stderr": 0.015964978456287846, - "acc_norm": 0.4154929577464789, - "acc_norm_stderr": 0.016893200149530024 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "BM-K/polyglot-ko-1.3b-it-v1.3", - "model_sha": "1df1840d994fed4d5806ca38746639407c9bb970", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/BM-K/polyglot-ko-1.3b-it-v1.4/result_2023-10-09 06:31:19.json b/BM-K/polyglot-ko-1.3b-it-v1.4/result_2023-10-09 06:31:19.json deleted file mode 100644 index ac59f2e1f64718d4da7d7cc80116d56d7d54d637..0000000000000000000000000000000000000000 --- a/BM-K/polyglot-ko-1.3b-it-v1.4/result_2023-10-09 06:31:19.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.25170648464163825, - "acc_stderr": 0.012682496334042963, - "acc_norm": 0.30887372013651876, - "acc_norm_stderr": 0.013501770929344003 - }, - "harness|ko_hellaswag|10": { - "acc": 0.34096793467436765, - "acc_stderr": 0.004730658073041557, - "acc_norm": 0.4206333399721171, - "acc_norm_stderr": 0.004926518439372268 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.23976608187134502, - "acc_stderr": 0.03274485211946956, - "acc_norm": 0.23976608187134502, - "acc_norm_stderr": 0.03274485211946956 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.2815533980582524, - "acc_stderr": 0.04453254836326467, - "acc_norm": 0.2815533980582524, - "acc_norm_stderr": 0.04453254836326467 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.27586206896551724, - "acc_stderr": 0.01598281477469563, - "acc_norm": 0.27586206896551724, - "acc_norm_stderr": 0.01598281477469563 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.22962962962962963, - "acc_stderr": 0.03633384414073462, - "acc_norm": 0.22962962962962963, - "acc_norm_stderr": 0.03633384414073462 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.15, - "acc_stderr": 0.03588702812826373, - "acc_norm": 0.15, - "acc_norm_stderr": 0.03588702812826373 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.23829787234042554, - "acc_stderr": 0.027851252973889802, - "acc_norm": 0.23829787234042554, - "acc_norm_stderr": 0.027851252973889802 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3072289156626506, - "acc_stderr": 0.03591566797824665, - "acc_norm": 0.3072289156626506, - "acc_norm_stderr": 0.03591566797824665 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.2508038585209003, - "acc_stderr": 0.024619771956697165, - "acc_norm": 0.2508038585209003, - "acc_norm_stderr": 0.024619771956697165 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.2062780269058296, - "acc_stderr": 0.02715715047956382, - "acc_norm": 0.2062780269058296, - "acc_norm_stderr": 0.02715715047956382 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.2595419847328244, - "acc_stderr": 0.03844876139785271, - "acc_norm": 0.2595419847328244, - "acc_norm_stderr": 0.03844876139785271 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.22, - "acc_stderr": 0.04163331998932269, - "acc_norm": 0.22, - "acc_norm_stderr": 0.04163331998932269 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.31313131313131315, - "acc_stderr": 0.033042050878136525, - "acc_norm": 0.31313131313131315, - "acc_norm_stderr": 0.033042050878136525 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.2413793103448276, - "acc_stderr": 0.03565998174135302, - "acc_norm": 0.2413793103448276, - "acc_norm_stderr": 0.03565998174135302 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.2647058823529412, - "acc_stderr": 0.04389869956808779, - "acc_norm": 0.2647058823529412, - "acc_norm_stderr": 0.04389869956808779 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.36134453781512604, - "acc_stderr": 0.031204691225150006, - "acc_norm": 0.36134453781512604, - "acc_norm_stderr": 0.031204691225150006 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.28974358974358977, - "acc_stderr": 0.023000628243687968, - "acc_norm": 0.28974358974358977, - "acc_norm_stderr": 0.023000628243687968 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.25925925925925924, - "acc_stderr": 0.04236511258094633, - "acc_norm": 0.25925925925925924, - "acc_norm_stderr": 0.04236511258094633 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.28078817733990147, - "acc_stderr": 0.03161856335358608, - "acc_norm": 0.28078817733990147, - "acc_norm_stderr": 0.03161856335358608 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.2806451612903226, - "acc_stderr": 0.025560604721022884, - "acc_norm": 0.2806451612903226, - "acc_norm_stderr": 0.025560604721022884 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.2094017094017094, - "acc_stderr": 0.026655699653922737, - "acc_norm": 0.2094017094017094, - "acc_norm_stderr": 0.026655699653922737 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.2792452830188679, - "acc_stderr": 0.027611163402399715, - "acc_norm": 0.2792452830188679, - "acc_norm_stderr": 0.027611163402399715 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.2818181818181818, - "acc_stderr": 0.04309118709946459, - "acc_norm": 0.2818181818181818, - "acc_norm_stderr": 0.04309118709946459 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.26296296296296295, - "acc_stderr": 0.026842057873833706, - "acc_norm": 0.26296296296296295, - "acc_norm_stderr": 0.026842057873833706 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2980132450331126, - "acc_stderr": 0.037345356767871984, - "acc_norm": 0.2980132450331126, - "acc_norm_stderr": 0.037345356767871984 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.18407960199004975, - "acc_stderr": 0.02740385941078684, - "acc_norm": 0.18407960199004975, - "acc_norm_stderr": 0.02740385941078684 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.20809248554913296, - "acc_stderr": 0.030952890217749884, - "acc_norm": 0.20809248554913296, - "acc_norm_stderr": 0.030952890217749884 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.24867724867724866, - "acc_stderr": 0.02226181769240018, - "acc_norm": 0.24867724867724866, - "acc_norm_stderr": 0.02226181769240018 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.24305555555555555, - "acc_stderr": 0.03586879280080342, - "acc_norm": 0.24305555555555555, - "acc_norm_stderr": 0.03586879280080342 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.26, - "acc_stderr": 0.0440844002276808, - "acc_norm": 0.26, - "acc_norm_stderr": 0.0440844002276808 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.2, - "acc_stderr": 0.04020151261036843, - "acc_norm": 0.2, - "acc_norm_stderr": 0.04020151261036843 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.2630057803468208, - "acc_stderr": 0.023703099525258172, - "acc_norm": 0.2630057803468208, - "acc_norm_stderr": 0.023703099525258172 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.3067484662576687, - "acc_stderr": 0.03623089915724148, - "acc_norm": 0.3067484662576687, - "acc_norm_stderr": 0.03623089915724148 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.25617283950617287, - "acc_stderr": 0.024288533637726095, - "acc_norm": 0.25617283950617287, - "acc_norm_stderr": 0.024288533637726095 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695235, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695235 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.25906735751295334, - "acc_stderr": 0.03161877917935411, - "acc_norm": 0.25906735751295334, - "acc_norm_stderr": 0.03161877917935411 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2982456140350877, - "acc_stderr": 0.043036840335373173, - "acc_norm": 0.2982456140350877, - "acc_norm_stderr": 0.043036840335373173 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.24770642201834864, - "acc_stderr": 0.018508143602547805, - "acc_norm": 0.24770642201834864, - "acc_norm_stderr": 0.018508143602547805 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.2698412698412698, - "acc_stderr": 0.03970158273235172, - "acc_norm": 0.2698412698412698, - "acc_norm_stderr": 0.03970158273235172 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.2679738562091503, - "acc_stderr": 0.025360603796242557, - "acc_norm": 0.2679738562091503, - "acc_norm_stderr": 0.025360603796242557 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.2, - "acc_stderr": 0.04020151261036844, - "acc_norm": 0.2, - "acc_norm_stderr": 0.04020151261036844 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.35537190082644626, - "acc_stderr": 0.04369236326573982, - "acc_norm": 0.35537190082644626, - "acc_norm_stderr": 0.04369236326573982 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.19078947368421054, - "acc_stderr": 0.031975658210325004, - "acc_norm": 0.19078947368421054, - "acc_norm_stderr": 0.031975658210325004 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.24183006535947713, - "acc_stderr": 0.017322789207784326, - "acc_norm": 0.24183006535947713, - "acc_norm_stderr": 0.017322789207784326 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.25177304964539005, - "acc_stderr": 0.0258921511567094, - "acc_norm": 0.25177304964539005, - "acc_norm_stderr": 0.0258921511567094 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.25892857142857145, - "acc_stderr": 0.04157751539865629, - "acc_norm": 0.25892857142857145, - "acc_norm_stderr": 0.04157751539865629 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.2361111111111111, - "acc_stderr": 0.028963702570791047, - "acc_norm": 0.2361111111111111, - "acc_norm_stderr": 0.028963702570791047 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2636871508379888, - "acc_stderr": 0.014736926383761987, - "acc_norm": 0.2636871508379888, - "acc_norm_stderr": 0.014736926383761987 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.16, - "acc_stderr": 0.03684529491774709, - "acc_norm": 0.16, - "acc_norm_stderr": 0.03684529491774709 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.26, - "acc_stderr": 0.04408440022768079, - "acc_norm": 0.26, - "acc_norm_stderr": 0.04408440022768079 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.19117647058823528, - "acc_stderr": 0.023886881922440362, - "acc_norm": 0.19117647058823528, - "acc_norm_stderr": 0.023886881922440362 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.2816326530612245, - "acc_stderr": 0.028795185574291282, - "acc_norm": 0.2816326530612245, - "acc_norm_stderr": 0.028795185574291282 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.24472573839662448, - "acc_stderr": 0.02798569938703642, - "acc_norm": 0.24472573839662448, - "acc_norm_stderr": 0.02798569938703642 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.23272490221642764, - "acc_stderr": 0.0107925955538885, - "acc_norm": 0.23272490221642764, - "acc_norm_stderr": 0.0107925955538885 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.24019607843137256, - "acc_stderr": 0.029983733055913623, - "acc_norm": 0.24019607843137256, - "acc_norm_stderr": 0.029983733055913623 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.2545454545454545, - "acc_stderr": 0.03401506715249039, - "acc_norm": 0.2545454545454545, - "acc_norm_stderr": 0.03401506715249039 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.23623011015911874, - "mc1_stderr": 0.014869755015871096, - "mc2": 0.414131633910044, - "mc2_stderr": 0.015365810716919849 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.24295774647887325, - "acc_stderr": 0.01470146638508064, - "acc_norm": 0.37089201877934275, - "acc_norm_stderr": 0.01655852169248733 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "BM-K/polyglot-ko-1.3b-it-v1.4", - "model_sha": "acbd40970c01a4b40debc0d9a9ac096a74673d74", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/Chang-Su/llama-2-13b-chat-ko/result_2023-10-18 16:07:29.json b/Chang-Su/llama-2-13b-chat-ko/result_2023-10-18 16:07:29.json deleted file mode 100644 index cd93003731f3c0f7305510ac07b19e084ce44fa6..0000000000000000000000000000000000000000 --- a/Chang-Su/llama-2-13b-chat-ko/result_2023-10-18 16:07:29.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.3037542662116041, - "acc_stderr": 0.013438909184778759, - "acc_norm": 0.3464163822525597, - "acc_norm_stderr": 0.013905011180063251 - }, - "harness|ko_hellaswag|10": { - "acc": 0.350726946823342, - "acc_stderr": 0.0047622234924352535, - "acc_norm": 0.45429197371041624, - "acc_norm_stderr": 0.004968888130290068 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.4678362573099415, - "acc_stderr": 0.03826882417660369, - "acc_norm": 0.4678362573099415, - "acc_norm_stderr": 0.03826882417660369 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.4077669902912621, - "acc_stderr": 0.048657775704107696, - "acc_norm": 0.4077669902912621, - "acc_norm_stderr": 0.048657775704107696 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.4610472541507024, - "acc_stderr": 0.01782562179323902, - "acc_norm": 0.4610472541507024, - "acc_norm_stderr": 0.01782562179323902 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.4074074074074074, - "acc_stderr": 0.04244633238353228, - "acc_norm": 0.4074074074074074, - "acc_norm_stderr": 0.04244633238353228 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.39, - "acc_stderr": 0.04902071300001975, - "acc_norm": 0.39, - "acc_norm_stderr": 0.04902071300001975 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.3446808510638298, - "acc_stderr": 0.03106898596312215, - "acc_norm": 0.3446808510638298, - "acc_norm_stderr": 0.03106898596312215 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3373493975903614, - "acc_stderr": 0.03680783690727581, - "acc_norm": 0.3373493975903614, - "acc_norm_stderr": 0.03680783690727581 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.4180064308681672, - "acc_stderr": 0.02801365189199507, - "acc_norm": 0.4180064308681672, - "acc_norm_stderr": 0.02801365189199507 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.36771300448430494, - "acc_stderr": 0.03236198350928276, - "acc_norm": 0.36771300448430494, - "acc_norm_stderr": 0.03236198350928276 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.44274809160305345, - "acc_stderr": 0.043564472026650695, - "acc_norm": 0.44274809160305345, - "acc_norm_stderr": 0.043564472026650695 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.33, - "acc_stderr": 0.047258156262526045, - "acc_norm": 0.33, - "acc_norm_stderr": 0.047258156262526045 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.41414141414141414, - "acc_stderr": 0.03509438348879629, - "acc_norm": 0.41414141414141414, - "acc_norm_stderr": 0.03509438348879629 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.4068965517241379, - "acc_stderr": 0.040937939812662374, - "acc_norm": 0.4068965517241379, - "acc_norm_stderr": 0.040937939812662374 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.2549019607843137, - "acc_stderr": 0.043364327079931785, - "acc_norm": 0.2549019607843137, - "acc_norm_stderr": 0.043364327079931785 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.41596638655462187, - "acc_stderr": 0.03201650100739615, - "acc_norm": 0.41596638655462187, - "acc_norm_stderr": 0.03201650100739615 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.4025641025641026, - "acc_stderr": 0.024864995159767762, - "acc_norm": 0.4025641025641026, - "acc_norm_stderr": 0.024864995159767762 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.43, - "acc_stderr": 0.049756985195624284, - "acc_norm": 0.43, - "acc_norm_stderr": 0.049756985195624284 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.34, - "acc_stderr": 0.047609522856952344, - "acc_norm": 0.34, - "acc_norm_stderr": 0.047609522856952344 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.5092592592592593, - "acc_stderr": 0.04832853553437055, - "acc_norm": 0.5092592592592593, - "acc_norm_stderr": 0.04832853553437055 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.3497536945812808, - "acc_stderr": 0.03355400904969565, - "acc_norm": 0.3497536945812808, - "acc_norm_stderr": 0.03355400904969565 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3870967741935484, - "acc_stderr": 0.027709359675032488, - "acc_norm": 0.3870967741935484, - "acc_norm_stderr": 0.027709359675032488 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.5384615384615384, - "acc_stderr": 0.03265903381186194, - "acc_norm": 0.5384615384615384, - "acc_norm_stderr": 0.03265903381186194 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.3584905660377358, - "acc_stderr": 0.029514703583981765, - "acc_norm": 0.3584905660377358, - "acc_norm_stderr": 0.029514703583981765 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.4636363636363636, - "acc_stderr": 0.04776449162396197, - "acc_norm": 0.4636363636363636, - "acc_norm_stderr": 0.04776449162396197 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.3074074074074074, - "acc_stderr": 0.02813325257881564, - "acc_norm": 0.3074074074074074, - "acc_norm_stderr": 0.02813325257881564 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.31125827814569534, - "acc_stderr": 0.03780445850526733, - "acc_norm": 0.31125827814569534, - "acc_norm_stderr": 0.03780445850526733 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.4925373134328358, - "acc_stderr": 0.035351400842767194, - "acc_norm": 0.4925373134328358, - "acc_norm_stderr": 0.035351400842767194 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.3468208092485549, - "acc_stderr": 0.036291466701596636, - "acc_norm": 0.3468208092485549, - "acc_norm_stderr": 0.036291466701596636 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.28835978835978837, - "acc_stderr": 0.023330654054535903, - "acc_norm": 0.28835978835978837, - "acc_norm_stderr": 0.023330654054535903 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3055555555555556, - "acc_stderr": 0.03852084696008534, - "acc_norm": 0.3055555555555556, - "acc_norm_stderr": 0.03852084696008534 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.28, - "acc_stderr": 0.045126085985421276, - "acc_norm": 0.28, - "acc_norm_stderr": 0.045126085985421276 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.51, - "acc_stderr": 0.05024183937956911, - "acc_norm": 0.51, - "acc_norm_stderr": 0.05024183937956911 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.40173410404624277, - "acc_stderr": 0.026394104177643634, - "acc_norm": 0.40173410404624277, - "acc_norm_stderr": 0.026394104177643634 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.38650306748466257, - "acc_stderr": 0.038258255488486076, - "acc_norm": 0.38650306748466257, - "acc_norm_stderr": 0.038258255488486076 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.3888888888888889, - "acc_stderr": 0.027125115513166865, - "acc_norm": 0.3888888888888889, - "acc_norm_stderr": 0.027125115513166865 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.40932642487046633, - "acc_stderr": 0.03548608168860806, - "acc_norm": 0.40932642487046633, - "acc_norm_stderr": 0.03548608168860806 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2982456140350877, - "acc_stderr": 0.04303684033537318, - "acc_norm": 0.2982456140350877, - "acc_norm_stderr": 0.04303684033537318 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.3798165137614679, - "acc_stderr": 0.020808825617866244, - "acc_norm": 0.3798165137614679, - "acc_norm_stderr": 0.020808825617866244 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.25396825396825395, - "acc_stderr": 0.038932596106046734, - "acc_norm": 0.25396825396825395, - "acc_norm_stderr": 0.038932596106046734 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.37254901960784315, - "acc_stderr": 0.02768418188330289, - "acc_norm": 0.37254901960784315, - "acc_norm_stderr": 0.02768418188330289 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.41, - "acc_stderr": 0.049431107042371025, - "acc_norm": 0.41, - "acc_norm_stderr": 0.049431107042371025 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.5785123966942148, - "acc_stderr": 0.04507732278775088, - "acc_norm": 0.5785123966942148, - "acc_norm_stderr": 0.04507732278775088 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.4407894736842105, - "acc_stderr": 0.04040311062490436, - "acc_norm": 0.4407894736842105, - "acc_norm_stderr": 0.04040311062490436 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.3104575163398693, - "acc_stderr": 0.018718067052623227, - "acc_norm": 0.3104575163398693, - "acc_norm_stderr": 0.018718067052623227 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2978723404255319, - "acc_stderr": 0.027281608344469414, - "acc_norm": 0.2978723404255319, - "acc_norm_stderr": 0.027281608344469414 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.24107142857142858, - "acc_stderr": 0.04059867246952687, - "acc_norm": 0.24107142857142858, - "acc_norm_stderr": 0.04059867246952687 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.2777777777777778, - "acc_stderr": 0.030546745264953202, - "acc_norm": 0.2777777777777778, - "acc_norm_stderr": 0.030546745264953202 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2424581005586592, - "acc_stderr": 0.01433352205921789, - "acc_norm": 0.2424581005586592, - "acc_norm_stderr": 0.01433352205921789 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695235, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695235 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.41, - "acc_stderr": 0.049431107042371025, - "acc_norm": 0.41, - "acc_norm_stderr": 0.049431107042371025 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.2647058823529412, - "acc_stderr": 0.02679956202488769, - "acc_norm": 0.2647058823529412, - "acc_norm_stderr": 0.02679956202488769 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.49387755102040815, - "acc_stderr": 0.032006820201639086, - "acc_norm": 0.49387755102040815, - "acc_norm_stderr": 0.032006820201639086 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.4092827004219409, - "acc_stderr": 0.032007041833595914, - "acc_norm": 0.4092827004219409, - "acc_norm_stderr": 0.032007041833595914 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.31290743155149936, - "acc_stderr": 0.011842529823062999, - "acc_norm": 0.31290743155149936, - "acc_norm_stderr": 0.011842529823062999 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.29901960784313725, - "acc_stderr": 0.03213325717373616, - "acc_norm": 0.29901960784313725, - "acc_norm_stderr": 0.03213325717373616 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.3878787878787879, - "acc_stderr": 0.038049136539710114, - "acc_norm": 0.3878787878787879, - "acc_norm_stderr": 0.038049136539710114 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2631578947368421, - "mc1_stderr": 0.015415241740237035, - "mc2": 0.42145051773986575, - "mc2_stderr": 0.015233960921162444 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.3415492957746479, - "acc_stderr": 0.01625636906118511, - "acc_norm": 0.42488262910798125, - "acc_norm_stderr": 0.016945248826821704 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "Chang-Su/llama-2-13b-chat-ko", - "model_sha": "3a82a33f61584cbe72dc32c15d55bfd182cefd8b", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/DILAB-HYU/KoQuality-Polyglot-5.8b/result_2023-10-12 13:21:04.json b/DILAB-HYU/KoQuality-Polyglot-5.8b/result_2023-10-12 13:21:04.json deleted file mode 100644 index 34b0faca313687fabc5d115bd13938b0a8b7e8ba..0000000000000000000000000000000000000000 --- a/DILAB-HYU/KoQuality-Polyglot-5.8b/result_2023-10-12 13:21:04.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.2977815699658703, - "acc_stderr": 0.01336308010724449, - "acc_norm": 0.3370307167235495, - "acc_norm_stderr": 0.013813476652902272 - }, - "harness|ko_hellaswag|10": { - "acc": 0.38458474407488547, - "acc_stderr": 0.004855027248398158, - "acc_norm": 0.4970125473013344, - "acc_norm_stderr": 0.004989692344313998 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.1695906432748538, - "acc_stderr": 0.028782108105401712, - "acc_norm": 0.1695906432748538, - "acc_norm_stderr": 0.028782108105401712 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.3300970873786408, - "acc_stderr": 0.0465614711001235, - "acc_norm": 0.3300970873786408, - "acc_norm_stderr": 0.0465614711001235 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.20561941251596424, - "acc_stderr": 0.014452500456785823, - "acc_norm": 0.20561941251596424, - "acc_norm_stderr": 0.014452500456785823 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.22962962962962963, - "acc_stderr": 0.036333844140734636, - "acc_norm": 0.22962962962962963, - "acc_norm_stderr": 0.036333844140734636 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.27, - "acc_stderr": 0.0446196043338474, - "acc_norm": 0.27, - "acc_norm_stderr": 0.0446196043338474 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.17872340425531916, - "acc_stderr": 0.025045373272050957, - "acc_norm": 0.17872340425531916, - "acc_norm_stderr": 0.025045373272050957 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.20481927710843373, - "acc_stderr": 0.031417842916639245, - "acc_norm": 0.20481927710843373, - "acc_norm_stderr": 0.031417842916639245 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.24115755627009647, - "acc_stderr": 0.024296594034763426, - "acc_norm": 0.24115755627009647, - "acc_norm_stderr": 0.024296594034763426 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.1031390134529148, - "acc_stderr": 0.020412564289839272, - "acc_norm": 0.1031390134529148, - "acc_norm_stderr": 0.020412564289839272 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.2748091603053435, - "acc_stderr": 0.039153454088478354, - "acc_norm": 0.2748091603053435, - "acc_norm_stderr": 0.039153454088478354 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.27, - "acc_stderr": 0.0446196043338474, - "acc_norm": 0.27, - "acc_norm_stderr": 0.0446196043338474 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.03358618145732523, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.03358618145732523 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.2413793103448276, - "acc_stderr": 0.03565998174135302, - "acc_norm": 0.2413793103448276, - "acc_norm_stderr": 0.03565998174135302 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.3627450980392157, - "acc_stderr": 0.04784060704105653, - "acc_norm": 0.3627450980392157, - "acc_norm_stderr": 0.04784060704105653 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.3487394957983193, - "acc_stderr": 0.030956636328566548, - "acc_norm": 0.3487394957983193, - "acc_norm_stderr": 0.030956636328566548 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.36153846153846153, - "acc_stderr": 0.024359581465396983, - "acc_norm": 0.36153846153846153, - "acc_norm_stderr": 0.024359581465396983 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.24, - "acc_stderr": 0.04292346959909284, - "acc_norm": 0.24, - "acc_norm_stderr": 0.04292346959909284 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.2, - "acc_stderr": 0.040201512610368445, - "acc_norm": 0.2, - "acc_norm_stderr": 0.040201512610368445 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.21296296296296297, - "acc_stderr": 0.03957835471980981, - "acc_norm": 0.21296296296296297, - "acc_norm_stderr": 0.03957835471980981 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.27586206896551724, - "acc_stderr": 0.03144712581678241, - "acc_norm": 0.27586206896551724, - "acc_norm_stderr": 0.03144712581678241 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3193548387096774, - "acc_stderr": 0.02652270967466777, - "acc_norm": 0.3193548387096774, - "acc_norm_stderr": 0.02652270967466777 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.17094017094017094, - "acc_stderr": 0.024662496845209828, - "acc_norm": 0.17094017094017094, - "acc_norm_stderr": 0.024662496845209828 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.3018867924528302, - "acc_stderr": 0.02825420034443866, - "acc_norm": 0.3018867924528302, - "acc_norm_stderr": 0.02825420034443866 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.22727272727272727, - "acc_stderr": 0.040139645540727735, - "acc_norm": 0.22727272727272727, - "acc_norm_stderr": 0.040139645540727735 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.2740740740740741, - "acc_stderr": 0.027195934804085626, - "acc_norm": 0.2740740740740741, - "acc_norm_stderr": 0.027195934804085626 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.33112582781456956, - "acc_stderr": 0.038425817186598696, - "acc_norm": 0.33112582781456956, - "acc_norm_stderr": 0.038425817186598696 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.26865671641791045, - "acc_stderr": 0.03134328358208954, - "acc_norm": 0.26865671641791045, - "acc_norm_stderr": 0.03134328358208954 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.34104046242774566, - "acc_stderr": 0.036146654241808254, - "acc_norm": 0.34104046242774566, - "acc_norm_stderr": 0.036146654241808254 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.24603174603174602, - "acc_stderr": 0.022182037202948365, - "acc_norm": 0.24603174603174602, - "acc_norm_stderr": 0.022182037202948365 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2569444444444444, - "acc_stderr": 0.03653946969442099, - "acc_norm": 0.2569444444444444, - "acc_norm_stderr": 0.03653946969442099 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.38, - "acc_stderr": 0.04878317312145632, - "acc_norm": 0.38, - "acc_norm_stderr": 0.04878317312145632 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.22, - "acc_stderr": 0.041633319989322695, - "acc_norm": 0.22, - "acc_norm_stderr": 0.041633319989322695 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.2254335260115607, - "acc_stderr": 0.022497230190967547, - "acc_norm": 0.2254335260115607, - "acc_norm_stderr": 0.022497230190967547 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.2392638036809816, - "acc_stderr": 0.033519538795212696, - "acc_norm": 0.2392638036809816, - "acc_norm_stderr": 0.033519538795212696 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.22530864197530864, - "acc_stderr": 0.02324620264781975, - "acc_norm": 0.22530864197530864, - "acc_norm_stderr": 0.02324620264781975 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.29, - "acc_stderr": 0.04560480215720685, - "acc_norm": 0.29, - "acc_norm_stderr": 0.04560480215720685 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.36787564766839376, - "acc_stderr": 0.034801756684660366, - "acc_norm": 0.36787564766839376, - "acc_norm_stderr": 0.034801756684660366 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.22807017543859648, - "acc_stderr": 0.03947152782669415, - "acc_norm": 0.22807017543859648, - "acc_norm_stderr": 0.03947152782669415 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.3541284403669725, - "acc_stderr": 0.0205047290138291, - "acc_norm": 0.3541284403669725, - "acc_norm_stderr": 0.0205047290138291 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.373015873015873, - "acc_stderr": 0.04325506042017086, - "acc_norm": 0.373015873015873, - "acc_norm_stderr": 0.04325506042017086 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.2973856209150327, - "acc_stderr": 0.026173908506718576, - "acc_norm": 0.2973856209150327, - "acc_norm_stderr": 0.026173908506718576 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.22, - "acc_stderr": 0.041633319989322695, - "acc_norm": 0.22, - "acc_norm_stderr": 0.041633319989322695 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.12396694214876033, - "acc_stderr": 0.03008309871603522, - "acc_norm": 0.12396694214876033, - "acc_norm_stderr": 0.03008309871603522 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.32894736842105265, - "acc_stderr": 0.03823428969926606, - "acc_norm": 0.32894736842105265, - "acc_norm_stderr": 0.03823428969926606 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.21241830065359477, - "acc_stderr": 0.016547148636203147, - "acc_norm": 0.21241830065359477, - "acc_norm_stderr": 0.016547148636203147 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.24468085106382978, - "acc_stderr": 0.025645553622266733, - "acc_norm": 0.24468085106382978, - "acc_norm_stderr": 0.025645553622266733 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.16964285714285715, - "acc_stderr": 0.03562367850095391, - "acc_norm": 0.16964285714285715, - "acc_norm_stderr": 0.03562367850095391 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.4722222222222222, - "acc_stderr": 0.0340470532865388, - "acc_norm": 0.4722222222222222, - "acc_norm_stderr": 0.0340470532865388 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.27262569832402234, - "acc_stderr": 0.014893391735249608, - "acc_norm": 0.27262569832402234, - "acc_norm_stderr": 0.014893391735249608 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252604, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252604 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.4485294117647059, - "acc_stderr": 0.030211479609121593, - "acc_norm": 0.4485294117647059, - "acc_norm_stderr": 0.030211479609121593 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.4163265306122449, - "acc_stderr": 0.03155782816556163, - "acc_norm": 0.4163265306122449, - "acc_norm_stderr": 0.03155782816556163 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.19831223628691982, - "acc_stderr": 0.025955020841621112, - "acc_norm": 0.19831223628691982, - "acc_norm_stderr": 0.025955020841621112 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.24315514993481094, - "acc_stderr": 0.010956556654417356, - "acc_norm": 0.24315514993481094, - "acc_norm_stderr": 0.010956556654417356 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.25980392156862747, - "acc_stderr": 0.030778554678693257, - "acc_norm": 0.25980392156862747, - "acc_norm_stderr": 0.030778554678693257 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.23636363636363636, - "acc_stderr": 0.033175059300091805, - "acc_norm": 0.23636363636363636, - "acc_norm_stderr": 0.033175059300091805 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2423500611995104, - "mc1_stderr": 0.015000674373570342, - "mc2": 0.4081734277840062, - "mc2_stderr": 0.014989124693241153 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.2687793427230047, - "acc_stderr": 0.015196983421381469, - "acc_norm": 0.3380281690140845, - "acc_norm_stderr": 0.016215540194273168 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "DILAB-HYU/KoQuality-Polyglot-5.8b", - "model_sha": "3bd0773198883587e1ced9f32a1763da2b64a536", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/DopeorNope/COLA3-7B/result_2023-10-03 08:35:59.json b/DopeorNope/COLA3-7B/result_2023-10-03 08:35:59.json deleted file mode 100644 index 4ff0b5fa39fbb11a9743ffc229d7427b6f2eff7f..0000000000000000000000000000000000000000 --- a/DopeorNope/COLA3-7B/result_2023-10-03 08:35:59.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.3310580204778157, - "acc_stderr": 0.013752062419817832, - "acc_norm": 0.3916382252559727, - "acc_norm_stderr": 0.014264122124938215 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3873730332603067, - "acc_stderr": 0.004861544478451855, - "acc_norm": 0.5097590121489743, - "acc_norm_stderr": 0.004988830884131634 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.4444444444444444, - "acc_stderr": 0.03811079669833531, - "acc_norm": 0.4444444444444444, - "acc_norm_stderr": 0.03811079669833531 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.3300970873786408, - "acc_stderr": 0.0465614711001235, - "acc_norm": 0.3300970873786408, - "acc_norm_stderr": 0.0465614711001235 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.3895274584929757, - "acc_stderr": 0.017438082556264594, - "acc_norm": 0.3895274584929757, - "acc_norm_stderr": 0.017438082556264594 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.4, - "acc_stderr": 0.04232073695151589, - "acc_norm": 0.4, - "acc_norm_stderr": 0.04232073695151589 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542127, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542127 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.28936170212765955, - "acc_stderr": 0.029644006577009618, - "acc_norm": 0.28936170212765955, - "acc_norm_stderr": 0.029644006577009618 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3433734939759036, - "acc_stderr": 0.03696584317010601, - "acc_norm": 0.3433734939759036, - "acc_norm_stderr": 0.03696584317010601 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.40192926045016075, - "acc_stderr": 0.027846476005930477, - "acc_norm": 0.40192926045016075, - "acc_norm_stderr": 0.027846476005930477 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.39461883408071746, - "acc_stderr": 0.03280400504755291, - "acc_norm": 0.39461883408071746, - "acc_norm_stderr": 0.03280400504755291 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.48091603053435117, - "acc_stderr": 0.04382094705550988, - "acc_norm": 0.48091603053435117, - "acc_norm_stderr": 0.04382094705550988 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.3, - "acc_stderr": 0.04605661864718381, - "acc_norm": 0.3, - "acc_norm_stderr": 0.04605661864718381 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.43434343434343436, - "acc_stderr": 0.035315058793591834, - "acc_norm": 0.43434343434343436, - "acc_norm_stderr": 0.035315058793591834 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.3931034482758621, - "acc_stderr": 0.040703290137070705, - "acc_norm": 0.3931034482758621, - "acc_norm_stderr": 0.040703290137070705 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.19607843137254902, - "acc_stderr": 0.03950581861179962, - "acc_norm": 0.19607843137254902, - "acc_norm_stderr": 0.03950581861179962 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.3739495798319328, - "acc_stderr": 0.031429466378837076, - "acc_norm": 0.3739495798319328, - "acc_norm_stderr": 0.031429466378837076 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.2948717948717949, - "acc_stderr": 0.02311936275823229, - "acc_norm": 0.2948717948717949, - "acc_norm_stderr": 0.02311936275823229 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.43, - "acc_stderr": 0.04975698519562428, - "acc_norm": 0.43, - "acc_norm_stderr": 0.04975698519562428 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.26, - "acc_stderr": 0.04408440022768079, - "acc_norm": 0.26, - "acc_norm_stderr": 0.04408440022768079 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.39814814814814814, - "acc_stderr": 0.047323326159788126, - "acc_norm": 0.39814814814814814, - "acc_norm_stderr": 0.047323326159788126 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.2561576354679803, - "acc_stderr": 0.0307127300709826, - "acc_norm": 0.2561576354679803, - "acc_norm_stderr": 0.0307127300709826 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.34516129032258064, - "acc_stderr": 0.027045746573534327, - "acc_norm": 0.34516129032258064, - "acc_norm_stderr": 0.027045746573534327 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.48717948717948717, - "acc_stderr": 0.032745319388423504, - "acc_norm": 0.48717948717948717, - "acc_norm_stderr": 0.032745319388423504 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.33584905660377357, - "acc_stderr": 0.029067220146644823, - "acc_norm": 0.33584905660377357, - "acc_norm_stderr": 0.029067220146644823 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.41818181818181815, - "acc_stderr": 0.0472457740573157, - "acc_norm": 0.41818181818181815, - "acc_norm_stderr": 0.0472457740573157 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.25555555555555554, - "acc_stderr": 0.02659393910184408, - "acc_norm": 0.25555555555555554, - "acc_norm_stderr": 0.02659393910184408 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2781456953642384, - "acc_stderr": 0.03658603262763743, - "acc_norm": 0.2781456953642384, - "acc_norm_stderr": 0.03658603262763743 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.46766169154228854, - "acc_stderr": 0.035281314729336065, - "acc_norm": 0.46766169154228854, - "acc_norm_stderr": 0.035281314729336065 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.32947976878612717, - "acc_stderr": 0.03583901754736411, - "acc_norm": 0.32947976878612717, - "acc_norm_stderr": 0.03583901754736411 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.25396825396825395, - "acc_stderr": 0.022418042891113942, - "acc_norm": 0.25396825396825395, - "acc_norm_stderr": 0.022418042891113942 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.039420826399272135, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.039420826399272135 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.24, - "acc_stderr": 0.042923469599092816, - "acc_norm": 0.24, - "acc_norm_stderr": 0.042923469599092816 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.4, - "acc_stderr": 0.049236596391733084, - "acc_norm": 0.4, - "acc_norm_stderr": 0.049236596391733084 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.407514450867052, - "acc_stderr": 0.026454578146931505, - "acc_norm": 0.407514450867052, - "acc_norm_stderr": 0.026454578146931505 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.32515337423312884, - "acc_stderr": 0.03680350371286461, - "acc_norm": 0.32515337423312884, - "acc_norm_stderr": 0.03680350371286461 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.39197530864197533, - "acc_stderr": 0.02716368603827123, - "acc_norm": 0.39197530864197533, - "acc_norm_stderr": 0.02716368603827123 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.4, - "acc_stderr": 0.04923659639173309, - "acc_norm": 0.4, - "acc_norm_stderr": 0.04923659639173309 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.41450777202072536, - "acc_stderr": 0.03555300319557672, - "acc_norm": 0.41450777202072536, - "acc_norm_stderr": 0.03555300319557672 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2631578947368421, - "acc_stderr": 0.04142439719489362, - "acc_norm": 0.2631578947368421, - "acc_norm_stderr": 0.04142439719489362 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.42018348623853213, - "acc_stderr": 0.021162420048273508, - "acc_norm": 0.42018348623853213, - "acc_norm_stderr": 0.021162420048273508 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.24603174603174602, - "acc_stderr": 0.03852273364924315, - "acc_norm": 0.24603174603174602, - "acc_norm_stderr": 0.03852273364924315 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.41830065359477125, - "acc_stderr": 0.02824513402438729, - "acc_norm": 0.41830065359477125, - "acc_norm_stderr": 0.02824513402438729 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.32, - "acc_stderr": 0.046882617226215034, - "acc_norm": 0.32, - "acc_norm_stderr": 0.046882617226215034 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.5289256198347108, - "acc_stderr": 0.04556710331269498, - "acc_norm": 0.5289256198347108, - "acc_norm_stderr": 0.04556710331269498 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.3026315789473684, - "acc_stderr": 0.03738520676119667, - "acc_norm": 0.3026315789473684, - "acc_norm_stderr": 0.03738520676119667 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.31699346405228757, - "acc_stderr": 0.018824219512706214, - "acc_norm": 0.31699346405228757, - "acc_norm_stderr": 0.018824219512706214 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2872340425531915, - "acc_stderr": 0.026992199173064356, - "acc_norm": 0.2872340425531915, - "acc_norm_stderr": 0.026992199173064356 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.25892857142857145, - "acc_stderr": 0.04157751539865629, - "acc_norm": 0.25892857142857145, - "acc_norm_stderr": 0.04157751539865629 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.3287037037037037, - "acc_stderr": 0.03203614084670058, - "acc_norm": 0.3287037037037037, - "acc_norm_stderr": 0.03203614084670058 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.25139664804469275, - "acc_stderr": 0.014508979453553977, - "acc_norm": 0.25139664804469275, - "acc_norm_stderr": 0.014508979453553977 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.26, - "acc_stderr": 0.044084400227680794, - "acc_norm": 0.26, - "acc_norm_stderr": 0.044084400227680794 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.4227941176470588, - "acc_stderr": 0.030008562845003476, - "acc_norm": 0.4227941176470588, - "acc_norm_stderr": 0.030008562845003476 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.3877551020408163, - "acc_stderr": 0.031192230726795656, - "acc_norm": 0.3877551020408163, - "acc_norm_stderr": 0.031192230726795656 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.5147679324894515, - "acc_stderr": 0.032533028078777386, - "acc_norm": 0.5147679324894515, - "acc_norm_stderr": 0.032533028078777386 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.3305084745762712, - "acc_stderr": 0.01201414210184297, - "acc_norm": 0.3305084745762712, - "acc_norm_stderr": 0.01201414210184297 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.3627450980392157, - "acc_stderr": 0.033744993563193555, - "acc_norm": 0.3627450980392157, - "acc_norm_stderr": 0.033744993563193555 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.37575757575757573, - "acc_stderr": 0.03781887353205982, - "acc_norm": 0.37575757575757573, - "acc_norm_stderr": 0.03781887353205982 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.23990208078335373, - "mc1_stderr": 0.01494881267906214, - "mc2": 0.3781293727977648, - "mc2_stderr": 0.014917319628125631 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.5610328638497653, - "acc_stderr": 0.017011608310486023, - "acc_norm": 0.6490610328638498, - "acc_norm_stderr": 0.016360395003030395 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "DopeorNope/COLA3-7B", - "model_sha": "831fc99b2b9d86ad17129c419953502f2d4f8da7", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/DopeorNope/COLA3_13B/result_2023-10-05 10:17:21.json b/DopeorNope/COLA3_13B/result_2023-10-05 10:17:21.json deleted file mode 100644 index b51ad1cfad37b33dee5ac39be39e238d8f7b0adc..0000000000000000000000000000000000000000 --- a/DopeorNope/COLA3_13B/result_2023-10-05 10:17:21.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.36860068259385664, - "acc_stderr": 0.014097810678042192, - "acc_norm": 0.42235494880546076, - "acc_norm_stderr": 0.014434138713379981 - }, - "harness|ko_hellaswag|10": { - "acc": 0.4082852021509659, - "acc_stderr": 0.004905119039849461, - "acc_norm": 0.5435172276438957, - "acc_norm_stderr": 0.004970846697552308 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.5555555555555556, - "acc_stderr": 0.03811079669833531, - "acc_norm": 0.5555555555555556, - "acc_norm_stderr": 0.03811079669833531 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.4563106796116505, - "acc_stderr": 0.049318019942204146, - "acc_norm": 0.4563106796116505, - "acc_norm_stderr": 0.049318019942204146 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.5057471264367817, - "acc_stderr": 0.017878782326129224, - "acc_norm": 0.5057471264367817, - "acc_norm_stderr": 0.017878782326129224 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.4148148148148148, - "acc_stderr": 0.04256193767901407, - "acc_norm": 0.4148148148148148, - "acc_norm_stderr": 0.04256193767901407 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206824, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206824 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.33191489361702126, - "acc_stderr": 0.03078373675774564, - "acc_norm": 0.33191489361702126, - "acc_norm_stderr": 0.03078373675774564 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.4397590361445783, - "acc_stderr": 0.03864139923699122, - "acc_norm": 0.4397590361445783, - "acc_norm_stderr": 0.03864139923699122 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.5080385852090032, - "acc_stderr": 0.028394421370984538, - "acc_norm": 0.5080385852090032, - "acc_norm_stderr": 0.028394421370984538 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.37668161434977576, - "acc_stderr": 0.03252113489929187, - "acc_norm": 0.37668161434977576, - "acc_norm_stderr": 0.03252113489929187 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.48854961832061067, - "acc_stderr": 0.04384140024078016, - "acc_norm": 0.48854961832061067, - "acc_norm_stderr": 0.04384140024078016 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.36, - "acc_stderr": 0.04824181513244218, - "acc_norm": 0.36, - "acc_norm_stderr": 0.04824181513244218 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.5505050505050505, - "acc_stderr": 0.035441324919479704, - "acc_norm": 0.5505050505050505, - "acc_norm_stderr": 0.035441324919479704 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.45517241379310347, - "acc_stderr": 0.04149886942192117, - "acc_norm": 0.45517241379310347, - "acc_norm_stderr": 0.04149886942192117 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.19607843137254902, - "acc_stderr": 0.03950581861179961, - "acc_norm": 0.19607843137254902, - "acc_norm_stderr": 0.03950581861179961 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.4369747899159664, - "acc_stderr": 0.03221943636566197, - "acc_norm": 0.4369747899159664, - "acc_norm_stderr": 0.03221943636566197 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.3923076923076923, - "acc_stderr": 0.02475600038213094, - "acc_norm": 0.3923076923076923, - "acc_norm_stderr": 0.02475600038213094 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.46, - "acc_stderr": 0.05009082659620333, - "acc_norm": 0.46, - "acc_norm_stderr": 0.05009082659620333 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.36, - "acc_stderr": 0.048241815132442176, - "acc_norm": 0.36, - "acc_norm_stderr": 0.048241815132442176 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.4722222222222222, - "acc_stderr": 0.04826217294139894, - "acc_norm": 0.4722222222222222, - "acc_norm_stderr": 0.04826217294139894 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.4187192118226601, - "acc_stderr": 0.03471192860518468, - "acc_norm": 0.4187192118226601, - "acc_norm_stderr": 0.03471192860518468 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.432258064516129, - "acc_stderr": 0.028181739720019416, - "acc_norm": 0.432258064516129, - "acc_norm_stderr": 0.028181739720019416 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.6495726495726496, - "acc_stderr": 0.0312561082442188, - "acc_norm": 0.6495726495726496, - "acc_norm_stderr": 0.0312561082442188 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.4679245283018868, - "acc_stderr": 0.030709486992556545, - "acc_norm": 0.4679245283018868, - "acc_norm_stderr": 0.030709486992556545 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.44545454545454544, - "acc_stderr": 0.04760548821460325, - "acc_norm": 0.44545454545454544, - "acc_norm_stderr": 0.04760548821460325 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.2518518518518518, - "acc_stderr": 0.026466117538959916, - "acc_norm": 0.2518518518518518, - "acc_norm_stderr": 0.026466117538959916 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2781456953642384, - "acc_stderr": 0.03658603262763743, - "acc_norm": 0.2781456953642384, - "acc_norm_stderr": 0.03658603262763743 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.5572139303482587, - "acc_stderr": 0.03512310964123937, - "acc_norm": 0.5572139303482587, - "acc_norm_stderr": 0.03512310964123937 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.3699421965317919, - "acc_stderr": 0.03681229633394319, - "acc_norm": 0.3699421965317919, - "acc_norm_stderr": 0.03681229633394319 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2671957671957672, - "acc_stderr": 0.022789673145776575, - "acc_norm": 0.2671957671957672, - "acc_norm_stderr": 0.022789673145776575 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3472222222222222, - "acc_stderr": 0.039812405437178615, - "acc_norm": 0.3472222222222222, - "acc_norm_stderr": 0.039812405437178615 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.33, - "acc_stderr": 0.047258156262526045, - "acc_norm": 0.33, - "acc_norm_stderr": 0.047258156262526045 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.57, - "acc_stderr": 0.04975698519562428, - "acc_norm": 0.57, - "acc_norm_stderr": 0.04975698519562428 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.4508670520231214, - "acc_stderr": 0.026788811931562757, - "acc_norm": 0.4508670520231214, - "acc_norm_stderr": 0.026788811931562757 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.4110429447852761, - "acc_stderr": 0.038656978537853624, - "acc_norm": 0.4110429447852761, - "acc_norm_stderr": 0.038656978537853624 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.46296296296296297, - "acc_stderr": 0.02774431344337654, - "acc_norm": 0.46296296296296297, - "acc_norm_stderr": 0.02774431344337654 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.43005181347150256, - "acc_stderr": 0.03572954333144807, - "acc_norm": 0.43005181347150256, - "acc_norm_stderr": 0.03572954333144807 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2631578947368421, - "acc_stderr": 0.04142439719489361, - "acc_norm": 0.2631578947368421, - "acc_norm_stderr": 0.04142439719489361 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.48256880733944957, - "acc_stderr": 0.021424291871853147, - "acc_norm": 0.48256880733944957, - "acc_norm_stderr": 0.021424291871853147 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.25396825396825395, - "acc_stderr": 0.03893259610604674, - "acc_norm": 0.25396825396825395, - "acc_norm_stderr": 0.03893259610604674 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.42483660130718953, - "acc_stderr": 0.02830457667314112, - "acc_norm": 0.42483660130718953, - "acc_norm_stderr": 0.02830457667314112 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.37, - "acc_stderr": 0.04852365870939098, - "acc_norm": 0.37, - "acc_norm_stderr": 0.04852365870939098 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.5619834710743802, - "acc_stderr": 0.045291468044357915, - "acc_norm": 0.5619834710743802, - "acc_norm_stderr": 0.045291468044357915 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.40789473684210525, - "acc_stderr": 0.039993097127774734, - "acc_norm": 0.40789473684210525, - "acc_norm_stderr": 0.039993097127774734 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.32189542483660133, - "acc_stderr": 0.018901015322093085, - "acc_norm": 0.32189542483660133, - "acc_norm_stderr": 0.018901015322093085 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.3120567375886525, - "acc_stderr": 0.02764012054516994, - "acc_norm": 0.3120567375886525, - "acc_norm_stderr": 0.02764012054516994 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.21428571428571427, - "acc_stderr": 0.038946411200447915, - "acc_norm": 0.21428571428571427, - "acc_norm_stderr": 0.038946411200447915 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.3194444444444444, - "acc_stderr": 0.03179876342176851, - "acc_norm": 0.3194444444444444, - "acc_norm_stderr": 0.03179876342176851 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2424581005586592, - "acc_stderr": 0.01433352205921789, - "acc_norm": 0.2424581005586592, - "acc_norm_stderr": 0.01433352205921789 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.43, - "acc_stderr": 0.049756985195624284, - "acc_norm": 0.43, - "acc_norm_stderr": 0.049756985195624284 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.2426470588235294, - "acc_stderr": 0.026040662474201285, - "acc_norm": 0.2426470588235294, - "acc_norm_stderr": 0.026040662474201285 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.46938775510204084, - "acc_stderr": 0.031949171367580624, - "acc_norm": 0.46938775510204084, - "acc_norm_stderr": 0.031949171367580624 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.5063291139240507, - "acc_stderr": 0.03254462010767859, - "acc_norm": 0.5063291139240507, - "acc_norm_stderr": 0.03254462010767859 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.30247718383311606, - "acc_stderr": 0.0117315242341657, - "acc_norm": 0.30247718383311606, - "acc_norm_stderr": 0.0117315242341657 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.38235294117647056, - "acc_stderr": 0.03410785338904719, - "acc_norm": 0.38235294117647056, - "acc_norm_stderr": 0.03410785338904719 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.4727272727272727, - "acc_stderr": 0.03898531605579419, - "acc_norm": 0.4727272727272727, - "acc_norm_stderr": 0.03898531605579419 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2533659730722154, - "mc1_stderr": 0.015225899340826824, - "mc2": 0.40933802446057865, - "mc2_stderr": 0.014937193336867839 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.3720657276995305, - "acc_stderr": 0.016569223163823556, - "acc_norm": 0.4835680751173709, - "acc_norm_stderr": 0.017130520993936017 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "DopeorNope/COLA3_13B", - "model_sha": "7725e7a1c6f8f022c7c4ec0286dd9f7fada126bd", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/DopeorNope/COLA_LO-7B/result_2023-10-03 17:04:14.json b/DopeorNope/COLA_LO-7B/result_2023-10-03 17:04:14.json deleted file mode 100644 index a226352705c0af031de35e3f499aba081c748696..0000000000000000000000000000000000000000 --- a/DopeorNope/COLA_LO-7B/result_2023-10-03 17:04:14.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.3319112627986348, - "acc_stderr": 0.013760988200880533, - "acc_norm": 0.38993174061433444, - "acc_norm_stderr": 0.014252959848892884 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3857797251543517, - "acc_stderr": 0.004857840934549158, - "acc_norm": 0.5046803425612428, - "acc_norm_stderr": 0.004989562798280523 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.38011695906432746, - "acc_stderr": 0.037229657413855394, - "acc_norm": 0.38011695906432746, - "acc_norm_stderr": 0.037229657413855394 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.33980582524271846, - "acc_stderr": 0.04689765937278134, - "acc_norm": 0.33980582524271846, - "acc_norm_stderr": 0.04689765937278134 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.41762452107279696, - "acc_stderr": 0.01763563732695152, - "acc_norm": 0.41762452107279696, - "acc_norm_stderr": 0.01763563732695152 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.37037037037037035, - "acc_stderr": 0.04171654161354544, - "acc_norm": 0.37037037037037035, - "acc_norm_stderr": 0.04171654161354544 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.33, - "acc_stderr": 0.047258156262526045, - "acc_norm": 0.33, - "acc_norm_stderr": 0.047258156262526045 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.2936170212765957, - "acc_stderr": 0.02977164271249123, - "acc_norm": 0.2936170212765957, - "acc_norm_stderr": 0.02977164271249123 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.35542168674698793, - "acc_stderr": 0.03726214354322415, - "acc_norm": 0.35542168674698793, - "acc_norm_stderr": 0.03726214354322415 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.43086816720257237, - "acc_stderr": 0.028125340983972714, - "acc_norm": 0.43086816720257237, - "acc_norm_stderr": 0.028125340983972714 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.42152466367713004, - "acc_stderr": 0.03314190222110658, - "acc_norm": 0.42152466367713004, - "acc_norm_stderr": 0.03314190222110658 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.45038167938931295, - "acc_stderr": 0.04363643698524779, - "acc_norm": 0.45038167938931295, - "acc_norm_stderr": 0.04363643698524779 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.37, - "acc_stderr": 0.04852365870939099, - "acc_norm": 0.37, - "acc_norm_stderr": 0.04852365870939099 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.41414141414141414, - "acc_stderr": 0.03509438348879629, - "acc_norm": 0.41414141414141414, - "acc_norm_stderr": 0.03509438348879629 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.3724137931034483, - "acc_stderr": 0.0402873153294756, - "acc_norm": 0.3724137931034483, - "acc_norm_stderr": 0.0402873153294756 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.21568627450980393, - "acc_stderr": 0.04092563958237655, - "acc_norm": 0.21568627450980393, - "acc_norm_stderr": 0.04092563958237655 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.3697478991596639, - "acc_stderr": 0.031357095996135904, - "acc_norm": 0.3697478991596639, - "acc_norm_stderr": 0.031357095996135904 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.26153846153846155, - "acc_stderr": 0.022282141204204433, - "acc_norm": 0.26153846153846155, - "acc_norm_stderr": 0.022282141204204433 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.41, - "acc_stderr": 0.04943110704237102, - "acc_norm": 0.41, - "acc_norm_stderr": 0.04943110704237102 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.37962962962962965, - "acc_stderr": 0.04691521224077742, - "acc_norm": 0.37962962962962965, - "acc_norm_stderr": 0.04691521224077742 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.2315270935960591, - "acc_stderr": 0.02967833314144444, - "acc_norm": 0.2315270935960591, - "acc_norm_stderr": 0.02967833314144444 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.36774193548387096, - "acc_stderr": 0.02743086657997347, - "acc_norm": 0.36774193548387096, - "acc_norm_stderr": 0.02743086657997347 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.5042735042735043, - "acc_stderr": 0.03275489264382132, - "acc_norm": 0.5042735042735043, - "acc_norm_stderr": 0.03275489264382132 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.33962264150943394, - "acc_stderr": 0.029146904747798352, - "acc_norm": 0.33962264150943394, - "acc_norm_stderr": 0.029146904747798352 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.37272727272727274, - "acc_stderr": 0.046313813194254635, - "acc_norm": 0.37272727272727274, - "acc_norm_stderr": 0.046313813194254635 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.24814814814814815, - "acc_stderr": 0.0263357394040558, - "acc_norm": 0.24814814814814815, - "acc_norm_stderr": 0.0263357394040558 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.23178807947019867, - "acc_stderr": 0.03445406271987054, - "acc_norm": 0.23178807947019867, - "acc_norm_stderr": 0.03445406271987054 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.43283582089552236, - "acc_stderr": 0.0350349092367328, - "acc_norm": 0.43283582089552236, - "acc_norm_stderr": 0.0350349092367328 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.30057803468208094, - "acc_stderr": 0.03496101481191181, - "acc_norm": 0.30057803468208094, - "acc_norm_stderr": 0.03496101481191181 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2328042328042328, - "acc_stderr": 0.021765961672154537, - "acc_norm": 0.2328042328042328, - "acc_norm_stderr": 0.021765961672154537 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.039420826399272135, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.039420826399272135 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.24, - "acc_stderr": 0.042923469599092816, - "acc_norm": 0.24, - "acc_norm_stderr": 0.042923469599092816 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.4, - "acc_stderr": 0.049236596391733084, - "acc_norm": 0.4, - "acc_norm_stderr": 0.049236596391733084 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.45664739884393063, - "acc_stderr": 0.02681771813034892, - "acc_norm": 0.45664739884393063, - "acc_norm_stderr": 0.02681771813034892 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.3312883435582822, - "acc_stderr": 0.03697983910025588, - "acc_norm": 0.3312883435582822, - "acc_norm_stderr": 0.03697983910025588 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.39197530864197533, - "acc_stderr": 0.02716368603827123, - "acc_norm": 0.39197530864197533, - "acc_norm_stderr": 0.02716368603827123 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252605, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252605 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.44041450777202074, - "acc_stderr": 0.035827245300360945, - "acc_norm": 0.44041450777202074, - "acc_norm_stderr": 0.035827245300360945 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2543859649122807, - "acc_stderr": 0.04096985139843671, - "acc_norm": 0.2543859649122807, - "acc_norm_stderr": 0.04096985139843671 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.3798165137614679, - "acc_stderr": 0.020808825617866244, - "acc_norm": 0.3798165137614679, - "acc_norm_stderr": 0.020808825617866244 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.24603174603174602, - "acc_stderr": 0.03852273364924318, - "acc_norm": 0.24603174603174602, - "acc_norm_stderr": 0.03852273364924318 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.4117647058823529, - "acc_stderr": 0.02818059632825929, - "acc_norm": 0.4117647058823529, - "acc_norm_stderr": 0.02818059632825929 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695235, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695235 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.48760330578512395, - "acc_stderr": 0.04562951548180765, - "acc_norm": 0.48760330578512395, - "acc_norm_stderr": 0.04562951548180765 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.3026315789473684, - "acc_stderr": 0.03738520676119667, - "acc_norm": 0.3026315789473684, - "acc_norm_stderr": 0.03738520676119667 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.32189542483660133, - "acc_stderr": 0.018901015322093095, - "acc_norm": 0.32189542483660133, - "acc_norm_stderr": 0.018901015322093095 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.3191489361702128, - "acc_stderr": 0.027807990141320203, - "acc_norm": 0.3191489361702128, - "acc_norm_stderr": 0.027807990141320203 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.2767857142857143, - "acc_stderr": 0.042466243366976256, - "acc_norm": 0.2767857142857143, - "acc_norm_stderr": 0.042466243366976256 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.35185185185185186, - "acc_stderr": 0.032568505702936464, - "acc_norm": 0.35185185185185186, - "acc_norm_stderr": 0.032568505702936464 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.24134078212290502, - "acc_stderr": 0.014310999547961443, - "acc_norm": 0.24134078212290502, - "acc_norm_stderr": 0.014310999547961443 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.28, - "acc_stderr": 0.045126085985421276, - "acc_norm": 0.28, - "acc_norm_stderr": 0.045126085985421276 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542127, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542127 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.4264705882352941, - "acc_stderr": 0.03004261583271486, - "acc_norm": 0.4264705882352941, - "acc_norm_stderr": 0.03004261583271486 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.39591836734693875, - "acc_stderr": 0.03130802899065686, - "acc_norm": 0.39591836734693875, - "acc_norm_stderr": 0.03130802899065686 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.42616033755274263, - "acc_stderr": 0.03219035703131774, - "acc_norm": 0.42616033755274263, - "acc_norm_stderr": 0.03219035703131774 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.33376792698826596, - "acc_stderr": 0.012043812655846146, - "acc_norm": 0.33376792698826596, - "acc_norm_stderr": 0.012043812655846146 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.3627450980392157, - "acc_stderr": 0.033744993563193555, - "acc_norm": 0.3627450980392157, - "acc_norm_stderr": 0.033744993563193555 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.3575757575757576, - "acc_stderr": 0.03742597043806587, - "acc_norm": 0.3575757575757576, - "acc_norm_stderr": 0.03742597043806587 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2484700122399021, - "mc1_stderr": 0.015127427096520688, - "mc2": 0.3821911392219441, - "mc2_stderr": 0.014928316371274168 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.5692488262910798, - "acc_stderr": 0.01697459912173145, - "acc_norm": 0.6326291079812206, - "acc_norm_stderr": 0.016525787977931604 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "DopeorNope/COLA_LO-7B", - "model_sha": "4cccb5249ea36f58588c32fe58c6f104f89f0487", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/DopeorNope/KOAT-5.8b/result_2023-10-01 15:52:29.json b/DopeorNope/KOAT-5.8b/result_2023-10-01 15:52:29.json deleted file mode 100644 index 65e9e3a874f43d2cd4063b694a18e2db5d36a3a7..0000000000000000000000000000000000000000 --- a/DopeorNope/KOAT-5.8b/result_2023-10-01 15:52:29.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.25597269624573377, - "acc_stderr": 0.012753013241244513, - "acc_norm": 0.30716723549488056, - "acc_norm_stderr": 0.013481034054980945 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3408683529177455, - "acc_stderr": 0.00473032455662415, - "acc_norm": 0.4153555068711412, - "acc_norm_stderr": 0.004917761181740164 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.25146198830409355, - "acc_stderr": 0.033275044238468436, - "acc_norm": 0.25146198830409355, - "acc_norm_stderr": 0.033275044238468436 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.23300970873786409, - "acc_stderr": 0.041858325989283136, - "acc_norm": 0.23300970873786409, - "acc_norm_stderr": 0.041858325989283136 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.21966794380587484, - "acc_stderr": 0.01480538447837116, - "acc_norm": 0.21966794380587484, - "acc_norm_stderr": 0.01480538447837116 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.24444444444444444, - "acc_stderr": 0.03712537833614866, - "acc_norm": 0.24444444444444444, - "acc_norm_stderr": 0.03712537833614866 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.27, - "acc_stderr": 0.0446196043338474, - "acc_norm": 0.27, - "acc_norm_stderr": 0.0446196043338474 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.22127659574468084, - "acc_stderr": 0.027136349602424063, - "acc_norm": 0.22127659574468084, - "acc_norm_stderr": 0.027136349602424063 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.26506024096385544, - "acc_stderr": 0.03436024037944968, - "acc_norm": 0.26506024096385544, - "acc_norm_stderr": 0.03436024037944968 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.24758842443729903, - "acc_stderr": 0.024513879973621967, - "acc_norm": 0.24758842443729903, - "acc_norm_stderr": 0.024513879973621967 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.25112107623318386, - "acc_stderr": 0.029105220833224605, - "acc_norm": 0.25112107623318386, - "acc_norm_stderr": 0.029105220833224605 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.22900763358778625, - "acc_stderr": 0.036853466317118506, - "acc_norm": 0.22900763358778625, - "acc_norm_stderr": 0.036853466317118506 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.24, - "acc_stderr": 0.04292346959909281, - "acc_norm": 0.24, - "acc_norm_stderr": 0.04292346959909281 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.23737373737373738, - "acc_stderr": 0.03031371053819888, - "acc_norm": 0.23737373737373738, - "acc_norm_stderr": 0.03031371053819888 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.2206896551724138, - "acc_stderr": 0.034559302019248124, - "acc_norm": 0.2206896551724138, - "acc_norm_stderr": 0.034559302019248124 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.13725490196078433, - "acc_stderr": 0.03424084669891521, - "acc_norm": 0.13725490196078433, - "acc_norm_stderr": 0.03424084669891521 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.31512605042016806, - "acc_stderr": 0.030176808288974337, - "acc_norm": 0.31512605042016806, - "acc_norm_stderr": 0.030176808288974337 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.2794871794871795, - "acc_stderr": 0.022752388839776826, - "acc_norm": 0.2794871794871795, - "acc_norm_stderr": 0.022752388839776826 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.13, - "acc_stderr": 0.033799766898963086, - "acc_norm": 0.13, - "acc_norm_stderr": 0.033799766898963086 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.18518518518518517, - "acc_stderr": 0.03755265865037181, - "acc_norm": 0.18518518518518517, - "acc_norm_stderr": 0.03755265865037181 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.26108374384236455, - "acc_stderr": 0.030903796952114485, - "acc_norm": 0.26108374384236455, - "acc_norm_stderr": 0.030903796952114485 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3161290322580645, - "acc_stderr": 0.026450874489042767, - "acc_norm": 0.3161290322580645, - "acc_norm_stderr": 0.026450874489042767 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.2564102564102564, - "acc_stderr": 0.028605953702004264, - "acc_norm": 0.2564102564102564, - "acc_norm_stderr": 0.028605953702004264 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.21132075471698114, - "acc_stderr": 0.02512576648482784, - "acc_norm": 0.21132075471698114, - "acc_norm_stderr": 0.02512576648482784 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.32727272727272727, - "acc_stderr": 0.0449429086625209, - "acc_norm": 0.32727272727272727, - "acc_norm_stderr": 0.0449429086625209 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.25925925925925924, - "acc_stderr": 0.026719240783712166, - "acc_norm": 0.25925925925925924, - "acc_norm_stderr": 0.026719240783712166 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.26490066225165565, - "acc_stderr": 0.03603038545360384, - "acc_norm": 0.26490066225165565, - "acc_norm_stderr": 0.03603038545360384 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.2537313432835821, - "acc_stderr": 0.030769444967296014, - "acc_norm": 0.2537313432835821, - "acc_norm_stderr": 0.030769444967296014 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.26011560693641617, - "acc_stderr": 0.033450369167889904, - "acc_norm": 0.26011560693641617, - "acc_norm_stderr": 0.033450369167889904 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.24338624338624337, - "acc_stderr": 0.02210112878741543, - "acc_norm": 0.24338624338624337, - "acc_norm_stderr": 0.02210112878741543 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2361111111111111, - "acc_stderr": 0.03551446610810826, - "acc_norm": 0.2361111111111111, - "acc_norm_stderr": 0.03551446610810826 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.27, - "acc_stderr": 0.04461960433384741, - "acc_norm": 0.27, - "acc_norm_stderr": 0.04461960433384741 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.29, - "acc_stderr": 0.04560480215720684, - "acc_norm": 0.29, - "acc_norm_stderr": 0.04560480215720684 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.24566473988439305, - "acc_stderr": 0.02317629820399201, - "acc_norm": 0.24566473988439305, - "acc_norm_stderr": 0.02317629820399201 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.25153374233128833, - "acc_stderr": 0.03408997886857529, - "acc_norm": 0.25153374233128833, - "acc_norm_stderr": 0.03408997886857529 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.27469135802469136, - "acc_stderr": 0.024836057868294677, - "acc_norm": 0.27469135802469136, - "acc_norm_stderr": 0.024836057868294677 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.29, - "acc_stderr": 0.04560480215720684, - "acc_norm": 0.29, - "acc_norm_stderr": 0.04560480215720684 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.27461139896373055, - "acc_stderr": 0.03221024508041154, - "acc_norm": 0.27461139896373055, - "acc_norm_stderr": 0.03221024508041154 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.30701754385964913, - "acc_stderr": 0.043391383225798594, - "acc_norm": 0.30701754385964913, - "acc_norm_stderr": 0.043391383225798594 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.23486238532110093, - "acc_stderr": 0.018175110510343602, - "acc_norm": 0.23486238532110093, - "acc_norm_stderr": 0.018175110510343602 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.30158730158730157, - "acc_stderr": 0.04104947269903394, - "acc_norm": 0.30158730158730157, - "acc_norm_stderr": 0.04104947269903394 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.2777777777777778, - "acc_stderr": 0.025646863097137904, - "acc_norm": 0.2777777777777778, - "acc_norm_stderr": 0.025646863097137904 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542128, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542128 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.2066115702479339, - "acc_stderr": 0.03695980128098824, - "acc_norm": 0.2066115702479339, - "acc_norm_stderr": 0.03695980128098824 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.25, - "acc_stderr": 0.03523807393012047, - "acc_norm": 0.25, - "acc_norm_stderr": 0.03523807393012047 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.21895424836601307, - "acc_stderr": 0.01672993756553755, - "acc_norm": 0.21895424836601307, - "acc_norm_stderr": 0.01672993756553755 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2553191489361702, - "acc_stderr": 0.026011992930902002, - "acc_norm": 0.2553191489361702, - "acc_norm_stderr": 0.026011992930902002 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.15178571428571427, - "acc_stderr": 0.034057028381856924, - "acc_norm": 0.15178571428571427, - "acc_norm_stderr": 0.034057028381856924 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.27314814814814814, - "acc_stderr": 0.03038805130167812, - "acc_norm": 0.27314814814814814, - "acc_norm_stderr": 0.03038805130167812 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.25027932960893856, - "acc_stderr": 0.014487500852850417, - "acc_norm": 0.25027932960893856, - "acc_norm_stderr": 0.014487500852850417 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.21, - "acc_stderr": 0.04093601807403326, - "acc_norm": 0.21, - "acc_norm_stderr": 0.04093601807403326 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.28, - "acc_stderr": 0.045126085985421296, - "acc_norm": 0.28, - "acc_norm_stderr": 0.045126085985421296 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.2647058823529412, - "acc_stderr": 0.026799562024887685, - "acc_norm": 0.2647058823529412, - "acc_norm_stderr": 0.026799562024887685 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.22448979591836735, - "acc_stderr": 0.02671143055553839, - "acc_norm": 0.22448979591836735, - "acc_norm_stderr": 0.02671143055553839 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.23628691983122363, - "acc_stderr": 0.027652153144159267, - "acc_norm": 0.23628691983122363, - "acc_norm_stderr": 0.027652153144159267 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2653194263363755, - "acc_stderr": 0.011276198843958873, - "acc_norm": 0.2653194263363755, - "acc_norm_stderr": 0.011276198843958873 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.25980392156862747, - "acc_stderr": 0.030778554678693268, - "acc_norm": 0.25980392156862747, - "acc_norm_stderr": 0.030778554678693268 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.26666666666666666, - "acc_stderr": 0.03453131801885415, - "acc_norm": 0.26666666666666666, - "acc_norm_stderr": 0.03453131801885415 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.24479804161566707, - "mc1_stderr": 0.01505186948671501, - "mc2": 0.41023662722679205, - "mc2_stderr": 0.016160843398647234 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.38028169014084506, - "acc_stderr": 0.016641217297503577, - "acc_norm": 0.4460093896713615, - "acc_norm_stderr": 0.017039561832563676 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "DopeorNope/KOAT-5.8b", - "model_sha": "768c40d2ffbddbc8aa15eed33234eef248eb43e7", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/DopeorNope/ZeroCoka-7B/result_2023-10-11 12:06:32.json b/DopeorNope/ZeroCoka-7B/result_2023-10-11 12:06:32.json deleted file mode 100644 index 849380275c02c8f7118401b2f25c31921d21492e..0000000000000000000000000000000000000000 --- a/DopeorNope/ZeroCoka-7B/result_2023-10-11 12:06:32.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.27986348122866894, - "acc_stderr": 0.013119040897725923, - "acc_norm": 0.3455631399317406, - "acc_norm_stderr": 0.013896938461145687 - }, - "harness|ko_hellaswag|10": { - "acc": 0.36566421031667, - "acc_stderr": 0.0048063163427093936, - "acc_norm": 0.48466440948018324, - "acc_norm_stderr": 0.004987433862274562 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.39766081871345027, - "acc_stderr": 0.0375363895576169, - "acc_norm": 0.39766081871345027, - "acc_norm_stderr": 0.0375363895576169 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.32038834951456313, - "acc_stderr": 0.0462028408228004, - "acc_norm": 0.32038834951456313, - "acc_norm_stderr": 0.0462028408228004 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.41890166028097064, - "acc_stderr": 0.017643205052377185, - "acc_norm": 0.41890166028097064, - "acc_norm_stderr": 0.017643205052377185 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.362962962962963, - "acc_stderr": 0.041539484047424004, - "acc_norm": 0.362962962962963, - "acc_norm_stderr": 0.041539484047424004 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.32, - "acc_stderr": 0.046882617226215034, - "acc_norm": 0.32, - "acc_norm_stderr": 0.046882617226215034 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.28936170212765955, - "acc_stderr": 0.029644006577009618, - "acc_norm": 0.28936170212765955, - "acc_norm_stderr": 0.029644006577009618 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3132530120481928, - "acc_stderr": 0.036108050180310235, - "acc_norm": 0.3132530120481928, - "acc_norm_stderr": 0.036108050180310235 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.3858520900321543, - "acc_stderr": 0.027648149599751464, - "acc_norm": 0.3858520900321543, - "acc_norm_stderr": 0.027648149599751464 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.3721973094170404, - "acc_stderr": 0.03244305283008731, - "acc_norm": 0.3721973094170404, - "acc_norm_stderr": 0.03244305283008731 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.45038167938931295, - "acc_stderr": 0.04363643698524779, - "acc_norm": 0.45038167938931295, - "acc_norm_stderr": 0.04363643698524779 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252605, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252605 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.3838383838383838, - "acc_stderr": 0.03464881675016339, - "acc_norm": 0.3838383838383838, - "acc_norm_stderr": 0.03464881675016339 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.4206896551724138, - "acc_stderr": 0.0411391498118926, - "acc_norm": 0.4206896551724138, - "acc_norm_stderr": 0.0411391498118926 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.20588235294117646, - "acc_stderr": 0.04023382273617746, - "acc_norm": 0.20588235294117646, - "acc_norm_stderr": 0.04023382273617746 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.3865546218487395, - "acc_stderr": 0.0316314580755238, - "acc_norm": 0.3865546218487395, - "acc_norm_stderr": 0.0316314580755238 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.3076923076923077, - "acc_stderr": 0.02340092891831049, - "acc_norm": 0.3076923076923077, - "acc_norm_stderr": 0.02340092891831049 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.44, - "acc_stderr": 0.04988876515698589, - "acc_norm": 0.44, - "acc_norm_stderr": 0.04988876515698589 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.26, - "acc_stderr": 0.04408440022768079, - "acc_norm": 0.26, - "acc_norm_stderr": 0.04408440022768079 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.46296296296296297, - "acc_stderr": 0.04820403072760628, - "acc_norm": 0.46296296296296297, - "acc_norm_stderr": 0.04820403072760628 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.2561576354679803, - "acc_stderr": 0.0307127300709826, - "acc_norm": 0.2561576354679803, - "acc_norm_stderr": 0.0307127300709826 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3709677419354839, - "acc_stderr": 0.02748054188795359, - "acc_norm": 0.3709677419354839, - "acc_norm_stderr": 0.02748054188795359 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.4829059829059829, - "acc_stderr": 0.032736940493481824, - "acc_norm": 0.4829059829059829, - "acc_norm_stderr": 0.032736940493481824 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.3471698113207547, - "acc_stderr": 0.02930010170554965, - "acc_norm": 0.3471698113207547, - "acc_norm_stderr": 0.02930010170554965 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.33636363636363636, - "acc_stderr": 0.04525393596302506, - "acc_norm": 0.33636363636363636, - "acc_norm_stderr": 0.04525393596302506 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.23333333333333334, - "acc_stderr": 0.02578787422095932, - "acc_norm": 0.23333333333333334, - "acc_norm_stderr": 0.02578787422095932 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2913907284768212, - "acc_stderr": 0.03710185726119996, - "acc_norm": 0.2913907284768212, - "acc_norm_stderr": 0.03710185726119996 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.4228855721393035, - "acc_stderr": 0.03493231777421282, - "acc_norm": 0.4228855721393035, - "acc_norm_stderr": 0.03493231777421282 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.3063583815028902, - "acc_stderr": 0.03514942551267437, - "acc_norm": 0.3063583815028902, - "acc_norm_stderr": 0.03514942551267437 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.24074074074074073, - "acc_stderr": 0.0220190800122179, - "acc_norm": 0.24074074074074073, - "acc_norm_stderr": 0.0220190800122179 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3125, - "acc_stderr": 0.038760854559127644, - "acc_norm": 0.3125, - "acc_norm_stderr": 0.038760854559127644 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.21, - "acc_stderr": 0.040936018074033256, - "acc_norm": 0.21, - "acc_norm_stderr": 0.040936018074033256 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.42, - "acc_stderr": 0.049604496374885836, - "acc_norm": 0.42, - "acc_norm_stderr": 0.049604496374885836 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.38439306358381503, - "acc_stderr": 0.026189666966272035, - "acc_norm": 0.38439306358381503, - "acc_norm_stderr": 0.026189666966272035 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.3067484662576687, - "acc_stderr": 0.03623089915724145, - "acc_norm": 0.3067484662576687, - "acc_norm_stderr": 0.03623089915724145 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.39197530864197533, - "acc_stderr": 0.027163686038271226, - "acc_norm": 0.39197530864197533, - "acc_norm_stderr": 0.027163686038271226 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.35, - "acc_stderr": 0.047937248544110196, - "acc_norm": 0.35, - "acc_norm_stderr": 0.047937248544110196 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.38860103626943004, - "acc_stderr": 0.03517739796373132, - "acc_norm": 0.38860103626943004, - "acc_norm_stderr": 0.03517739796373132 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2807017543859649, - "acc_stderr": 0.042270544512322, - "acc_norm": 0.2807017543859649, - "acc_norm_stderr": 0.042270544512322 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.3779816513761468, - "acc_stderr": 0.02078918706672811, - "acc_norm": 0.3779816513761468, - "acc_norm_stderr": 0.02078918706672811 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.25396825396825395, - "acc_stderr": 0.03893259610604672, - "acc_norm": 0.25396825396825395, - "acc_norm_stderr": 0.03893259610604672 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.45098039215686275, - "acc_stderr": 0.028491993586171566, - "acc_norm": 0.45098039215686275, - "acc_norm_stderr": 0.028491993586171566 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695235, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695235 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.5041322314049587, - "acc_stderr": 0.045641987674327526, - "acc_norm": 0.5041322314049587, - "acc_norm_stderr": 0.045641987674327526 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.32894736842105265, - "acc_stderr": 0.03823428969926605, - "acc_norm": 0.32894736842105265, - "acc_norm_stderr": 0.03823428969926605 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.3104575163398693, - "acc_stderr": 0.018718067052623216, - "acc_norm": 0.3104575163398693, - "acc_norm_stderr": 0.018718067052623216 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.32269503546099293, - "acc_stderr": 0.027889139300534778, - "acc_norm": 0.32269503546099293, - "acc_norm_stderr": 0.027889139300534778 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.25, - "acc_stderr": 0.04109974682633932, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04109974682633932 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.3425925925925926, - "acc_stderr": 0.03236585252602157, - "acc_norm": 0.3425925925925926, - "acc_norm_stderr": 0.03236585252602157 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.24804469273743016, - "acc_stderr": 0.014444157808261446, - "acc_norm": 0.24804469273743016, - "acc_norm_stderr": 0.014444157808261446 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252604, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252604 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542128, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542128 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.4117647058823529, - "acc_stderr": 0.029896163033125474, - "acc_norm": 0.4117647058823529, - "acc_norm_stderr": 0.029896163033125474 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.3142857142857143, - "acc_stderr": 0.029719329422417468, - "acc_norm": 0.3142857142857143, - "acc_norm_stderr": 0.029719329422417468 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.45147679324894513, - "acc_stderr": 0.0323936001739747, - "acc_norm": 0.45147679324894513, - "acc_norm_stderr": 0.0323936001739747 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.3135593220338983, - "acc_stderr": 0.01184923429145932, - "acc_norm": 0.3135593220338983, - "acc_norm_stderr": 0.01184923429145932 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.36764705882352944, - "acc_stderr": 0.03384132045674118, - "acc_norm": 0.36764705882352944, - "acc_norm_stderr": 0.03384132045674118 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.3878787878787879, - "acc_stderr": 0.038049136539710114, - "acc_norm": 0.3878787878787879, - "acc_norm_stderr": 0.038049136539710114 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2386780905752754, - "mc1_stderr": 0.014922629695456411, - "mc2": 0.3826229918315052, - "mc2_stderr": 0.015120737226444851 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.32981220657276994, - "acc_stderr": 0.01611635552339568, - "acc_norm": 0.3967136150234742, - "acc_norm_stderr": 0.01677009546349846 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "DopeorNope/ZeroCoka-7B", - "model_sha": "3025135b08f7d052531fcd8f6a4a5a97e4e25c76", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/DopeorNope/Zero_COKE_K-13B/result_2023-10-08 06:50:15.json b/DopeorNope/Zero_COKE_K-13B/result_2023-10-08 06:50:15.json deleted file mode 100644 index afd8dad87aa39279b87a4ccc0003c8eff27860eb..0000000000000000000000000000000000000000 --- a/DopeorNope/Zero_COKE_K-13B/result_2023-10-08 06:50:15.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.35238907849829354, - "acc_stderr": 0.01396014260059869, - "acc_norm": 0.3984641638225256, - "acc_norm_stderr": 0.014306946052735569 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3679545907189803, - "acc_stderr": 0.0048126332800782715, - "acc_norm": 0.46932881896036643, - "acc_norm_stderr": 0.004980384575535391 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.47368421052631576, - "acc_stderr": 0.038295098689947286, - "acc_norm": 0.47368421052631576, - "acc_norm_stderr": 0.038295098689947286 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.5728155339805825, - "acc_stderr": 0.04897957737781168, - "acc_norm": 0.5728155339805825, - "acc_norm_stderr": 0.04897957737781168 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.46871008939974457, - "acc_stderr": 0.01784491809046854, - "acc_norm": 0.46871008939974457, - "acc_norm_stderr": 0.01784491809046854 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.34814814814814815, - "acc_stderr": 0.041153246103369526, - "acc_norm": 0.34814814814814815, - "acc_norm_stderr": 0.041153246103369526 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206824, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206824 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.40425531914893614, - "acc_stderr": 0.03208115750788684, - "acc_norm": 0.40425531914893614, - "acc_norm_stderr": 0.03208115750788684 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3855421686746988, - "acc_stderr": 0.037891344246115496, - "acc_norm": 0.3855421686746988, - "acc_norm_stderr": 0.037891344246115496 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.44694533762057875, - "acc_stderr": 0.028237769422085335, - "acc_norm": 0.44694533762057875, - "acc_norm_stderr": 0.028237769422085335 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.42152466367713004, - "acc_stderr": 0.03314190222110656, - "acc_norm": 0.42152466367713004, - "acc_norm_stderr": 0.03314190222110656 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.4732824427480916, - "acc_stderr": 0.04379024936553894, - "acc_norm": 0.4732824427480916, - "acc_norm_stderr": 0.04379024936553894 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.41, - "acc_stderr": 0.04943110704237102, - "acc_norm": 0.41, - "acc_norm_stderr": 0.04943110704237102 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.5404040404040404, - "acc_stderr": 0.035507024651313425, - "acc_norm": 0.5404040404040404, - "acc_norm_stderr": 0.035507024651313425 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.3931034482758621, - "acc_stderr": 0.0407032901370707, - "acc_norm": 0.3931034482758621, - "acc_norm_stderr": 0.0407032901370707 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.21568627450980393, - "acc_stderr": 0.04092563958237655, - "acc_norm": 0.21568627450980393, - "acc_norm_stderr": 0.04092563958237655 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.4411764705882353, - "acc_stderr": 0.0322529423239964, - "acc_norm": 0.4411764705882353, - "acc_norm_stderr": 0.0322529423239964 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.45897435897435895, - "acc_stderr": 0.025265525491284295, - "acc_norm": 0.45897435897435895, - "acc_norm_stderr": 0.025265525491284295 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.45, - "acc_stderr": 0.049999999999999996, - "acc_norm": 0.45, - "acc_norm_stderr": 0.049999999999999996 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.49074074074074076, - "acc_stderr": 0.04832853553437055, - "acc_norm": 0.49074074074074076, - "acc_norm_stderr": 0.04832853553437055 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.31527093596059114, - "acc_stderr": 0.03269080871970186, - "acc_norm": 0.31527093596059114, - "acc_norm_stderr": 0.03269080871970186 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.4483870967741935, - "acc_stderr": 0.02829205683011273, - "acc_norm": 0.4483870967741935, - "acc_norm_stderr": 0.02829205683011273 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.6025641025641025, - "acc_stderr": 0.03205953453789293, - "acc_norm": 0.6025641025641025, - "acc_norm_stderr": 0.03205953453789293 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.4528301886792453, - "acc_stderr": 0.030635627957961823, - "acc_norm": 0.4528301886792453, - "acc_norm_stderr": 0.030635627957961823 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.4909090909090909, - "acc_stderr": 0.04788339768702861, - "acc_norm": 0.4909090909090909, - "acc_norm_stderr": 0.04788339768702861 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.3296296296296296, - "acc_stderr": 0.028661201116524586, - "acc_norm": 0.3296296296296296, - "acc_norm_stderr": 0.028661201116524586 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.3443708609271523, - "acc_stderr": 0.03879687024073327, - "acc_norm": 0.3443708609271523, - "acc_norm_stderr": 0.03879687024073327 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.5870646766169154, - "acc_stderr": 0.03481520803367348, - "acc_norm": 0.5870646766169154, - "acc_norm_stderr": 0.03481520803367348 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.34104046242774566, - "acc_stderr": 0.03614665424180826, - "acc_norm": 0.34104046242774566, - "acc_norm_stderr": 0.03614665424180826 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.32275132275132273, - "acc_stderr": 0.024078943243597016, - "acc_norm": 0.32275132275132273, - "acc_norm_stderr": 0.024078943243597016 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3958333333333333, - "acc_stderr": 0.04089465449325582, - "acc_norm": 0.3958333333333333, - "acc_norm_stderr": 0.04089465449325582 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695235, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695235 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.61, - "acc_stderr": 0.04902071300001974, - "acc_norm": 0.61, - "acc_norm_stderr": 0.04902071300001974 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.5173410404624278, - "acc_stderr": 0.026902900458666647, - "acc_norm": 0.5173410404624278, - "acc_norm_stderr": 0.026902900458666647 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.38650306748466257, - "acc_stderr": 0.03825825548848607, - "acc_norm": 0.38650306748466257, - "acc_norm_stderr": 0.03825825548848607 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.4382716049382716, - "acc_stderr": 0.027607914087400473, - "acc_norm": 0.4382716049382716, - "acc_norm_stderr": 0.027607914087400473 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.37, - "acc_stderr": 0.04852365870939098, - "acc_norm": 0.37, - "acc_norm_stderr": 0.04852365870939098 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.5025906735751295, - "acc_stderr": 0.03608390745384487, - "acc_norm": 0.5025906735751295, - "acc_norm_stderr": 0.03608390745384487 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.20175438596491227, - "acc_stderr": 0.03775205013583639, - "acc_norm": 0.20175438596491227, - "acc_norm_stderr": 0.03775205013583639 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.46972477064220186, - "acc_stderr": 0.021397988604936965, - "acc_norm": 0.46972477064220186, - "acc_norm_stderr": 0.021397988604936965 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.40476190476190477, - "acc_stderr": 0.04390259265377563, - "acc_norm": 0.40476190476190477, - "acc_norm_stderr": 0.04390259265377563 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.46405228758169936, - "acc_stderr": 0.028555827516528777, - "acc_norm": 0.46405228758169936, - "acc_norm_stderr": 0.028555827516528777 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.43, - "acc_stderr": 0.04975698519562428, - "acc_norm": 0.43, - "acc_norm_stderr": 0.04975698519562428 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.5289256198347108, - "acc_stderr": 0.04556710331269498, - "acc_norm": 0.5289256198347108, - "acc_norm_stderr": 0.04556710331269498 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.35526315789473684, - "acc_stderr": 0.03894734487013317, - "acc_norm": 0.35526315789473684, - "acc_norm_stderr": 0.03894734487013317 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.3284313725490196, - "acc_stderr": 0.018999707383162666, - "acc_norm": 0.3284313725490196, - "acc_norm_stderr": 0.018999707383162666 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.3404255319148936, - "acc_stderr": 0.028267657482650147, - "acc_norm": 0.3404255319148936, - "acc_norm_stderr": 0.028267657482650147 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.24107142857142858, - "acc_stderr": 0.04059867246952687, - "acc_norm": 0.24107142857142858, - "acc_norm_stderr": 0.04059867246952687 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.35185185185185186, - "acc_stderr": 0.03256850570293648, - "acc_norm": 0.35185185185185186, - "acc_norm_stderr": 0.03256850570293648 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2927374301675978, - "acc_stderr": 0.015218109544410182, - "acc_norm": 0.2927374301675978, - "acc_norm_stderr": 0.015218109544410182 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695235, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695235 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.38, - "acc_stderr": 0.04878317312145632, - "acc_norm": 0.38, - "acc_norm_stderr": 0.04878317312145632 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.39705882352941174, - "acc_stderr": 0.029722152099280065, - "acc_norm": 0.39705882352941174, - "acc_norm_stderr": 0.029722152099280065 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.5142857142857142, - "acc_stderr": 0.03199615232806287, - "acc_norm": 0.5142857142857142, - "acc_norm_stderr": 0.03199615232806287 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.5569620253164557, - "acc_stderr": 0.03233532777533485, - "acc_norm": 0.5569620253164557, - "acc_norm_stderr": 0.03233532777533485 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.3239895697522816, - "acc_stderr": 0.011952840809646563, - "acc_norm": 0.3239895697522816, - "acc_norm_stderr": 0.011952840809646563 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.4411764705882353, - "acc_stderr": 0.03484941514429231, - "acc_norm": 0.4411764705882353, - "acc_norm_stderr": 0.03484941514429231 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.4909090909090909, - "acc_stderr": 0.03903698647748441, - "acc_norm": 0.4909090909090909, - "acc_norm_stderr": 0.03903698647748441 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.32068543451652387, - "mc1_stderr": 0.0163391703732809, - "mc2": 0.498111749136946, - "mc2_stderr": 0.015897921630313217 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.318075117370892, - "acc_stderr": 0.015964978456287866, - "acc_norm": 0.32981220657276994, - "acc_norm_stderr": 0.016116355523395676 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "DopeorNope/Zero_COKE_K-13B", - "model_sha": "fda4838dd7feb06c1289ae143810c67a59a72961", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/EleutherAI/polyglot-ko-1.3b/result_2023-09-24 15:21:38.json b/EleutherAI/polyglot-ko-1.3b/result_2023-09-24 15:21:38.json deleted file mode 100644 index 956771dcd3160fc0b8bb2af06730f4e62950a263..0000000000000000000000000000000000000000 --- a/EleutherAI/polyglot-ko-1.3b/result_2023-09-24 15:21:38.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.2235494880546075, - "acc_stderr": 0.012174896631202605, - "acc_norm": 0.2815699658703072, - "acc_norm_stderr": 0.013143376735009015 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3345947022505477, - "acc_stderr": 0.004708842600177431, - "acc_norm": 0.4135630352519418, - "acc_norm_stderr": 0.0049146550633294974 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.27485380116959063, - "acc_stderr": 0.03424042924691585, - "acc_norm": 0.27485380116959063, - "acc_norm_stderr": 0.03424042924691585 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.27184466019417475, - "acc_stderr": 0.044052680241409216, - "acc_norm": 0.27184466019417475, - "acc_norm_stderr": 0.044052680241409216 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.26947637292464877, - "acc_stderr": 0.015866243073215065, - "acc_norm": 0.26947637292464877, - "acc_norm_stderr": 0.015866243073215065 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.26666666666666666, - "acc_stderr": 0.038201699145179055, - "acc_norm": 0.26666666666666666, - "acc_norm_stderr": 0.038201699145179055 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.2127659574468085, - "acc_stderr": 0.026754391348039783, - "acc_norm": 0.2127659574468085, - "acc_norm_stderr": 0.026754391348039783 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.24096385542168675, - "acc_stderr": 0.033293941190735296, - "acc_norm": 0.24096385542168675, - "acc_norm_stderr": 0.033293941190735296 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.2379421221864952, - "acc_stderr": 0.024185150647818707, - "acc_norm": 0.2379421221864952, - "acc_norm_stderr": 0.024185150647818707 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.2825112107623318, - "acc_stderr": 0.030216831011508766, - "acc_norm": 0.2825112107623318, - "acc_norm_stderr": 0.030216831011508766 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.21374045801526717, - "acc_stderr": 0.0359546161177469, - "acc_norm": 0.21374045801526717, - "acc_norm_stderr": 0.0359546161177469 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.24, - "acc_stderr": 0.042923469599092816, - "acc_norm": 0.24, - "acc_norm_stderr": 0.042923469599092816 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.2474747474747475, - "acc_stderr": 0.03074630074212451, - "acc_norm": 0.2474747474747475, - "acc_norm_stderr": 0.03074630074212451 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.22758620689655173, - "acc_stderr": 0.03493950380131184, - "acc_norm": 0.22758620689655173, - "acc_norm_stderr": 0.03493950380131184 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.22549019607843138, - "acc_stderr": 0.041583075330832865, - "acc_norm": 0.22549019607843138, - "acc_norm_stderr": 0.041583075330832865 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.31512605042016806, - "acc_stderr": 0.030176808288974337, - "acc_norm": 0.31512605042016806, - "acc_norm_stderr": 0.030176808288974337 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.2205128205128205, - "acc_stderr": 0.02102067268082791, - "acc_norm": 0.2205128205128205, - "acc_norm_stderr": 0.02102067268082791 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.18, - "acc_stderr": 0.038612291966536955, - "acc_norm": 0.18, - "acc_norm_stderr": 0.038612291966536955 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.25, - "acc_stderr": 0.04186091791394607, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04186091791394607 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.2660098522167488, - "acc_stderr": 0.03108982600293752, - "acc_norm": 0.2660098522167488, - "acc_norm_stderr": 0.03108982600293752 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3, - "acc_stderr": 0.02606936229533513, - "acc_norm": 0.3, - "acc_norm_stderr": 0.02606936229533513 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.23076923076923078, - "acc_stderr": 0.027601921381417607, - "acc_norm": 0.23076923076923078, - "acc_norm_stderr": 0.027601921381417607 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.25660377358490566, - "acc_stderr": 0.026880647889051968, - "acc_norm": 0.25660377358490566, - "acc_norm_stderr": 0.026880647889051968 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.2545454545454545, - "acc_stderr": 0.04172343038705383, - "acc_norm": 0.2545454545454545, - "acc_norm_stderr": 0.04172343038705383 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.2962962962962963, - "acc_stderr": 0.02784081149587194, - "acc_norm": 0.2962962962962963, - "acc_norm_stderr": 0.02784081149587194 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.304635761589404, - "acc_stderr": 0.03757949922943342, - "acc_norm": 0.304635761589404, - "acc_norm_stderr": 0.03757949922943342 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.25870646766169153, - "acc_stderr": 0.03096590312357303, - "acc_norm": 0.25870646766169153, - "acc_norm_stderr": 0.03096590312357303 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.2254335260115607, - "acc_stderr": 0.03186209851641144, - "acc_norm": 0.2254335260115607, - "acc_norm_stderr": 0.03186209851641144 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2566137566137566, - "acc_stderr": 0.022494510767503154, - "acc_norm": 0.2566137566137566, - "acc_norm_stderr": 0.022494510767503154 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2638888888888889, - "acc_stderr": 0.03685651095897532, - "acc_norm": 0.2638888888888889, - "acc_norm_stderr": 0.03685651095897532 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.23, - "acc_stderr": 0.04229525846816505, - "acc_norm": 0.23, - "acc_norm_stderr": 0.04229525846816505 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.22, - "acc_stderr": 0.04163331998932269, - "acc_norm": 0.22, - "acc_norm_stderr": 0.04163331998932269 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.24855491329479767, - "acc_stderr": 0.023267528432100174, - "acc_norm": 0.24855491329479767, - "acc_norm_stderr": 0.023267528432100174 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.31901840490797545, - "acc_stderr": 0.03661997551073836, - "acc_norm": 0.31901840490797545, - "acc_norm_stderr": 0.03661997551073836 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.2623456790123457, - "acc_stderr": 0.024477222856135114, - "acc_norm": 0.2623456790123457, - "acc_norm_stderr": 0.024477222856135114 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.33678756476683935, - "acc_stderr": 0.03410780251836184, - "acc_norm": 0.33678756476683935, - "acc_norm_stderr": 0.03410780251836184 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.20175438596491227, - "acc_stderr": 0.037752050135836386, - "acc_norm": 0.20175438596491227, - "acc_norm_stderr": 0.037752050135836386 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.24220183486238533, - "acc_stderr": 0.01836817630659862, - "acc_norm": 0.24220183486238533, - "acc_norm_stderr": 0.01836817630659862 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.23015873015873015, - "acc_stderr": 0.03764950879790606, - "acc_norm": 0.23015873015873015, - "acc_norm_stderr": 0.03764950879790606 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.23529411764705882, - "acc_stderr": 0.024288619466046102, - "acc_norm": 0.23529411764705882, - "acc_norm_stderr": 0.024288619466046102 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.18, - "acc_stderr": 0.03861229196653695, - "acc_norm": 0.18, - "acc_norm_stderr": 0.03861229196653695 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.256198347107438, - "acc_stderr": 0.039849796533028704, - "acc_norm": 0.256198347107438, - "acc_norm_stderr": 0.039849796533028704 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.21710526315789475, - "acc_stderr": 0.033550453048829226, - "acc_norm": 0.21710526315789475, - "acc_norm_stderr": 0.033550453048829226 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.24019607843137256, - "acc_stderr": 0.01728276069516743, - "acc_norm": 0.24019607843137256, - "acc_norm_stderr": 0.01728276069516743 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2553191489361702, - "acc_stderr": 0.02601199293090201, - "acc_norm": 0.2553191489361702, - "acc_norm_stderr": 0.02601199293090201 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.21428571428571427, - "acc_stderr": 0.03894641120044793, - "acc_norm": 0.21428571428571427, - "acc_norm_stderr": 0.03894641120044793 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.46296296296296297, - "acc_stderr": 0.03400603625538272, - "acc_norm": 0.46296296296296297, - "acc_norm_stderr": 0.03400603625538272 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.24692737430167597, - "acc_stderr": 0.014422292204808852, - "acc_norm": 0.24692737430167597, - "acc_norm_stderr": 0.014422292204808852 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.4411764705882353, - "acc_stderr": 0.030161911930767102, - "acc_norm": 0.4411764705882353, - "acc_norm_stderr": 0.030161911930767102 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.3795918367346939, - "acc_stderr": 0.03106721126287249, - "acc_norm": 0.3795918367346939, - "acc_norm_stderr": 0.03106721126287249 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.2109704641350211, - "acc_stderr": 0.02655837250266192, - "acc_norm": 0.2109704641350211, - "acc_norm_stderr": 0.02655837250266192 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.23468057366362452, - "acc_stderr": 0.010824026872449344, - "acc_norm": 0.23468057366362452, - "acc_norm_stderr": 0.010824026872449344 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.25, - "acc_stderr": 0.03039153369274154, - "acc_norm": 0.25, - "acc_norm_stderr": 0.03039153369274154 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.22424242424242424, - "acc_stderr": 0.03256866661681102, - "acc_norm": 0.22424242424242424, - "acc_norm_stderr": 0.03256866661681102 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.25091799265605874, - "mc1_stderr": 0.015176985027707682, - "mc2": 0.4116568832959107, - "mc2_stderr": 0.015044504977529799 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.12206572769953052, - "acc_stderr": 0.011221814716156896, - "acc_norm": 0.16901408450704225, - "acc_norm_stderr": 0.01284675672446505 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "EleutherAI/polyglot-ko-1.3b", - "model_sha": "557e162cf6e944fdbae05bab2e45d066a125eacb", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/EleutherAI/polyglot-ko-12.8b/result_2023-09-26 09:55:07.json b/EleutherAI/polyglot-ko-12.8b/result_2023-09-26 09:55:07.json deleted file mode 100644 index 94b01714ac7a483cf56127d45d174d64235db8bd..0000000000000000000000000000000000000000 --- a/EleutherAI/polyglot-ko-12.8b/result_2023-09-26 09:55:07.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.2858361774744027, - "acc_stderr": 0.013203196088537365, - "acc_norm": 0.33532423208191126, - "acc_norm_stderr": 0.013796182947785562 - }, - "harness|ko_hellaswag|10": { - "acc": 0.385381398127863, - "acc_stderr": 0.004856906473719383, - "acc_norm": 0.5027882891854212, - "acc_norm_stderr": 0.004989703824167094 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.30994152046783624, - "acc_stderr": 0.03546976959393161, - "acc_norm": 0.30994152046783624, - "acc_norm_stderr": 0.03546976959393161 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.18446601941747573, - "acc_stderr": 0.03840423627288276, - "acc_norm": 0.18446601941747573, - "acc_norm_stderr": 0.03840423627288276 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.25925925925925924, - "acc_stderr": 0.015671006009339572, - "acc_norm": 0.25925925925925924, - "acc_norm_stderr": 0.015671006009339572 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.22962962962962963, - "acc_stderr": 0.036333844140734636, - "acc_norm": 0.22962962962962963, - "acc_norm_stderr": 0.036333844140734636 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.27, - "acc_stderr": 0.0446196043338474, - "acc_norm": 0.27, - "acc_norm_stderr": 0.0446196043338474 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.2127659574468085, - "acc_stderr": 0.026754391348039787, - "acc_norm": 0.2127659574468085, - "acc_norm_stderr": 0.026754391348039787 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.21686746987951808, - "acc_stderr": 0.03208284450356365, - "acc_norm": 0.21686746987951808, - "acc_norm_stderr": 0.03208284450356365 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.31189710610932475, - "acc_stderr": 0.02631185807185416, - "acc_norm": 0.31189710610932475, - "acc_norm_stderr": 0.02631185807185416 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.20179372197309417, - "acc_stderr": 0.02693611191280227, - "acc_norm": 0.20179372197309417, - "acc_norm_stderr": 0.02693611191280227 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.22900763358778625, - "acc_stderr": 0.036853466317118506, - "acc_norm": 0.22900763358778625, - "acc_norm_stderr": 0.036853466317118506 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.23, - "acc_stderr": 0.04229525846816506, - "acc_norm": 0.23, - "acc_norm_stderr": 0.04229525846816506 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.24242424242424243, - "acc_stderr": 0.03053289223393203, - "acc_norm": 0.24242424242424243, - "acc_norm_stderr": 0.03053289223393203 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.2896551724137931, - "acc_stderr": 0.03780019230438014, - "acc_norm": 0.2896551724137931, - "acc_norm_stderr": 0.03780019230438014 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.3137254901960784, - "acc_stderr": 0.04617034827006716, - "acc_norm": 0.3137254901960784, - "acc_norm_stderr": 0.04617034827006716 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.23109243697478993, - "acc_stderr": 0.027381406927868963, - "acc_norm": 0.23109243697478993, - "acc_norm_stderr": 0.027381406927868963 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.21025641025641026, - "acc_stderr": 0.020660597485026928, - "acc_norm": 0.21025641025641026, - "acc_norm_stderr": 0.020660597485026928 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.26, - "acc_stderr": 0.0440844002276808, - "acc_norm": 0.26, - "acc_norm_stderr": 0.0440844002276808 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.21, - "acc_stderr": 0.040936018074033256, - "acc_norm": 0.21, - "acc_norm_stderr": 0.040936018074033256 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.25, - "acc_stderr": 0.04186091791394607, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04186091791394607 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.26108374384236455, - "acc_stderr": 0.0309037969521145, - "acc_norm": 0.26108374384236455, - "acc_norm_stderr": 0.0309037969521145 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.25161290322580643, - "acc_stderr": 0.024685979286239963, - "acc_norm": 0.25161290322580643, - "acc_norm_stderr": 0.024685979286239963 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.23076923076923078, - "acc_stderr": 0.027601921381417604, - "acc_norm": 0.23076923076923078, - "acc_norm_stderr": 0.027601921381417604 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.23773584905660378, - "acc_stderr": 0.026199808807561932, - "acc_norm": 0.23773584905660378, - "acc_norm_stderr": 0.026199808807561932 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.21818181818181817, - "acc_stderr": 0.03955932861795833, - "acc_norm": 0.21818181818181817, - "acc_norm_stderr": 0.03955932861795833 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.26296296296296295, - "acc_stderr": 0.02684205787383371, - "acc_norm": 0.26296296296296295, - "acc_norm_stderr": 0.02684205787383371 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2847682119205298, - "acc_stderr": 0.03684881521389024, - "acc_norm": 0.2847682119205298, - "acc_norm_stderr": 0.03684881521389024 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.263681592039801, - "acc_stderr": 0.03115715086935554, - "acc_norm": 0.263681592039801, - "acc_norm_stderr": 0.03115715086935554 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.24855491329479767, - "acc_stderr": 0.03295304696818317, - "acc_norm": 0.24855491329479767, - "acc_norm_stderr": 0.03295304696818317 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2671957671957672, - "acc_stderr": 0.022789673145776578, - "acc_norm": 0.2671957671957672, - "acc_norm_stderr": 0.022789673145776578 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2569444444444444, - "acc_stderr": 0.036539469694421, - "acc_norm": 0.2569444444444444, - "acc_norm_stderr": 0.036539469694421 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.2, - "acc_stderr": 0.04020151261036846, - "acc_norm": 0.2, - "acc_norm_stderr": 0.04020151261036846 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.26, - "acc_stderr": 0.04408440022768079, - "acc_norm": 0.26, - "acc_norm_stderr": 0.04408440022768079 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.23121387283236994, - "acc_stderr": 0.022698657167855716, - "acc_norm": 0.23121387283236994, - "acc_norm_stderr": 0.022698657167855716 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.2883435582822086, - "acc_stderr": 0.035590395316173425, - "acc_norm": 0.2883435582822086, - "acc_norm_stderr": 0.035590395316173425 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.2777777777777778, - "acc_stderr": 0.024922001168886338, - "acc_norm": 0.2777777777777778, - "acc_norm_stderr": 0.024922001168886338 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.33, - "acc_stderr": 0.047258156262526045, - "acc_norm": 0.33, - "acc_norm_stderr": 0.047258156262526045 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.2694300518134715, - "acc_stderr": 0.03201867122877794, - "acc_norm": 0.2694300518134715, - "acc_norm_stderr": 0.03201867122877794 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.23684210526315788, - "acc_stderr": 0.03999423879281336, - "acc_norm": 0.23684210526315788, - "acc_norm_stderr": 0.03999423879281336 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.26422018348623855, - "acc_stderr": 0.0189041641715102, - "acc_norm": 0.26422018348623855, - "acc_norm_stderr": 0.0189041641715102 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.19047619047619047, - "acc_stderr": 0.035122074123020534, - "acc_norm": 0.19047619047619047, - "acc_norm_stderr": 0.035122074123020534 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.2581699346405229, - "acc_stderr": 0.02505850331695815, - "acc_norm": 0.2581699346405229, - "acc_norm_stderr": 0.02505850331695815 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.22, - "acc_stderr": 0.041633319989322674, - "acc_norm": 0.22, - "acc_norm_stderr": 0.041633319989322674 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.36363636363636365, - "acc_stderr": 0.043913262867240704, - "acc_norm": 0.36363636363636365, - "acc_norm_stderr": 0.043913262867240704 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.28289473684210525, - "acc_stderr": 0.03665349695640767, - "acc_norm": 0.28289473684210525, - "acc_norm_stderr": 0.03665349695640767 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.20098039215686275, - "acc_stderr": 0.016211938889655574, - "acc_norm": 0.20098039215686275, - "acc_norm_stderr": 0.016211938889655574 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.22695035460992907, - "acc_stderr": 0.02498710636564298, - "acc_norm": 0.22695035460992907, - "acc_norm_stderr": 0.02498710636564298 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.25, - "acc_stderr": 0.04109974682633932, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04109974682633932 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.3611111111111111, - "acc_stderr": 0.03275773486100999, - "acc_norm": 0.3611111111111111, - "acc_norm_stderr": 0.03275773486100999 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.24692737430167597, - "acc_stderr": 0.014422292204808852, - "acc_norm": 0.24692737430167597, - "acc_norm_stderr": 0.014422292204808852 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.27, - "acc_stderr": 0.04461960433384741, - "acc_norm": 0.27, - "acc_norm_stderr": 0.04461960433384741 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.44485294117647056, - "acc_stderr": 0.030187532060329383, - "acc_norm": 0.44485294117647056, - "acc_norm_stderr": 0.030187532060329383 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.22857142857142856, - "acc_stderr": 0.026882144922307748, - "acc_norm": 0.22857142857142856, - "acc_norm_stderr": 0.026882144922307748 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.32489451476793246, - "acc_stderr": 0.030486039389105303, - "acc_norm": 0.32489451476793246, - "acc_norm_stderr": 0.030486039389105303 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.25684485006518903, - "acc_stderr": 0.011158455853098857, - "acc_norm": 0.25684485006518903, - "acc_norm_stderr": 0.011158455853098857 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.27941176470588236, - "acc_stderr": 0.031493281045079556, - "acc_norm": 0.27941176470588236, - "acc_norm_stderr": 0.031493281045079556 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.23030303030303031, - "acc_stderr": 0.032876667586034886, - "acc_norm": 0.23030303030303031, - "acc_norm_stderr": 0.032876667586034886 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2350061199510404, - "mc1_stderr": 0.014843061507731613, - "mc2": 0.390667104295536, - "mc2_stderr": 0.014736649975849761 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.49413145539906106, - "acc_stderr": 0.017138598632436254, - "acc_norm": 0.5868544600938967, - "acc_norm_stderr": 0.016879203885533163 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "EleutherAI/polyglot-ko-12.8b", - "model_sha": "09dfc839067bf44e7f52976eca8adbc17f04e1b0", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/EleutherAI/polyglot-ko-3.8b/result_2023-09-26 09:54:58.json b/EleutherAI/polyglot-ko-3.8b/result_2023-09-26 09:54:58.json deleted file mode 100644 index e7e4f0de318ba9bb5b7da9fa3aa2cbf8d48067d2..0000000000000000000000000000000000000000 --- a/EleutherAI/polyglot-ko-3.8b/result_2023-09-26 09:54:58.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.2525597269624573, - "acc_stderr": 0.01269672898020771, - "acc_norm": 0.3046075085324232, - "acc_norm_stderr": 0.013449522109932494 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3511252738498307, - "acc_stderr": 0.004763465139038552, - "acc_norm": 0.4420434176458873, - "acc_norm_stderr": 0.004956147046108961 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.2222222222222222, - "acc_stderr": 0.03188578017686398, - "acc_norm": 0.2222222222222222, - "acc_norm_stderr": 0.03188578017686398 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.30097087378640774, - "acc_stderr": 0.045416094465039476, - "acc_norm": 0.30097087378640774, - "acc_norm_stderr": 0.045416094465039476 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.23627075351213284, - "acc_stderr": 0.015190473717037497, - "acc_norm": 0.23627075351213284, - "acc_norm_stderr": 0.015190473717037497 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.23703703703703705, - "acc_stderr": 0.03673731683969506, - "acc_norm": 0.23703703703703705, - "acc_norm_stderr": 0.03673731683969506 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.27, - "acc_stderr": 0.04461960433384741, - "acc_norm": 0.27, - "acc_norm_stderr": 0.04461960433384741 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.2, - "acc_stderr": 0.026148818018424502, - "acc_norm": 0.2, - "acc_norm_stderr": 0.026148818018424502 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.25301204819277107, - "acc_stderr": 0.03384429155233135, - "acc_norm": 0.25301204819277107, - "acc_norm_stderr": 0.03384429155233135 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.24437299035369775, - "acc_stderr": 0.024406162094668886, - "acc_norm": 0.24437299035369775, - "acc_norm_stderr": 0.024406162094668886 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.21524663677130046, - "acc_stderr": 0.027584066602208263, - "acc_norm": 0.21524663677130046, - "acc_norm_stderr": 0.027584066602208263 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.19083969465648856, - "acc_stderr": 0.034465133507525954, - "acc_norm": 0.19083969465648856, - "acc_norm_stderr": 0.034465133507525954 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.21, - "acc_stderr": 0.040936018074033256, - "acc_norm": 0.21, - "acc_norm_stderr": 0.040936018074033256 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.31313131313131315, - "acc_stderr": 0.033042050878136525, - "acc_norm": 0.31313131313131315, - "acc_norm_stderr": 0.033042050878136525 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.2413793103448276, - "acc_stderr": 0.03565998174135302, - "acc_norm": 0.2413793103448276, - "acc_norm_stderr": 0.03565998174135302 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.19607843137254902, - "acc_stderr": 0.03950581861179962, - "acc_norm": 0.19607843137254902, - "acc_norm_stderr": 0.03950581861179962 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.3487394957983193, - "acc_stderr": 0.030956636328566548, - "acc_norm": 0.3487394957983193, - "acc_norm_stderr": 0.030956636328566548 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.3564102564102564, - "acc_stderr": 0.024283140529467295, - "acc_norm": 0.3564102564102564, - "acc_norm_stderr": 0.024283140529467295 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.16, - "acc_stderr": 0.03684529491774709, - "acc_norm": 0.16, - "acc_norm_stderr": 0.03684529491774709 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.32, - "acc_stderr": 0.04688261722621504, - "acc_norm": 0.32, - "acc_norm_stderr": 0.04688261722621504 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.21296296296296297, - "acc_stderr": 0.03957835471980981, - "acc_norm": 0.21296296296296297, - "acc_norm_stderr": 0.03957835471980981 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.29064039408866993, - "acc_stderr": 0.03194740072265541, - "acc_norm": 0.29064039408866993, - "acc_norm_stderr": 0.03194740072265541 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3032258064516129, - "acc_stderr": 0.026148685930671742, - "acc_norm": 0.3032258064516129, - "acc_norm_stderr": 0.026148685930671742 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.19658119658119658, - "acc_stderr": 0.02603538609895129, - "acc_norm": 0.19658119658119658, - "acc_norm_stderr": 0.02603538609895129 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.3283018867924528, - "acc_stderr": 0.02890159361241178, - "acc_norm": 0.3283018867924528, - "acc_norm_stderr": 0.02890159361241178 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.23636363636363636, - "acc_stderr": 0.04069306319721376, - "acc_norm": 0.23636363636363636, - "acc_norm_stderr": 0.04069306319721376 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.25925925925925924, - "acc_stderr": 0.02671924078371216, - "acc_norm": 0.25925925925925924, - "acc_norm_stderr": 0.02671924078371216 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.32450331125827814, - "acc_stderr": 0.03822746937658753, - "acc_norm": 0.32450331125827814, - "acc_norm_stderr": 0.03822746937658753 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.26865671641791045, - "acc_stderr": 0.03134328358208954, - "acc_norm": 0.26865671641791045, - "acc_norm_stderr": 0.03134328358208954 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.32947976878612717, - "acc_stderr": 0.03583901754736412, - "acc_norm": 0.32947976878612717, - "acc_norm_stderr": 0.03583901754736412 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.24603174603174602, - "acc_stderr": 0.022182037202948368, - "acc_norm": 0.24603174603174602, - "acc_norm_stderr": 0.022182037202948368 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2569444444444444, - "acc_stderr": 0.03653946969442099, - "acc_norm": 0.2569444444444444, - "acc_norm_stderr": 0.03653946969442099 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.4, - "acc_stderr": 0.049236596391733084, - "acc_norm": 0.4, - "acc_norm_stderr": 0.049236596391733084 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.26, - "acc_stderr": 0.044084400227680794, - "acc_norm": 0.26, - "acc_norm_stderr": 0.044084400227680794 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.2398843930635838, - "acc_stderr": 0.022989592543123567, - "acc_norm": 0.2398843930635838, - "acc_norm_stderr": 0.022989592543123567 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.3312883435582822, - "acc_stderr": 0.03697983910025588, - "acc_norm": 0.3312883435582822, - "acc_norm_stderr": 0.03697983910025588 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.26851851851851855, - "acc_stderr": 0.024659685185967277, - "acc_norm": 0.26851851851851855, - "acc_norm_stderr": 0.024659685185967277 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.35, - "acc_stderr": 0.0479372485441102, - "acc_norm": 0.35, - "acc_norm_stderr": 0.0479372485441102 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.3626943005181347, - "acc_stderr": 0.034697137917043715, - "acc_norm": 0.3626943005181347, - "acc_norm_stderr": 0.034697137917043715 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.22807017543859648, - "acc_stderr": 0.03947152782669415, - "acc_norm": 0.22807017543859648, - "acc_norm_stderr": 0.03947152782669415 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.25688073394495414, - "acc_stderr": 0.018732492928342448, - "acc_norm": 0.25688073394495414, - "acc_norm_stderr": 0.018732492928342448 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.3492063492063492, - "acc_stderr": 0.04263906892795132, - "acc_norm": 0.3492063492063492, - "acc_norm_stderr": 0.04263906892795132 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.2549019607843137, - "acc_stderr": 0.024954184324879905, - "acc_norm": 0.2549019607843137, - "acc_norm_stderr": 0.024954184324879905 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.26, - "acc_stderr": 0.04408440022768079, - "acc_norm": 0.26, - "acc_norm_stderr": 0.04408440022768079 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.1652892561983471, - "acc_stderr": 0.03390780612972776, - "acc_norm": 0.1652892561983471, - "acc_norm_stderr": 0.03390780612972776 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.29605263157894735, - "acc_stderr": 0.03715062154998904, - "acc_norm": 0.29605263157894735, - "acc_norm_stderr": 0.03715062154998904 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.21895424836601307, - "acc_stderr": 0.016729937565537537, - "acc_norm": 0.21895424836601307, - "acc_norm_stderr": 0.016729937565537537 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.24822695035460993, - "acc_stderr": 0.025770015644290396, - "acc_norm": 0.24822695035460993, - "acc_norm_stderr": 0.025770015644290396 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.15178571428571427, - "acc_stderr": 0.03405702838185692, - "acc_norm": 0.15178571428571427, - "acc_norm_stderr": 0.03405702838185692 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.4722222222222222, - "acc_stderr": 0.0340470532865388, - "acc_norm": 0.4722222222222222, - "acc_norm_stderr": 0.0340470532865388 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.26145251396648045, - "acc_stderr": 0.014696599650364546, - "acc_norm": 0.26145251396648045, - "acc_norm_stderr": 0.014696599650364546 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.26, - "acc_stderr": 0.04408440022768078, - "acc_norm": 0.26, - "acc_norm_stderr": 0.04408440022768078 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.22, - "acc_stderr": 0.04163331998932269, - "acc_norm": 0.22, - "acc_norm_stderr": 0.04163331998932269 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.44485294117647056, - "acc_stderr": 0.030187532060329383, - "acc_norm": 0.44485294117647056, - "acc_norm_stderr": 0.030187532060329383 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.39591836734693875, - "acc_stderr": 0.03130802899065685, - "acc_norm": 0.39591836734693875, - "acc_norm_stderr": 0.03130802899065685 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.23628691983122363, - "acc_stderr": 0.02765215314415926, - "acc_norm": 0.23628691983122363, - "acc_norm_stderr": 0.02765215314415926 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.24445893089960888, - "acc_stderr": 0.010976425013113912, - "acc_norm": 0.24445893089960888, - "acc_norm_stderr": 0.010976425013113912 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.2647058823529412, - "acc_stderr": 0.03096451792692341, - "acc_norm": 0.2647058823529412, - "acc_norm_stderr": 0.03096451792692341 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.296969696969697, - "acc_stderr": 0.03567969772268046, - "acc_norm": 0.296969696969697, - "acc_norm_stderr": 0.03567969772268046 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.24969400244798043, - "mc1_stderr": 0.015152286907148125, - "mc2": 0.40454723614569765, - "mc2_stderr": 0.014981033793701278 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.05046948356807512, - "acc_stderr": 0.007504195050541823, - "acc_norm": 0.09507042253521127, - "acc_norm_stderr": 0.010054612173655424 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "EleutherAI/polyglot-ko-3.8b", - "model_sha": "3c696a71c16b4a4622b7cabf6c5da4ba5a73b548", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/EleutherAI/polyglot-ko-5.8b/result_2023-09-24 15:21:38.json b/EleutherAI/polyglot-ko-5.8b/result_2023-09-24 15:21:38.json deleted file mode 100644 index d0784f0b67cc88b6c63da950b3f21e8450bc52b3..0000000000000000000000000000000000000000 --- a/EleutherAI/polyglot-ko-5.8b/result_2023-09-24 15:21:38.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.2687713310580205, - "acc_stderr": 0.012955065963710675, - "acc_norm": 0.32764505119453924, - "acc_norm_stderr": 0.013715847940719339 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3690499900418243, - "acc_stderr": 0.004815613144385398, - "acc_norm": 0.4814777932682733, - "acc_norm_stderr": 0.004986356526063965 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.17543859649122806, - "acc_stderr": 0.029170885500727665, - "acc_norm": 0.17543859649122806, - "acc_norm_stderr": 0.029170885500727665 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.3592233009708738, - "acc_stderr": 0.04750458399041693, - "acc_norm": 0.3592233009708738, - "acc_norm_stderr": 0.04750458399041693 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.20178799489144317, - "acc_stderr": 0.014351702181636861, - "acc_norm": 0.20178799489144317, - "acc_norm_stderr": 0.014351702181636861 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.22962962962962963, - "acc_stderr": 0.036333844140734636, - "acc_norm": 0.22962962962962963, - "acc_norm_stderr": 0.036333844140734636 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.22, - "acc_stderr": 0.0416333199893227, - "acc_norm": 0.22, - "acc_norm_stderr": 0.0416333199893227 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.17446808510638298, - "acc_stderr": 0.02480944233550398, - "acc_norm": 0.17446808510638298, - "acc_norm_stderr": 0.02480944233550398 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.1927710843373494, - "acc_stderr": 0.030709824050565264, - "acc_norm": 0.1927710843373494, - "acc_norm_stderr": 0.030709824050565264 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.24115755627009647, - "acc_stderr": 0.024296594034763426, - "acc_norm": 0.24115755627009647, - "acc_norm_stderr": 0.024296594034763426 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.11659192825112108, - "acc_stderr": 0.021539639816244467, - "acc_norm": 0.11659192825112108, - "acc_norm_stderr": 0.021539639816244467 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.2748091603053435, - "acc_stderr": 0.03915345408847835, - "acc_norm": 0.2748091603053435, - "acc_norm_stderr": 0.03915345408847835 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.24, - "acc_stderr": 0.04292346959909284, - "acc_norm": 0.24, - "acc_norm_stderr": 0.04292346959909284 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.35353535353535354, - "acc_stderr": 0.03406086723547153, - "acc_norm": 0.35353535353535354, - "acc_norm_stderr": 0.03406086723547153 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.2413793103448276, - "acc_stderr": 0.03565998174135302, - "acc_norm": 0.2413793103448276, - "acc_norm_stderr": 0.03565998174135302 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.37254901960784315, - "acc_stderr": 0.048108401480826346, - "acc_norm": 0.37254901960784315, - "acc_norm_stderr": 0.048108401480826346 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.3487394957983193, - "acc_stderr": 0.030956636328566548, - "acc_norm": 0.3487394957983193, - "acc_norm_stderr": 0.030956636328566548 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.3641025641025641, - "acc_stderr": 0.024396672985094778, - "acc_norm": 0.3641025641025641, - "acc_norm_stderr": 0.024396672985094778 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.18, - "acc_stderr": 0.03861229196653694, - "acc_norm": 0.18, - "acc_norm_stderr": 0.03861229196653694 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.16, - "acc_stderr": 0.03684529491774709, - "acc_norm": 0.16, - "acc_norm_stderr": 0.03684529491774709 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.21296296296296297, - "acc_stderr": 0.03957835471980981, - "acc_norm": 0.21296296296296297, - "acc_norm_stderr": 0.03957835471980981 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.28078817733990147, - "acc_stderr": 0.0316185633535861, - "acc_norm": 0.28078817733990147, - "acc_norm_stderr": 0.0316185633535861 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3161290322580645, - "acc_stderr": 0.026450874489042764, - "acc_norm": 0.3161290322580645, - "acc_norm_stderr": 0.026450874489042764 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.19658119658119658, - "acc_stderr": 0.02603538609895129, - "acc_norm": 0.19658119658119658, - "acc_norm_stderr": 0.02603538609895129 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.2981132075471698, - "acc_stderr": 0.028152837942493864, - "acc_norm": 0.2981132075471698, - "acc_norm_stderr": 0.028152837942493864 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.22727272727272727, - "acc_stderr": 0.040139645540727735, - "acc_norm": 0.22727272727272727, - "acc_norm_stderr": 0.040139645540727735 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.26296296296296295, - "acc_stderr": 0.02684205787383371, - "acc_norm": 0.26296296296296295, - "acc_norm_stderr": 0.02684205787383371 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.33112582781456956, - "acc_stderr": 0.038425817186598696, - "acc_norm": 0.33112582781456956, - "acc_norm_stderr": 0.038425817186598696 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.26865671641791045, - "acc_stderr": 0.03134328358208954, - "acc_norm": 0.26865671641791045, - "acc_norm_stderr": 0.03134328358208954 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.3352601156069364, - "acc_stderr": 0.03599586301247078, - "acc_norm": 0.3352601156069364, - "acc_norm_stderr": 0.03599586301247078 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2698412698412698, - "acc_stderr": 0.022860838309232072, - "acc_norm": 0.2698412698412698, - "acc_norm_stderr": 0.022860838309232072 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2638888888888889, - "acc_stderr": 0.03685651095897532, - "acc_norm": 0.2638888888888889, - "acc_norm_stderr": 0.03685651095897532 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.41, - "acc_stderr": 0.049431107042371025, - "acc_norm": 0.41, - "acc_norm_stderr": 0.049431107042371025 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.26, - "acc_stderr": 0.044084400227680794, - "acc_norm": 0.26, - "acc_norm_stderr": 0.044084400227680794 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.2138728323699422, - "acc_stderr": 0.022075709251757173, - "acc_norm": 0.2138728323699422, - "acc_norm_stderr": 0.022075709251757173 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.2331288343558282, - "acc_stderr": 0.0332201579577674, - "acc_norm": 0.2331288343558282, - "acc_norm_stderr": 0.0332201579577674 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.22530864197530864, - "acc_stderr": 0.02324620264781975, - "acc_norm": 0.22530864197530864, - "acc_norm_stderr": 0.02324620264781975 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.36787564766839376, - "acc_stderr": 0.034801756684660366, - "acc_norm": 0.36787564766839376, - "acc_norm_stderr": 0.034801756684660366 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.23684210526315788, - "acc_stderr": 0.039994238792813365, - "acc_norm": 0.23684210526315788, - "acc_norm_stderr": 0.039994238792813365 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.3504587155963303, - "acc_stderr": 0.02045607759982446, - "acc_norm": 0.3504587155963303, - "acc_norm_stderr": 0.02045607759982446 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.36507936507936506, - "acc_stderr": 0.04306241259127153, - "acc_norm": 0.36507936507936506, - "acc_norm_stderr": 0.04306241259127153 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.29411764705882354, - "acc_stderr": 0.026090162504279053, - "acc_norm": 0.29411764705882354, - "acc_norm_stderr": 0.026090162504279053 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.21, - "acc_stderr": 0.040936018074033256, - "acc_norm": 0.21, - "acc_norm_stderr": 0.040936018074033256 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.14049586776859505, - "acc_stderr": 0.031722334260021585, - "acc_norm": 0.14049586776859505, - "acc_norm_stderr": 0.031722334260021585 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.3355263157894737, - "acc_stderr": 0.038424985593952694, - "acc_norm": 0.3355263157894737, - "acc_norm_stderr": 0.038424985593952694 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.2173202614379085, - "acc_stderr": 0.016684820929148598, - "acc_norm": 0.2173202614379085, - "acc_norm_stderr": 0.016684820929148598 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.24113475177304963, - "acc_stderr": 0.02551873104953776, - "acc_norm": 0.24113475177304963, - "acc_norm_stderr": 0.02551873104953776 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.16964285714285715, - "acc_stderr": 0.03562367850095391, - "acc_norm": 0.16964285714285715, - "acc_norm_stderr": 0.03562367850095391 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.4722222222222222, - "acc_stderr": 0.0340470532865388, - "acc_norm": 0.4722222222222222, - "acc_norm_stderr": 0.0340470532865388 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.27262569832402234, - "acc_stderr": 0.014893391735249608, - "acc_norm": 0.27262569832402234, - "acc_norm_stderr": 0.014893391735249608 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252604, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252604 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.18, - "acc_stderr": 0.038612291966536934, - "acc_norm": 0.18, - "acc_norm_stderr": 0.038612291966536934 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.4485294117647059, - "acc_stderr": 0.030211479609121593, - "acc_norm": 0.4485294117647059, - "acc_norm_stderr": 0.030211479609121593 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.37142857142857144, - "acc_stderr": 0.030932858792789834, - "acc_norm": 0.37142857142857144, - "acc_norm_stderr": 0.030932858792789834 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.20253164556962025, - "acc_stderr": 0.026160568246601464, - "acc_norm": 0.20253164556962025, - "acc_norm_stderr": 0.026160568246601464 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2438070404172099, - "acc_stderr": 0.010966507972178475, - "acc_norm": 0.2438070404172099, - "acc_norm_stderr": 0.010966507972178475 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.25980392156862747, - "acc_stderr": 0.030778554678693257, - "acc_norm": 0.25980392156862747, - "acc_norm_stderr": 0.030778554678693257 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.2727272727272727, - "acc_stderr": 0.03477691162163659, - "acc_norm": 0.2727272727272727, - "acc_norm_stderr": 0.03477691162163659 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2533659730722154, - "mc1_stderr": 0.01522589934082683, - "mc2": 0.3923103125697379, - "mc2_stderr": 0.014648106435610566 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.1795774647887324, - "acc_stderr": 0.013157698435457041, - "acc_norm": 0.2312206572769953, - "acc_norm_stderr": 0.014452713321605408 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "EleutherAI/polyglot-ko-5.8b", - "model_sha": "581a4c3eebfac23536b3c9676bcfb05c6a97baa2", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/FINDA-FIT/llama-2-ko-plain/result_2023-09-30 03:54:00.json b/FINDA-FIT/llama-2-ko-plain/result_2023-09-30 03:54:00.json deleted file mode 100644 index 90cd93e1c63ad0e9ee2be35435894bd040b665e7..0000000000000000000000000000000000000000 --- a/FINDA-FIT/llama-2-ko-plain/result_2023-09-30 03:54:00.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.19539249146757678, - "acc_stderr": 0.011586907189952911, - "acc_norm": 0.2636518771331058, - "acc_norm_stderr": 0.012875929151297047 - }, - "harness|ko_hellaswag|10": { - "acc": 0.2660824536944832, - "acc_stderr": 0.004410047530835032, - "acc_norm": 0.2788289185421231, - "acc_norm_stderr": 0.004475067344626752 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.21052631578947367, - "acc_stderr": 0.0312678171466318, - "acc_norm": 0.21052631578947367, - "acc_norm_stderr": 0.0312678171466318 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.30097087378640774, - "acc_stderr": 0.04541609446503949, - "acc_norm": 0.30097087378640774, - "acc_norm_stderr": 0.04541609446503949 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.2720306513409962, - "acc_stderr": 0.015913367447500524, - "acc_norm": 0.2720306513409962, - "acc_norm_stderr": 0.015913367447500524 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.2, - "acc_stderr": 0.034554737023254366, - "acc_norm": 0.2, - "acc_norm_stderr": 0.034554737023254366 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.33, - "acc_stderr": 0.047258156262526045, - "acc_norm": 0.33, - "acc_norm_stderr": 0.047258156262526045 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.2680851063829787, - "acc_stderr": 0.028957342788342347, - "acc_norm": 0.2680851063829787, - "acc_norm_stderr": 0.028957342788342347 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.25903614457831325, - "acc_stderr": 0.034106466140718564, - "acc_norm": 0.25903614457831325, - "acc_norm_stderr": 0.034106466140718564 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.28938906752411575, - "acc_stderr": 0.025755865922632945, - "acc_norm": 0.28938906752411575, - "acc_norm_stderr": 0.025755865922632945 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.2914798206278027, - "acc_stderr": 0.030500283176545902, - "acc_norm": 0.2914798206278027, - "acc_norm_stderr": 0.030500283176545902 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.21374045801526717, - "acc_stderr": 0.0359546161177469, - "acc_norm": 0.21374045801526717, - "acc_norm_stderr": 0.0359546161177469 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542127, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542127 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.2474747474747475, - "acc_stderr": 0.030746300742124505, - "acc_norm": 0.2474747474747475, - "acc_norm_stderr": 0.030746300742124505 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.2689655172413793, - "acc_stderr": 0.036951833116502325, - "acc_norm": 0.2689655172413793, - "acc_norm_stderr": 0.036951833116502325 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.20588235294117646, - "acc_stderr": 0.04023382273617747, - "acc_norm": 0.20588235294117646, - "acc_norm_stderr": 0.04023382273617747 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.3403361344537815, - "acc_stderr": 0.030778057422931666, - "acc_norm": 0.3403361344537815, - "acc_norm_stderr": 0.030778057422931666 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.31025641025641026, - "acc_stderr": 0.023454674889404285, - "acc_norm": 0.31025641025641026, - "acc_norm_stderr": 0.023454674889404285 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.21, - "acc_stderr": 0.040936018074033256, - "acc_norm": 0.21, - "acc_norm_stderr": 0.040936018074033256 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.18, - "acc_stderr": 0.03861229196653694, - "acc_norm": 0.18, - "acc_norm_stderr": 0.03861229196653694 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.2222222222222222, - "acc_stderr": 0.040191074725573483, - "acc_norm": 0.2222222222222222, - "acc_norm_stderr": 0.040191074725573483 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.31527093596059114, - "acc_stderr": 0.03269080871970186, - "acc_norm": 0.31527093596059114, - "acc_norm_stderr": 0.03269080871970186 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3064516129032258, - "acc_stderr": 0.026226485652553873, - "acc_norm": 0.3064516129032258, - "acc_norm_stderr": 0.026226485652553873 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.19658119658119658, - "acc_stderr": 0.02603538609895129, - "acc_norm": 0.19658119658119658, - "acc_norm_stderr": 0.02603538609895129 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.23773584905660378, - "acc_stderr": 0.02619980880756193, - "acc_norm": 0.23773584905660378, - "acc_norm_stderr": 0.02619980880756193 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.2545454545454545, - "acc_stderr": 0.04172343038705383, - "acc_norm": 0.2545454545454545, - "acc_norm_stderr": 0.04172343038705383 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.2962962962962963, - "acc_stderr": 0.027840811495871937, - "acc_norm": 0.2962962962962963, - "acc_norm_stderr": 0.027840811495871937 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.31788079470198677, - "acc_stderr": 0.038020397601079024, - "acc_norm": 0.31788079470198677, - "acc_norm_stderr": 0.038020397601079024 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.20398009950248755, - "acc_stderr": 0.02849317624532609, - "acc_norm": 0.20398009950248755, - "acc_norm_stderr": 0.02849317624532609 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.2138728323699422, - "acc_stderr": 0.03126511206173044, - "acc_norm": 0.2138728323699422, - "acc_norm_stderr": 0.03126511206173044 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2566137566137566, - "acc_stderr": 0.022494510767503154, - "acc_norm": 0.2566137566137566, - "acc_norm_stderr": 0.022494510767503154 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2361111111111111, - "acc_stderr": 0.03551446610810826, - "acc_norm": 0.2361111111111111, - "acc_norm_stderr": 0.03551446610810826 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542128, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542128 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.23699421965317918, - "acc_stderr": 0.022894082489925992, - "acc_norm": 0.23699421965317918, - "acc_norm_stderr": 0.022894082489925992 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.27607361963190186, - "acc_stderr": 0.0351238528370505, - "acc_norm": 0.27607361963190186, - "acc_norm_stderr": 0.0351238528370505 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.2654320987654321, - "acc_stderr": 0.02456922360046085, - "acc_norm": 0.2654320987654321, - "acc_norm_stderr": 0.02456922360046085 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.30569948186528495, - "acc_stderr": 0.03324837939758159, - "acc_norm": 0.30569948186528495, - "acc_norm_stderr": 0.03324837939758159 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2719298245614035, - "acc_stderr": 0.041857744240220575, - "acc_norm": 0.2719298245614035, - "acc_norm_stderr": 0.041857744240220575 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.24220183486238533, - "acc_stderr": 0.01836817630659862, - "acc_norm": 0.24220183486238533, - "acc_norm_stderr": 0.01836817630659862 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.18253968253968253, - "acc_stderr": 0.03455071019102148, - "acc_norm": 0.18253968253968253, - "acc_norm_stderr": 0.03455071019102148 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.27124183006535946, - "acc_stderr": 0.025457756696667874, - "acc_norm": 0.27124183006535946, - "acc_norm_stderr": 0.025457756696667874 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.26, - "acc_stderr": 0.04408440022768077, - "acc_norm": 0.26, - "acc_norm_stderr": 0.04408440022768077 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.3305785123966942, - "acc_stderr": 0.04294340845212095, - "acc_norm": 0.3305785123966942, - "acc_norm_stderr": 0.04294340845212095 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.3092105263157895, - "acc_stderr": 0.037610708698674805, - "acc_norm": 0.3092105263157895, - "acc_norm_stderr": 0.037610708698674805 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.2173202614379085, - "acc_stderr": 0.016684820929148594, - "acc_norm": 0.2173202614379085, - "acc_norm_stderr": 0.016684820929148594 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.25177304964539005, - "acc_stderr": 0.0258921511567094, - "acc_norm": 0.25177304964539005, - "acc_norm_stderr": 0.0258921511567094 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.17857142857142858, - "acc_stderr": 0.036352091215778065, - "acc_norm": 0.17857142857142858, - "acc_norm_stderr": 0.036352091215778065 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.4398148148148148, - "acc_stderr": 0.033851779760448106, - "acc_norm": 0.4398148148148148, - "acc_norm_stderr": 0.033851779760448106 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.24134078212290502, - "acc_stderr": 0.01431099954796145, - "acc_norm": 0.24134078212290502, - "acc_norm_stderr": 0.01431099954796145 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.32, - "acc_stderr": 0.04688261722621504, - "acc_norm": 0.32, - "acc_norm_stderr": 0.04688261722621504 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.4338235294117647, - "acc_stderr": 0.03010563657001664, - "acc_norm": 0.4338235294117647, - "acc_norm_stderr": 0.03010563657001664 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.2612244897959184, - "acc_stderr": 0.028123429335142804, - "acc_norm": 0.2612244897959184, - "acc_norm_stderr": 0.028123429335142804 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.2320675105485232, - "acc_stderr": 0.02747974455080851, - "acc_norm": 0.2320675105485232, - "acc_norm_stderr": 0.02747974455080851 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.23663624511082137, - "acc_stderr": 0.010855137351572742, - "acc_norm": 0.23663624511082137, - "acc_norm_stderr": 0.010855137351572742 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.24019607843137256, - "acc_stderr": 0.02998373305591362, - "acc_norm": 0.24019607843137256, - "acc_norm_stderr": 0.02998373305591362 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.23636363636363636, - "acc_stderr": 0.03317505930009179, - "acc_norm": 0.23636363636363636, - "acc_norm_stderr": 0.03317505930009179 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.27050183598531213, - "mc1_stderr": 0.015550778332842892, - "mc2": 0.5367542106571858, - "mc2_stderr": 0.01635449255335969 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.11267605633802817, - "acc_stderr": 0.010839072955995904, - "acc_norm": 0.36032863849765256, - "acc_norm_stderr": 0.016457469695705128 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "FINDA-FIT/llama-2-ko-plain", - "model_sha": "091fe3550bfa49baaebda838c10825484580f89d", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/FINDA-FIT/llama-ko-7b/result_2023-09-29 16:26:20.json b/FINDA-FIT/llama-ko-7b/result_2023-09-29 16:26:20.json deleted file mode 100644 index 55b52acb4fe3d0d93562f3e14146f7b357e36166..0000000000000000000000000000000000000000 --- a/FINDA-FIT/llama-ko-7b/result_2023-09-29 16:26:20.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.19795221843003413, - "acc_stderr": 0.011643990971573401, - "acc_norm": 0.26535836177474403, - "acc_norm_stderr": 0.012902554762313962 - }, - "harness|ko_hellaswag|10": { - "acc": 0.2633937462656841, - "acc_stderr": 0.004395739495688583, - "acc_norm": 0.27823142800239, - "acc_norm_stderr": 0.004472121485161932 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.22807017543859648, - "acc_stderr": 0.03218093795602357, - "acc_norm": 0.22807017543859648, - "acc_norm_stderr": 0.03218093795602357 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.20388349514563106, - "acc_stderr": 0.03989139859531771, - "acc_norm": 0.20388349514563106, - "acc_norm_stderr": 0.03989139859531771 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.2669220945083014, - "acc_stderr": 0.015818450894777552, - "acc_norm": 0.2669220945083014, - "acc_norm_stderr": 0.015818450894777552 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.22962962962962963, - "acc_stderr": 0.03633384414073463, - "acc_norm": 0.22962962962962963, - "acc_norm_stderr": 0.03633384414073463 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.24680851063829787, - "acc_stderr": 0.028185441301234102, - "acc_norm": 0.24680851063829787, - "acc_norm_stderr": 0.028185441301234102 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.29518072289156627, - "acc_stderr": 0.0355092018568963, - "acc_norm": 0.29518072289156627, - "acc_norm_stderr": 0.0355092018568963 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.26688102893890675, - "acc_stderr": 0.025122637608816657, - "acc_norm": 0.26688102893890675, - "acc_norm_stderr": 0.025122637608816657 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.2556053811659193, - "acc_stderr": 0.029275891003969923, - "acc_norm": 0.2556053811659193, - "acc_norm_stderr": 0.029275891003969923 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.2366412213740458, - "acc_stderr": 0.037276735755969195, - "acc_norm": 0.2366412213740458, - "acc_norm_stderr": 0.037276735755969195 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.35, - "acc_stderr": 0.0479372485441102, - "acc_norm": 0.35, - "acc_norm_stderr": 0.0479372485441102 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.2474747474747475, - "acc_stderr": 0.03074630074212451, - "acc_norm": 0.2474747474747475, - "acc_norm_stderr": 0.03074630074212451 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.2620689655172414, - "acc_stderr": 0.036646663372252565, - "acc_norm": 0.2620689655172414, - "acc_norm_stderr": 0.036646663372252565 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.17647058823529413, - "acc_stderr": 0.03793281185307811, - "acc_norm": 0.17647058823529413, - "acc_norm_stderr": 0.03793281185307811 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.3403361344537815, - "acc_stderr": 0.030778057422931666, - "acc_norm": 0.3403361344537815, - "acc_norm_stderr": 0.030778057422931666 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.30512820512820515, - "acc_stderr": 0.023346335293325887, - "acc_norm": 0.30512820512820515, - "acc_norm_stderr": 0.023346335293325887 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.17, - "acc_stderr": 0.03775251680686371, - "acc_norm": 0.17, - "acc_norm_stderr": 0.03775251680686371 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.15, - "acc_stderr": 0.03588702812826371, - "acc_norm": 0.15, - "acc_norm_stderr": 0.03588702812826371 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.2222222222222222, - "acc_stderr": 0.040191074725573483, - "acc_norm": 0.2222222222222222, - "acc_norm_stderr": 0.040191074725573483 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.32019704433497537, - "acc_stderr": 0.032826493853041504, - "acc_norm": 0.32019704433497537, - "acc_norm_stderr": 0.032826493853041504 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.31290322580645163, - "acc_stderr": 0.026377567028645858, - "acc_norm": 0.31290322580645163, - "acc_norm_stderr": 0.026377567028645858 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.19230769230769232, - "acc_stderr": 0.025819233256483727, - "acc_norm": 0.19230769230769232, - "acc_norm_stderr": 0.025819233256483727 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.22641509433962265, - "acc_stderr": 0.025757559893106727, - "acc_norm": 0.22641509433962265, - "acc_norm_stderr": 0.025757559893106727 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.23636363636363636, - "acc_stderr": 0.04069306319721376, - "acc_norm": 0.23636363636363636, - "acc_norm_stderr": 0.04069306319721376 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.25555555555555554, - "acc_stderr": 0.02659393910184408, - "acc_norm": 0.25555555555555554, - "acc_norm_stderr": 0.02659393910184408 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.33112582781456956, - "acc_stderr": 0.038425817186598696, - "acc_norm": 0.33112582781456956, - "acc_norm_stderr": 0.038425817186598696 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.23880597014925373, - "acc_stderr": 0.030147775935409224, - "acc_norm": 0.23880597014925373, - "acc_norm_stderr": 0.030147775935409224 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.21965317919075145, - "acc_stderr": 0.031568093627031744, - "acc_norm": 0.21965317919075145, - "acc_norm_stderr": 0.031568093627031744 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.25396825396825395, - "acc_stderr": 0.022418042891113946, - "acc_norm": 0.25396825396825395, - "acc_norm_stderr": 0.022418042891113946 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2638888888888889, - "acc_stderr": 0.03685651095897532, - "acc_norm": 0.2638888888888889, - "acc_norm_stderr": 0.03685651095897532 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.32, - "acc_stderr": 0.04688261722621505, - "acc_norm": 0.32, - "acc_norm_stderr": 0.04688261722621505 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.26, - "acc_stderr": 0.044084400227680794, - "acc_norm": 0.26, - "acc_norm_stderr": 0.044084400227680794 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.23699421965317918, - "acc_stderr": 0.022894082489925992, - "acc_norm": 0.23699421965317918, - "acc_norm_stderr": 0.022894082489925992 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.27607361963190186, - "acc_stderr": 0.0351238528370505, - "acc_norm": 0.27607361963190186, - "acc_norm_stderr": 0.0351238528370505 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.2654320987654321, - "acc_stderr": 0.02456922360046085, - "acc_norm": 0.2654320987654321, - "acc_norm_stderr": 0.02456922360046085 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.32, - "acc_stderr": 0.04688261722621504, - "acc_norm": 0.32, - "acc_norm_stderr": 0.04688261722621504 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.30569948186528495, - "acc_stderr": 0.03324837939758159, - "acc_norm": 0.30569948186528495, - "acc_norm_stderr": 0.03324837939758159 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2631578947368421, - "acc_stderr": 0.0414243971948936, - "acc_norm": 0.2631578947368421, - "acc_norm_stderr": 0.0414243971948936 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.23302752293577983, - "acc_stderr": 0.018125669180861514, - "acc_norm": 0.23302752293577983, - "acc_norm_stderr": 0.018125669180861514 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.16666666666666666, - "acc_stderr": 0.03333333333333337, - "acc_norm": 0.16666666666666666, - "acc_norm_stderr": 0.03333333333333337 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.24183006535947713, - "acc_stderr": 0.024518195641879334, - "acc_norm": 0.24183006535947713, - "acc_norm_stderr": 0.024518195641879334 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.2975206611570248, - "acc_stderr": 0.04173349148083497, - "acc_norm": 0.2975206611570248, - "acc_norm_stderr": 0.04173349148083497 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.23026315789473684, - "acc_stderr": 0.034260594244031654, - "acc_norm": 0.23026315789473684, - "acc_norm_stderr": 0.034260594244031654 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.21895424836601307, - "acc_stderr": 0.016729937565537537, - "acc_norm": 0.21895424836601307, - "acc_norm_stderr": 0.016729937565537537 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.24822695035460993, - "acc_stderr": 0.025770015644290396, - "acc_norm": 0.24822695035460993, - "acc_norm_stderr": 0.025770015644290396 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.2857142857142857, - "acc_stderr": 0.04287858751340456, - "acc_norm": 0.2857142857142857, - "acc_norm_stderr": 0.04287858751340456 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.4398148148148148, - "acc_stderr": 0.033851779760448106, - "acc_norm": 0.4398148148148148, - "acc_norm_stderr": 0.033851779760448106 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.24581005586592178, - "acc_stderr": 0.01440029642922561, - "acc_norm": 0.24581005586592178, - "acc_norm_stderr": 0.01440029642922561 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252604, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252604 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.44485294117647056, - "acc_stderr": 0.03018753206032938, - "acc_norm": 0.44485294117647056, - "acc_norm_stderr": 0.03018753206032938 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.23265306122448978, - "acc_stderr": 0.02704925791589618, - "acc_norm": 0.23265306122448978, - "acc_norm_stderr": 0.02704925791589618 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.2320675105485232, - "acc_stderr": 0.02747974455080851, - "acc_norm": 0.2320675105485232, - "acc_norm_stderr": 0.02747974455080851 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2301173402868318, - "acc_stderr": 0.010750183177375553, - "acc_norm": 0.2301173402868318, - "acc_norm_stderr": 0.010750183177375553 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.25, - "acc_stderr": 0.03039153369274154, - "acc_norm": 0.25, - "acc_norm_stderr": 0.03039153369274154 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.22424242424242424, - "acc_stderr": 0.03256866661681102, - "acc_norm": 0.22424242424242424, - "acc_norm_stderr": 0.03256866661681102 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2741738066095471, - "mc1_stderr": 0.015616518497219381, - "mc2": 0.538620436654127, - "mc2_stderr": 0.016366108934105512 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.09976525821596244, - "acc_stderr": 0.010273129672385398, - "acc_norm": 0.3380281690140845, - "acc_norm_stderr": 0.016215540194273178 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "FINDA-FIT/llama-ko-7b", - "model_sha": "c1f0b9f20d38c9494e1607bd30ce43da570d9d52", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/FINDA-FIT/llama-m/result_2023-09-30 08:24:55.json b/FINDA-FIT/llama-m/result_2023-09-30 08:24:55.json deleted file mode 100644 index a23b43af60937cf5376b82d1e793bfb701e9013c..0000000000000000000000000000000000000000 --- a/FINDA-FIT/llama-m/result_2023-09-30 08:24:55.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.19539249146757678, - "acc_stderr": 0.01158690718995291, - "acc_norm": 0.2619453924914676, - "acc_norm_stderr": 0.012849054826858112 - }, - "harness|ko_hellaswag|10": { - "acc": 0.2642899820752838, - "acc_stderr": 0.00440053218855021, - "acc_norm": 0.27763393746265685, - "acc_norm_stderr": 0.00446916572860033 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.21052631578947367, - "acc_stderr": 0.0312678171466318, - "acc_norm": 0.21052631578947367, - "acc_norm_stderr": 0.0312678171466318 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.3106796116504854, - "acc_stderr": 0.04582124160161549, - "acc_norm": 0.3106796116504854, - "acc_norm_stderr": 0.04582124160161549 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.2681992337164751, - "acc_stderr": 0.015842430835269438, - "acc_norm": 0.2681992337164751, - "acc_norm_stderr": 0.015842430835269438 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.2074074074074074, - "acc_stderr": 0.03502553170678316, - "acc_norm": 0.2074074074074074, - "acc_norm_stderr": 0.03502553170678316 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.2553191489361702, - "acc_stderr": 0.028504856470514203, - "acc_norm": 0.2553191489361702, - "acc_norm_stderr": 0.028504856470514203 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.30120481927710846, - "acc_stderr": 0.0357160923005348, - "acc_norm": 0.30120481927710846, - "acc_norm_stderr": 0.0357160923005348 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.2797427652733119, - "acc_stderr": 0.02549425935069491, - "acc_norm": 0.2797427652733119, - "acc_norm_stderr": 0.02549425935069491 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.23318385650224216, - "acc_stderr": 0.028380391147094716, - "acc_norm": 0.23318385650224216, - "acc_norm_stderr": 0.028380391147094716 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.2366412213740458, - "acc_stderr": 0.037276735755969195, - "acc_norm": 0.2366412213740458, - "acc_norm_stderr": 0.037276735755969195 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.24242424242424243, - "acc_stderr": 0.030532892233932032, - "acc_norm": 0.24242424242424243, - "acc_norm_stderr": 0.030532892233932032 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.2413793103448276, - "acc_stderr": 0.03565998174135302, - "acc_norm": 0.2413793103448276, - "acc_norm_stderr": 0.03565998174135302 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.17647058823529413, - "acc_stderr": 0.03793281185307811, - "acc_norm": 0.17647058823529413, - "acc_norm_stderr": 0.03793281185307811 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.3487394957983193, - "acc_stderr": 0.030956636328566545, - "acc_norm": 0.3487394957983193, - "acc_norm_stderr": 0.030956636328566545 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.3282051282051282, - "acc_stderr": 0.02380763319865727, - "acc_norm": 0.3282051282051282, - "acc_norm_stderr": 0.02380763319865727 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.22, - "acc_stderr": 0.0416333199893227, - "acc_norm": 0.22, - "acc_norm_stderr": 0.0416333199893227 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.15, - "acc_stderr": 0.03588702812826371, - "acc_norm": 0.15, - "acc_norm_stderr": 0.03588702812826371 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.2222222222222222, - "acc_stderr": 0.040191074725573483, - "acc_norm": 0.2222222222222222, - "acc_norm_stderr": 0.040191074725573483 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.3103448275862069, - "acc_stderr": 0.03255086769970103, - "acc_norm": 0.3103448275862069, - "acc_norm_stderr": 0.03255086769970103 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3064516129032258, - "acc_stderr": 0.026226485652553873, - "acc_norm": 0.3064516129032258, - "acc_norm_stderr": 0.026226485652553873 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.19658119658119658, - "acc_stderr": 0.02603538609895129, - "acc_norm": 0.19658119658119658, - "acc_norm_stderr": 0.02603538609895129 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.23018867924528302, - "acc_stderr": 0.025907897122408173, - "acc_norm": 0.23018867924528302, - "acc_norm_stderr": 0.025907897122408173 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.20909090909090908, - "acc_stderr": 0.03895091015724138, - "acc_norm": 0.20909090909090908, - "acc_norm_stderr": 0.03895091015724138 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.25925925925925924, - "acc_stderr": 0.02671924078371217, - "acc_norm": 0.25925925925925924, - "acc_norm_stderr": 0.02671924078371217 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.32450331125827814, - "acc_stderr": 0.03822746937658754, - "acc_norm": 0.32450331125827814, - "acc_norm_stderr": 0.03822746937658754 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.22388059701492538, - "acc_stderr": 0.0294752502360172, - "acc_norm": 0.22388059701492538, - "acc_norm_stderr": 0.0294752502360172 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.2138728323699422, - "acc_stderr": 0.03126511206173043, - "acc_norm": 0.2138728323699422, - "acc_norm_stderr": 0.03126511206173043 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2566137566137566, - "acc_stderr": 0.022494510767503154, - "acc_norm": 0.2566137566137566, - "acc_norm_stderr": 0.022494510767503154 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.22916666666666666, - "acc_stderr": 0.03514697467862388, - "acc_norm": 0.22916666666666666, - "acc_norm_stderr": 0.03514697467862388 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.33, - "acc_stderr": 0.047258156262526045, - "acc_norm": 0.33, - "acc_norm_stderr": 0.047258156262526045 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.23699421965317918, - "acc_stderr": 0.022894082489925992, - "acc_norm": 0.23699421965317918, - "acc_norm_stderr": 0.022894082489925992 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.27607361963190186, - "acc_stderr": 0.0351238528370505, - "acc_norm": 0.27607361963190186, - "acc_norm_stderr": 0.0351238528370505 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.25925925925925924, - "acc_stderr": 0.02438366553103545, - "acc_norm": 0.25925925925925924, - "acc_norm_stderr": 0.02438366553103545 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695236, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695236 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.27979274611398963, - "acc_stderr": 0.032396370467357015, - "acc_norm": 0.27979274611398963, - "acc_norm_stderr": 0.032396370467357015 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2631578947368421, - "acc_stderr": 0.04142439719489362, - "acc_norm": 0.2631578947368421, - "acc_norm_stderr": 0.04142439719489362 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.24036697247706423, - "acc_stderr": 0.01832060732096407, - "acc_norm": 0.24036697247706423, - "acc_norm_stderr": 0.01832060732096407 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.15873015873015872, - "acc_stderr": 0.032684540130117436, - "acc_norm": 0.15873015873015872, - "acc_norm_stderr": 0.032684540130117436 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.25163398692810457, - "acc_stderr": 0.0248480182638752, - "acc_norm": 0.25163398692810457, - "acc_norm_stderr": 0.0248480182638752 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.23, - "acc_stderr": 0.04229525846816508, - "acc_norm": 0.23, - "acc_norm_stderr": 0.04229525846816508 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.2892561983471074, - "acc_stderr": 0.04139112727635464, - "acc_norm": 0.2892561983471074, - "acc_norm_stderr": 0.04139112727635464 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.25, - "acc_stderr": 0.03523807393012047, - "acc_norm": 0.25, - "acc_norm_stderr": 0.03523807393012047 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.2238562091503268, - "acc_stderr": 0.016863008585416617, - "acc_norm": 0.2238562091503268, - "acc_norm_stderr": 0.016863008585416617 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.24822695035460993, - "acc_stderr": 0.025770015644290396, - "acc_norm": 0.24822695035460993, - "acc_norm_stderr": 0.025770015644290396 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.25, - "acc_stderr": 0.04109974682633932, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04109974682633932 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.4166666666666667, - "acc_stderr": 0.03362277436608043, - "acc_norm": 0.4166666666666667, - "acc_norm_stderr": 0.03362277436608043 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.25139664804469275, - "acc_stderr": 0.014508979453553983, - "acc_norm": 0.25139664804469275, - "acc_norm_stderr": 0.014508979453553983 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.4411764705882353, - "acc_stderr": 0.030161911930767102, - "acc_norm": 0.4411764705882353, - "acc_norm_stderr": 0.030161911930767102 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.2612244897959184, - "acc_stderr": 0.02812342933514279, - "acc_norm": 0.2612244897959184, - "acc_norm_stderr": 0.02812342933514279 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.22362869198312235, - "acc_stderr": 0.027123298205229972, - "acc_norm": 0.22362869198312235, - "acc_norm_stderr": 0.027123298205229972 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2333767926988266, - "acc_stderr": 0.010803108481179088, - "acc_norm": 0.2333767926988266, - "acc_norm_stderr": 0.010803108481179088 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.2549019607843137, - "acc_stderr": 0.030587591351604243, - "acc_norm": 0.2549019607843137, - "acc_norm_stderr": 0.030587591351604243 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.23030303030303031, - "acc_stderr": 0.03287666758603489, - "acc_norm": 0.23030303030303031, - "acc_norm_stderr": 0.03287666758603489 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2741738066095471, - "mc1_stderr": 0.015616518497219385, - "mc2": 0.5382255654218452, - "mc2_stderr": 0.01636582464762524 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.11267605633802817, - "acc_stderr": 0.010839072955995904, - "acc_norm": 0.3615023474178404, - "acc_norm_stderr": 0.01646912149043007 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "FINDA-FIT/llama-m", - "model_sha": "7c06c7acb6bd18e1cf52846483e430def93686f2", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/FINDA-FIT/llama-p/result_2023-09-30 17:05:38.json b/FINDA-FIT/llama-p/result_2023-09-30 17:05:38.json deleted file mode 100644 index 9c4328c0285923944233bb05a7f01929bfa8fcd8..0000000000000000000000000000000000000000 --- a/FINDA-FIT/llama-p/result_2023-09-30 17:05:38.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.3395904436860068, - "acc_stderr": 0.013839039762820169, - "acc_norm": 0.39590443686006827, - "acc_norm_stderr": 0.014291228393536588 - }, - "harness|ko_hellaswag|10": { - "acc": 0.38856801433977295, - "acc_stderr": 0.004864286176731823, - "acc_norm": 0.5073690499900418, - "acc_norm_stderr": 0.004989239462835233 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.391812865497076, - "acc_stderr": 0.037439798259263996, - "acc_norm": 0.391812865497076, - "acc_norm_stderr": 0.037439798259263996 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.27184466019417475, - "acc_stderr": 0.044052680241409216, - "acc_norm": 0.27184466019417475, - "acc_norm_stderr": 0.044052680241409216 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.3946360153256705, - "acc_stderr": 0.017478464305911545, - "acc_norm": 0.3946360153256705, - "acc_norm_stderr": 0.017478464305911545 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.35555555555555557, - "acc_stderr": 0.04135176749720386, - "acc_norm": 0.35555555555555557, - "acc_norm_stderr": 0.04135176749720386 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695235, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695235 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.26382978723404255, - "acc_stderr": 0.028809989854102956, - "acc_norm": 0.26382978723404255, - "acc_norm_stderr": 0.028809989854102956 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.30120481927710846, - "acc_stderr": 0.03571609230053481, - "acc_norm": 0.30120481927710846, - "acc_norm_stderr": 0.03571609230053481 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.4115755627009646, - "acc_stderr": 0.027950481494401266, - "acc_norm": 0.4115755627009646, - "acc_norm_stderr": 0.027950481494401266 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.3632286995515695, - "acc_stderr": 0.032277904428505, - "acc_norm": 0.3632286995515695, - "acc_norm_stderr": 0.032277904428505 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.42748091603053434, - "acc_stderr": 0.043389203057924, - "acc_norm": 0.42748091603053434, - "acc_norm_stderr": 0.043389203057924 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.37, - "acc_stderr": 0.048523658709391, - "acc_norm": 0.37, - "acc_norm_stderr": 0.048523658709391 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.35353535353535354, - "acc_stderr": 0.03406086723547153, - "acc_norm": 0.35353535353535354, - "acc_norm_stderr": 0.03406086723547153 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.31724137931034485, - "acc_stderr": 0.03878352372138621, - "acc_norm": 0.31724137931034485, - "acc_norm_stderr": 0.03878352372138621 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.13725490196078433, - "acc_stderr": 0.03424084669891523, - "acc_norm": 0.13725490196078433, - "acc_norm_stderr": 0.03424084669891523 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.33613445378151263, - "acc_stderr": 0.030684737115135367, - "acc_norm": 0.33613445378151263, - "acc_norm_stderr": 0.030684737115135367 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.258974358974359, - "acc_stderr": 0.02221110681006167, - "acc_norm": 0.258974358974359, - "acc_norm_stderr": 0.02221110681006167 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.45, - "acc_stderr": 0.05, - "acc_norm": 0.45, - "acc_norm_stderr": 0.05 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542128, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542128 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.39814814814814814, - "acc_stderr": 0.04732332615978814, - "acc_norm": 0.39814814814814814, - "acc_norm_stderr": 0.04732332615978814 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.2561576354679803, - "acc_stderr": 0.0307127300709826, - "acc_norm": 0.2561576354679803, - "acc_norm_stderr": 0.0307127300709826 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3258064516129032, - "acc_stderr": 0.026662010578567104, - "acc_norm": 0.3258064516129032, - "acc_norm_stderr": 0.026662010578567104 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.5512820512820513, - "acc_stderr": 0.032583346493868806, - "acc_norm": 0.5512820512820513, - "acc_norm_stderr": 0.032583346493868806 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.35094339622641507, - "acc_stderr": 0.029373646253234686, - "acc_norm": 0.35094339622641507, - "acc_norm_stderr": 0.029373646253234686 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.39090909090909093, - "acc_stderr": 0.046737523336702384, - "acc_norm": 0.39090909090909093, - "acc_norm_stderr": 0.046737523336702384 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.26666666666666666, - "acc_stderr": 0.026962424325073828, - "acc_norm": 0.26666666666666666, - "acc_norm_stderr": 0.026962424325073828 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.1986754966887417, - "acc_stderr": 0.032578473844367746, - "acc_norm": 0.1986754966887417, - "acc_norm_stderr": 0.032578473844367746 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.4427860696517413, - "acc_stderr": 0.03512310964123936, - "acc_norm": 0.4427860696517413, - "acc_norm_stderr": 0.03512310964123936 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.3236994219653179, - "acc_stderr": 0.0356760379963917, - "acc_norm": 0.3236994219653179, - "acc_norm_stderr": 0.0356760379963917 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2698412698412698, - "acc_stderr": 0.022860838309232072, - "acc_norm": 0.2698412698412698, - "acc_norm_stderr": 0.022860838309232072 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2361111111111111, - "acc_stderr": 0.03551446610810826, - "acc_norm": 0.2361111111111111, - "acc_norm_stderr": 0.03551446610810826 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.16, - "acc_stderr": 0.03684529491774709, - "acc_norm": 0.16, - "acc_norm_stderr": 0.03684529491774709 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.42, - "acc_stderr": 0.04960449637488583, - "acc_norm": 0.42, - "acc_norm_stderr": 0.04960449637488583 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.3901734104046243, - "acc_stderr": 0.026261677607806642, - "acc_norm": 0.3901734104046243, - "acc_norm_stderr": 0.026261677607806642 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.34355828220858897, - "acc_stderr": 0.03731133519673893, - "acc_norm": 0.34355828220858897, - "acc_norm_stderr": 0.03731133519673893 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.39197530864197533, - "acc_stderr": 0.02716368603827123, - "acc_norm": 0.39197530864197533, - "acc_norm_stderr": 0.02716368603827123 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.32642487046632124, - "acc_stderr": 0.033840286211432945, - "acc_norm": 0.32642487046632124, - "acc_norm_stderr": 0.033840286211432945 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2631578947368421, - "acc_stderr": 0.04142439719489361, - "acc_norm": 0.2631578947368421, - "acc_norm_stderr": 0.04142439719489361 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.3743119266055046, - "acc_stderr": 0.02074895940898831, - "acc_norm": 0.3743119266055046, - "acc_norm_stderr": 0.02074895940898831 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.23015873015873015, - "acc_stderr": 0.03764950879790604, - "acc_norm": 0.23015873015873015, - "acc_norm_stderr": 0.03764950879790604 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.4215686274509804, - "acc_stderr": 0.028275490156791434, - "acc_norm": 0.4215686274509804, - "acc_norm_stderr": 0.028275490156791434 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.29, - "acc_stderr": 0.04560480215720684, - "acc_norm": 0.29, - "acc_norm_stderr": 0.04560480215720684 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.5785123966942148, - "acc_stderr": 0.045077322787750874, - "acc_norm": 0.5785123966942148, - "acc_norm_stderr": 0.045077322787750874 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.40131578947368424, - "acc_stderr": 0.039889037033362836, - "acc_norm": 0.40131578947368424, - "acc_norm_stderr": 0.039889037033362836 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.369281045751634, - "acc_stderr": 0.019524316744866346, - "acc_norm": 0.369281045751634, - "acc_norm_stderr": 0.019524316744866346 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.30141843971631205, - "acc_stderr": 0.02737412888263115, - "acc_norm": 0.30141843971631205, - "acc_norm_stderr": 0.02737412888263115 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.2767857142857143, - "acc_stderr": 0.04246624336697624, - "acc_norm": 0.2767857142857143, - "acc_norm_stderr": 0.04246624336697624 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.27314814814814814, - "acc_stderr": 0.030388051301678116, - "acc_norm": 0.27314814814814814, - "acc_norm_stderr": 0.030388051301678116 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2446927374301676, - "acc_stderr": 0.014378169884098424, - "acc_norm": 0.2446927374301676, - "acc_norm_stderr": 0.014378169884098424 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.31985294117647056, - "acc_stderr": 0.02833295951403124, - "acc_norm": 0.31985294117647056, - "acc_norm_stderr": 0.02833295951403124 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.37551020408163266, - "acc_stderr": 0.03100120903989484, - "acc_norm": 0.37551020408163266, - "acc_norm_stderr": 0.03100120903989484 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.5232067510548524, - "acc_stderr": 0.032512152011410174, - "acc_norm": 0.5232067510548524, - "acc_norm_stderr": 0.032512152011410174 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2985658409387223, - "acc_stderr": 0.011688060141794208, - "acc_norm": 0.2985658409387223, - "acc_norm_stderr": 0.011688060141794208 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.38235294117647056, - "acc_stderr": 0.03410785338904719, - "acc_norm": 0.38235294117647056, - "acc_norm_stderr": 0.03410785338904719 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.3939393939393939, - "acc_stderr": 0.0381549430868893, - "acc_norm": 0.3939393939393939, - "acc_norm_stderr": 0.0381549430868893 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.24969400244798043, - "mc1_stderr": 0.015152286907148125, - "mc2": 0.38092210327853554, - "mc2_stderr": 0.014881931344043989 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.47417840375586856, - "acc_stderr": 0.017116907933735912, - "acc_norm": 0.5586854460093896, - "acc_norm_stderr": 0.017021311671847467 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "FINDA-FIT/llama-p", - "model_sha": "e54c345988c60cdafe797a2f15e916801ee4ab7b", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/FINDA-FIT/llama-r/result_2023-09-30 09:12:26.json b/FINDA-FIT/llama-r/result_2023-09-30 09:12:26.json deleted file mode 100644 index dbccb2e2f5dee8747eefe10e4ae6f436b88cce88..0000000000000000000000000000000000000000 --- a/FINDA-FIT/llama-r/result_2023-09-30 09:12:26.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.20136518771331058, - "acc_stderr": 0.011718927477444262, - "acc_norm": 0.2636518771331058, - "acc_norm_stderr": 0.01287592915129705 - }, - "harness|ko_hellaswag|10": { - "acc": 0.2665803624775941, - "acc_stderr": 0.004412674170976469, - "acc_norm": 0.27922724556861184, - "acc_norm_stderr": 0.004477025762200596 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.2046783625730994, - "acc_stderr": 0.03094445977853321, - "acc_norm": 0.2046783625730994, - "acc_norm_stderr": 0.03094445977853321 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.30097087378640774, - "acc_stderr": 0.04541609446503949, - "acc_norm": 0.30097087378640774, - "acc_norm_stderr": 0.04541609446503949 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.2656449553001277, - "acc_stderr": 0.01579430248788873, - "acc_norm": 0.2656449553001277, - "acc_norm_stderr": 0.01579430248788873 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.2074074074074074, - "acc_stderr": 0.03502553170678316, - "acc_norm": 0.2074074074074074, - "acc_norm_stderr": 0.03502553170678316 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.24680851063829787, - "acc_stderr": 0.028185441301234113, - "acc_norm": 0.24680851063829787, - "acc_norm_stderr": 0.028185441301234113 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.2710843373493976, - "acc_stderr": 0.034605799075530255, - "acc_norm": 0.2710843373493976, - "acc_norm_stderr": 0.034605799075530255 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.2958199356913183, - "acc_stderr": 0.02592237178881877, - "acc_norm": 0.2958199356913183, - "acc_norm_stderr": 0.02592237178881877 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.23766816143497757, - "acc_stderr": 0.028568079464714267, - "acc_norm": 0.23766816143497757, - "acc_norm_stderr": 0.028568079464714267 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.24427480916030533, - "acc_stderr": 0.037683359597287434, - "acc_norm": 0.24427480916030533, - "acc_norm_stderr": 0.037683359597287434 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.27, - "acc_stderr": 0.04461960433384739, - "acc_norm": 0.27, - "acc_norm_stderr": 0.04461960433384739 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.24242424242424243, - "acc_stderr": 0.030532892233932032, - "acc_norm": 0.24242424242424243, - "acc_norm_stderr": 0.030532892233932032 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.2482758620689655, - "acc_stderr": 0.03600105692727771, - "acc_norm": 0.2482758620689655, - "acc_norm_stderr": 0.03600105692727771 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.16666666666666666, - "acc_stderr": 0.03708284662416542, - "acc_norm": 0.16666666666666666, - "acc_norm_stderr": 0.03708284662416542 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.33613445378151263, - "acc_stderr": 0.03068473711513536, - "acc_norm": 0.33613445378151263, - "acc_norm_stderr": 0.03068473711513536 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.3153846153846154, - "acc_stderr": 0.02355964698318994, - "acc_norm": 0.3153846153846154, - "acc_norm_stderr": 0.02355964698318994 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.24, - "acc_stderr": 0.04292346959909284, - "acc_norm": 0.24, - "acc_norm_stderr": 0.04292346959909284 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.16, - "acc_stderr": 0.03684529491774708, - "acc_norm": 0.16, - "acc_norm_stderr": 0.03684529491774708 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.2222222222222222, - "acc_stderr": 0.040191074725573483, - "acc_norm": 0.2222222222222222, - "acc_norm_stderr": 0.040191074725573483 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.3103448275862069, - "acc_stderr": 0.03255086769970103, - "acc_norm": 0.3103448275862069, - "acc_norm_stderr": 0.03255086769970103 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.31290322580645163, - "acc_stderr": 0.026377567028645858, - "acc_norm": 0.31290322580645163, - "acc_norm_stderr": 0.026377567028645858 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.19230769230769232, - "acc_stderr": 0.025819233256483727, - "acc_norm": 0.19230769230769232, - "acc_norm_stderr": 0.025819233256483727 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.2339622641509434, - "acc_stderr": 0.02605529690115292, - "acc_norm": 0.2339622641509434, - "acc_norm_stderr": 0.02605529690115292 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.20909090909090908, - "acc_stderr": 0.03895091015724138, - "acc_norm": 0.20909090909090908, - "acc_norm_stderr": 0.03895091015724138 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.25925925925925924, - "acc_stderr": 0.026719240783712177, - "acc_norm": 0.25925925925925924, - "acc_norm_stderr": 0.026719240783712177 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.33112582781456956, - "acc_stderr": 0.038425817186598696, - "acc_norm": 0.33112582781456956, - "acc_norm_stderr": 0.038425817186598696 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.19402985074626866, - "acc_stderr": 0.027962677604768893, - "acc_norm": 0.19402985074626866, - "acc_norm_stderr": 0.027962677604768893 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.24277456647398843, - "acc_stderr": 0.0326926380614177, - "acc_norm": 0.24277456647398843, - "acc_norm_stderr": 0.0326926380614177 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2566137566137566, - "acc_stderr": 0.022494510767503154, - "acc_norm": 0.2566137566137566, - "acc_norm_stderr": 0.022494510767503154 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2708333333333333, - "acc_stderr": 0.03716177437566018, - "acc_norm": 0.2708333333333333, - "acc_norm_stderr": 0.03716177437566018 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.32, - "acc_stderr": 0.04688261722621504, - "acc_norm": 0.32, - "acc_norm_stderr": 0.04688261722621504 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.26, - "acc_stderr": 0.044084400227680794, - "acc_norm": 0.26, - "acc_norm_stderr": 0.044084400227680794 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.23410404624277456, - "acc_stderr": 0.022797110278071134, - "acc_norm": 0.23410404624277456, - "acc_norm_stderr": 0.022797110278071134 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.2822085889570552, - "acc_stderr": 0.03536117886664743, - "acc_norm": 0.2822085889570552, - "acc_norm_stderr": 0.03536117886664743 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.25925925925925924, - "acc_stderr": 0.02438366553103545, - "acc_norm": 0.25925925925925924, - "acc_norm_stderr": 0.02438366553103545 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.3, - "acc_stderr": 0.04605661864718381, - "acc_norm": 0.3, - "acc_norm_stderr": 0.04605661864718381 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.2849740932642487, - "acc_stderr": 0.0325771407770966, - "acc_norm": 0.2849740932642487, - "acc_norm_stderr": 0.0325771407770966 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2719298245614035, - "acc_stderr": 0.04185774424022056, - "acc_norm": 0.2719298245614035, - "acc_norm_stderr": 0.04185774424022056 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.23119266055045873, - "acc_stderr": 0.018075750241633163, - "acc_norm": 0.23119266055045873, - "acc_norm_stderr": 0.018075750241633163 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.16666666666666666, - "acc_stderr": 0.03333333333333337, - "acc_norm": 0.16666666666666666, - "acc_norm_stderr": 0.03333333333333337 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.2549019607843137, - "acc_stderr": 0.024954184324879912, - "acc_norm": 0.2549019607843137, - "acc_norm_stderr": 0.024954184324879912 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.27, - "acc_stderr": 0.0446196043338474, - "acc_norm": 0.27, - "acc_norm_stderr": 0.0446196043338474 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.34710743801652894, - "acc_stderr": 0.04345724570292534, - "acc_norm": 0.34710743801652894, - "acc_norm_stderr": 0.04345724570292534 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.26973684210526316, - "acc_stderr": 0.036117805602848975, - "acc_norm": 0.26973684210526316, - "acc_norm_stderr": 0.036117805602848975 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.22712418300653595, - "acc_stderr": 0.016949853279212373, - "acc_norm": 0.22712418300653595, - "acc_norm_stderr": 0.016949853279212373 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.24113475177304963, - "acc_stderr": 0.02551873104953777, - "acc_norm": 0.24113475177304963, - "acc_norm_stderr": 0.02551873104953777 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.23214285714285715, - "acc_stderr": 0.04007341809755806, - "acc_norm": 0.23214285714285715, - "acc_norm_stderr": 0.04007341809755806 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.4351851851851852, - "acc_stderr": 0.033812000056435254, - "acc_norm": 0.4351851851851852, - "acc_norm_stderr": 0.033812000056435254 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.24916201117318434, - "acc_stderr": 0.01446589382985993, - "acc_norm": 0.24916201117318434, - "acc_norm_stderr": 0.01446589382985993 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.32, - "acc_stderr": 0.046882617226215034, - "acc_norm": 0.32, - "acc_norm_stderr": 0.046882617226215034 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.44485294117647056, - "acc_stderr": 0.030187532060329383, - "acc_norm": 0.44485294117647056, - "acc_norm_stderr": 0.030187532060329383 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.23265306122448978, - "acc_stderr": 0.02704925791589618, - "acc_norm": 0.23265306122448978, - "acc_norm_stderr": 0.02704925791589618 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.22784810126582278, - "acc_stderr": 0.02730348459906942, - "acc_norm": 0.22784810126582278, - "acc_norm_stderr": 0.02730348459906942 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.23728813559322035, - "acc_stderr": 0.010865436690780272, - "acc_norm": 0.23728813559322035, - "acc_norm_stderr": 0.010865436690780272 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.25, - "acc_stderr": 0.03039153369274154, - "acc_norm": 0.25, - "acc_norm_stderr": 0.03039153369274154 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.24848484848484848, - "acc_stderr": 0.03374402644139404, - "acc_norm": 0.24848484848484848, - "acc_norm_stderr": 0.03374402644139404 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2741738066095471, - "mc1_stderr": 0.01561651849721938, - "mc2": 0.5406294687690661, - "mc2_stderr": 0.016334114258114155 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.12441314553990611, - "acc_stderr": 0.011314046818595224, - "acc_norm": 0.3615023474178404, - "acc_norm_stderr": 0.01646912149043007 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "FINDA-FIT/llama-r", - "model_sha": "6bdde9a227da60c2db803024d5b2e3a53a41cf0b", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/FINDA-FIT/xllama-instruct/result_2023-10-01 07:23:53.json b/FINDA-FIT/xllama-instruct/result_2023-10-01 07:23:53.json deleted file mode 100644 index dfa246d4db956057e01d60d722282c49d7dd76d4..0000000000000000000000000000000000000000 --- a/FINDA-FIT/xllama-instruct/result_2023-10-01 07:23:53.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.3387372013651877, - "acc_stderr": 0.013830568927974334, - "acc_norm": 0.3924914675767918, - "acc_norm_stderr": 0.01426963463567071 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3910575582553276, - "acc_stderr": 0.004869899297734548, - "acc_norm": 0.5143397729535949, - "acc_norm_stderr": 0.004987728900897584 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.3684210526315789, - "acc_stderr": 0.036996580176568775, - "acc_norm": 0.3684210526315789, - "acc_norm_stderr": 0.036996580176568775 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.3106796116504854, - "acc_stderr": 0.04582124160161549, - "acc_norm": 0.3106796116504854, - "acc_norm_stderr": 0.04582124160161549 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.39208173690932313, - "acc_stderr": 0.01745852405014764, - "acc_norm": 0.39208173690932313, - "acc_norm_stderr": 0.01745852405014764 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.4074074074074074, - "acc_stderr": 0.042446332383532286, - "acc_norm": 0.4074074074074074, - "acc_norm_stderr": 0.042446332383532286 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.2680851063829787, - "acc_stderr": 0.028957342788342343, - "acc_norm": 0.2680851063829787, - "acc_norm_stderr": 0.028957342788342343 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3132530120481928, - "acc_stderr": 0.03610805018031024, - "acc_norm": 0.3132530120481928, - "acc_norm_stderr": 0.03610805018031024 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.43086816720257237, - "acc_stderr": 0.028125340983972714, - "acc_norm": 0.43086816720257237, - "acc_norm_stderr": 0.028125340983972714 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.36771300448430494, - "acc_stderr": 0.03236198350928275, - "acc_norm": 0.36771300448430494, - "acc_norm_stderr": 0.03236198350928275 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.3893129770992366, - "acc_stderr": 0.04276486542814591, - "acc_norm": 0.3893129770992366, - "acc_norm_stderr": 0.04276486542814591 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.33, - "acc_stderr": 0.047258156262526045, - "acc_norm": 0.33, - "acc_norm_stderr": 0.047258156262526045 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.35858585858585856, - "acc_stderr": 0.0341690364039152, - "acc_norm": 0.35858585858585856, - "acc_norm_stderr": 0.0341690364039152 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.3793103448275862, - "acc_stderr": 0.04043461861916747, - "acc_norm": 0.3793103448275862, - "acc_norm_stderr": 0.04043461861916747 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.10784313725490197, - "acc_stderr": 0.03086428212206014, - "acc_norm": 0.10784313725490197, - "acc_norm_stderr": 0.03086428212206014 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.3235294117647059, - "acc_stderr": 0.030388353551886845, - "acc_norm": 0.3235294117647059, - "acc_norm_stderr": 0.030388353551886845 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.26666666666666666, - "acc_stderr": 0.0224212736129237, - "acc_norm": 0.26666666666666666, - "acc_norm_stderr": 0.0224212736129237 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.45, - "acc_stderr": 0.05, - "acc_norm": 0.45, - "acc_norm_stderr": 0.05 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.22, - "acc_stderr": 0.04163331998932269, - "acc_norm": 0.22, - "acc_norm_stderr": 0.04163331998932269 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.4074074074074074, - "acc_stderr": 0.047500773411999854, - "acc_norm": 0.4074074074074074, - "acc_norm_stderr": 0.047500773411999854 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.28078817733990147, - "acc_stderr": 0.03161856335358611, - "acc_norm": 0.28078817733990147, - "acc_norm_stderr": 0.03161856335358611 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3161290322580645, - "acc_stderr": 0.02645087448904277, - "acc_norm": 0.3161290322580645, - "acc_norm_stderr": 0.02645087448904277 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.4700854700854701, - "acc_stderr": 0.03269741106812443, - "acc_norm": 0.4700854700854701, - "acc_norm_stderr": 0.03269741106812443 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.33962264150943394, - "acc_stderr": 0.029146904747798345, - "acc_norm": 0.33962264150943394, - "acc_norm_stderr": 0.029146904747798345 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.3181818181818182, - "acc_stderr": 0.04461272175910507, - "acc_norm": 0.3181818181818182, - "acc_norm_stderr": 0.04461272175910507 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.2814814814814815, - "acc_stderr": 0.027420019350945277, - "acc_norm": 0.2814814814814815, - "acc_norm_stderr": 0.027420019350945277 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2781456953642384, - "acc_stderr": 0.03658603262763743, - "acc_norm": 0.2781456953642384, - "acc_norm_stderr": 0.03658603262763743 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.39800995024875624, - "acc_stderr": 0.034611994290400135, - "acc_norm": 0.39800995024875624, - "acc_norm_stderr": 0.034611994290400135 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.3063583815028902, - "acc_stderr": 0.035149425512674394, - "acc_norm": 0.3063583815028902, - "acc_norm_stderr": 0.035149425512674394 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2830687830687831, - "acc_stderr": 0.023201392938194978, - "acc_norm": 0.2830687830687831, - "acc_norm_stderr": 0.023201392938194978 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2638888888888889, - "acc_stderr": 0.03685651095897532, - "acc_norm": 0.2638888888888889, - "acc_norm_stderr": 0.03685651095897532 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.42, - "acc_stderr": 0.049604496374885836, - "acc_norm": 0.42, - "acc_norm_stderr": 0.049604496374885836 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.43641618497109824, - "acc_stderr": 0.026700545424943687, - "acc_norm": 0.43641618497109824, - "acc_norm_stderr": 0.026700545424943687 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.2883435582822086, - "acc_stderr": 0.03559039531617342, - "acc_norm": 0.2883435582822086, - "acc_norm_stderr": 0.03559039531617342 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.3950617283950617, - "acc_stderr": 0.027201117666925657, - "acc_norm": 0.3950617283950617, - "acc_norm_stderr": 0.027201117666925657 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.3160621761658031, - "acc_stderr": 0.033553973696861736, - "acc_norm": 0.3160621761658031, - "acc_norm_stderr": 0.033553973696861736 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2719298245614035, - "acc_stderr": 0.04185774424022057, - "acc_norm": 0.2719298245614035, - "acc_norm_stderr": 0.04185774424022057 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.3779816513761468, - "acc_stderr": 0.020789187066728113, - "acc_norm": 0.3779816513761468, - "acc_norm_stderr": 0.020789187066728113 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.21428571428571427, - "acc_stderr": 0.03670066451047182, - "acc_norm": 0.21428571428571427, - "acc_norm_stderr": 0.03670066451047182 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.39869281045751637, - "acc_stderr": 0.02803609227389176, - "acc_norm": 0.39869281045751637, - "acc_norm_stderr": 0.02803609227389176 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252605, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252605 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.5537190082644629, - "acc_stderr": 0.0453793517794788, - "acc_norm": 0.5537190082644629, - "acc_norm_stderr": 0.0453793517794788 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.3815789473684211, - "acc_stderr": 0.03953173377749194, - "acc_norm": 0.3815789473684211, - "acc_norm_stderr": 0.03953173377749194 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.3104575163398693, - "acc_stderr": 0.01871806705262322, - "acc_norm": 0.3104575163398693, - "acc_norm_stderr": 0.01871806705262322 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.3191489361702128, - "acc_stderr": 0.027807990141320203, - "acc_norm": 0.3191489361702128, - "acc_norm_stderr": 0.027807990141320203 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.29464285714285715, - "acc_stderr": 0.0432704093257873, - "acc_norm": 0.29464285714285715, - "acc_norm_stderr": 0.0432704093257873 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.3425925925925926, - "acc_stderr": 0.032365852526021574, - "acc_norm": 0.3425925925925926, - "acc_norm_stderr": 0.032365852526021574 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.24134078212290502, - "acc_stderr": 0.014310999547961441, - "acc_norm": 0.24134078212290502, - "acc_norm_stderr": 0.014310999547961441 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.39, - "acc_stderr": 0.04902071300001975, - "acc_norm": 0.39, - "acc_norm_stderr": 0.04902071300001975 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.3235294117647059, - "acc_stderr": 0.028418208619406794, - "acc_norm": 0.3235294117647059, - "acc_norm_stderr": 0.028418208619406794 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.31020408163265306, - "acc_stderr": 0.029613459872484378, - "acc_norm": 0.31020408163265306, - "acc_norm_stderr": 0.029613459872484378 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.47257383966244726, - "acc_stderr": 0.032498227183013026, - "acc_norm": 0.47257383966244726, - "acc_norm_stderr": 0.032498227183013026 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2900912646675359, - "acc_stderr": 0.0115903755547331, - "acc_norm": 0.2900912646675359, - "acc_norm_stderr": 0.0115903755547331 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.29411764705882354, - "acc_stderr": 0.03198001660115072, - "acc_norm": 0.29411764705882354, - "acc_norm_stderr": 0.03198001660115072 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.3696969696969697, - "acc_stderr": 0.037694303145125674, - "acc_norm": 0.3696969696969697, - "acc_norm_stderr": 0.037694303145125674 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.26560587515299877, - "mc1_stderr": 0.015461027627253597, - "mc2": 0.40727214174838056, - "mc2_stderr": 0.014940202090745085 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.3204225352112676, - "acc_stderr": 0.015996178088626918, - "acc_norm": 0.4307511737089202, - "acc_norm_stderr": 0.016974599121731444 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "FINDA-FIT/xllama-instruct", - "model_sha": "1e5ee340d5f4558a1bc451ba7942fa5f3a1c8d80", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/GAI-LLM/ko-en-llama2-13b-mixed-v3/result_2023-10-23 00:18:31.json b/GAI-LLM/ko-en-llama2-13b-mixed-v3/result_2023-10-23 00:18:31.json deleted file mode 100644 index fc0183418c63fbdff9bc87513bd124e3eaa4e890..0000000000000000000000000000000000000000 --- a/GAI-LLM/ko-en-llama2-13b-mixed-v3/result_2023-10-23 00:18:31.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.3728668941979522, - "acc_stderr": 0.01413117676013117, - "acc_norm": 0.42406143344709896, - "acc_norm_stderr": 0.014441889627464394 - }, - "harness|ko_hellaswag|10": { - "acc": 0.40689105755825533, - "acc_stderr": 0.004902502514738606, - "acc_norm": 0.5433180641306513, - "acc_norm_stderr": 0.004971019942726589 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.5146198830409356, - "acc_stderr": 0.038331852752130254, - "acc_norm": 0.5146198830409356, - "acc_norm_stderr": 0.038331852752130254 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.44660194174757284, - "acc_stderr": 0.04922424153458933, - "acc_norm": 0.44660194174757284, - "acc_norm_stderr": 0.04922424153458933 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.4878671775223499, - "acc_stderr": 0.01787469866749135, - "acc_norm": 0.4878671775223499, - "acc_norm_stderr": 0.01787469866749135 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.362962962962963, - "acc_stderr": 0.041539484047424, - "acc_norm": 0.362962962962963, - "acc_norm_stderr": 0.041539484047424 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206824, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206824 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.3148936170212766, - "acc_stderr": 0.030363582197238167, - "acc_norm": 0.3148936170212766, - "acc_norm_stderr": 0.030363582197238167 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.4397590361445783, - "acc_stderr": 0.03864139923699122, - "acc_norm": 0.4397590361445783, - "acc_norm_stderr": 0.03864139923699122 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.4694533762057878, - "acc_stderr": 0.028345045864840678, - "acc_norm": 0.4694533762057878, - "acc_norm_stderr": 0.028345045864840678 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.3721973094170404, - "acc_stderr": 0.03244305283008731, - "acc_norm": 0.3721973094170404, - "acc_norm_stderr": 0.03244305283008731 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.48091603053435117, - "acc_stderr": 0.04382094705550989, - "acc_norm": 0.48091603053435117, - "acc_norm_stderr": 0.04382094705550989 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.29, - "acc_stderr": 0.04560480215720683, - "acc_norm": 0.29, - "acc_norm_stderr": 0.04560480215720683 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.5151515151515151, - "acc_stderr": 0.0356071651653106, - "acc_norm": 0.5151515151515151, - "acc_norm_stderr": 0.0356071651653106 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.3724137931034483, - "acc_stderr": 0.0402873153294756, - "acc_norm": 0.3724137931034483, - "acc_norm_stderr": 0.0402873153294756 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.21568627450980393, - "acc_stderr": 0.04092563958237655, - "acc_norm": 0.21568627450980393, - "acc_norm_stderr": 0.04092563958237655 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.3697478991596639, - "acc_stderr": 0.03135709599613591, - "acc_norm": 0.3697478991596639, - "acc_norm_stderr": 0.03135709599613591 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.358974358974359, - "acc_stderr": 0.02432173848460237, - "acc_norm": 0.358974358974359, - "acc_norm_stderr": 0.02432173848460237 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.44, - "acc_stderr": 0.04988876515698589, - "acc_norm": 0.44, - "acc_norm_stderr": 0.04988876515698589 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.33, - "acc_stderr": 0.047258156262526045, - "acc_norm": 0.33, - "acc_norm_stderr": 0.047258156262526045 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.4166666666666667, - "acc_stderr": 0.04766075165356461, - "acc_norm": 0.4166666666666667, - "acc_norm_stderr": 0.04766075165356461 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.35467980295566504, - "acc_stderr": 0.03366124489051449, - "acc_norm": 0.35467980295566504, - "acc_norm_stderr": 0.03366124489051449 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.45483870967741935, - "acc_stderr": 0.028327743091561053, - "acc_norm": 0.45483870967741935, - "acc_norm_stderr": 0.028327743091561053 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.5683760683760684, - "acc_stderr": 0.0324483553531149, - "acc_norm": 0.5683760683760684, - "acc_norm_stderr": 0.0324483553531149 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.4528301886792453, - "acc_stderr": 0.03063562795796182, - "acc_norm": 0.4528301886792453, - "acc_norm_stderr": 0.03063562795796182 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.4727272727272727, - "acc_stderr": 0.04782001791380063, - "acc_norm": 0.4727272727272727, - "acc_norm_stderr": 0.04782001791380063 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.26666666666666666, - "acc_stderr": 0.026962424325073828, - "acc_norm": 0.26666666666666666, - "acc_norm_stderr": 0.026962424325073828 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2847682119205298, - "acc_stderr": 0.03684881521389023, - "acc_norm": 0.2847682119205298, - "acc_norm_stderr": 0.03684881521389023 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.5472636815920398, - "acc_stderr": 0.03519702717576915, - "acc_norm": 0.5472636815920398, - "acc_norm_stderr": 0.03519702717576915 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.3583815028901734, - "acc_stderr": 0.03656343653353159, - "acc_norm": 0.3583815028901734, - "acc_norm_stderr": 0.03656343653353159 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.30423280423280424, - "acc_stderr": 0.023695415009463084, - "acc_norm": 0.30423280423280424, - "acc_norm_stderr": 0.023695415009463084 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3055555555555556, - "acc_stderr": 0.03852084696008534, - "acc_norm": 0.3055555555555556, - "acc_norm_stderr": 0.03852084696008534 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.24, - "acc_stderr": 0.042923469599092816, - "acc_norm": 0.24, - "acc_norm_stderr": 0.042923469599092816 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.54, - "acc_stderr": 0.05009082659620333, - "acc_norm": 0.54, - "acc_norm_stderr": 0.05009082659620333 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.4190751445086705, - "acc_stderr": 0.02656417811142262, - "acc_norm": 0.4190751445086705, - "acc_norm_stderr": 0.02656417811142262 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.34355828220858897, - "acc_stderr": 0.03731133519673893, - "acc_norm": 0.34355828220858897, - "acc_norm_stderr": 0.03731133519673893 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.4660493827160494, - "acc_stderr": 0.027756535257347666, - "acc_norm": 0.4660493827160494, - "acc_norm_stderr": 0.027756535257347666 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.26, - "acc_stderr": 0.04408440022768079, - "acc_norm": 0.26, - "acc_norm_stderr": 0.04408440022768079 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.40932642487046633, - "acc_stderr": 0.03548608168860806, - "acc_norm": 0.40932642487046633, - "acc_norm_stderr": 0.03548608168860806 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2543859649122807, - "acc_stderr": 0.040969851398436716, - "acc_norm": 0.2543859649122807, - "acc_norm_stderr": 0.040969851398436716 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.44954128440366975, - "acc_stderr": 0.021327881417823363, - "acc_norm": 0.44954128440366975, - "acc_norm_stderr": 0.021327881417823363 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.25396825396825395, - "acc_stderr": 0.03893259610604675, - "acc_norm": 0.25396825396825395, - "acc_norm_stderr": 0.03893259610604675 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.38235294117647056, - "acc_stderr": 0.027826109307283693, - "acc_norm": 0.38235294117647056, - "acc_norm_stderr": 0.027826109307283693 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.37, - "acc_stderr": 0.048523658709391, - "acc_norm": 0.37, - "acc_norm_stderr": 0.048523658709391 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.5206611570247934, - "acc_stderr": 0.04560456086387235, - "acc_norm": 0.5206611570247934, - "acc_norm_stderr": 0.04560456086387235 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.40131578947368424, - "acc_stderr": 0.03988903703336284, - "acc_norm": 0.40131578947368424, - "acc_norm_stderr": 0.03988903703336284 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.3137254901960784, - "acc_stderr": 0.018771683893528176, - "acc_norm": 0.3137254901960784, - "acc_norm_stderr": 0.018771683893528176 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2907801418439716, - "acc_stderr": 0.027090664368353178, - "acc_norm": 0.2907801418439716, - "acc_norm_stderr": 0.027090664368353178 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.22321428571428573, - "acc_stderr": 0.039523019677025116, - "acc_norm": 0.22321428571428573, - "acc_norm_stderr": 0.039523019677025116 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.2916666666666667, - "acc_stderr": 0.03099866630456053, - "acc_norm": 0.2916666666666667, - "acc_norm_stderr": 0.03099866630456053 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2424581005586592, - "acc_stderr": 0.01433352205921789, - "acc_norm": 0.2424581005586592, - "acc_norm_stderr": 0.01433352205921789 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.27, - "acc_stderr": 0.044619604333847394, - "acc_norm": 0.27, - "acc_norm_stderr": 0.044619604333847394 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.39, - "acc_stderr": 0.04902071300001975, - "acc_norm": 0.39, - "acc_norm_stderr": 0.04902071300001975 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.20220588235294118, - "acc_stderr": 0.02439819298665492, - "acc_norm": 0.20220588235294118, - "acc_norm_stderr": 0.02439819298665492 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.43673469387755104, - "acc_stderr": 0.031751952375833226, - "acc_norm": 0.43673469387755104, - "acc_norm_stderr": 0.031751952375833226 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.4810126582278481, - "acc_stderr": 0.03252375148090448, - "acc_norm": 0.4810126582278481, - "acc_norm_stderr": 0.03252375148090448 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.29726205997392435, - "acc_stderr": 0.011673346173086034, - "acc_norm": 0.29726205997392435, - "acc_norm_stderr": 0.011673346173086034 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.36764705882352944, - "acc_stderr": 0.03384132045674118, - "acc_norm": 0.36764705882352944, - "acc_norm_stderr": 0.03384132045674118 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.46060606060606063, - "acc_stderr": 0.03892207016552013, - "acc_norm": 0.46060606060606063, - "acc_norm_stderr": 0.03892207016552013 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2558139534883721, - "mc1_stderr": 0.015274176219283347, - "mc2": 0.41687077666896594, - "mc2_stderr": 0.014804732810744745 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.5363849765258216, - "acc_stderr": 0.017094337456326263, - "acc_norm": 0.6373239436619719, - "acc_norm_stderr": 0.016480666823965075 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "GAI-LLM/ko-en-llama2-13b-mixed-v3", - "model_sha": "da615711850b1e6c1deb1a9c8dab9476a19df855", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/HAERAE-HUB/hae-tae_v0.1.1/result_2023-09-30 11:46:43.json b/HAERAE-HUB/hae-tae_v0.1.1/result_2023-09-30 11:46:43.json deleted file mode 100644 index 0409e7bb00089bbd847e47f622d61a642cd990c4..0000000000000000000000000000000000000000 --- a/HAERAE-HUB/hae-tae_v0.1.1/result_2023-09-30 11:46:43.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.28242320819112626, - "acc_stderr": 0.01315545688409722, - "acc_norm": 0.3302047781569966, - "acc_norm_stderr": 0.013743085603760422 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3719378609838678, - "acc_stderr": 0.004823341569605419, - "acc_norm": 0.4821748655646286, - "acc_norm_stderr": 0.0049866095427490405 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.17543859649122806, - "acc_stderr": 0.029170885500727665, - "acc_norm": 0.17543859649122806, - "acc_norm_stderr": 0.029170885500727665 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.3592233009708738, - "acc_stderr": 0.04750458399041693, - "acc_norm": 0.3592233009708738, - "acc_norm_stderr": 0.04750458399041693 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.20051085568326948, - "acc_stderr": 0.014317653708594209, - "acc_norm": 0.20051085568326948, - "acc_norm_stderr": 0.014317653708594209 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.22962962962962963, - "acc_stderr": 0.036333844140734636, - "acc_norm": 0.22962962962962963, - "acc_norm_stderr": 0.036333844140734636 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.23, - "acc_stderr": 0.04229525846816508, - "acc_norm": 0.23, - "acc_norm_stderr": 0.04229525846816508 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.16170212765957448, - "acc_stderr": 0.02406850528969531, - "acc_norm": 0.16170212765957448, - "acc_norm_stderr": 0.02406850528969531 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.1927710843373494, - "acc_stderr": 0.030709824050565264, - "acc_norm": 0.1927710843373494, - "acc_norm_stderr": 0.030709824050565264 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.24115755627009647, - "acc_stderr": 0.024296594034763426, - "acc_norm": 0.24115755627009647, - "acc_norm_stderr": 0.024296594034763426 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.11659192825112108, - "acc_stderr": 0.021539639816244467, - "acc_norm": 0.11659192825112108, - "acc_norm_stderr": 0.021539639816244467 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.2595419847328244, - "acc_stderr": 0.03844876139785271, - "acc_norm": 0.2595419847328244, - "acc_norm_stderr": 0.03844876139785271 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.35353535353535354, - "acc_stderr": 0.03406086723547153, - "acc_norm": 0.35353535353535354, - "acc_norm_stderr": 0.03406086723547153 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.2413793103448276, - "acc_stderr": 0.03565998174135302, - "acc_norm": 0.2413793103448276, - "acc_norm_stderr": 0.03565998174135302 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.3627450980392157, - "acc_stderr": 0.04784060704105653, - "acc_norm": 0.3627450980392157, - "acc_norm_stderr": 0.04784060704105653 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.3487394957983193, - "acc_stderr": 0.030956636328566548, - "acc_norm": 0.3487394957983193, - "acc_norm_stderr": 0.030956636328566548 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.36153846153846153, - "acc_stderr": 0.024359581465396983, - "acc_norm": 0.36153846153846153, - "acc_norm_stderr": 0.024359581465396983 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.19, - "acc_stderr": 0.03942772444036624, - "acc_norm": 0.19, - "acc_norm_stderr": 0.03942772444036624 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.16, - "acc_stderr": 0.0368452949177471, - "acc_norm": 0.16, - "acc_norm_stderr": 0.0368452949177471 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.21296296296296297, - "acc_stderr": 0.03957835471980981, - "acc_norm": 0.21296296296296297, - "acc_norm_stderr": 0.03957835471980981 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.28078817733990147, - "acc_stderr": 0.0316185633535861, - "acc_norm": 0.28078817733990147, - "acc_norm_stderr": 0.0316185633535861 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3161290322580645, - "acc_stderr": 0.026450874489042764, - "acc_norm": 0.3161290322580645, - "acc_norm_stderr": 0.026450874489042764 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.19658119658119658, - "acc_stderr": 0.02603538609895129, - "acc_norm": 0.19658119658119658, - "acc_norm_stderr": 0.02603538609895129 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.3018867924528302, - "acc_stderr": 0.02825420034443866, - "acc_norm": 0.3018867924528302, - "acc_norm_stderr": 0.02825420034443866 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.22727272727272727, - "acc_stderr": 0.040139645540727735, - "acc_norm": 0.22727272727272727, - "acc_norm_stderr": 0.040139645540727735 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.26296296296296295, - "acc_stderr": 0.02684205787383371, - "acc_norm": 0.26296296296296295, - "acc_norm_stderr": 0.02684205787383371 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.33112582781456956, - "acc_stderr": 0.038425817186598696, - "acc_norm": 0.33112582781456956, - "acc_norm_stderr": 0.038425817186598696 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.23880597014925373, - "acc_stderr": 0.030147775935409217, - "acc_norm": 0.23880597014925373, - "acc_norm_stderr": 0.030147775935409217 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.3352601156069364, - "acc_stderr": 0.03599586301247078, - "acc_norm": 0.3352601156069364, - "acc_norm_stderr": 0.03599586301247078 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2724867724867725, - "acc_stderr": 0.02293097307163334, - "acc_norm": 0.2724867724867725, - "acc_norm_stderr": 0.02293097307163334 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2569444444444444, - "acc_stderr": 0.03653946969442099, - "acc_norm": 0.2569444444444444, - "acc_norm_stderr": 0.03653946969442099 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.41, - "acc_stderr": 0.049431107042371025, - "acc_norm": 0.41, - "acc_norm_stderr": 0.049431107042371025 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.26, - "acc_stderr": 0.044084400227680794, - "acc_norm": 0.26, - "acc_norm_stderr": 0.044084400227680794 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.2138728323699422, - "acc_stderr": 0.022075709251757173, - "acc_norm": 0.2138728323699422, - "acc_norm_stderr": 0.022075709251757173 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.2331288343558282, - "acc_stderr": 0.0332201579577674, - "acc_norm": 0.2331288343558282, - "acc_norm_stderr": 0.0332201579577674 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.22530864197530864, - "acc_stderr": 0.02324620264781975, - "acc_norm": 0.22530864197530864, - "acc_norm_stderr": 0.02324620264781975 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.36787564766839376, - "acc_stderr": 0.034801756684660366, - "acc_norm": 0.36787564766839376, - "acc_norm_stderr": 0.034801756684660366 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.23684210526315788, - "acc_stderr": 0.03999423879281336, - "acc_norm": 0.23684210526315788, - "acc_norm_stderr": 0.03999423879281336 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.344954128440367, - "acc_stderr": 0.02038060540506697, - "acc_norm": 0.344954128440367, - "acc_norm_stderr": 0.02038060540506697 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.36507936507936506, - "acc_stderr": 0.04306241259127153, - "acc_norm": 0.36507936507936506, - "acc_norm_stderr": 0.04306241259127153 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.29411764705882354, - "acc_stderr": 0.02609016250427905, - "acc_norm": 0.29411764705882354, - "acc_norm_stderr": 0.02609016250427905 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.23, - "acc_stderr": 0.04229525846816506, - "acc_norm": 0.23, - "acc_norm_stderr": 0.04229525846816506 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.15702479338842976, - "acc_stderr": 0.03321244842547128, - "acc_norm": 0.15702479338842976, - "acc_norm_stderr": 0.03321244842547128 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.3355263157894737, - "acc_stderr": 0.038424985593952694, - "acc_norm": 0.3355263157894737, - "acc_norm_stderr": 0.038424985593952694 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.2173202614379085, - "acc_stderr": 0.016684820929148598, - "acc_norm": 0.2173202614379085, - "acc_norm_stderr": 0.016684820929148598 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.24113475177304963, - "acc_stderr": 0.02551873104953776, - "acc_norm": 0.24113475177304963, - "acc_norm_stderr": 0.02551873104953776 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.14285714285714285, - "acc_stderr": 0.033213611069662696, - "acc_norm": 0.14285714285714285, - "acc_norm_stderr": 0.033213611069662696 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.4722222222222222, - "acc_stderr": 0.0340470532865388, - "acc_norm": 0.4722222222222222, - "acc_norm_stderr": 0.0340470532865388 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.27262569832402234, - "acc_stderr": 0.014893391735249608, - "acc_norm": 0.27262569832402234, - "acc_norm_stderr": 0.014893391735249608 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695235, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695235 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.2, - "acc_stderr": 0.04020151261036847, - "acc_norm": 0.2, - "acc_norm_stderr": 0.04020151261036847 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.4485294117647059, - "acc_stderr": 0.030211479609121593, - "acc_norm": 0.4485294117647059, - "acc_norm_stderr": 0.030211479609121593 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.3183673469387755, - "acc_stderr": 0.02982253379398209, - "acc_norm": 0.3183673469387755, - "acc_norm_stderr": 0.02982253379398209 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.20675105485232068, - "acc_stderr": 0.026361651668389087, - "acc_norm": 0.20675105485232068, - "acc_norm_stderr": 0.026361651668389087 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2470664928292047, - "acc_stderr": 0.011015752255279329, - "acc_norm": 0.2470664928292047, - "acc_norm_stderr": 0.011015752255279329 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.2549019607843137, - "acc_stderr": 0.030587591351604246, - "acc_norm": 0.2549019607843137, - "acc_norm_stderr": 0.030587591351604246 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.2727272727272727, - "acc_stderr": 0.03477691162163659, - "acc_norm": 0.2727272727272727, - "acc_norm_stderr": 0.03477691162163659 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.25458996328029376, - "mc1_stderr": 0.015250117079156475, - "mc2": 0.3974526680083883, - "mc2_stderr": 0.01475058288914894 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.18779342723004694, - "acc_stderr": 0.013387782981513264, - "acc_norm": 0.23943661971830985, - "acc_norm_stderr": 0.014628446638821336 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "HAERAE-HUB/hae-tae_v0.1.1", - "model_sha": "4ae77d9659bb11f158180f4b8b243d1e9ddb51f4", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/HAERAE-HUB/hae-tae_v0.1.2/result_2023-09-30 11:46:34.json b/HAERAE-HUB/hae-tae_v0.1.2/result_2023-09-30 11:46:34.json deleted file mode 100644 index b5ca8c596054f253b90cf77627521844efc1385e..0000000000000000000000000000000000000000 --- a/HAERAE-HUB/hae-tae_v0.1.2/result_2023-09-30 11:46:34.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.2909556313993174, - "acc_stderr": 0.01327307786590758, - "acc_norm": 0.3302047781569966, - "acc_norm_stderr": 0.013743085603760427 - }, - "harness|ko_hellaswag|10": { - "acc": 0.37442740489942244, - "acc_stderr": 0.004829856058603579, - "acc_norm": 0.481876120294762, - "acc_norm_stderr": 0.00498650229693118 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.17543859649122806, - "acc_stderr": 0.029170885500727665, - "acc_norm": 0.17543859649122806, - "acc_norm_stderr": 0.029170885500727665 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.3786407766990291, - "acc_stderr": 0.04802694698258973, - "acc_norm": 0.3786407766990291, - "acc_norm_stderr": 0.04802694698258973 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.20434227330779056, - "acc_stderr": 0.014419123980931906, - "acc_norm": 0.20434227330779056, - "acc_norm_stderr": 0.014419123980931906 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.21481481481481482, - "acc_stderr": 0.03547854198560826, - "acc_norm": 0.21481481481481482, - "acc_norm_stderr": 0.03547854198560826 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.15, - "acc_stderr": 0.035887028128263714, - "acc_norm": 0.15, - "acc_norm_stderr": 0.035887028128263714 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.20851063829787234, - "acc_stderr": 0.026556982117838746, - "acc_norm": 0.20851063829787234, - "acc_norm_stderr": 0.026556982117838746 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.1927710843373494, - "acc_stderr": 0.030709824050565264, - "acc_norm": 0.1927710843373494, - "acc_norm_stderr": 0.030709824050565264 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.24115755627009647, - "acc_stderr": 0.024296594034763426, - "acc_norm": 0.24115755627009647, - "acc_norm_stderr": 0.024296594034763426 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.1031390134529148, - "acc_stderr": 0.020412564289839272, - "acc_norm": 0.1031390134529148, - "acc_norm_stderr": 0.020412564289839272 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.2900763358778626, - "acc_stderr": 0.03980066246467765, - "acc_norm": 0.2900763358778626, - "acc_norm_stderr": 0.03980066246467765 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.24, - "acc_stderr": 0.04292346959909284, - "acc_norm": 0.24, - "acc_norm_stderr": 0.04292346959909284 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.3434343434343434, - "acc_stderr": 0.03383201223244441, - "acc_norm": 0.3434343434343434, - "acc_norm_stderr": 0.03383201223244441 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.2413793103448276, - "acc_stderr": 0.03565998174135302, - "acc_norm": 0.2413793103448276, - "acc_norm_stderr": 0.03565998174135302 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.37254901960784315, - "acc_stderr": 0.048108401480826346, - "acc_norm": 0.37254901960784315, - "acc_norm_stderr": 0.048108401480826346 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.3487394957983193, - "acc_stderr": 0.030956636328566548, - "acc_norm": 0.3487394957983193, - "acc_norm_stderr": 0.030956636328566548 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.3641025641025641, - "acc_stderr": 0.024396672985094778, - "acc_norm": 0.3641025641025641, - "acc_norm_stderr": 0.024396672985094778 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.2, - "acc_stderr": 0.04020151261036845, - "acc_norm": 0.2, - "acc_norm_stderr": 0.04020151261036845 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.2, - "acc_stderr": 0.04020151261036845, - "acc_norm": 0.2, - "acc_norm_stderr": 0.04020151261036845 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.21296296296296297, - "acc_stderr": 0.03957835471980981, - "acc_norm": 0.21296296296296297, - "acc_norm_stderr": 0.03957835471980981 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.28078817733990147, - "acc_stderr": 0.0316185633535861, - "acc_norm": 0.28078817733990147, - "acc_norm_stderr": 0.0316185633535861 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3161290322580645, - "acc_stderr": 0.026450874489042764, - "acc_norm": 0.3161290322580645, - "acc_norm_stderr": 0.026450874489042764 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.19230769230769232, - "acc_stderr": 0.025819233256483724, - "acc_norm": 0.19230769230769232, - "acc_norm_stderr": 0.025819233256483724 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.2943396226415094, - "acc_stderr": 0.028049186315695248, - "acc_norm": 0.2943396226415094, - "acc_norm_stderr": 0.028049186315695248 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.22727272727272727, - "acc_stderr": 0.040139645540727735, - "acc_norm": 0.22727272727272727, - "acc_norm_stderr": 0.040139645540727735 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.2518518518518518, - "acc_stderr": 0.026466117538959905, - "acc_norm": 0.2518518518518518, - "acc_norm_stderr": 0.026466117538959905 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.33774834437086093, - "acc_stderr": 0.038615575462551684, - "acc_norm": 0.33774834437086093, - "acc_norm_stderr": 0.038615575462551684 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.26865671641791045, - "acc_stderr": 0.03134328358208954, - "acc_norm": 0.26865671641791045, - "acc_norm_stderr": 0.03134328358208954 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.3352601156069364, - "acc_stderr": 0.03599586301247078, - "acc_norm": 0.3352601156069364, - "acc_norm_stderr": 0.03599586301247078 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2619047619047619, - "acc_stderr": 0.022644212615525214, - "acc_norm": 0.2619047619047619, - "acc_norm_stderr": 0.022644212615525214 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2708333333333333, - "acc_stderr": 0.03716177437566017, - "acc_norm": 0.2708333333333333, - "acc_norm_stderr": 0.03716177437566017 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.37, - "acc_stderr": 0.04852365870939099, - "acc_norm": 0.37, - "acc_norm_stderr": 0.04852365870939099 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.26, - "acc_stderr": 0.044084400227680794, - "acc_norm": 0.26, - "acc_norm_stderr": 0.044084400227680794 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.2138728323699422, - "acc_stderr": 0.022075709251757173, - "acc_norm": 0.2138728323699422, - "acc_norm_stderr": 0.022075709251757173 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.2331288343558282, - "acc_stderr": 0.0332201579577674, - "acc_norm": 0.2331288343558282, - "acc_norm_stderr": 0.0332201579577674 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.22530864197530864, - "acc_stderr": 0.02324620264781975, - "acc_norm": 0.22530864197530864, - "acc_norm_stderr": 0.02324620264781975 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.26, - "acc_stderr": 0.0440844002276808, - "acc_norm": 0.26, - "acc_norm_stderr": 0.0440844002276808 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.36787564766839376, - "acc_stderr": 0.034801756684660366, - "acc_norm": 0.36787564766839376, - "acc_norm_stderr": 0.034801756684660366 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.23684210526315788, - "acc_stderr": 0.039994238792813365, - "acc_norm": 0.23684210526315788, - "acc_norm_stderr": 0.039994238792813365 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.344954128440367, - "acc_stderr": 0.02038060540506697, - "acc_norm": 0.344954128440367, - "acc_norm_stderr": 0.02038060540506697 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.3253968253968254, - "acc_stderr": 0.041905964388711366, - "acc_norm": 0.3253968253968254, - "acc_norm_stderr": 0.041905964388711366 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.29411764705882354, - "acc_stderr": 0.026090162504279053, - "acc_norm": 0.29411764705882354, - "acc_norm_stderr": 0.026090162504279053 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.21, - "acc_stderr": 0.040936018074033256, - "acc_norm": 0.21, - "acc_norm_stderr": 0.040936018074033256 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.14049586776859505, - "acc_stderr": 0.031722334260021585, - "acc_norm": 0.14049586776859505, - "acc_norm_stderr": 0.031722334260021585 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.3355263157894737, - "acc_stderr": 0.038424985593952694, - "acc_norm": 0.3355263157894737, - "acc_norm_stderr": 0.038424985593952694 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.2222222222222222, - "acc_stderr": 0.016819028375736386, - "acc_norm": 0.2222222222222222, - "acc_norm_stderr": 0.016819028375736386 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.24468085106382978, - "acc_stderr": 0.02564555362226673, - "acc_norm": 0.24468085106382978, - "acc_norm_stderr": 0.02564555362226673 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.16071428571428573, - "acc_stderr": 0.034859460964757394, - "acc_norm": 0.16071428571428573, - "acc_norm_stderr": 0.034859460964757394 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.4675925925925926, - "acc_stderr": 0.03402801581358966, - "acc_norm": 0.4675925925925926, - "acc_norm_stderr": 0.03402801581358966 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.27262569832402234, - "acc_stderr": 0.014893391735249608, - "acc_norm": 0.27262569832402234, - "acc_norm_stderr": 0.014893391735249608 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.32, - "acc_stderr": 0.04688261722621504, - "acc_norm": 0.32, - "acc_norm_stderr": 0.04688261722621504 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.21, - "acc_stderr": 0.040936018074033256, - "acc_norm": 0.21, - "acc_norm_stderr": 0.040936018074033256 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.4485294117647059, - "acc_stderr": 0.030211479609121593, - "acc_norm": 0.4485294117647059, - "acc_norm_stderr": 0.030211479609121593 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.3346938775510204, - "acc_stderr": 0.030209235226242314, - "acc_norm": 0.3346938775510204, - "acc_norm_stderr": 0.030209235226242314 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.19831223628691982, - "acc_stderr": 0.02595502084162111, - "acc_norm": 0.19831223628691982, - "acc_norm_stderr": 0.02595502084162111 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.24902216427640156, - "acc_stderr": 0.01104489226404077, - "acc_norm": 0.24902216427640156, - "acc_norm_stderr": 0.01104489226404077 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.2549019607843137, - "acc_stderr": 0.030587591351604246, - "acc_norm": 0.2549019607843137, - "acc_norm_stderr": 0.030587591351604246 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.2545454545454545, - "acc_stderr": 0.03401506715249039, - "acc_norm": 0.2545454545454545, - "acc_norm_stderr": 0.03401506715249039 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.27050183598531213, - "mc1_stderr": 0.015550778332842885, - "mc2": 0.420854027075679, - "mc2_stderr": 0.014933313137954875 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.284037558685446, - "acc_stderr": 0.01545853115904392, - "acc_norm": 0.3474178403755869, - "acc_norm_stderr": 0.016322206819108925 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "HAERAE-HUB/hae-tae_v0.1.2", - "model_sha": "fd9094c0e91bcb07ecf2b89b36a16480e27a93dc", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/HumanF-MarkrAI/pub-llama-13B-v3/result_2023-10-24 18:02:37.json b/HumanF-MarkrAI/pub-llama-13B-v3/result_2023-10-24 18:02:37.json deleted file mode 100644 index b6ca4089ed67f533ce5d3a7088186f1736290958..0000000000000000000000000000000000000000 --- a/HumanF-MarkrAI/pub-llama-13B-v3/result_2023-10-24 18:02:37.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.36945392491467577, - "acc_stderr": 0.014104578366491888, - "acc_norm": 0.42150170648464164, - "acc_norm_stderr": 0.014430197069326028 - }, - "harness|ko_hellaswag|10": { - "acc": 0.40450109539932283, - "acc_stderr": 0.004897921845492105, - "acc_norm": 0.5392352121091416, - "acc_norm_stderr": 0.004974395131539592 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.4853801169590643, - "acc_stderr": 0.038331852752130205, - "acc_norm": 0.4853801169590643, - "acc_norm_stderr": 0.038331852752130205 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.5533980582524272, - "acc_stderr": 0.04922424153458934, - "acc_norm": 0.5533980582524272, - "acc_norm_stderr": 0.04922424153458934 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.5070242656449553, - "acc_stderr": 0.017878199003432214, - "acc_norm": 0.5070242656449553, - "acc_norm_stderr": 0.017878199003432214 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.43703703703703706, - "acc_stderr": 0.04284958639753399, - "acc_norm": 0.43703703703703706, - "acc_norm_stderr": 0.04284958639753399 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.27, - "acc_stderr": 0.04461960433384741, - "acc_norm": 0.27, - "acc_norm_stderr": 0.04461960433384741 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.3404255319148936, - "acc_stderr": 0.03097669299853443, - "acc_norm": 0.3404255319148936, - "acc_norm_stderr": 0.03097669299853443 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3614457831325301, - "acc_stderr": 0.037400593820293204, - "acc_norm": 0.3614457831325301, - "acc_norm_stderr": 0.037400593820293204 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.4983922829581994, - "acc_stderr": 0.02839794490780661, - "acc_norm": 0.4983922829581994, - "acc_norm_stderr": 0.02839794490780661 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.4977578475336323, - "acc_stderr": 0.03355746535223263, - "acc_norm": 0.4977578475336323, - "acc_norm_stderr": 0.03355746535223263 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.44274809160305345, - "acc_stderr": 0.0435644720266507, - "acc_norm": 0.44274809160305345, - "acc_norm_stderr": 0.0435644720266507 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.41, - "acc_stderr": 0.049431107042371025, - "acc_norm": 0.41, - "acc_norm_stderr": 0.049431107042371025 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.5505050505050505, - "acc_stderr": 0.0354413249194797, - "acc_norm": 0.5505050505050505, - "acc_norm_stderr": 0.0354413249194797 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.4482758620689655, - "acc_stderr": 0.04144311810878151, - "acc_norm": 0.4482758620689655, - "acc_norm_stderr": 0.04144311810878151 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.24509803921568626, - "acc_stderr": 0.04280105837364396, - "acc_norm": 0.24509803921568626, - "acc_norm_stderr": 0.04280105837364396 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.453781512605042, - "acc_stderr": 0.032339434681820885, - "acc_norm": 0.453781512605042, - "acc_norm_stderr": 0.032339434681820885 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.4230769230769231, - "acc_stderr": 0.025049197876042335, - "acc_norm": 0.4230769230769231, - "acc_norm_stderr": 0.025049197876042335 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.41, - "acc_stderr": 0.04943110704237102, - "acc_norm": 0.41, - "acc_norm_stderr": 0.04943110704237102 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.46296296296296297, - "acc_stderr": 0.04820403072760626, - "acc_norm": 0.46296296296296297, - "acc_norm_stderr": 0.04820403072760626 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.37438423645320196, - "acc_stderr": 0.034051553805619514, - "acc_norm": 0.37438423645320196, - "acc_norm_stderr": 0.034051553805619514 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.43548387096774194, - "acc_stderr": 0.028206225591502744, - "acc_norm": 0.43548387096774194, - "acc_norm_stderr": 0.028206225591502744 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.6452991452991453, - "acc_stderr": 0.03134250486245402, - "acc_norm": 0.6452991452991453, - "acc_norm_stderr": 0.03134250486245402 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.4226415094339623, - "acc_stderr": 0.03040233144576954, - "acc_norm": 0.4226415094339623, - "acc_norm_stderr": 0.03040233144576954 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.509090909090909, - "acc_stderr": 0.04788339768702861, - "acc_norm": 0.509090909090909, - "acc_norm_stderr": 0.04788339768702861 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.3148148148148148, - "acc_stderr": 0.028317533496066475, - "acc_norm": 0.3148148148148148, - "acc_norm_stderr": 0.028317533496066475 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.304635761589404, - "acc_stderr": 0.03757949922943343, - "acc_norm": 0.304635761589404, - "acc_norm_stderr": 0.03757949922943343 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.5522388059701493, - "acc_stderr": 0.035161847729521675, - "acc_norm": 0.5522388059701493, - "acc_norm_stderr": 0.035161847729521675 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.3930635838150289, - "acc_stderr": 0.0372424959581773, - "acc_norm": 0.3930635838150289, - "acc_norm_stderr": 0.0372424959581773 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.37037037037037035, - "acc_stderr": 0.02487081525105709, - "acc_norm": 0.37037037037037035, - "acc_norm_stderr": 0.02487081525105709 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3611111111111111, - "acc_stderr": 0.04016660030451233, - "acc_norm": 0.3611111111111111, - "acc_norm_stderr": 0.04016660030451233 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.32, - "acc_stderr": 0.04688261722621504, - "acc_norm": 0.32, - "acc_norm_stderr": 0.04688261722621504 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.6, - "acc_stderr": 0.04923659639173309, - "acc_norm": 0.6, - "acc_norm_stderr": 0.04923659639173309 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.4595375722543353, - "acc_stderr": 0.02683080599895224, - "acc_norm": 0.4595375722543353, - "acc_norm_stderr": 0.02683080599895224 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.4539877300613497, - "acc_stderr": 0.0391170190467718, - "acc_norm": 0.4539877300613497, - "acc_norm_stderr": 0.0391170190467718 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.4722222222222222, - "acc_stderr": 0.027777777777777797, - "acc_norm": 0.4722222222222222, - "acc_norm_stderr": 0.027777777777777797 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.48704663212435234, - "acc_stderr": 0.03607228061047749, - "acc_norm": 0.48704663212435234, - "acc_norm_stderr": 0.03607228061047749 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.21052631578947367, - "acc_stderr": 0.0383515395439942, - "acc_norm": 0.21052631578947367, - "acc_norm_stderr": 0.0383515395439942 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.5412844036697247, - "acc_stderr": 0.021364122533881695, - "acc_norm": 0.5412844036697247, - "acc_norm_stderr": 0.021364122533881695 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.42063492063492064, - "acc_stderr": 0.04415438226743744, - "acc_norm": 0.42063492063492064, - "acc_norm_stderr": 0.04415438226743744 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.42810457516339867, - "acc_stderr": 0.028332397483664278, - "acc_norm": 0.42810457516339867, - "acc_norm_stderr": 0.028332397483664278 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.47, - "acc_stderr": 0.05016135580465919, - "acc_norm": 0.47, - "acc_norm_stderr": 0.05016135580465919 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.5950413223140496, - "acc_stderr": 0.04481137755942469, - "acc_norm": 0.5950413223140496, - "acc_norm_stderr": 0.04481137755942469 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.4407894736842105, - "acc_stderr": 0.04040311062490436, - "acc_norm": 0.4407894736842105, - "acc_norm_stderr": 0.04040311062490436 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.3660130718954248, - "acc_stderr": 0.019488025745529675, - "acc_norm": 0.3660130718954248, - "acc_norm_stderr": 0.019488025745529675 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.33687943262411346, - "acc_stderr": 0.02819553487396673, - "acc_norm": 0.33687943262411346, - "acc_norm_stderr": 0.02819553487396673 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.2857142857142857, - "acc_stderr": 0.042878587513404544, - "acc_norm": 0.2857142857142857, - "acc_norm_stderr": 0.042878587513404544 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.375, - "acc_stderr": 0.033016908987210894, - "acc_norm": 0.375, - "acc_norm_stderr": 0.033016908987210894 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.24804469273743016, - "acc_stderr": 0.014444157808261453, - "acc_norm": 0.24804469273743016, - "acc_norm_stderr": 0.014444157808261453 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.44, - "acc_stderr": 0.04988876515698589, - "acc_norm": 0.44, - "acc_norm_stderr": 0.04988876515698589 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.46, - "acc_stderr": 0.05009082659620333, - "acc_norm": 0.46, - "acc_norm_stderr": 0.05009082659620333 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.33455882352941174, - "acc_stderr": 0.02866199620233531, - "acc_norm": 0.33455882352941174, - "acc_norm_stderr": 0.02866199620233531 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.4775510204081633, - "acc_stderr": 0.03197694118713673, - "acc_norm": 0.4775510204081633, - "acc_norm_stderr": 0.03197694118713673 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.5654008438818565, - "acc_stderr": 0.03226759995510145, - "acc_norm": 0.5654008438818565, - "acc_norm_stderr": 0.03226759995510145 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.3559322033898305, - "acc_stderr": 0.012228645537277573, - "acc_norm": 0.3559322033898305, - "acc_norm_stderr": 0.012228645537277573 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.45588235294117646, - "acc_stderr": 0.03495624522015474, - "acc_norm": 0.45588235294117646, - "acc_norm_stderr": 0.03495624522015474 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.503030303030303, - "acc_stderr": 0.03904272341431855, - "acc_norm": 0.503030303030303, - "acc_norm_stderr": 0.03904272341431855 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.27539779681762544, - "mc1_stderr": 0.015638135667775527, - "mc2": 0.4355517094226067, - "mc2_stderr": 0.015309009273280678 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.5915492957746479, - "acc_stderr": 0.016850023674109642, - "acc_norm": 0.6854460093896714, - "acc_norm_stderr": 0.015917301615490653 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "HumanF-MarkrAI/pub-llama-13B-v3", - "model_sha": "a077b211925e00e7bd8e3f6bdf29476c59b81d6d", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/HumanF-MarkrAI/pub-llama-13b-v1/result_2023-10-19 18:44:30.json b/HumanF-MarkrAI/pub-llama-13b-v1/result_2023-10-19 18:44:30.json deleted file mode 100644 index 944aab75b5b5481feb82ec9d1f083623e1a3b793..0000000000000000000000000000000000000000 --- a/HumanF-MarkrAI/pub-llama-13b-v1/result_2023-10-19 18:44:30.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.3677474402730375, - "acc_stderr": 0.01409099561816849, - "acc_norm": 0.41552901023890787, - "acc_norm_stderr": 0.01440136664121639 - }, - "harness|ko_hellaswag|10": { - "acc": 0.40579565823541125, - "acc_stderr": 0.004900417982582061, - "acc_norm": 0.5321649073889664, - "acc_norm_stderr": 0.004979446038824757 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.4678362573099415, - "acc_stderr": 0.03826882417660369, - "acc_norm": 0.4678362573099415, - "acc_norm_stderr": 0.03826882417660369 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.5825242718446602, - "acc_stderr": 0.04882840548212238, - "acc_norm": 0.5825242718446602, - "acc_norm_stderr": 0.04882840548212238 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.5006385696040868, - "acc_stderr": 0.01787994891443168, - "acc_norm": 0.5006385696040868, - "acc_norm_stderr": 0.01787994891443168 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.43703703703703706, - "acc_stderr": 0.04284958639753399, - "acc_norm": 0.43703703703703706, - "acc_norm_stderr": 0.04284958639753399 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.27, - "acc_stderr": 0.044619604333847415, - "acc_norm": 0.27, - "acc_norm_stderr": 0.044619604333847415 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.34893617021276596, - "acc_stderr": 0.031158522131357787, - "acc_norm": 0.34893617021276596, - "acc_norm_stderr": 0.031158522131357787 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3795180722891566, - "acc_stderr": 0.037777988227480165, - "acc_norm": 0.3795180722891566, - "acc_norm_stderr": 0.037777988227480165 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.5080385852090032, - "acc_stderr": 0.02839442137098453, - "acc_norm": 0.5080385852090032, - "acc_norm_stderr": 0.02839442137098453 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.47085201793721976, - "acc_stderr": 0.03350073248773404, - "acc_norm": 0.47085201793721976, - "acc_norm_stderr": 0.03350073248773404 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.4351145038167939, - "acc_stderr": 0.04348208051644858, - "acc_norm": 0.4351145038167939, - "acc_norm_stderr": 0.04348208051644858 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.41, - "acc_stderr": 0.049431107042371025, - "acc_norm": 0.41, - "acc_norm_stderr": 0.049431107042371025 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.5454545454545454, - "acc_stderr": 0.03547601494006938, - "acc_norm": 0.5454545454545454, - "acc_norm_stderr": 0.03547601494006938 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.4482758620689655, - "acc_stderr": 0.04144311810878151, - "acc_norm": 0.4482758620689655, - "acc_norm_stderr": 0.04144311810878151 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.23529411764705882, - "acc_stderr": 0.04220773659171453, - "acc_norm": 0.23529411764705882, - "acc_norm_stderr": 0.04220773659171453 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.4789915966386555, - "acc_stderr": 0.03244980849990028, - "acc_norm": 0.4789915966386555, - "acc_norm_stderr": 0.03244980849990028 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.4153846153846154, - "acc_stderr": 0.024985354923102318, - "acc_norm": 0.4153846153846154, - "acc_norm_stderr": 0.024985354923102318 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.4, - "acc_stderr": 0.049236596391733084, - "acc_norm": 0.4, - "acc_norm_stderr": 0.049236596391733084 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.32, - "acc_stderr": 0.04688261722621504, - "acc_norm": 0.32, - "acc_norm_stderr": 0.04688261722621504 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.4351851851851852, - "acc_stderr": 0.04792898170907062, - "acc_norm": 0.4351851851851852, - "acc_norm_stderr": 0.04792898170907062 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.3694581280788177, - "acc_stderr": 0.03395970381998576, - "acc_norm": 0.3694581280788177, - "acc_norm_stderr": 0.03395970381998576 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.4483870967741935, - "acc_stderr": 0.02829205683011273, - "acc_norm": 0.4483870967741935, - "acc_norm_stderr": 0.02829205683011273 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.6538461538461539, - "acc_stderr": 0.0311669573672359, - "acc_norm": 0.6538461538461539, - "acc_norm_stderr": 0.0311669573672359 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.4339622641509434, - "acc_stderr": 0.030503292013342592, - "acc_norm": 0.4339622641509434, - "acc_norm_stderr": 0.030503292013342592 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.5272727272727272, - "acc_stderr": 0.04782001791380061, - "acc_norm": 0.5272727272727272, - "acc_norm_stderr": 0.04782001791380061 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.3148148148148148, - "acc_stderr": 0.028317533496066482, - "acc_norm": 0.3148148148148148, - "acc_norm_stderr": 0.028317533496066482 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.304635761589404, - "acc_stderr": 0.037579499229433426, - "acc_norm": 0.304635761589404, - "acc_norm_stderr": 0.037579499229433426 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.5572139303482587, - "acc_stderr": 0.03512310964123935, - "acc_norm": 0.5572139303482587, - "acc_norm_stderr": 0.03512310964123935 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.4046242774566474, - "acc_stderr": 0.03742461193887248, - "acc_norm": 0.4046242774566474, - "acc_norm_stderr": 0.03742461193887248 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.35978835978835977, - "acc_stderr": 0.024718075944129277, - "acc_norm": 0.35978835978835977, - "acc_norm_stderr": 0.024718075944129277 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3611111111111111, - "acc_stderr": 0.04016660030451233, - "acc_norm": 0.3611111111111111, - "acc_norm_stderr": 0.04016660030451233 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695235, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695235 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.61, - "acc_stderr": 0.04902071300001975, - "acc_norm": 0.61, - "acc_norm_stderr": 0.04902071300001975 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.44508670520231214, - "acc_stderr": 0.026756255129663765, - "acc_norm": 0.44508670520231214, - "acc_norm_stderr": 0.026756255129663765 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.4171779141104294, - "acc_stderr": 0.038741028598180814, - "acc_norm": 0.4171779141104294, - "acc_norm_stderr": 0.038741028598180814 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.46296296296296297, - "acc_stderr": 0.027744313443376536, - "acc_norm": 0.46296296296296297, - "acc_norm_stderr": 0.027744313443376536 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.47150259067357514, - "acc_stderr": 0.036025735712884414, - "acc_norm": 0.47150259067357514, - "acc_norm_stderr": 0.036025735712884414 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.20175438596491227, - "acc_stderr": 0.03775205013583639, - "acc_norm": 0.20175438596491227, - "acc_norm_stderr": 0.03775205013583639 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.5174311926605505, - "acc_stderr": 0.02142429187185315, - "acc_norm": 0.5174311926605505, - "acc_norm_stderr": 0.02142429187185315 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.4126984126984127, - "acc_stderr": 0.04403438954768177, - "acc_norm": 0.4126984126984127, - "acc_norm_stderr": 0.04403438954768177 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.4150326797385621, - "acc_stderr": 0.028213504177824093, - "acc_norm": 0.4150326797385621, - "acc_norm_stderr": 0.028213504177824093 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.45, - "acc_stderr": 0.049999999999999996, - "acc_norm": 0.45, - "acc_norm_stderr": 0.049999999999999996 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.5867768595041323, - "acc_stderr": 0.04495087843548408, - "acc_norm": 0.5867768595041323, - "acc_norm_stderr": 0.04495087843548408 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.45394736842105265, - "acc_stderr": 0.04051646342874142, - "acc_norm": 0.45394736842105265, - "acc_norm_stderr": 0.04051646342874142 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.3611111111111111, - "acc_stderr": 0.019431775677037313, - "acc_norm": 0.3611111111111111, - "acc_norm_stderr": 0.019431775677037313 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.02812163604063989, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.02812163604063989 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.25, - "acc_stderr": 0.04109974682633932, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04109974682633932 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.3888888888888889, - "acc_stderr": 0.03324708911809117, - "acc_norm": 0.3888888888888889, - "acc_norm_stderr": 0.03324708911809117 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.24916201117318434, - "acc_stderr": 0.01446589382985992, - "acc_norm": 0.24916201117318434, - "acc_norm_stderr": 0.01446589382985992 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.41, - "acc_stderr": 0.04943110704237102, - "acc_norm": 0.41, - "acc_norm_stderr": 0.04943110704237102 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.48, - "acc_stderr": 0.050211673156867795, - "acc_norm": 0.48, - "acc_norm_stderr": 0.050211673156867795 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.34191176470588236, - "acc_stderr": 0.02881472242225417, - "acc_norm": 0.34191176470588236, - "acc_norm_stderr": 0.02881472242225417 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.4775510204081633, - "acc_stderr": 0.03197694118713673, - "acc_norm": 0.4775510204081633, - "acc_norm_stderr": 0.03197694118713673 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.5780590717299579, - "acc_stderr": 0.032148146302403695, - "acc_norm": 0.5780590717299579, - "acc_norm_stderr": 0.032148146302403695 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.3624511082138201, - "acc_stderr": 0.012277512533252495, - "acc_norm": 0.3624511082138201, - "acc_norm_stderr": 0.012277512533252495 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.45098039215686275, - "acc_stderr": 0.03492406104163614, - "acc_norm": 0.45098039215686275, - "acc_norm_stderr": 0.03492406104163614 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.4909090909090909, - "acc_stderr": 0.03903698647748441, - "acc_norm": 0.4909090909090909, - "acc_norm_stderr": 0.03903698647748441 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2864137086903305, - "mc1_stderr": 0.015826142439502342, - "mc2": 0.4364091486561351, - "mc2_stderr": 0.015369734802451228 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.5176056338028169, - "acc_stderr": 0.017129150724246808, - "acc_norm": 0.5903755868544601, - "acc_norm_stderr": 0.016857467505356098 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "HumanF-MarkrAI/pub-llama-13b-v1", - "model_sha": "4aa21e41dfcb82ff842306b3b5eadd2b258bfc80", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/HumanF-MarkrAI/pub-llama-13b-v2/result_2023-10-22 16:02:46.json b/HumanF-MarkrAI/pub-llama-13b-v2/result_2023-10-22 16:02:46.json deleted file mode 100644 index ba3d02b364432de78c70ce676e180e6b4132c978..0000000000000000000000000000000000000000 --- a/HumanF-MarkrAI/pub-llama-13b-v2/result_2023-10-22 16:02:46.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.371160409556314, - "acc_stderr": 0.014117971901142824, - "acc_norm": 0.4197952218430034, - "acc_norm_stderr": 0.014422181226303026 - }, - "harness|ko_hellaswag|10": { - "acc": 0.4048994224258116, - "acc_stderr": 0.004898693652043317, - "acc_norm": 0.5401314479187412, - "acc_norm_stderr": 0.0049736830262021746 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.47953216374269003, - "acc_stderr": 0.0383161053282193, - "acc_norm": 0.47953216374269003, - "acc_norm_stderr": 0.0383161053282193 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.5631067961165048, - "acc_stderr": 0.049111471073657764, - "acc_norm": 0.5631067961165048, - "acc_norm_stderr": 0.049111471073657764 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.5019157088122606, - "acc_stderr": 0.017879832259026677, - "acc_norm": 0.5019157088122606, - "acc_norm_stderr": 0.017879832259026677 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.4444444444444444, - "acc_stderr": 0.042925967182569816, - "acc_norm": 0.4444444444444444, - "acc_norm_stderr": 0.042925967182569816 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.26, - "acc_stderr": 0.044084400227680794, - "acc_norm": 0.26, - "acc_norm_stderr": 0.044084400227680794 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.3276595744680851, - "acc_stderr": 0.030683020843231008, - "acc_norm": 0.3276595744680851, - "acc_norm_stderr": 0.030683020843231008 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3493975903614458, - "acc_stderr": 0.0371172519074075, - "acc_norm": 0.3493975903614458, - "acc_norm_stderr": 0.0371172519074075 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.5048231511254019, - "acc_stderr": 0.028396770444111298, - "acc_norm": 0.5048231511254019, - "acc_norm_stderr": 0.028396770444111298 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.48878923766816146, - "acc_stderr": 0.033549366530984746, - "acc_norm": 0.48878923766816146, - "acc_norm_stderr": 0.033549366530984746 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.44274809160305345, - "acc_stderr": 0.0435644720266507, - "acc_norm": 0.44274809160305345, - "acc_norm_stderr": 0.0435644720266507 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.42, - "acc_stderr": 0.049604496374885836, - "acc_norm": 0.42, - "acc_norm_stderr": 0.049604496374885836 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.5353535353535354, - "acc_stderr": 0.03553436368828063, - "acc_norm": 0.5353535353535354, - "acc_norm_stderr": 0.03553436368828063 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.45517241379310347, - "acc_stderr": 0.04149886942192117, - "acc_norm": 0.45517241379310347, - "acc_norm_stderr": 0.04149886942192117 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.24509803921568626, - "acc_stderr": 0.04280105837364395, - "acc_norm": 0.24509803921568626, - "acc_norm_stderr": 0.04280105837364395 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.4495798319327731, - "acc_stderr": 0.03231293497137707, - "acc_norm": 0.4495798319327731, - "acc_norm_stderr": 0.03231293497137707 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.4256410256410256, - "acc_stderr": 0.02506909438729654, - "acc_norm": 0.4256410256410256, - "acc_norm_stderr": 0.02506909438729654 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.41, - "acc_stderr": 0.04943110704237102, - "acc_norm": 0.41, - "acc_norm_stderr": 0.04943110704237102 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.27, - "acc_stderr": 0.044619604333847394, - "acc_norm": 0.27, - "acc_norm_stderr": 0.044619604333847394 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.4537037037037037, - "acc_stderr": 0.04812917324536821, - "acc_norm": 0.4537037037037037, - "acc_norm_stderr": 0.04812917324536821 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.3793103448275862, - "acc_stderr": 0.03413963805906234, - "acc_norm": 0.3793103448275862, - "acc_norm_stderr": 0.03413963805906234 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.43548387096774194, - "acc_stderr": 0.02820622559150274, - "acc_norm": 0.43548387096774194, - "acc_norm_stderr": 0.02820622559150274 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.6452991452991453, - "acc_stderr": 0.03134250486245402, - "acc_norm": 0.6452991452991453, - "acc_norm_stderr": 0.03134250486245402 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.4339622641509434, - "acc_stderr": 0.030503292013342592, - "acc_norm": 0.4339622641509434, - "acc_norm_stderr": 0.030503292013342592 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.5272727272727272, - "acc_stderr": 0.04782001791380061, - "acc_norm": 0.5272727272727272, - "acc_norm_stderr": 0.04782001791380061 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.31851851851851853, - "acc_stderr": 0.02840653309060846, - "acc_norm": 0.31851851851851853, - "acc_norm_stderr": 0.02840653309060846 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.31125827814569534, - "acc_stderr": 0.03780445850526733, - "acc_norm": 0.31125827814569534, - "acc_norm_stderr": 0.03780445850526733 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.5522388059701493, - "acc_stderr": 0.035161847729521675, - "acc_norm": 0.5522388059701493, - "acc_norm_stderr": 0.035161847729521675 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.3930635838150289, - "acc_stderr": 0.0372424959581773, - "acc_norm": 0.3930635838150289, - "acc_norm_stderr": 0.0372424959581773 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.36772486772486773, - "acc_stderr": 0.024833839825562424, - "acc_norm": 0.36772486772486773, - "acc_norm_stderr": 0.024833839825562424 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3680555555555556, - "acc_stderr": 0.040329990539607195, - "acc_norm": 0.3680555555555556, - "acc_norm_stderr": 0.040329990539607195 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.32, - "acc_stderr": 0.04688261722621504, - "acc_norm": 0.32, - "acc_norm_stderr": 0.04688261722621504 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.65, - "acc_stderr": 0.047937248544110196, - "acc_norm": 0.65, - "acc_norm_stderr": 0.047937248544110196 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.4479768786127168, - "acc_stderr": 0.02677299065336182, - "acc_norm": 0.4479768786127168, - "acc_norm_stderr": 0.02677299065336182 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.44785276073619634, - "acc_stderr": 0.03906947479456601, - "acc_norm": 0.44785276073619634, - "acc_norm_stderr": 0.03906947479456601 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.47530864197530864, - "acc_stderr": 0.02778680093142745, - "acc_norm": 0.47530864197530864, - "acc_norm_stderr": 0.02778680093142745 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.48186528497409326, - "acc_stderr": 0.036060650018329185, - "acc_norm": 0.48186528497409326, - "acc_norm_stderr": 0.036060650018329185 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.22807017543859648, - "acc_stderr": 0.03947152782669415, - "acc_norm": 0.22807017543859648, - "acc_norm_stderr": 0.03947152782669415 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.5376146788990825, - "acc_stderr": 0.021376575274397576, - "acc_norm": 0.5376146788990825, - "acc_norm_stderr": 0.021376575274397576 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.40476190476190477, - "acc_stderr": 0.043902592653775614, - "acc_norm": 0.40476190476190477, - "acc_norm_stderr": 0.043902592653775614 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.4215686274509804, - "acc_stderr": 0.028275490156791434, - "acc_norm": 0.4215686274509804, - "acc_norm_stderr": 0.028275490156791434 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.47, - "acc_stderr": 0.05016135580465919, - "acc_norm": 0.47, - "acc_norm_stderr": 0.05016135580465919 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.5950413223140496, - "acc_stderr": 0.04481137755942469, - "acc_norm": 0.5950413223140496, - "acc_norm_stderr": 0.04481137755942469 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.4473684210526316, - "acc_stderr": 0.04046336883978251, - "acc_norm": 0.4473684210526316, - "acc_norm_stderr": 0.04046336883978251 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.36764705882352944, - "acc_stderr": 0.019506291693954854, - "acc_norm": 0.36764705882352944, - "acc_norm_stderr": 0.019506291693954854 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.34397163120567376, - "acc_stderr": 0.02833801742861132, - "acc_norm": 0.34397163120567376, - "acc_norm_stderr": 0.02833801742861132 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.25892857142857145, - "acc_stderr": 0.041577515398656284, - "acc_norm": 0.25892857142857145, - "acc_norm_stderr": 0.041577515398656284 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.3888888888888889, - "acc_stderr": 0.03324708911809117, - "acc_norm": 0.3888888888888889, - "acc_norm_stderr": 0.03324708911809117 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.24804469273743016, - "acc_stderr": 0.014444157808261453, - "acc_norm": 0.24804469273743016, - "acc_norm_stderr": 0.014444157808261453 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.43, - "acc_stderr": 0.04975698519562428, - "acc_norm": 0.43, - "acc_norm_stderr": 0.04975698519562428 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.48, - "acc_stderr": 0.050211673156867795, - "acc_norm": 0.48, - "acc_norm_stderr": 0.050211673156867795 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.34558823529411764, - "acc_stderr": 0.02888819310398865, - "acc_norm": 0.34558823529411764, - "acc_norm_stderr": 0.02888819310398865 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.46530612244897956, - "acc_stderr": 0.03193207024425314, - "acc_norm": 0.46530612244897956, - "acc_norm_stderr": 0.03193207024425314 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.569620253164557, - "acc_stderr": 0.03223017195937599, - "acc_norm": 0.569620253164557, - "acc_norm_stderr": 0.03223017195937599 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.35723598435462844, - "acc_stderr": 0.012238615750316506, - "acc_norm": 0.35723598435462844, - "acc_norm_stderr": 0.012238615750316506 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.45098039215686275, - "acc_stderr": 0.03492406104163614, - "acc_norm": 0.45098039215686275, - "acc_norm_stderr": 0.03492406104163614 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.4909090909090909, - "acc_stderr": 0.03903698647748441, - "acc_norm": 0.4909090909090909, - "acc_norm_stderr": 0.03903698647748441 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2802937576499388, - "mc1_stderr": 0.015723139524608742, - "mc2": 0.43609767583849846, - "mc2_stderr": 0.015308496603243212 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.5751173708920188, - "acc_stderr": 0.0169452488268217, - "acc_norm": 0.647887323943662, - "acc_norm_stderr": 0.016372906865326657 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "HumanF-MarkrAI/pub-llama-13b-v2", - "model_sha": "d59387039c395781b62f514db7bf4fb32d254522", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/HumanF-MarkrAI/pub-llama-7b-v1/result_2023-10-19 00:06:32.json b/HumanF-MarkrAI/pub-llama-7b-v1/result_2023-10-19 00:06:32.json deleted file mode 100644 index 3df8f1c01f830310e2d96f94fea69505e41f266e..0000000000000000000000000000000000000000 --- a/HumanF-MarkrAI/pub-llama-7b-v1/result_2023-10-19 00:06:32.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.28071672354948807, - "acc_stderr": 0.013131238126975593, - "acc_norm": 0.34812286689419797, - "acc_norm_stderr": 0.013921008595179338 - }, - "harness|ko_hellaswag|10": { - "acc": 0.36496713802031466, - "acc_stderr": 0.0048043705638562305, - "acc_norm": 0.48665604461262696, - "acc_norm_stderr": 0.004988004122536492 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.39766081871345027, - "acc_stderr": 0.0375363895576169, - "acc_norm": 0.39766081871345027, - "acc_norm_stderr": 0.0375363895576169 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.3106796116504854, - "acc_stderr": 0.04582124160161551, - "acc_norm": 0.3106796116504854, - "acc_norm_stderr": 0.04582124160161551 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.3716475095785441, - "acc_stderr": 0.01728080252213318, - "acc_norm": 0.3716475095785441, - "acc_norm_stderr": 0.01728080252213318 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.362962962962963, - "acc_stderr": 0.041539484047424004, - "acc_norm": 0.362962962962963, - "acc_norm_stderr": 0.041539484047424004 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542127, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542127 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.28085106382978725, - "acc_stderr": 0.02937917046412482, - "acc_norm": 0.28085106382978725, - "acc_norm_stderr": 0.02937917046412482 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3192771084337349, - "acc_stderr": 0.03629335329947861, - "acc_norm": 0.3192771084337349, - "acc_norm_stderr": 0.03629335329947861 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.3890675241157556, - "acc_stderr": 0.027690337536485372, - "acc_norm": 0.3890675241157556, - "acc_norm_stderr": 0.027690337536485372 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.37668161434977576, - "acc_stderr": 0.032521134899291884, - "acc_norm": 0.37668161434977576, - "acc_norm_stderr": 0.032521134899291884 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.45038167938931295, - "acc_stderr": 0.04363643698524779, - "acc_norm": 0.45038167938931295, - "acc_norm_stderr": 0.04363643698524779 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695236, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695236 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.398989898989899, - "acc_stderr": 0.03488901616852731, - "acc_norm": 0.398989898989899, - "acc_norm_stderr": 0.03488901616852731 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.38620689655172413, - "acc_stderr": 0.04057324734419035, - "acc_norm": 0.38620689655172413, - "acc_norm_stderr": 0.04057324734419035 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.23529411764705882, - "acc_stderr": 0.04220773659171453, - "acc_norm": 0.23529411764705882, - "acc_norm_stderr": 0.04220773659171453 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.3739495798319328, - "acc_stderr": 0.031429466378837076, - "acc_norm": 0.3739495798319328, - "acc_norm_stderr": 0.031429466378837076 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.28717948717948716, - "acc_stderr": 0.022939925418530616, - "acc_norm": 0.28717948717948716, - "acc_norm_stderr": 0.022939925418530616 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.4, - "acc_stderr": 0.049236596391733084, - "acc_norm": 0.4, - "acc_norm_stderr": 0.049236596391733084 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.21, - "acc_stderr": 0.04093601807403326, - "acc_norm": 0.21, - "acc_norm_stderr": 0.04093601807403326 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.42592592592592593, - "acc_stderr": 0.0478034362693679, - "acc_norm": 0.42592592592592593, - "acc_norm_stderr": 0.0478034362693679 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.23645320197044334, - "acc_stderr": 0.029896114291733555, - "acc_norm": 0.23645320197044334, - "acc_norm_stderr": 0.029896114291733555 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3774193548387097, - "acc_stderr": 0.027575960723278253, - "acc_norm": 0.3774193548387097, - "acc_norm_stderr": 0.027575960723278253 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.49145299145299143, - "acc_stderr": 0.032751303000970296, - "acc_norm": 0.49145299145299143, - "acc_norm_stderr": 0.032751303000970296 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.32452830188679244, - "acc_stderr": 0.028815615713432115, - "acc_norm": 0.32452830188679244, - "acc_norm_stderr": 0.028815615713432115 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.4, - "acc_stderr": 0.0469237132203465, - "acc_norm": 0.4, - "acc_norm_stderr": 0.0469237132203465 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.22962962962962963, - "acc_stderr": 0.02564410863926762, - "acc_norm": 0.22962962962962963, - "acc_norm_stderr": 0.02564410863926762 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.304635761589404, - "acc_stderr": 0.03757949922943342, - "acc_norm": 0.304635761589404, - "acc_norm_stderr": 0.03757949922943342 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.4527363184079602, - "acc_stderr": 0.035197027175769155, - "acc_norm": 0.4527363184079602, - "acc_norm_stderr": 0.035197027175769155 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.30057803468208094, - "acc_stderr": 0.03496101481191181, - "acc_norm": 0.30057803468208094, - "acc_norm_stderr": 0.03496101481191181 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.24603174603174602, - "acc_stderr": 0.022182037202948368, - "acc_norm": 0.24603174603174602, - "acc_norm_stderr": 0.022182037202948368 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3263888888888889, - "acc_stderr": 0.03921067198982266, - "acc_norm": 0.3263888888888889, - "acc_norm_stderr": 0.03921067198982266 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.22, - "acc_stderr": 0.04163331998932268, - "acc_norm": 0.22, - "acc_norm_stderr": 0.04163331998932268 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.41, - "acc_stderr": 0.04943110704237103, - "acc_norm": 0.41, - "acc_norm_stderr": 0.04943110704237103 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.3699421965317919, - "acc_stderr": 0.025992472029306386, - "acc_norm": 0.3699421965317919, - "acc_norm_stderr": 0.025992472029306386 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.3128834355828221, - "acc_stderr": 0.036429145782924055, - "acc_norm": 0.3128834355828221, - "acc_norm_stderr": 0.036429145782924055 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.38580246913580246, - "acc_stderr": 0.027085401226132143, - "acc_norm": 0.38580246913580246, - "acc_norm_stderr": 0.027085401226132143 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695234, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695234 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.40414507772020725, - "acc_stderr": 0.0354150857888402, - "acc_norm": 0.40414507772020725, - "acc_norm_stderr": 0.0354150857888402 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2719298245614035, - "acc_stderr": 0.04185774424022057, - "acc_norm": 0.2719298245614035, - "acc_norm_stderr": 0.04185774424022057 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.3926605504587156, - "acc_stderr": 0.020937505161201093, - "acc_norm": 0.3926605504587156, - "acc_norm_stderr": 0.020937505161201093 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.2698412698412698, - "acc_stderr": 0.03970158273235173, - "acc_norm": 0.2698412698412698, - "acc_norm_stderr": 0.03970158273235173 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.43137254901960786, - "acc_stderr": 0.028358956313423556, - "acc_norm": 0.43137254901960786, - "acc_norm_stderr": 0.028358956313423556 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.36, - "acc_stderr": 0.04824181513244218, - "acc_norm": 0.36, - "acc_norm_stderr": 0.04824181513244218 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.49586776859504134, - "acc_stderr": 0.04564198767432754, - "acc_norm": 0.49586776859504134, - "acc_norm_stderr": 0.04564198767432754 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.27631578947368424, - "acc_stderr": 0.03639057569952925, - "acc_norm": 0.27631578947368424, - "acc_norm_stderr": 0.03639057569952925 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.3235294117647059, - "acc_stderr": 0.01892608291608339, - "acc_norm": 0.3235294117647059, - "acc_norm_stderr": 0.01892608291608339 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2978723404255319, - "acc_stderr": 0.027281608344469414, - "acc_norm": 0.2978723404255319, - "acc_norm_stderr": 0.027281608344469414 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.25892857142857145, - "acc_stderr": 0.041577515398656284, - "acc_norm": 0.25892857142857145, - "acc_norm_stderr": 0.041577515398656284 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.32407407407407407, - "acc_stderr": 0.03191923445686186, - "acc_norm": 0.32407407407407407, - "acc_norm_stderr": 0.03191923445686186 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.25251396648044694, - "acc_stderr": 0.014530330201468636, - "acc_norm": 0.25251396648044694, - "acc_norm_stderr": 0.014530330201468636 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.27, - "acc_stderr": 0.044619604333847394, - "acc_norm": 0.27, - "acc_norm_stderr": 0.044619604333847394 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.41911764705882354, - "acc_stderr": 0.02997280717046463, - "acc_norm": 0.41911764705882354, - "acc_norm_stderr": 0.02997280717046463 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.3510204081632653, - "acc_stderr": 0.03055531675557364, - "acc_norm": 0.3510204081632653, - "acc_norm_stderr": 0.03055531675557364 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.4810126582278481, - "acc_stderr": 0.03252375148090448, - "acc_norm": 0.4810126582278481, - "acc_norm_stderr": 0.03252375148090448 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.31486310299869624, - "acc_stderr": 0.011862561755715928, - "acc_norm": 0.31486310299869624, - "acc_norm_stderr": 0.011862561755715928 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.3872549019607843, - "acc_stderr": 0.03418931233833344, - "acc_norm": 0.3872549019607843, - "acc_norm_stderr": 0.03418931233833344 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.37575757575757573, - "acc_stderr": 0.03781887353205982, - "acc_norm": 0.37575757575757573, - "acc_norm_stderr": 0.03781887353205982 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.24112607099143207, - "mc1_stderr": 0.014974827279752332, - "mc2": 0.38399188144082486, - "mc2_stderr": 0.015164475722750202 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.2981220657276995, - "acc_stderr": 0.01568061440819548, - "acc_norm": 0.3720657276995305, - "acc_norm_stderr": 0.016569223163823546 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "HumanF-MarkrAI/pub-llama-7b-v1", - "model_sha": "41de7ce06931ccfe1ed99435bb071d69aca2ffe0", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/Jaewoo1/Foundation_Platypus_data/result_2023-10-18 09:16:14.json b/Jaewoo1/Foundation_Platypus_data/result_2023-10-18 09:16:14.json deleted file mode 100644 index 181add6754ffdbf05eb9add2774c6631af8ded1e..0000000000000000000000000000000000000000 --- a/Jaewoo1/Foundation_Platypus_data/result_2023-10-18 09:16:14.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.30204778156996587, - "acc_stderr": 0.013417519144716417, - "acc_norm": 0.3174061433447099, - "acc_norm_stderr": 0.01360223908803817 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3450507866958773, - "acc_stderr": 0.004744132825391515, - "acc_norm": 0.41196972714598684, - "acc_norm_stderr": 0.00491183773058221 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.4619883040935672, - "acc_stderr": 0.03823727092882307, - "acc_norm": 0.4619883040935672, - "acc_norm_stderr": 0.03823727092882307 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.3592233009708738, - "acc_stderr": 0.04750458399041692, - "acc_norm": 0.3592233009708738, - "acc_norm_stderr": 0.04750458399041692 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.39719029374201786, - "acc_stderr": 0.017497905037159377, - "acc_norm": 0.39719029374201786, - "acc_norm_stderr": 0.017497905037159377 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.31851851851851853, - "acc_stderr": 0.040247784019771096, - "acc_norm": 0.31851851851851853, - "acc_norm_stderr": 0.040247784019771096 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.33, - "acc_stderr": 0.047258156262526045, - "acc_norm": 0.33, - "acc_norm_stderr": 0.047258156262526045 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.3404255319148936, - "acc_stderr": 0.030976692998534422, - "acc_norm": 0.3404255319148936, - "acc_norm_stderr": 0.030976692998534422 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.2891566265060241, - "acc_stderr": 0.03529486801511114, - "acc_norm": 0.2891566265060241, - "acc_norm_stderr": 0.03529486801511114 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.37942122186495175, - "acc_stderr": 0.027559949802347817, - "acc_norm": 0.37942122186495175, - "acc_norm_stderr": 0.027559949802347817 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.3901345291479821, - "acc_stderr": 0.03273766725459156, - "acc_norm": 0.3901345291479821, - "acc_norm_stderr": 0.03273766725459156 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.32061068702290074, - "acc_stderr": 0.040933292298342784, - "acc_norm": 0.32061068702290074, - "acc_norm_stderr": 0.040933292298342784 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695235, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695235 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.3282828282828283, - "acc_stderr": 0.03345678422756777, - "acc_norm": 0.3282828282828283, - "acc_norm_stderr": 0.03345678422756777 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.38620689655172413, - "acc_stderr": 0.04057324734419034, - "acc_norm": 0.38620689655172413, - "acc_norm_stderr": 0.04057324734419034 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.19607843137254902, - "acc_stderr": 0.03950581861179964, - "acc_norm": 0.19607843137254902, - "acc_norm_stderr": 0.03950581861179964 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.3487394957983193, - "acc_stderr": 0.030956636328566545, - "acc_norm": 0.3487394957983193, - "acc_norm_stderr": 0.030956636328566545 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.3384615384615385, - "acc_stderr": 0.023991500500313036, - "acc_norm": 0.3384615384615385, - "acc_norm_stderr": 0.023991500500313036 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.37, - "acc_stderr": 0.04852365870939099, - "acc_norm": 0.37, - "acc_norm_stderr": 0.04852365870939099 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.26, - "acc_stderr": 0.04408440022768077, - "acc_norm": 0.26, - "acc_norm_stderr": 0.04408440022768077 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.35185185185185186, - "acc_stderr": 0.04616631111801713, - "acc_norm": 0.35185185185185186, - "acc_norm_stderr": 0.04616631111801713 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.31527093596059114, - "acc_stderr": 0.03269080871970186, - "acc_norm": 0.31527093596059114, - "acc_norm_stderr": 0.03269080871970186 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3419354838709677, - "acc_stderr": 0.026985289576552732, - "acc_norm": 0.3419354838709677, - "acc_norm_stderr": 0.026985289576552732 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.5341880341880342, - "acc_stderr": 0.03267942734081228, - "acc_norm": 0.5341880341880342, - "acc_norm_stderr": 0.03267942734081228 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.33962264150943394, - "acc_stderr": 0.029146904747798342, - "acc_norm": 0.33962264150943394, - "acc_norm_stderr": 0.029146904747798342 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.4, - "acc_stderr": 0.0469237132203465, - "acc_norm": 0.4, - "acc_norm_stderr": 0.0469237132203465 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.3296296296296296, - "acc_stderr": 0.02866120111652457, - "acc_norm": 0.3296296296296296, - "acc_norm_stderr": 0.02866120111652457 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2980132450331126, - "acc_stderr": 0.037345356767871984, - "acc_norm": 0.2980132450331126, - "acc_norm_stderr": 0.037345356767871984 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.4129353233830846, - "acc_stderr": 0.03481520803367348, - "acc_norm": 0.4129353233830846, - "acc_norm_stderr": 0.03481520803367348 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.2947976878612717, - "acc_stderr": 0.03476599607516478, - "acc_norm": 0.2947976878612717, - "acc_norm_stderr": 0.03476599607516478 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.30423280423280424, - "acc_stderr": 0.023695415009463087, - "acc_norm": 0.30423280423280424, - "acc_norm_stderr": 0.023695415009463087 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2708333333333333, - "acc_stderr": 0.037161774375660164, - "acc_norm": 0.2708333333333333, - "acc_norm_stderr": 0.037161774375660164 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.32, - "acc_stderr": 0.046882617226215034, - "acc_norm": 0.32, - "acc_norm_stderr": 0.046882617226215034 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.46, - "acc_stderr": 0.05009082659620332, - "acc_norm": 0.46, - "acc_norm_stderr": 0.05009082659620332 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.3930635838150289, - "acc_stderr": 0.026296227915613663, - "acc_norm": 0.3930635838150289, - "acc_norm_stderr": 0.026296227915613663 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.3619631901840491, - "acc_stderr": 0.037757007291414416, - "acc_norm": 0.3619631901840491, - "acc_norm_stderr": 0.037757007291414416 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.3487654320987654, - "acc_stderr": 0.02651759772446501, - "acc_norm": 0.3487654320987654, - "acc_norm_stderr": 0.02651759772446501 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252605, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252605 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.40932642487046633, - "acc_stderr": 0.03548608168860806, - "acc_norm": 0.40932642487046633, - "acc_norm_stderr": 0.03548608168860806 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.19298245614035087, - "acc_stderr": 0.037124548537213684, - "acc_norm": 0.19298245614035087, - "acc_norm_stderr": 0.037124548537213684 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.3522935779816514, - "acc_stderr": 0.020480568843999004, - "acc_norm": 0.3522935779816514, - "acc_norm_stderr": 0.020480568843999004 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.2857142857142857, - "acc_stderr": 0.0404061017820884, - "acc_norm": 0.2857142857142857, - "acc_norm_stderr": 0.0404061017820884 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.39215686274509803, - "acc_stderr": 0.027956046165424516, - "acc_norm": 0.39215686274509803, - "acc_norm_stderr": 0.027956046165424516 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.43, - "acc_stderr": 0.049756985195624284, - "acc_norm": 0.43, - "acc_norm_stderr": 0.049756985195624284 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.5206611570247934, - "acc_stderr": 0.045604560863872365, - "acc_norm": 0.5206611570247934, - "acc_norm_stderr": 0.045604560863872365 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.3881578947368421, - "acc_stderr": 0.03965842097512744, - "acc_norm": 0.3881578947368421, - "acc_norm_stderr": 0.03965842097512744 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.32189542483660133, - "acc_stderr": 0.01890101532209309, - "acc_norm": 0.32189542483660133, - "acc_norm_stderr": 0.01890101532209309 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.30851063829787234, - "acc_stderr": 0.027553366165101373, - "acc_norm": 0.30851063829787234, - "acc_norm_stderr": 0.027553366165101373 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.24107142857142858, - "acc_stderr": 0.04059867246952688, - "acc_norm": 0.24107142857142858, - "acc_norm_stderr": 0.04059867246952688 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.3472222222222222, - "acc_stderr": 0.032468872436376486, - "acc_norm": 0.3472222222222222, - "acc_norm_stderr": 0.032468872436376486 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.23798882681564246, - "acc_stderr": 0.01424263007057489, - "acc_norm": 0.23798882681564246, - "acc_norm_stderr": 0.01424263007057489 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.32, - "acc_stderr": 0.046882617226215034, - "acc_norm": 0.32, - "acc_norm_stderr": 0.046882617226215034 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.38, - "acc_stderr": 0.04878317312145632, - "acc_norm": 0.38, - "acc_norm_stderr": 0.04878317312145632 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.26838235294117646, - "acc_stderr": 0.026917481224377246, - "acc_norm": 0.26838235294117646, - "acc_norm_stderr": 0.026917481224377246 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.31020408163265306, - "acc_stderr": 0.029613459872484375, - "acc_norm": 0.31020408163265306, - "acc_norm_stderr": 0.029613459872484375 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.5274261603375527, - "acc_stderr": 0.03249822718301303, - "acc_norm": 0.5274261603375527, - "acc_norm_stderr": 0.03249822718301303 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.303129074315515, - "acc_stderr": 0.0117386699512543, - "acc_norm": 0.303129074315515, - "acc_norm_stderr": 0.0117386699512543 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.38235294117647056, - "acc_stderr": 0.03410785338904719, - "acc_norm": 0.38235294117647056, - "acc_norm_stderr": 0.03410785338904719 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.3878787878787879, - "acc_stderr": 0.038049136539710114, - "acc_norm": 0.3878787878787879, - "acc_norm_stderr": 0.038049136539710114 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2729498164014688, - "mc1_stderr": 0.015594753632006509, - "mc2": 0.4249328187172098, - "mc2_stderr": 0.016337088601279814 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.4014084507042254, - "acc_stderr": 0.016803268469738605, - "acc_norm": 0.46830985915492956, - "acc_norm_stderr": 0.01710531885082843 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "Jaewoo1/Foundation_Platypus_data", - "model_sha": "63fbecee8df6cc694880299e37b7cd8f8140942e", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/Jaewoo1/KoT-Platypus2_foundation/result_2023-10-16 07:12:51.json b/Jaewoo1/KoT-Platypus2_foundation/result_2023-10-16 07:12:51.json deleted file mode 100644 index 3528157a5648767136742b22761f6df6d5b0c887..0000000000000000000000000000000000000000 --- a/Jaewoo1/KoT-Platypus2_foundation/result_2023-10-16 07:12:51.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.27047781569965873, - "acc_stderr": 0.012980954547659556, - "acc_norm": 0.3319112627986348, - "acc_norm_stderr": 0.013760988200880541 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3505277833100976, - "acc_stderr": 0.004761601303258889, - "acc_norm": 0.44722166899024096, - "acc_norm_stderr": 0.0049619049491713965 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.32748538011695905, - "acc_stderr": 0.03599335771456027, - "acc_norm": 0.32748538011695905, - "acc_norm_stderr": 0.03599335771456027 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.3106796116504854, - "acc_stderr": 0.04582124160161549, - "acc_norm": 0.3106796116504854, - "acc_norm_stderr": 0.04582124160161549 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.351213282247765, - "acc_stderr": 0.01706998205149943, - "acc_norm": 0.351213282247765, - "acc_norm_stderr": 0.01706998205149943 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.31851851851851853, - "acc_stderr": 0.040247784019771096, - "acc_norm": 0.31851851851851853, - "acc_norm_stderr": 0.040247784019771096 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.32, - "acc_stderr": 0.046882617226215034, - "acc_norm": 0.32, - "acc_norm_stderr": 0.046882617226215034 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.3659574468085106, - "acc_stderr": 0.031489558297455304, - "acc_norm": 0.3659574468085106, - "acc_norm_stderr": 0.031489558297455304 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3493975903614458, - "acc_stderr": 0.0371172519074075, - "acc_norm": 0.3493975903614458, - "acc_norm_stderr": 0.0371172519074075 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.31511254019292606, - "acc_stderr": 0.026385273703464496, - "acc_norm": 0.31511254019292606, - "acc_norm_stderr": 0.026385273703464496 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.4125560538116592, - "acc_stderr": 0.03304062175449297, - "acc_norm": 0.4125560538116592, - "acc_norm_stderr": 0.03304062175449297 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.29770992366412213, - "acc_stderr": 0.040103589424622034, - "acc_norm": 0.29770992366412213, - "acc_norm_stderr": 0.040103589424622034 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.38, - "acc_stderr": 0.048783173121456316, - "acc_norm": 0.38, - "acc_norm_stderr": 0.048783173121456316 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.03358618145732524, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.03358618145732524 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.3448275862068966, - "acc_stderr": 0.03960933549451207, - "acc_norm": 0.3448275862068966, - "acc_norm_stderr": 0.03960933549451207 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.21568627450980393, - "acc_stderr": 0.04092563958237655, - "acc_norm": 0.21568627450980393, - "acc_norm_stderr": 0.04092563958237655 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.27310924369747897, - "acc_stderr": 0.028942004040998167, - "acc_norm": 0.27310924369747897, - "acc_norm_stderr": 0.028942004040998167 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.2717948717948718, - "acc_stderr": 0.02255655101013235, - "acc_norm": 0.2717948717948718, - "acc_norm_stderr": 0.02255655101013235 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.26, - "acc_stderr": 0.04408440022768078, - "acc_norm": 0.26, - "acc_norm_stderr": 0.04408440022768078 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695235, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695235 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.3055555555555556, - "acc_stderr": 0.044531975073749834, - "acc_norm": 0.3055555555555556, - "acc_norm_stderr": 0.044531975073749834 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.23645320197044334, - "acc_stderr": 0.029896114291733552, - "acc_norm": 0.23645320197044334, - "acc_norm_stderr": 0.029896114291733552 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3161290322580645, - "acc_stderr": 0.026450874489042767, - "acc_norm": 0.3161290322580645, - "acc_norm_stderr": 0.026450874489042767 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.4658119658119658, - "acc_stderr": 0.03267942734081228, - "acc_norm": 0.4658119658119658, - "acc_norm_stderr": 0.03267942734081228 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.3320754716981132, - "acc_stderr": 0.028985455652334395, - "acc_norm": 0.3320754716981132, - "acc_norm_stderr": 0.028985455652334395 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.39090909090909093, - "acc_stderr": 0.04673752333670237, - "acc_norm": 0.39090909090909093, - "acc_norm_stderr": 0.04673752333670237 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.22962962962962963, - "acc_stderr": 0.025644108639267645, - "acc_norm": 0.22962962962962963, - "acc_norm_stderr": 0.025644108639267645 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.33112582781456956, - "acc_stderr": 0.038425817186598696, - "acc_norm": 0.33112582781456956, - "acc_norm_stderr": 0.038425817186598696 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.03333333333333336, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.03333333333333336 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.2658959537572254, - "acc_stderr": 0.033687629322594295, - "acc_norm": 0.2658959537572254, - "acc_norm_stderr": 0.033687629322594295 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.25925925925925924, - "acc_stderr": 0.022569897074918428, - "acc_norm": 0.25925925925925924, - "acc_norm_stderr": 0.022569897074918428 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2777777777777778, - "acc_stderr": 0.037455547914624576, - "acc_norm": 0.2777777777777778, - "acc_norm_stderr": 0.037455547914624576 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542128, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542128 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.44, - "acc_stderr": 0.04988876515698589, - "acc_norm": 0.44, - "acc_norm_stderr": 0.04988876515698589 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.28901734104046245, - "acc_stderr": 0.02440517393578323, - "acc_norm": 0.28901734104046245, - "acc_norm_stderr": 0.02440517393578323 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.2822085889570552, - "acc_stderr": 0.03536117886664743, - "acc_norm": 0.2822085889570552, - "acc_norm_stderr": 0.03536117886664743 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.31790123456790126, - "acc_stderr": 0.025910063528240865, - "acc_norm": 0.31790123456790126, - "acc_norm_stderr": 0.025910063528240865 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.37, - "acc_stderr": 0.04852365870939099, - "acc_norm": 0.37, - "acc_norm_stderr": 0.04852365870939099 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.2694300518134715, - "acc_stderr": 0.032018671228777947, - "acc_norm": 0.2694300518134715, - "acc_norm_stderr": 0.032018671228777947 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2719298245614035, - "acc_stderr": 0.04185774424022056, - "acc_norm": 0.2719298245614035, - "acc_norm_stderr": 0.04185774424022056 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.3155963302752294, - "acc_stderr": 0.019926117513869666, - "acc_norm": 0.3155963302752294, - "acc_norm_stderr": 0.019926117513869666 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.20634920634920634, - "acc_stderr": 0.0361960452412425, - "acc_norm": 0.20634920634920634, - "acc_norm_stderr": 0.0361960452412425 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.3006535947712418, - "acc_stderr": 0.026256053835718964, - "acc_norm": 0.3006535947712418, - "acc_norm_stderr": 0.026256053835718964 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.33, - "acc_stderr": 0.047258156262526045, - "acc_norm": 0.33, - "acc_norm_stderr": 0.047258156262526045 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.38016528925619836, - "acc_stderr": 0.04431324501968431, - "acc_norm": 0.38016528925619836, - "acc_norm_stderr": 0.04431324501968431 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.2631578947368421, - "acc_stderr": 0.035834961763610625, - "acc_norm": 0.2631578947368421, - "acc_norm_stderr": 0.035834961763610625 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.2565359477124183, - "acc_stderr": 0.01766784161237899, - "acc_norm": 0.2565359477124183, - "acc_norm_stderr": 0.01766784161237899 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.24113475177304963, - "acc_stderr": 0.025518731049537773, - "acc_norm": 0.24113475177304963, - "acc_norm_stderr": 0.025518731049537773 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.3392857142857143, - "acc_stderr": 0.044939490686135404, - "acc_norm": 0.3392857142857143, - "acc_norm_stderr": 0.044939490686135404 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.23148148148148148, - "acc_stderr": 0.028765111718046972, - "acc_norm": 0.23148148148148148, - "acc_norm_stderr": 0.028765111718046972 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.22793296089385476, - "acc_stderr": 0.014030149950805095, - "acc_norm": 0.22793296089385476, - "acc_norm_stderr": 0.014030149950805095 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.26, - "acc_stderr": 0.0440844002276808, - "acc_norm": 0.26, - "acc_norm_stderr": 0.0440844002276808 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.26, - "acc_stderr": 0.04408440022768079, - "acc_norm": 0.26, - "acc_norm_stderr": 0.04408440022768079 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.3492647058823529, - "acc_stderr": 0.02895975519682486, - "acc_norm": 0.3492647058823529, - "acc_norm_stderr": 0.02895975519682486 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.19183673469387755, - "acc_stderr": 0.025206963154225395, - "acc_norm": 0.19183673469387755, - "acc_norm_stderr": 0.025206963154225395 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.3037974683544304, - "acc_stderr": 0.0299366963871386, - "acc_norm": 0.3037974683544304, - "acc_norm_stderr": 0.0299366963871386 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2666232073011734, - "acc_stderr": 0.01129383603161213, - "acc_norm": 0.2666232073011734, - "acc_norm_stderr": 0.01129383603161213 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.3235294117647059, - "acc_stderr": 0.03283472056108567, - "acc_norm": 0.3235294117647059, - "acc_norm_stderr": 0.03283472056108567 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.30303030303030304, - "acc_stderr": 0.035886248000917075, - "acc_norm": 0.30303030303030304, - "acc_norm_stderr": 0.035886248000917075 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.27539779681762544, - "mc1_stderr": 0.015638135667775523, - "mc2": 0.4297360873033464, - "mc2_stderr": 0.016304548005749996 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.21713615023474178, - "acc_stderr": 0.014133326970413466, - "acc_norm": 0.23943661971830985, - "acc_norm_stderr": 0.014628446638821324 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "Jaewoo1/KoT-Platypus2_foundation", - "model_sha": "7e97a65b825f9aa4691fe2bebf14696d80ba831d", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/Jaewoo1/Llama2-7B-Blend-3rd-dup-Active-LoRA/result_2023-10-04 03:17:08.json b/Jaewoo1/Llama2-7B-Blend-3rd-dup-Active-LoRA/result_2023-10-04 03:17:08.json deleted file mode 100644 index 5cbb849dd92c754658f3928b26b507e97dd8ab51..0000000000000000000000000000000000000000 --- a/Jaewoo1/Llama2-7B-Blend-3rd-dup-Active-LoRA/result_2023-10-04 03:17:08.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.257679180887372, - "acc_stderr": 0.0127807705627684, - "acc_norm": 0.3003412969283277, - "acc_norm_stderr": 0.01339590930995701 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3263294164509062, - "acc_stderr": 0.004679111783653908, - "acc_norm": 0.385381398127863, - "acc_norm_stderr": 0.00485690647371939 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.38596491228070173, - "acc_stderr": 0.03733756969066164, - "acc_norm": 0.38596491228070173, - "acc_norm_stderr": 0.03733756969066164 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.3786407766990291, - "acc_stderr": 0.048026946982589726, - "acc_norm": 0.3786407766990291, - "acc_norm_stderr": 0.048026946982589726 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.3959131545338442, - "acc_stderr": 0.01748824700697927, - "acc_norm": 0.3959131545338442, - "acc_norm_stderr": 0.01748824700697927 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.04072314811876837, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.04072314811876837 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.31063829787234043, - "acc_stderr": 0.030251237579213167, - "acc_norm": 0.31063829787234043, - "acc_norm_stderr": 0.030251237579213167 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3493975903614458, - "acc_stderr": 0.0371172519074075, - "acc_norm": 0.3493975903614458, - "acc_norm_stderr": 0.0371172519074075 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.3729903536977492, - "acc_stderr": 0.0274666102131401, - "acc_norm": 0.3729903536977492, - "acc_norm_stderr": 0.0274666102131401 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.3811659192825112, - "acc_stderr": 0.032596251184168264, - "acc_norm": 0.3811659192825112, - "acc_norm_stderr": 0.032596251184168264 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.35877862595419846, - "acc_stderr": 0.04206739313864908, - "acc_norm": 0.35877862595419846, - "acc_norm_stderr": 0.04206739313864908 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.3888888888888889, - "acc_stderr": 0.0347327959083696, - "acc_norm": 0.3888888888888889, - "acc_norm_stderr": 0.0347327959083696 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.4068965517241379, - "acc_stderr": 0.04093793981266237, - "acc_norm": 0.4068965517241379, - "acc_norm_stderr": 0.04093793981266237 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.21568627450980393, - "acc_stderr": 0.04092563958237655, - "acc_norm": 0.21568627450980393, - "acc_norm_stderr": 0.04092563958237655 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.3319327731092437, - "acc_stderr": 0.030588697013783667, - "acc_norm": 0.3319327731092437, - "acc_norm_stderr": 0.030588697013783667 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.3282051282051282, - "acc_stderr": 0.023807633198657262, - "acc_norm": 0.3282051282051282, - "acc_norm_stderr": 0.023807633198657262 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.41, - "acc_stderr": 0.04943110704237101, - "acc_norm": 0.41, - "acc_norm_stderr": 0.04943110704237101 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.35, - "acc_stderr": 0.047937248544110196, - "acc_norm": 0.35, - "acc_norm_stderr": 0.047937248544110196 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.4166666666666667, - "acc_stderr": 0.04766075165356461, - "acc_norm": 0.4166666666666667, - "acc_norm_stderr": 0.04766075165356461 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.3251231527093596, - "acc_stderr": 0.032957975663112704, - "acc_norm": 0.3251231527093596, - "acc_norm_stderr": 0.032957975663112704 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3387096774193548, - "acc_stderr": 0.02692344605930284, - "acc_norm": 0.3387096774193548, - "acc_norm_stderr": 0.02692344605930284 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.5470085470085471, - "acc_stderr": 0.0326109987309862, - "acc_norm": 0.5470085470085471, - "acc_norm_stderr": 0.0326109987309862 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.35471698113207545, - "acc_stderr": 0.029445175328199593, - "acc_norm": 0.35471698113207545, - "acc_norm_stderr": 0.029445175328199593 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.4, - "acc_stderr": 0.0469237132203465, - "acc_norm": 0.4, - "acc_norm_stderr": 0.0469237132203465 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.2851851851851852, - "acc_stderr": 0.027528599210340496, - "acc_norm": 0.2851851851851852, - "acc_norm_stderr": 0.027528599210340496 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.24503311258278146, - "acc_stderr": 0.03511807571804724, - "acc_norm": 0.24503311258278146, - "acc_norm_stderr": 0.03511807571804724 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.44776119402985076, - "acc_stderr": 0.03516184772952166, - "acc_norm": 0.44776119402985076, - "acc_norm_stderr": 0.03516184772952166 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.3063583815028902, - "acc_stderr": 0.03514942551267437, - "acc_norm": 0.3063583815028902, - "acc_norm_stderr": 0.03514942551267437 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2830687830687831, - "acc_stderr": 0.023201392938194978, - "acc_norm": 0.2830687830687831, - "acc_norm_stderr": 0.023201392938194978 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2986111111111111, - "acc_stderr": 0.03827052357950756, - "acc_norm": 0.2986111111111111, - "acc_norm_stderr": 0.03827052357950756 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.26, - "acc_stderr": 0.0440844002276808, - "acc_norm": 0.26, - "acc_norm_stderr": 0.0440844002276808 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.46, - "acc_stderr": 0.05009082659620332, - "acc_norm": 0.46, - "acc_norm_stderr": 0.05009082659620332 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.3699421965317919, - "acc_stderr": 0.025992472029306386, - "acc_norm": 0.3699421965317919, - "acc_norm_stderr": 0.025992472029306386 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.3803680981595092, - "acc_stderr": 0.038142698932618374, - "acc_norm": 0.3803680981595092, - "acc_norm_stderr": 0.038142698932618374 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.3395061728395062, - "acc_stderr": 0.026348564412011624, - "acc_norm": 0.3395061728395062, - "acc_norm_stderr": 0.026348564412011624 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.28, - "acc_stderr": 0.045126085985421296, - "acc_norm": 0.28, - "acc_norm_stderr": 0.045126085985421296 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.37823834196891193, - "acc_stderr": 0.034998072761933396, - "acc_norm": 0.37823834196891193, - "acc_norm_stderr": 0.034998072761933396 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2631578947368421, - "acc_stderr": 0.04142439719489362, - "acc_norm": 0.2631578947368421, - "acc_norm_stderr": 0.04142439719489362 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.3339449541284404, - "acc_stderr": 0.020220554196736403, - "acc_norm": 0.3339449541284404, - "acc_norm_stderr": 0.020220554196736403 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.2619047619047619, - "acc_stderr": 0.03932537680392871, - "acc_norm": 0.2619047619047619, - "acc_norm_stderr": 0.03932537680392871 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.3562091503267974, - "acc_stderr": 0.02742047766262925, - "acc_norm": 0.3562091503267974, - "acc_norm_stderr": 0.02742047766262925 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.42, - "acc_stderr": 0.049604496374885836, - "acc_norm": 0.42, - "acc_norm_stderr": 0.049604496374885836 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.512396694214876, - "acc_stderr": 0.04562951548180765, - "acc_norm": 0.512396694214876, - "acc_norm_stderr": 0.04562951548180765 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.3223684210526316, - "acc_stderr": 0.03803510248351587, - "acc_norm": 0.3223684210526316, - "acc_norm_stderr": 0.03803510248351587 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.29248366013071897, - "acc_stderr": 0.018403415710109797, - "acc_norm": 0.29248366013071897, - "acc_norm_stderr": 0.018403415710109797 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2730496453900709, - "acc_stderr": 0.026577860943307857, - "acc_norm": 0.2730496453900709, - "acc_norm_stderr": 0.026577860943307857 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.30357142857142855, - "acc_stderr": 0.04364226155841044, - "acc_norm": 0.30357142857142855, - "acc_norm_stderr": 0.04364226155841044 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.3148148148148148, - "acc_stderr": 0.031674687068289784, - "acc_norm": 0.3148148148148148, - "acc_norm_stderr": 0.031674687068289784 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.24581005586592178, - "acc_stderr": 0.014400296429225608, - "acc_norm": 0.24581005586592178, - "acc_norm_stderr": 0.014400296429225608 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.36, - "acc_stderr": 0.04824181513244218, - "acc_norm": 0.36, - "acc_norm_stderr": 0.04824181513244218 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.26838235294117646, - "acc_stderr": 0.026917481224377243, - "acc_norm": 0.26838235294117646, - "acc_norm_stderr": 0.026917481224377243 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.3469387755102041, - "acc_stderr": 0.030472526026726492, - "acc_norm": 0.3469387755102041, - "acc_norm_stderr": 0.030472526026726492 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.3670886075949367, - "acc_stderr": 0.03137624072561618, - "acc_norm": 0.3670886075949367, - "acc_norm_stderr": 0.03137624072561618 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.28552803129074317, - "acc_stderr": 0.011535751586665673, - "acc_norm": 0.28552803129074317, - "acc_norm_stderr": 0.011535751586665673 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.35784313725490197, - "acc_stderr": 0.033644872860882996, - "acc_norm": 0.35784313725490197, - "acc_norm_stderr": 0.033644872860882996 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.42424242424242425, - "acc_stderr": 0.038592681420702615, - "acc_norm": 0.42424242424242425, - "acc_norm_stderr": 0.038592681420702615 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2582619339045288, - "mc1_stderr": 0.015321821688476196, - "mc2": 0.41968593595047643, - "mc2_stderr": 0.016254999867947123 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.31220657276995306, - "acc_stderr": 0.015884928030374883, - "acc_norm": 0.3403755868544601, - "acc_norm_stderr": 0.016242870504270406 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "Jaewoo1/Llama2-7B-Blend-3rd-dup-Active-LoRA", - "model_sha": "cbb72323bf2db6eb9ea591a4a882d02964d53eed", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/Jaewoo1/Llama2-7B-ShareGPT-Wiki_noprompt-News_noprompt-CoT-blending-circulus/result_2023-10-04 09:05:17.json b/Jaewoo1/Llama2-7B-ShareGPT-Wiki_noprompt-News_noprompt-CoT-blending-circulus/result_2023-10-04 09:05:17.json deleted file mode 100644 index fc28eceff87b048d0eebe8273452885fe7ea5f1e..0000000000000000000000000000000000000000 --- a/Jaewoo1/Llama2-7B-ShareGPT-Wiki_noprompt-News_noprompt-CoT-blending-circulus/result_2023-10-04 09:05:17.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.29692832764505117, - "acc_stderr": 0.013352025976725222, - "acc_norm": 0.34812286689419797, - "acc_norm_stderr": 0.013921008595179342 - }, - "harness|ko_hellaswag|10": { - "acc": 0.35311690898227444, - "acc_stderr": 0.004769618829196517, - "acc_norm": 0.42939653455486954, - "acc_norm_stderr": 0.0049397843114489855 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.4152046783625731, - "acc_stderr": 0.03779275945503201, - "acc_norm": 0.4152046783625731, - "acc_norm_stderr": 0.03779275945503201 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.42718446601941745, - "acc_stderr": 0.04897957737781169, - "acc_norm": 0.42718446601941745, - "acc_norm_stderr": 0.04897957737781169 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.41507024265644954, - "acc_stderr": 0.017620137003655265, - "acc_norm": 0.41507024265644954, - "acc_norm_stderr": 0.017620137003655265 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.37037037037037035, - "acc_stderr": 0.04171654161354543, - "acc_norm": 0.37037037037037035, - "acc_norm_stderr": 0.04171654161354543 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.35, - "acc_stderr": 0.0479372485441102, - "acc_norm": 0.35, - "acc_norm_stderr": 0.0479372485441102 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.32340425531914896, - "acc_stderr": 0.03057944277361034, - "acc_norm": 0.32340425531914896, - "acc_norm_stderr": 0.03057944277361034 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.35542168674698793, - "acc_stderr": 0.03726214354322415, - "acc_norm": 0.35542168674698793, - "acc_norm_stderr": 0.03726214354322415 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.4405144694533762, - "acc_stderr": 0.028196400574197422, - "acc_norm": 0.4405144694533762, - "acc_norm_stderr": 0.028196400574197422 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.3632286995515695, - "acc_stderr": 0.032277904428505, - "acc_norm": 0.3632286995515695, - "acc_norm_stderr": 0.032277904428505 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.4122137404580153, - "acc_stderr": 0.04317171194870254, - "acc_norm": 0.4122137404580153, - "acc_norm_stderr": 0.04317171194870254 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.43434343434343436, - "acc_stderr": 0.03531505879359183, - "acc_norm": 0.43434343434343436, - "acc_norm_stderr": 0.03531505879359183 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.36551724137931035, - "acc_stderr": 0.04013124195424386, - "acc_norm": 0.36551724137931035, - "acc_norm_stderr": 0.04013124195424386 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.18627450980392157, - "acc_stderr": 0.03873958714149351, - "acc_norm": 0.18627450980392157, - "acc_norm_stderr": 0.03873958714149351 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.3739495798319328, - "acc_stderr": 0.031429466378837076, - "acc_norm": 0.3739495798319328, - "acc_norm_stderr": 0.031429466378837076 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.34615384615384615, - "acc_stderr": 0.024121125416941173, - "acc_norm": 0.34615384615384615, - "acc_norm_stderr": 0.024121125416941173 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.45, - "acc_stderr": 0.05, - "acc_norm": 0.45, - "acc_norm_stderr": 0.05 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695235, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695235 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.4537037037037037, - "acc_stderr": 0.04812917324536823, - "acc_norm": 0.4537037037037037, - "acc_norm_stderr": 0.04812917324536823 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.3251231527093596, - "acc_stderr": 0.032957975663112704, - "acc_norm": 0.3251231527093596, - "acc_norm_stderr": 0.032957975663112704 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3774193548387097, - "acc_stderr": 0.02757596072327824, - "acc_norm": 0.3774193548387097, - "acc_norm_stderr": 0.02757596072327824 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.5726495726495726, - "acc_stderr": 0.03240847393516326, - "acc_norm": 0.5726495726495726, - "acc_norm_stderr": 0.03240847393516326 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.37358490566037733, - "acc_stderr": 0.02977308271331988, - "acc_norm": 0.37358490566037733, - "acc_norm_stderr": 0.02977308271331988 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.4636363636363636, - "acc_stderr": 0.04776449162396197, - "acc_norm": 0.4636363636363636, - "acc_norm_stderr": 0.04776449162396197 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.28888888888888886, - "acc_stderr": 0.02763490726417854, - "acc_norm": 0.28888888888888886, - "acc_norm_stderr": 0.02763490726417854 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2980132450331126, - "acc_stderr": 0.037345356767871984, - "acc_norm": 0.2980132450331126, - "acc_norm_stderr": 0.037345356767871984 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.4925373134328358, - "acc_stderr": 0.03535140084276719, - "acc_norm": 0.4925373134328358, - "acc_norm_stderr": 0.03535140084276719 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.32947976878612717, - "acc_stderr": 0.03583901754736412, - "acc_norm": 0.32947976878612717, - "acc_norm_stderr": 0.03583901754736412 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.26455026455026454, - "acc_stderr": 0.022717467897708617, - "acc_norm": 0.26455026455026454, - "acc_norm_stderr": 0.022717467897708617 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3402777777777778, - "acc_stderr": 0.03962135573486219, - "acc_norm": 0.3402777777777778, - "acc_norm_stderr": 0.03962135573486219 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.27, - "acc_stderr": 0.044619604333847394, - "acc_norm": 0.27, - "acc_norm_stderr": 0.044619604333847394 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.52, - "acc_stderr": 0.050211673156867795, - "acc_norm": 0.52, - "acc_norm_stderr": 0.050211673156867795 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.4190751445086705, - "acc_stderr": 0.026564178111422622, - "acc_norm": 0.4190751445086705, - "acc_norm_stderr": 0.026564178111422622 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.4049079754601227, - "acc_stderr": 0.03856672163548914, - "acc_norm": 0.4049079754601227, - "acc_norm_stderr": 0.03856672163548914 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.41358024691358025, - "acc_stderr": 0.027402042040269955, - "acc_norm": 0.41358024691358025, - "acc_norm_stderr": 0.027402042040269955 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.28, - "acc_stderr": 0.045126085985421296, - "acc_norm": 0.28, - "acc_norm_stderr": 0.045126085985421296 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.43005181347150256, - "acc_stderr": 0.03572954333144808, - "acc_norm": 0.43005181347150256, - "acc_norm_stderr": 0.03572954333144808 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2807017543859649, - "acc_stderr": 0.042270544512321984, - "acc_norm": 0.2807017543859649, - "acc_norm_stderr": 0.042270544512321984 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.3834862385321101, - "acc_stderr": 0.020847156641915984, - "acc_norm": 0.3834862385321101, - "acc_norm_stderr": 0.020847156641915984 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.2698412698412698, - "acc_stderr": 0.03970158273235172, - "acc_norm": 0.2698412698412698, - "acc_norm_stderr": 0.03970158273235172 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.3790849673202614, - "acc_stderr": 0.027780141207023337, - "acc_norm": 0.3790849673202614, - "acc_norm_stderr": 0.027780141207023337 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.47, - "acc_stderr": 0.05016135580465919, - "acc_norm": 0.47, - "acc_norm_stderr": 0.05016135580465919 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.5785123966942148, - "acc_stderr": 0.04507732278775089, - "acc_norm": 0.5785123966942148, - "acc_norm_stderr": 0.04507732278775089 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.3223684210526316, - "acc_stderr": 0.038035102483515854, - "acc_norm": 0.3223684210526316, - "acc_norm_stderr": 0.038035102483515854 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.3284313725490196, - "acc_stderr": 0.01899970738316267, - "acc_norm": 0.3284313725490196, - "acc_norm_stderr": 0.01899970738316267 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.30851063829787234, - "acc_stderr": 0.027553366165101362, - "acc_norm": 0.30851063829787234, - "acc_norm_stderr": 0.027553366165101362 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.35714285714285715, - "acc_stderr": 0.04547960999764376, - "acc_norm": 0.35714285714285715, - "acc_norm_stderr": 0.04547960999764376 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.3472222222222222, - "acc_stderr": 0.03246887243637648, - "acc_norm": 0.3472222222222222, - "acc_norm_stderr": 0.03246887243637648 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.264804469273743, - "acc_stderr": 0.014756906483260657, - "acc_norm": 0.264804469273743, - "acc_norm_stderr": 0.014756906483260657 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.36, - "acc_stderr": 0.048241815132442176, - "acc_norm": 0.36, - "acc_norm_stderr": 0.048241815132442176 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.35, - "acc_stderr": 0.0479372485441102, - "acc_norm": 0.35, - "acc_norm_stderr": 0.0479372485441102 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.33455882352941174, - "acc_stderr": 0.028661996202335314, - "acc_norm": 0.33455882352941174, - "acc_norm_stderr": 0.028661996202335314 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.43673469387755104, - "acc_stderr": 0.031751952375833226, - "acc_norm": 0.43673469387755104, - "acc_norm_stderr": 0.031751952375833226 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.4219409282700422, - "acc_stderr": 0.032148146302403695, - "acc_norm": 0.4219409282700422, - "acc_norm_stderr": 0.032148146302403695 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2920469361147327, - "acc_stderr": 0.011613349136271817, - "acc_norm": 0.2920469361147327, - "acc_norm_stderr": 0.011613349136271817 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.4068627450980392, - "acc_stderr": 0.03447891136353383, - "acc_norm": 0.4068627450980392, - "acc_norm_stderr": 0.03447891136353383 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.4303030303030303, - "acc_stderr": 0.03866225962879077, - "acc_norm": 0.4303030303030303, - "acc_norm_stderr": 0.03866225962879077 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.3023255813953488, - "mc1_stderr": 0.01607750926613303, - "mc2": 0.4750714543386988, - "mc2_stderr": 0.016159472828434183 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.27582159624413144, - "acc_stderr": 0.01532047174956522, - "acc_norm": 0.29107981220657275, - "acc_norm_stderr": 0.015571840078994575 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "Jaewoo1/Llama2-7B-ShareGPT-Wiki_noprompt-News_noprompt-CoT-blending-circulus", - "model_sha": "1c97acb58f2a740d7994d1ea7b0c02c234bbde3a", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/Jaewoo1/Platypus7B_Follow_FT/result_2023-10-21 14:41:08.json b/Jaewoo1/Platypus7B_Follow_FT/result_2023-10-21 14:41:08.json deleted file mode 100644 index a9f8b7f5fec2c9edd996e0d8284ea6a36281c888..0000000000000000000000000000000000000000 --- a/Jaewoo1/Platypus7B_Follow_FT/result_2023-10-21 14:41:08.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.197098976109215, - "acc_stderr": 0.011625047669880612, - "acc_norm": 0.26535836177474403, - "acc_norm_stderr": 0.012902554762313964 - }, - "harness|ko_hellaswag|10": { - "acc": 0.29904401513642703, - "acc_stderr": 0.0045690346133326004, - "acc_norm": 0.36675960963951404, - "acc_norm_stderr": 0.0048093520750089385 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.22807017543859648, - "acc_stderr": 0.03218093795602357, - "acc_norm": 0.22807017543859648, - "acc_norm_stderr": 0.03218093795602357 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.20388349514563106, - "acc_stderr": 0.0398913985953177, - "acc_norm": 0.20388349514563106, - "acc_norm_stderr": 0.0398913985953177 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.2796934865900383, - "acc_stderr": 0.016050792148036536, - "acc_norm": 0.2796934865900383, - "acc_norm_stderr": 0.016050792148036536 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.32592592592592595, - "acc_stderr": 0.040491220417025055, - "acc_norm": 0.32592592592592595, - "acc_norm_stderr": 0.040491220417025055 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.23404255319148937, - "acc_stderr": 0.027678452578212387, - "acc_norm": 0.23404255319148937, - "acc_norm_stderr": 0.027678452578212387 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.21084337349397592, - "acc_stderr": 0.031755547866299194, - "acc_norm": 0.21084337349397592, - "acc_norm_stderr": 0.031755547866299194 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.2765273311897106, - "acc_stderr": 0.02540383297817961, - "acc_norm": 0.2765273311897106, - "acc_norm_stderr": 0.02540383297817961 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.2825112107623318, - "acc_stderr": 0.030216831011508762, - "acc_norm": 0.2825112107623318, - "acc_norm_stderr": 0.030216831011508762 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.24427480916030533, - "acc_stderr": 0.03768335959728742, - "acc_norm": 0.24427480916030533, - "acc_norm_stderr": 0.03768335959728742 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542127, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542127 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.2676767676767677, - "acc_stderr": 0.03154449888270285, - "acc_norm": 0.2676767676767677, - "acc_norm_stderr": 0.03154449888270285 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.25517241379310346, - "acc_stderr": 0.03632984052707842, - "acc_norm": 0.25517241379310346, - "acc_norm_stderr": 0.03632984052707842 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.22549019607843138, - "acc_stderr": 0.04158307533083286, - "acc_norm": 0.22549019607843138, - "acc_norm_stderr": 0.04158307533083286 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.23529411764705882, - "acc_stderr": 0.027553614467863786, - "acc_norm": 0.23529411764705882, - "acc_norm_stderr": 0.027553614467863786 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.2641025641025641, - "acc_stderr": 0.022352193737453285, - "acc_norm": 0.2641025641025641, - "acc_norm_stderr": 0.022352193737453285 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.27, - "acc_stderr": 0.044619604333847394, - "acc_norm": 0.27, - "acc_norm_stderr": 0.044619604333847394 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.26851851851851855, - "acc_stderr": 0.04284467968052191, - "acc_norm": 0.26851851851851855, - "acc_norm_stderr": 0.04284467968052191 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.2315270935960591, - "acc_stderr": 0.029678333141444444, - "acc_norm": 0.2315270935960591, - "acc_norm_stderr": 0.029678333141444444 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3096774193548387, - "acc_stderr": 0.026302774983517414, - "acc_norm": 0.3096774193548387, - "acc_norm_stderr": 0.026302774983517414 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.030882736974138663, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.030882736974138663 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.2641509433962264, - "acc_stderr": 0.0271342916287417, - "acc_norm": 0.2641509433962264, - "acc_norm_stderr": 0.0271342916287417 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.20909090909090908, - "acc_stderr": 0.038950910157241364, - "acc_norm": 0.20909090909090908, - "acc_norm_stderr": 0.038950910157241364 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.24814814814814815, - "acc_stderr": 0.0263357394040558, - "acc_norm": 0.24814814814814815, - "acc_norm_stderr": 0.0263357394040558 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2980132450331126, - "acc_stderr": 0.03734535676787198, - "acc_norm": 0.2980132450331126, - "acc_norm_stderr": 0.03734535676787198 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.29850746268656714, - "acc_stderr": 0.032357437893550424, - "acc_norm": 0.29850746268656714, - "acc_norm_stderr": 0.032357437893550424 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.21965317919075145, - "acc_stderr": 0.031568093627031744, - "acc_norm": 0.21965317919075145, - "acc_norm_stderr": 0.031568093627031744 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.24603174603174602, - "acc_stderr": 0.022182037202948368, - "acc_norm": 0.24603174603174602, - "acc_norm_stderr": 0.022182037202948368 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2222222222222222, - "acc_stderr": 0.03476590104304134, - "acc_norm": 0.2222222222222222, - "acc_norm_stderr": 0.03476590104304134 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.24, - "acc_stderr": 0.04292346959909282, - "acc_norm": 0.24, - "acc_norm_stderr": 0.04292346959909282 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.24, - "acc_stderr": 0.042923469599092816, - "acc_norm": 0.24, - "acc_norm_stderr": 0.042923469599092816 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.23410404624277456, - "acc_stderr": 0.022797110278071128, - "acc_norm": 0.23410404624277456, - "acc_norm_stderr": 0.022797110278071128 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.37423312883435583, - "acc_stderr": 0.03802068102899616, - "acc_norm": 0.37423312883435583, - "acc_norm_stderr": 0.03802068102899616 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.2654320987654321, - "acc_stderr": 0.024569223600460845, - "acc_norm": 0.2654320987654321, - "acc_norm_stderr": 0.024569223600460845 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.23, - "acc_stderr": 0.042295258468165065, - "acc_norm": 0.23, - "acc_norm_stderr": 0.042295258468165065 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.2538860103626943, - "acc_stderr": 0.03141024780565318, - "acc_norm": 0.2538860103626943, - "acc_norm_stderr": 0.03141024780565318 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.23684210526315788, - "acc_stderr": 0.039994238792813344, - "acc_norm": 0.23684210526315788, - "acc_norm_stderr": 0.039994238792813344 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.28807339449541286, - "acc_stderr": 0.019416445892636015, - "acc_norm": 0.28807339449541286, - "acc_norm_stderr": 0.019416445892636015 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.20634920634920634, - "acc_stderr": 0.0361960452412425, - "acc_norm": 0.20634920634920634, - "acc_norm_stderr": 0.0361960452412425 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.2777777777777778, - "acc_stderr": 0.025646863097137908, - "acc_norm": 0.2777777777777778, - "acc_norm_stderr": 0.025646863097137908 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.38016528925619836, - "acc_stderr": 0.04431324501968431, - "acc_norm": 0.38016528925619836, - "acc_norm_stderr": 0.04431324501968431 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.2236842105263158, - "acc_stderr": 0.03391160934343602, - "acc_norm": 0.2236842105263158, - "acc_norm_stderr": 0.03391160934343602 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.2647058823529412, - "acc_stderr": 0.01784808957491323, - "acc_norm": 0.2647058823529412, - "acc_norm_stderr": 0.01784808957491323 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.25886524822695034, - "acc_stderr": 0.026129572527180848, - "acc_norm": 0.25886524822695034, - "acc_norm_stderr": 0.026129572527180848 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.30357142857142855, - "acc_stderr": 0.04364226155841044, - "acc_norm": 0.30357142857142855, - "acc_norm_stderr": 0.04364226155841044 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.26851851851851855, - "acc_stderr": 0.030225226160012386, - "acc_norm": 0.26851851851851855, - "acc_norm_stderr": 0.030225226160012386 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.24581005586592178, - "acc_stderr": 0.014400296429225605, - "acc_norm": 0.24581005586592178, - "acc_norm_stderr": 0.014400296429225605 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.18, - "acc_stderr": 0.038612291966536955, - "acc_norm": 0.18, - "acc_norm_stderr": 0.038612291966536955 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542128, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542128 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.35661764705882354, - "acc_stderr": 0.029097209568411962, - "acc_norm": 0.35661764705882354, - "acc_norm_stderr": 0.029097209568411962 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.24489795918367346, - "acc_stderr": 0.027529637440174913, - "acc_norm": 0.24489795918367346, - "acc_norm_stderr": 0.027529637440174913 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.3291139240506329, - "acc_stderr": 0.03058732629470236, - "acc_norm": 0.3291139240506329, - "acc_norm_stderr": 0.03058732629470236 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2522816166883963, - "acc_stderr": 0.011092789056875248, - "acc_norm": 0.2522816166883963, - "acc_norm_stderr": 0.011092789056875248 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.27450980392156865, - "acc_stderr": 0.031321798030832904, - "acc_norm": 0.27450980392156865, - "acc_norm_stderr": 0.031321798030832904 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.4, - "acc_stderr": 0.03825460278380026, - "acc_norm": 0.4, - "acc_norm_stderr": 0.03825460278380026 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2692778457772338, - "mc1_stderr": 0.015528566637087288, - "mc2": 0.4755864114164748, - "mc2_stderr": 0.016657423214439165 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.12910798122065728, - "acc_stderr": 0.011494601522741298, - "acc_norm": 0.29460093896713613, - "acc_norm_stderr": 0.015626788056631535 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "Jaewoo1/Platypus7B_Follow_FT", - "model_sha": "ac5c77ab817d2d9b0a4f3fc7c609dce3770428d8", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/Jaewoo1/Platypus7B_Follow_LoRA/result_2023-10-22 15:04:14.json b/Jaewoo1/Platypus7B_Follow_LoRA/result_2023-10-22 15:04:14.json deleted file mode 100644 index 2284315c3c78c6111fcb6ad58e90b439e84c7041..0000000000000000000000000000000000000000 --- a/Jaewoo1/Platypus7B_Follow_LoRA/result_2023-10-22 15:04:14.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.27986348122866894, - "acc_stderr": 0.013119040897725923, - "acc_norm": 0.3506825938566553, - "acc_norm_stderr": 0.013944635930726089 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3648675562636925, - "acc_stderr": 0.004804091708812553, - "acc_norm": 0.4856602270464051, - "acc_norm_stderr": 0.004987728900897601 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.40350877192982454, - "acc_stderr": 0.03762738699917055, - "acc_norm": 0.40350877192982454, - "acc_norm_stderr": 0.03762738699917055 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.32038834951456313, - "acc_stderr": 0.0462028408228004, - "acc_norm": 0.32038834951456313, - "acc_norm_stderr": 0.0462028408228004 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.37037037037037035, - "acc_stderr": 0.017268607560005776, - "acc_norm": 0.37037037037037035, - "acc_norm_stderr": 0.017268607560005776 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.35555555555555557, - "acc_stderr": 0.04135176749720386, - "acc_norm": 0.35555555555555557, - "acc_norm_stderr": 0.04135176749720386 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.2765957446808511, - "acc_stderr": 0.029241883869628817, - "acc_norm": 0.2765957446808511, - "acc_norm_stderr": 0.029241883869628817 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.30120481927710846, - "acc_stderr": 0.03571609230053481, - "acc_norm": 0.30120481927710846, - "acc_norm_stderr": 0.03571609230053481 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.3987138263665595, - "acc_stderr": 0.0278093225857745, - "acc_norm": 0.3987138263665595, - "acc_norm_stderr": 0.0278093225857745 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.3811659192825112, - "acc_stderr": 0.03259625118416828, - "acc_norm": 0.3811659192825112, - "acc_norm_stderr": 0.03259625118416828 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.46564885496183206, - "acc_stderr": 0.043749285605997376, - "acc_norm": 0.46564885496183206, - "acc_norm_stderr": 0.043749285605997376 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695235, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695235 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.3838383838383838, - "acc_stderr": 0.03464881675016339, - "acc_norm": 0.3838383838383838, - "acc_norm_stderr": 0.03464881675016339 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.4, - "acc_stderr": 0.04082482904638628, - "acc_norm": 0.4, - "acc_norm_stderr": 0.04082482904638628 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.23529411764705882, - "acc_stderr": 0.04220773659171453, - "acc_norm": 0.23529411764705882, - "acc_norm_stderr": 0.04220773659171453 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.38235294117647056, - "acc_stderr": 0.03156663099215416, - "acc_norm": 0.38235294117647056, - "acc_norm_stderr": 0.03156663099215416 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.28717948717948716, - "acc_stderr": 0.022939925418530613, - "acc_norm": 0.28717948717948716, - "acc_norm_stderr": 0.022939925418530613 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.41, - "acc_stderr": 0.04943110704237102, - "acc_norm": 0.41, - "acc_norm_stderr": 0.04943110704237102 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.22, - "acc_stderr": 0.041633319989322695, - "acc_norm": 0.22, - "acc_norm_stderr": 0.041633319989322695 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.4166666666666667, - "acc_stderr": 0.04766075165356461, - "acc_norm": 0.4166666666666667, - "acc_norm_stderr": 0.04766075165356461 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.23645320197044334, - "acc_stderr": 0.029896114291733555, - "acc_norm": 0.23645320197044334, - "acc_norm_stderr": 0.029896114291733555 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.38387096774193546, - "acc_stderr": 0.027666182075539652, - "acc_norm": 0.38387096774193546, - "acc_norm_stderr": 0.027666182075539652 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.49572649572649574, - "acc_stderr": 0.032754892643821316, - "acc_norm": 0.49572649572649574, - "acc_norm_stderr": 0.032754892643821316 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.3320754716981132, - "acc_stderr": 0.02898545565233439, - "acc_norm": 0.3320754716981132, - "acc_norm_stderr": 0.02898545565233439 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.39090909090909093, - "acc_stderr": 0.04673752333670237, - "acc_norm": 0.39090909090909093, - "acc_norm_stderr": 0.04673752333670237 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.22592592592592592, - "acc_stderr": 0.02549753263960955, - "acc_norm": 0.22592592592592592, - "acc_norm_stderr": 0.02549753263960955 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2980132450331126, - "acc_stderr": 0.037345356767871984, - "acc_norm": 0.2980132450331126, - "acc_norm_stderr": 0.037345356767871984 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.4527363184079602, - "acc_stderr": 0.035197027175769155, - "acc_norm": 0.4527363184079602, - "acc_norm_stderr": 0.035197027175769155 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.2947976878612717, - "acc_stderr": 0.03476599607516478, - "acc_norm": 0.2947976878612717, - "acc_norm_stderr": 0.03476599607516478 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.24338624338624337, - "acc_stderr": 0.022101128787415412, - "acc_norm": 0.24338624338624337, - "acc_norm_stderr": 0.022101128787415412 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3402777777777778, - "acc_stderr": 0.039621355734862175, - "acc_norm": 0.3402777777777778, - "acc_norm_stderr": 0.039621355734862175 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.22, - "acc_stderr": 0.04163331998932268, - "acc_norm": 0.22, - "acc_norm_stderr": 0.04163331998932268 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.41, - "acc_stderr": 0.04943110704237103, - "acc_norm": 0.41, - "acc_norm_stderr": 0.04943110704237103 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.3699421965317919, - "acc_stderr": 0.025992472029306386, - "acc_norm": 0.3699421965317919, - "acc_norm_stderr": 0.025992472029306386 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.3374233128834356, - "acc_stderr": 0.03714908409935575, - "acc_norm": 0.3374233128834356, - "acc_norm_stderr": 0.03714908409935575 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.38580246913580246, - "acc_stderr": 0.027085401226132143, - "acc_norm": 0.38580246913580246, - "acc_norm_stderr": 0.027085401226132143 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252605, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252605 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.38860103626943004, - "acc_stderr": 0.03517739796373132, - "acc_norm": 0.38860103626943004, - "acc_norm_stderr": 0.03517739796373132 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2719298245614035, - "acc_stderr": 0.04185774424022057, - "acc_norm": 0.2719298245614035, - "acc_norm_stderr": 0.04185774424022057 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.4018348623853211, - "acc_stderr": 0.021020106172997013, - "acc_norm": 0.4018348623853211, - "acc_norm_stderr": 0.021020106172997013 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.2619047619047619, - "acc_stderr": 0.039325376803928704, - "acc_norm": 0.2619047619047619, - "acc_norm_stderr": 0.039325376803928704 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.43137254901960786, - "acc_stderr": 0.028358956313423556, - "acc_norm": 0.43137254901960786, - "acc_norm_stderr": 0.028358956313423556 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.35, - "acc_stderr": 0.0479372485441102, - "acc_norm": 0.35, - "acc_norm_stderr": 0.0479372485441102 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.49586776859504134, - "acc_stderr": 0.04564198767432754, - "acc_norm": 0.49586776859504134, - "acc_norm_stderr": 0.04564198767432754 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.27631578947368424, - "acc_stderr": 0.03639057569952925, - "acc_norm": 0.27631578947368424, - "acc_norm_stderr": 0.03639057569952925 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.3202614379084967, - "acc_stderr": 0.01887568293806944, - "acc_norm": 0.3202614379084967, - "acc_norm_stderr": 0.01887568293806944 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.30141843971631205, - "acc_stderr": 0.02737412888263115, - "acc_norm": 0.30141843971631205, - "acc_norm_stderr": 0.02737412888263115 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.26785714285714285, - "acc_stderr": 0.04203277291467762, - "acc_norm": 0.26785714285714285, - "acc_norm_stderr": 0.04203277291467762 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.32407407407407407, - "acc_stderr": 0.03191923445686186, - "acc_norm": 0.32407407407407407, - "acc_norm_stderr": 0.03191923445686186 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.24692737430167597, - "acc_stderr": 0.014422292204808852, - "acc_norm": 0.24692737430167597, - "acc_norm_stderr": 0.014422292204808852 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.27, - "acc_stderr": 0.044619604333847394, - "acc_norm": 0.27, - "acc_norm_stderr": 0.044619604333847394 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.29, - "acc_stderr": 0.04560480215720684, - "acc_norm": 0.29, - "acc_norm_stderr": 0.04560480215720684 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.41911764705882354, - "acc_stderr": 0.02997280717046463, - "acc_norm": 0.41911764705882354, - "acc_norm_stderr": 0.02997280717046463 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.33877551020408164, - "acc_stderr": 0.030299506562154185, - "acc_norm": 0.33877551020408164, - "acc_norm_stderr": 0.030299506562154185 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.4810126582278481, - "acc_stderr": 0.03252375148090448, - "acc_norm": 0.4810126582278481, - "acc_norm_stderr": 0.03252375148090448 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.3135593220338983, - "acc_stderr": 0.011849234291459324, - "acc_norm": 0.3135593220338983, - "acc_norm_stderr": 0.011849234291459324 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.39215686274509803, - "acc_stderr": 0.03426712349247272, - "acc_norm": 0.39215686274509803, - "acc_norm_stderr": 0.03426712349247272 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.3696969696969697, - "acc_stderr": 0.03769430314512568, - "acc_norm": 0.3696969696969697, - "acc_norm_stderr": 0.03769430314512568 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.24112607099143207, - "mc1_stderr": 0.014974827279752332, - "mc2": 0.3857319099407924, - "mc2_stderr": 0.015181937276962347 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.2992957746478873, - "acc_stderr": 0.015698309276204952, - "acc_norm": 0.3779342723004695, - "acc_norm_stderr": 0.01662116634084928 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "Jaewoo1/Platypus7B_Follow_LoRA", - "model_sha": "b963d09e5db0e791858e56e3fafac7e066328014", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/KRAFTON/KORani-v1-13B/result_2023-10-17 13:34:31.json b/KRAFTON/KORani-v1-13B/result_2023-10-17 13:34:31.json deleted file mode 100644 index d4b1af5575a6d51b84b34060e556c8e3cb44c798..0000000000000000000000000000000000000000 --- a/KRAFTON/KORani-v1-13B/result_2023-10-17 13:34:31.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.30802047781569963, - "acc_stderr": 0.01349142951729204, - "acc_norm": 0.3515358361774744, - "acc_norm_stderr": 0.013952413699600938 - }, - "harness|ko_hellaswag|10": { - "acc": 0.39533957379008167, - "acc_stderr": 0.004879242848473461, - "acc_norm": 0.5114519020115514, - "acc_norm_stderr": 0.0049884724594180165 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.3216374269005848, - "acc_stderr": 0.03582529442573122, - "acc_norm": 0.3216374269005848, - "acc_norm_stderr": 0.03582529442573122 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.20388349514563106, - "acc_stderr": 0.0398913985953177, - "acc_norm": 0.20388349514563106, - "acc_norm_stderr": 0.0398913985953177 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.2567049808429119, - "acc_stderr": 0.015620480263064533, - "acc_norm": 0.2567049808429119, - "acc_norm_stderr": 0.015620480263064533 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.22962962962962963, - "acc_stderr": 0.03633384414073465, - "acc_norm": 0.22962962962962963, - "acc_norm_stderr": 0.03633384414073465 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.22, - "acc_stderr": 0.04163331998932268, - "acc_norm": 0.22, - "acc_norm_stderr": 0.04163331998932268 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.2680851063829787, - "acc_stderr": 0.02895734278834235, - "acc_norm": 0.2680851063829787, - "acc_norm_stderr": 0.02895734278834235 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.2710843373493976, - "acc_stderr": 0.034605799075530276, - "acc_norm": 0.2710843373493976, - "acc_norm_stderr": 0.034605799075530276 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.24437299035369775, - "acc_stderr": 0.0244061620946689, - "acc_norm": 0.24437299035369775, - "acc_norm_stderr": 0.0244061620946689 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.22869955156950672, - "acc_stderr": 0.028188240046929196, - "acc_norm": 0.22869955156950672, - "acc_norm_stderr": 0.028188240046929196 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.2824427480916031, - "acc_stderr": 0.03948406125768361, - "acc_norm": 0.2824427480916031, - "acc_norm_stderr": 0.03948406125768361 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.18686868686868688, - "acc_stderr": 0.02777253333421899, - "acc_norm": 0.18686868686868688, - "acc_norm_stderr": 0.02777253333421899 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.2620689655172414, - "acc_stderr": 0.03664666337225256, - "acc_norm": 0.2620689655172414, - "acc_norm_stderr": 0.03664666337225256 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.14705882352941177, - "acc_stderr": 0.035240689515674495, - "acc_norm": 0.14705882352941177, - "acc_norm_stderr": 0.035240689515674495 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.20588235294117646, - "acc_stderr": 0.026265024608275882, - "acc_norm": 0.20588235294117646, - "acc_norm_stderr": 0.026265024608275882 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.2076923076923077, - "acc_stderr": 0.020567539567246787, - "acc_norm": 0.2076923076923077, - "acc_norm_stderr": 0.020567539567246787 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.26, - "acc_stderr": 0.04408440022768077, - "acc_norm": 0.26, - "acc_norm_stderr": 0.04408440022768077 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.19, - "acc_stderr": 0.03942772444036623, - "acc_norm": 0.19, - "acc_norm_stderr": 0.03942772444036623 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.23148148148148148, - "acc_stderr": 0.04077494709252627, - "acc_norm": 0.23148148148148148, - "acc_norm_stderr": 0.04077494709252627 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.1724137931034483, - "acc_stderr": 0.026577672183036572, - "acc_norm": 0.1724137931034483, - "acc_norm_stderr": 0.026577672183036572 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.20967741935483872, - "acc_stderr": 0.02315787934908353, - "acc_norm": 0.20967741935483872, - "acc_norm_stderr": 0.02315787934908353 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.2863247863247863, - "acc_stderr": 0.02961432369045665, - "acc_norm": 0.2863247863247863, - "acc_norm_stderr": 0.02961432369045665 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.21132075471698114, - "acc_stderr": 0.025125766484827845, - "acc_norm": 0.21132075471698114, - "acc_norm_stderr": 0.025125766484827845 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.22727272727272727, - "acc_stderr": 0.04013964554072776, - "acc_norm": 0.22727272727272727, - "acc_norm_stderr": 0.04013964554072776 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.22962962962962963, - "acc_stderr": 0.025644108639267645, - "acc_norm": 0.22962962962962963, - "acc_norm_stderr": 0.025644108639267645 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2251655629139073, - "acc_stderr": 0.03410435282008936, - "acc_norm": 0.2251655629139073, - "acc_norm_stderr": 0.03410435282008936 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.23383084577114427, - "acc_stderr": 0.029929415408348377, - "acc_norm": 0.23383084577114427, - "acc_norm_stderr": 0.029929415408348377 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.21965317919075145, - "acc_stderr": 0.031568093627031744, - "acc_norm": 0.21965317919075145, - "acc_norm_stderr": 0.031568093627031744 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.21693121693121692, - "acc_stderr": 0.02122708244944504, - "acc_norm": 0.21693121693121692, - "acc_norm_stderr": 0.02122708244944504 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.25, - "acc_stderr": 0.03621034121889507, - "acc_norm": 0.25, - "acc_norm_stderr": 0.03621034121889507 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.17, - "acc_stderr": 0.03775251680686371, - "acc_norm": 0.17, - "acc_norm_stderr": 0.03775251680686371 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.2658959537572254, - "acc_stderr": 0.02378620325550829, - "acc_norm": 0.2658959537572254, - "acc_norm_stderr": 0.02378620325550829 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.2331288343558282, - "acc_stderr": 0.033220157957767414, - "acc_norm": 0.2331288343558282, - "acc_norm_stderr": 0.033220157957767414 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.25308641975308643, - "acc_stderr": 0.024191808600712992, - "acc_norm": 0.25308641975308643, - "acc_norm_stderr": 0.024191808600712992 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.22, - "acc_stderr": 0.04163331998932269, - "acc_norm": 0.22, - "acc_norm_stderr": 0.04163331998932269 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.21243523316062177, - "acc_stderr": 0.029519282616817247, - "acc_norm": 0.21243523316062177, - "acc_norm_stderr": 0.029519282616817247 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.23684210526315788, - "acc_stderr": 0.039994238792813344, - "acc_norm": 0.23684210526315788, - "acc_norm_stderr": 0.039994238792813344 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.21100917431192662, - "acc_stderr": 0.017493922404112648, - "acc_norm": 0.21100917431192662, - "acc_norm_stderr": 0.017493922404112648 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.2222222222222222, - "acc_stderr": 0.037184890068181146, - "acc_norm": 0.2222222222222222, - "acc_norm_stderr": 0.037184890068181146 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.2549019607843137, - "acc_stderr": 0.024954184324879905, - "acc_norm": 0.2549019607843137, - "acc_norm_stderr": 0.024954184324879905 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.24793388429752067, - "acc_stderr": 0.03941897526516304, - "acc_norm": 0.24793388429752067, - "acc_norm_stderr": 0.03941897526516304 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.19736842105263158, - "acc_stderr": 0.03238981601699397, - "acc_norm": 0.19736842105263158, - "acc_norm_stderr": 0.03238981601699397 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.27124183006535946, - "acc_stderr": 0.017986615304030312, - "acc_norm": 0.27124183006535946, - "acc_norm_stderr": 0.017986615304030312 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.23404255319148937, - "acc_stderr": 0.025257861359432407, - "acc_norm": 0.23404255319148937, - "acc_norm_stderr": 0.025257861359432407 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.38392857142857145, - "acc_stderr": 0.04616143075028547, - "acc_norm": 0.38392857142857145, - "acc_norm_stderr": 0.04616143075028547 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.16666666666666666, - "acc_stderr": 0.025416428388767474, - "acc_norm": 0.16666666666666666, - "acc_norm_stderr": 0.025416428388767474 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.24134078212290502, - "acc_stderr": 0.014310999547961459, - "acc_norm": 0.24134078212290502, - "acc_norm_stderr": 0.014310999547961459 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.24, - "acc_stderr": 0.04292346959909282, - "acc_norm": 0.24, - "acc_norm_stderr": 0.04292346959909282 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206824, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206824 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.20220588235294118, - "acc_stderr": 0.024398192986654924, - "acc_norm": 0.20220588235294118, - "acc_norm_stderr": 0.024398192986654924 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.22040816326530613, - "acc_stderr": 0.0265370453121453, - "acc_norm": 0.22040816326530613, - "acc_norm_stderr": 0.0265370453121453 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.2489451476793249, - "acc_stderr": 0.028146970599422644, - "acc_norm": 0.2489451476793249, - "acc_norm_stderr": 0.028146970599422644 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.24641460234680573, - "acc_stderr": 0.011005971399927234, - "acc_norm": 0.24641460234680573, - "acc_norm_stderr": 0.011005971399927234 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.22549019607843138, - "acc_stderr": 0.029331162294251735, - "acc_norm": 0.22549019607843138, - "acc_norm_stderr": 0.029331162294251735 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.19393939393939394, - "acc_stderr": 0.030874145136562097, - "acc_norm": 0.19393939393939394, - "acc_norm_stderr": 0.030874145136562097 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2484700122399021, - "mc1_stderr": 0.0151274270965207, - "mc2": 0.40538205465914606, - "mc2_stderr": 0.01537488137847706 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.5551643192488263, - "acc_stderr": 0.01703514366596627, - "acc_norm": 0.613849765258216, - "acc_norm_stderr": 0.016689541992754253 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "KRAFTON/KORani-v1-13B", - "model_sha": "a699d0cebc4815f33854bc83065a03fc9008473c", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/KRAFTON/KORani-v2-13B/result_2023-10-17 13:34:06.json b/KRAFTON/KORani-v2-13B/result_2023-10-17 13:34:06.json deleted file mode 100644 index bb896bc9f628eab6a91744412792774c497cfc64..0000000000000000000000000000000000000000 --- a/KRAFTON/KORani-v2-13B/result_2023-10-17 13:34:06.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.2960750853242321, - "acc_stderr": 0.013340916085246263, - "acc_norm": 0.3370307167235495, - "acc_norm_stderr": 0.013813476652902265 - }, - "harness|ko_hellaswag|10": { - "acc": 0.35241983668591914, - "acc_stderr": 0.004767475366689779, - "acc_norm": 0.42252539334793865, - "acc_norm_stderr": 0.004929517011508216 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.4093567251461988, - "acc_stderr": 0.037712831076265434, - "acc_norm": 0.4093567251461988, - "acc_norm_stderr": 0.037712831076265434 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.32038834951456313, - "acc_stderr": 0.0462028408228004, - "acc_norm": 0.32038834951456313, - "acc_norm_stderr": 0.0462028408228004 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.37547892720306514, - "acc_stderr": 0.01731661319718279, - "acc_norm": 0.37547892720306514, - "acc_norm_stderr": 0.01731661319718279 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.2740740740740741, - "acc_stderr": 0.03853254836552003, - "acc_norm": 0.2740740740740741, - "acc_norm_stderr": 0.03853254836552003 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.3446808510638298, - "acc_stderr": 0.031068985963122145, - "acc_norm": 0.3446808510638298, - "acc_norm_stderr": 0.031068985963122145 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3192771084337349, - "acc_stderr": 0.0362933532994786, - "acc_norm": 0.3192771084337349, - "acc_norm_stderr": 0.0362933532994786 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.4115755627009646, - "acc_stderr": 0.027950481494401266, - "acc_norm": 0.4115755627009646, - "acc_norm_stderr": 0.027950481494401266 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.37668161434977576, - "acc_stderr": 0.03252113489929188, - "acc_norm": 0.37668161434977576, - "acc_norm_stderr": 0.03252113489929188 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.3893129770992366, - "acc_stderr": 0.04276486542814591, - "acc_norm": 0.3893129770992366, - "acc_norm_stderr": 0.04276486542814591 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.36, - "acc_stderr": 0.04824181513244218, - "acc_norm": 0.36, - "acc_norm_stderr": 0.04824181513244218 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.03358618145732524, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.03358618145732524 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.3586206896551724, - "acc_stderr": 0.039966295748767186, - "acc_norm": 0.3586206896551724, - "acc_norm_stderr": 0.039966295748767186 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.19607843137254902, - "acc_stderr": 0.03950581861179962, - "acc_norm": 0.19607843137254902, - "acc_norm_stderr": 0.03950581861179962 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.3403361344537815, - "acc_stderr": 0.03077805742293167, - "acc_norm": 0.3403361344537815, - "acc_norm_stderr": 0.03077805742293167 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.33589743589743587, - "acc_stderr": 0.02394672474156397, - "acc_norm": 0.33589743589743587, - "acc_norm_stderr": 0.02394672474156397 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.54, - "acc_stderr": 0.05009082659620333, - "acc_norm": 0.54, - "acc_norm_stderr": 0.05009082659620333 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.21, - "acc_stderr": 0.040936018074033256, - "acc_norm": 0.21, - "acc_norm_stderr": 0.040936018074033256 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.4351851851851852, - "acc_stderr": 0.04792898170907062, - "acc_norm": 0.4351851851851852, - "acc_norm_stderr": 0.04792898170907062 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.31527093596059114, - "acc_stderr": 0.03269080871970187, - "acc_norm": 0.31527093596059114, - "acc_norm_stderr": 0.03269080871970187 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3258064516129032, - "acc_stderr": 0.026662010578567107, - "acc_norm": 0.3258064516129032, - "acc_norm_stderr": 0.026662010578567107 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.5683760683760684, - "acc_stderr": 0.0324483553531149, - "acc_norm": 0.5683760683760684, - "acc_norm_stderr": 0.0324483553531149 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.37358490566037733, - "acc_stderr": 0.029773082713319878, - "acc_norm": 0.37358490566037733, - "acc_norm_stderr": 0.029773082713319878 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.45454545454545453, - "acc_stderr": 0.04769300568972743, - "acc_norm": 0.45454545454545453, - "acc_norm_stderr": 0.04769300568972743 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.24814814814814815, - "acc_stderr": 0.0263357394040558, - "acc_norm": 0.24814814814814815, - "acc_norm_stderr": 0.0263357394040558 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2913907284768212, - "acc_stderr": 0.037101857261199946, - "acc_norm": 0.2913907284768212, - "acc_norm_stderr": 0.037101857261199946 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.42786069651741293, - "acc_stderr": 0.03498541988407795, - "acc_norm": 0.42786069651741293, - "acc_norm_stderr": 0.03498541988407795 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.3236994219653179, - "acc_stderr": 0.035676037996391685, - "acc_norm": 0.3236994219653179, - "acc_norm_stderr": 0.035676037996391685 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2222222222222222, - "acc_stderr": 0.02141168439369418, - "acc_norm": 0.2222222222222222, - "acc_norm_stderr": 0.02141168439369418 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3263888888888889, - "acc_stderr": 0.03921067198982266, - "acc_norm": 0.3263888888888889, - "acc_norm_stderr": 0.03921067198982266 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.44, - "acc_stderr": 0.0498887651569859, - "acc_norm": 0.44, - "acc_norm_stderr": 0.0498887651569859 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.40173410404624277, - "acc_stderr": 0.026394104177643634, - "acc_norm": 0.40173410404624277, - "acc_norm_stderr": 0.026394104177643634 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.3128834355828221, - "acc_stderr": 0.03642914578292404, - "acc_norm": 0.3128834355828221, - "acc_norm_stderr": 0.03642914578292404 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.3425925925925926, - "acc_stderr": 0.026406145973625658, - "acc_norm": 0.3425925925925926, - "acc_norm_stderr": 0.026406145973625658 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.40932642487046633, - "acc_stderr": 0.03548608168860806, - "acc_norm": 0.40932642487046633, - "acc_norm_stderr": 0.03548608168860806 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.24561403508771928, - "acc_stderr": 0.04049339297748142, - "acc_norm": 0.24561403508771928, - "acc_norm_stderr": 0.04049339297748142 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.30642201834862387, - "acc_stderr": 0.019765517220458523, - "acc_norm": 0.30642201834862387, - "acc_norm_stderr": 0.019765517220458523 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.30952380952380953, - "acc_stderr": 0.04134913018303316, - "acc_norm": 0.30952380952380953, - "acc_norm_stderr": 0.04134913018303316 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.4215686274509804, - "acc_stderr": 0.028275490156791438, - "acc_norm": 0.4215686274509804, - "acc_norm_stderr": 0.028275490156791438 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.35, - "acc_stderr": 0.047937248544110196, - "acc_norm": 0.35, - "acc_norm_stderr": 0.047937248544110196 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.5041322314049587, - "acc_stderr": 0.045641987674327526, - "acc_norm": 0.5041322314049587, - "acc_norm_stderr": 0.045641987674327526 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.23684210526315788, - "acc_stderr": 0.034597776068105365, - "acc_norm": 0.23684210526315788, - "acc_norm_stderr": 0.034597776068105365 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.3300653594771242, - "acc_stderr": 0.019023726160724553, - "acc_norm": 0.3300653594771242, - "acc_norm_stderr": 0.019023726160724553 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2765957446808511, - "acc_stderr": 0.026684564340460994, - "acc_norm": 0.2765957446808511, - "acc_norm_stderr": 0.026684564340460994 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.3482142857142857, - "acc_stderr": 0.045218299028335865, - "acc_norm": 0.3482142857142857, - "acc_norm_stderr": 0.045218299028335865 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.24074074074074073, - "acc_stderr": 0.029157522184605617, - "acc_norm": 0.24074074074074073, - "acc_norm_stderr": 0.029157522184605617 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.24134078212290502, - "acc_stderr": 0.014310999547961443, - "acc_norm": 0.24134078212290502, - "acc_norm_stderr": 0.014310999547961443 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.27, - "acc_stderr": 0.0446196043338474, - "acc_norm": 0.27, - "acc_norm_stderr": 0.0446196043338474 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.43, - "acc_stderr": 0.04975698519562428, - "acc_norm": 0.43, - "acc_norm_stderr": 0.04975698519562428 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.2610294117647059, - "acc_stderr": 0.02667925227010312, - "acc_norm": 0.2610294117647059, - "acc_norm_stderr": 0.02667925227010312 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.40816326530612246, - "acc_stderr": 0.03146465712827424, - "acc_norm": 0.40816326530612246, - "acc_norm_stderr": 0.03146465712827424 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.39662447257383965, - "acc_stderr": 0.03184399873811225, - "acc_norm": 0.39662447257383965, - "acc_norm_stderr": 0.03184399873811225 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.27640156453715775, - "acc_stderr": 0.011422153194553567, - "acc_norm": 0.27640156453715775, - "acc_norm_stderr": 0.011422153194553567 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.3431372549019608, - "acc_stderr": 0.033321399446680854, - "acc_norm": 0.3431372549019608, - "acc_norm_stderr": 0.033321399446680854 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.3515151515151515, - "acc_stderr": 0.037282069986826503, - "acc_norm": 0.3515151515151515, - "acc_norm_stderr": 0.037282069986826503 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2692778457772338, - "mc1_stderr": 0.015528566637087305, - "mc2": 0.44326975161880294, - "mc2_stderr": 0.015781962014868475 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.1936619718309859, - "acc_stderr": 0.013546152666107363, - "acc_norm": 0.27816901408450706, - "acc_norm_stderr": 0.01536057085913159 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "KRAFTON/KORani-v2-13B", - "model_sha": "12dbb4046d3fabb3b64c3eab2ecc91faec1af9e9", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/KRAFTON/KORani-v3-13B/result_2023-10-17 13:33:45.json b/KRAFTON/KORani-v3-13B/result_2023-10-17 13:33:45.json deleted file mode 100644 index a63b919a6fcfd514b5e7ffdc06d6be5c1ee0f708..0000000000000000000000000000000000000000 --- a/KRAFTON/KORani-v3-13B/result_2023-10-17 13:33:45.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.3046075085324232, - "acc_stderr": 0.01344952210993249, - "acc_norm": 0.34726962457337884, - "acc_norm_stderr": 0.013913034529620442 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3494323839872535, - "acc_stderr": 0.004758162967997396, - "acc_norm": 0.4313881696873133, - "acc_norm_stderr": 0.004942578520987348 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.38011695906432746, - "acc_stderr": 0.03722965741385539, - "acc_norm": 0.38011695906432746, - "acc_norm_stderr": 0.03722965741385539 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.39805825242718446, - "acc_stderr": 0.04846748253977239, - "acc_norm": 0.39805825242718446, - "acc_norm_stderr": 0.04846748253977239 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.3665389527458493, - "acc_stderr": 0.01723124462679705, - "acc_norm": 0.3665389527458493, - "acc_norm_stderr": 0.01723124462679705 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.2962962962962963, - "acc_stderr": 0.03944624162501117, - "acc_norm": 0.2962962962962963, - "acc_norm_stderr": 0.03944624162501117 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.27, - "acc_stderr": 0.0446196043338474, - "acc_norm": 0.27, - "acc_norm_stderr": 0.0446196043338474 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.32340425531914896, - "acc_stderr": 0.030579442773610334, - "acc_norm": 0.32340425531914896, - "acc_norm_stderr": 0.030579442773610334 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3253012048192771, - "acc_stderr": 0.03647168523683227, - "acc_norm": 0.3253012048192771, - "acc_norm_stderr": 0.03647168523683227 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.37942122186495175, - "acc_stderr": 0.027559949802347817, - "acc_norm": 0.37942122186495175, - "acc_norm_stderr": 0.027559949802347817 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.3811659192825112, - "acc_stderr": 0.03259625118416827, - "acc_norm": 0.3811659192825112, - "acc_norm_stderr": 0.03259625118416827 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.3816793893129771, - "acc_stderr": 0.0426073515764456, - "acc_norm": 0.3816793893129771, - "acc_norm_stderr": 0.0426073515764456 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695236, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695236 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.3282828282828283, - "acc_stderr": 0.03345678422756777, - "acc_norm": 0.3282828282828283, - "acc_norm_stderr": 0.03345678422756777 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.32413793103448274, - "acc_stderr": 0.03900432069185553, - "acc_norm": 0.32413793103448274, - "acc_norm_stderr": 0.03900432069185553 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.22549019607843138, - "acc_stderr": 0.041583075330832865, - "acc_norm": 0.22549019607843138, - "acc_norm_stderr": 0.041583075330832865 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.3865546218487395, - "acc_stderr": 0.03163145807552378, - "acc_norm": 0.3865546218487395, - "acc_norm_stderr": 0.03163145807552378 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.34102564102564104, - "acc_stderr": 0.024035489676335044, - "acc_norm": 0.34102564102564104, - "acc_norm_stderr": 0.024035489676335044 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.52, - "acc_stderr": 0.050211673156867795, - "acc_norm": 0.52, - "acc_norm_stderr": 0.050211673156867795 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.27, - "acc_stderr": 0.04461960433384739, - "acc_norm": 0.27, - "acc_norm_stderr": 0.04461960433384739 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.4444444444444444, - "acc_stderr": 0.04803752235190192, - "acc_norm": 0.4444444444444444, - "acc_norm_stderr": 0.04803752235190192 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.2955665024630542, - "acc_stderr": 0.032104944337514575, - "acc_norm": 0.2955665024630542, - "acc_norm_stderr": 0.032104944337514575 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3870967741935484, - "acc_stderr": 0.02770935967503249, - "acc_norm": 0.3870967741935484, - "acc_norm_stderr": 0.02770935967503249 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.5811965811965812, - "acc_stderr": 0.03232128912157792, - "acc_norm": 0.5811965811965812, - "acc_norm_stderr": 0.03232128912157792 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.33584905660377357, - "acc_stderr": 0.029067220146644826, - "acc_norm": 0.33584905660377357, - "acc_norm_stderr": 0.029067220146644826 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.42727272727272725, - "acc_stderr": 0.04738198703545483, - "acc_norm": 0.42727272727272725, - "acc_norm_stderr": 0.04738198703545483 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.23703703703703705, - "acc_stderr": 0.02592887613276611, - "acc_norm": 0.23703703703703705, - "acc_norm_stderr": 0.02592887613276611 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.31125827814569534, - "acc_stderr": 0.03780445850526733, - "acc_norm": 0.31125827814569534, - "acc_norm_stderr": 0.03780445850526733 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.43781094527363185, - "acc_stderr": 0.0350808011219984, - "acc_norm": 0.43781094527363185, - "acc_norm_stderr": 0.0350808011219984 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.27167630057803466, - "acc_stderr": 0.0339175032232166, - "acc_norm": 0.27167630057803466, - "acc_norm_stderr": 0.0339175032232166 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.25396825396825395, - "acc_stderr": 0.022418042891113935, - "acc_norm": 0.25396825396825395, - "acc_norm_stderr": 0.022418042891113935 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2708333333333333, - "acc_stderr": 0.03716177437566017, - "acc_norm": 0.2708333333333333, - "acc_norm_stderr": 0.03716177437566017 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.22, - "acc_stderr": 0.04163331998932269, - "acc_norm": 0.22, - "acc_norm_stderr": 0.04163331998932269 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.55, - "acc_stderr": 0.05, - "acc_norm": 0.55, - "acc_norm_stderr": 0.05 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.3872832369942196, - "acc_stderr": 0.026226158605124655, - "acc_norm": 0.3872832369942196, - "acc_norm_stderr": 0.026226158605124655 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.3987730061349693, - "acc_stderr": 0.03847021420456023, - "acc_norm": 0.3987730061349693, - "acc_norm_stderr": 0.03847021420456023 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.38271604938271603, - "acc_stderr": 0.027044538138402616, - "acc_norm": 0.38271604938271603, - "acc_norm_stderr": 0.027044538138402616 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.27, - "acc_stderr": 0.044619604333847394, - "acc_norm": 0.27, - "acc_norm_stderr": 0.044619604333847394 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.41968911917098445, - "acc_stderr": 0.035615873276858834, - "acc_norm": 0.41968911917098445, - "acc_norm_stderr": 0.035615873276858834 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.22807017543859648, - "acc_stderr": 0.03947152782669415, - "acc_norm": 0.22807017543859648, - "acc_norm_stderr": 0.03947152782669415 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.3174311926605505, - "acc_stderr": 0.0199571521984605, - "acc_norm": 0.3174311926605505, - "acc_norm_stderr": 0.0199571521984605 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.31746031746031744, - "acc_stderr": 0.04163453031302859, - "acc_norm": 0.31746031746031744, - "acc_norm_stderr": 0.04163453031302859 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.4084967320261438, - "acc_stderr": 0.028146405993096358, - "acc_norm": 0.4084967320261438, - "acc_norm_stderr": 0.028146405993096358 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.43, - "acc_stderr": 0.04975698519562428, - "acc_norm": 0.43, - "acc_norm_stderr": 0.04975698519562428 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.5702479338842975, - "acc_stderr": 0.04519082021319773, - "acc_norm": 0.5702479338842975, - "acc_norm_stderr": 0.04519082021319773 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.23026315789473684, - "acc_stderr": 0.03426059424403165, - "acc_norm": 0.23026315789473684, - "acc_norm_stderr": 0.03426059424403165 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.3137254901960784, - "acc_stderr": 0.01877168389352817, - "acc_norm": 0.3137254901960784, - "acc_norm_stderr": 0.01877168389352817 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.3049645390070922, - "acc_stderr": 0.02746470844202213, - "acc_norm": 0.3049645390070922, - "acc_norm_stderr": 0.02746470844202213 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.36607142857142855, - "acc_stderr": 0.04572372358737431, - "acc_norm": 0.36607142857142855, - "acc_norm_stderr": 0.04572372358737431 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.24074074074074073, - "acc_stderr": 0.029157522184605607, - "acc_norm": 0.24074074074074073, - "acc_norm_stderr": 0.029157522184605607 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.24916201117318434, - "acc_stderr": 0.014465893829859933, - "acc_norm": 0.24916201117318434, - "acc_norm_stderr": 0.014465893829859933 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.36, - "acc_stderr": 0.04824181513244218, - "acc_norm": 0.36, - "acc_norm_stderr": 0.04824181513244218 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.21691176470588236, - "acc_stderr": 0.025035845227711254, - "acc_norm": 0.21691176470588236, - "acc_norm_stderr": 0.025035845227711254 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.4163265306122449, - "acc_stderr": 0.03155782816556164, - "acc_norm": 0.4163265306122449, - "acc_norm_stderr": 0.03155782816556164 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.3924050632911392, - "acc_stderr": 0.03178471874564729, - "acc_norm": 0.3924050632911392, - "acc_norm_stderr": 0.03178471874564729 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.3050847457627119, - "acc_stderr": 0.011759939618085451, - "acc_norm": 0.3050847457627119, - "acc_norm_stderr": 0.011759939618085451 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.3627450980392157, - "acc_stderr": 0.03374499356319355, - "acc_norm": 0.3627450980392157, - "acc_norm_stderr": 0.03374499356319355 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.4, - "acc_stderr": 0.03825460278380026, - "acc_norm": 0.4, - "acc_norm_stderr": 0.03825460278380026 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.26805385556915545, - "mc1_stderr": 0.015506204722834562, - "mc2": 0.44032476462099357, - "mc2_stderr": 0.015871156864559203 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.19014084507042253, - "acc_stderr": 0.01345171563310588, - "acc_norm": 0.2664319248826291, - "acc_norm_stderr": 0.01515474253336583 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "KRAFTON/KORani-v3-13B", - "model_sha": "d6479f9de126caf02a770e5e8db4524a0ccb4db7", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/Korabbit/my_model/result_2023-10-24 09:04:19.json b/Korabbit/my_model/result_2023-10-24 09:04:19.json deleted file mode 100644 index a0a4ec970c9543755b97a1ca87d42d8f9a1d96e9..0000000000000000000000000000000000000000 --- a/Korabbit/my_model/result_2023-10-24 09:04:19.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.32081911262798635, - "acc_stderr": 0.013640943091946524, - "acc_norm": 0.37372013651877134, - "acc_norm_stderr": 0.014137708601759095 - }, - "harness|ko_hellaswag|10": { - "acc": 0.38767177853017326, - "acc_stderr": 0.004862232790041553, - "acc_norm": 0.5120493925512846, - "acc_norm_stderr": 0.004988332289642081 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.40350877192982454, - "acc_stderr": 0.03762738699917055, - "acc_norm": 0.40350877192982454, - "acc_norm_stderr": 0.03762738699917055 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.3106796116504854, - "acc_stderr": 0.04582124160161549, - "acc_norm": 0.3106796116504854, - "acc_norm_stderr": 0.04582124160161549 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.42528735632183906, - "acc_stderr": 0.017679225489431447, - "acc_norm": 0.42528735632183906, - "acc_norm_stderr": 0.017679225489431447 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.3925925925925926, - "acc_stderr": 0.042185062153688786, - "acc_norm": 0.3925925925925926, - "acc_norm_stderr": 0.042185062153688786 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695235, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695235 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.2425531914893617, - "acc_stderr": 0.02802022627120022, - "acc_norm": 0.2425531914893617, - "acc_norm_stderr": 0.02802022627120022 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3192771084337349, - "acc_stderr": 0.0362933532994786, - "acc_norm": 0.3192771084337349, - "acc_norm_stderr": 0.0362933532994786 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.37942122186495175, - "acc_stderr": 0.02755994980234781, - "acc_norm": 0.37942122186495175, - "acc_norm_stderr": 0.02755994980234781 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.3811659192825112, - "acc_stderr": 0.032596251184168264, - "acc_norm": 0.3811659192825112, - "acc_norm_stderr": 0.032596251184168264 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.366412213740458, - "acc_stderr": 0.042258754519696386, - "acc_norm": 0.366412213740458, - "acc_norm_stderr": 0.042258754519696386 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.4, - "acc_stderr": 0.04923659639173309, - "acc_norm": 0.4, - "acc_norm_stderr": 0.04923659639173309 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.3434343434343434, - "acc_stderr": 0.03383201223244443, - "acc_norm": 0.3434343434343434, - "acc_norm_stderr": 0.03383201223244443 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.33793103448275863, - "acc_stderr": 0.039417076320648906, - "acc_norm": 0.33793103448275863, - "acc_norm_stderr": 0.039417076320648906 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.17647058823529413, - "acc_stderr": 0.037932811853078084, - "acc_norm": 0.17647058823529413, - "acc_norm_stderr": 0.037932811853078084 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.31512605042016806, - "acc_stderr": 0.03017680828897434, - "acc_norm": 0.31512605042016806, - "acc_norm_stderr": 0.03017680828897434 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.26153846153846155, - "acc_stderr": 0.022282141204204426, - "acc_norm": 0.26153846153846155, - "acc_norm_stderr": 0.022282141204204426 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.43, - "acc_stderr": 0.04975698519562428, - "acc_norm": 0.43, - "acc_norm_stderr": 0.04975698519562428 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.27, - "acc_stderr": 0.044619604333847394, - "acc_norm": 0.27, - "acc_norm_stderr": 0.044619604333847394 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.4074074074074074, - "acc_stderr": 0.04750077341199985, - "acc_norm": 0.4074074074074074, - "acc_norm_stderr": 0.04750077341199985 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.2561576354679803, - "acc_stderr": 0.030712730070982592, - "acc_norm": 0.2561576354679803, - "acc_norm_stderr": 0.030712730070982592 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.32903225806451614, - "acc_stderr": 0.026729499068349972, - "acc_norm": 0.32903225806451614, - "acc_norm_stderr": 0.026729499068349972 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.5341880341880342, - "acc_stderr": 0.03267942734081228, - "acc_norm": 0.5341880341880342, - "acc_norm_stderr": 0.03267942734081228 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.37735849056603776, - "acc_stderr": 0.02983280811479601, - "acc_norm": 0.37735849056603776, - "acc_norm_stderr": 0.02983280811479601 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.3, - "acc_stderr": 0.04389311454644286, - "acc_norm": 0.3, - "acc_norm_stderr": 0.04389311454644286 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.26296296296296295, - "acc_stderr": 0.02684205787383371, - "acc_norm": 0.26296296296296295, - "acc_norm_stderr": 0.02684205787383371 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2582781456953642, - "acc_stderr": 0.035737053147634576, - "acc_norm": 0.2582781456953642, - "acc_norm_stderr": 0.035737053147634576 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.417910447761194, - "acc_stderr": 0.034875586404620636, - "acc_norm": 0.417910447761194, - "acc_norm_stderr": 0.034875586404620636 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.28901734104046245, - "acc_stderr": 0.034564257450869995, - "acc_norm": 0.28901734104046245, - "acc_norm_stderr": 0.034564257450869995 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.25132275132275134, - "acc_stderr": 0.022340482339643898, - "acc_norm": 0.25132275132275134, - "acc_norm_stderr": 0.022340482339643898 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2777777777777778, - "acc_stderr": 0.03745554791462457, - "acc_norm": 0.2777777777777778, - "acc_norm_stderr": 0.03745554791462457 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.18, - "acc_stderr": 0.038612291966536934, - "acc_norm": 0.18, - "acc_norm_stderr": 0.038612291966536934 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.53, - "acc_stderr": 0.050161355804659205, - "acc_norm": 0.53, - "acc_norm_stderr": 0.050161355804659205 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.36416184971098264, - "acc_stderr": 0.02590663263101613, - "acc_norm": 0.36416184971098264, - "acc_norm_stderr": 0.02590663263101613 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.38650306748466257, - "acc_stderr": 0.038258255488486076, - "acc_norm": 0.38650306748466257, - "acc_norm_stderr": 0.038258255488486076 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.02622964917882116, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.02622964917882116 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.3316062176165803, - "acc_stderr": 0.03397636541089116, - "acc_norm": 0.3316062176165803, - "acc_norm_stderr": 0.03397636541089116 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.19298245614035087, - "acc_stderr": 0.03712454853721368, - "acc_norm": 0.19298245614035087, - "acc_norm_stderr": 0.03712454853721368 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.3376146788990826, - "acc_stderr": 0.020275265986638914, - "acc_norm": 0.3376146788990826, - "acc_norm_stderr": 0.020275265986638914 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.1984126984126984, - "acc_stderr": 0.03567016675276862, - "acc_norm": 0.1984126984126984, - "acc_norm_stderr": 0.03567016675276862 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.3627450980392157, - "acc_stderr": 0.0275300784471103, - "acc_norm": 0.3627450980392157, - "acc_norm_stderr": 0.0275300784471103 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.35, - "acc_stderr": 0.0479372485441102, - "acc_norm": 0.35, - "acc_norm_stderr": 0.0479372485441102 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.5206611570247934, - "acc_stderr": 0.045604560863872344, - "acc_norm": 0.5206611570247934, - "acc_norm_stderr": 0.045604560863872344 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.3223684210526316, - "acc_stderr": 0.03803510248351586, - "acc_norm": 0.3223684210526316, - "acc_norm_stderr": 0.03803510248351586 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.35784313725490197, - "acc_stderr": 0.019393058402355445, - "acc_norm": 0.35784313725490197, - "acc_norm_stderr": 0.019393058402355445 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.30851063829787234, - "acc_stderr": 0.027553366165101362, - "acc_norm": 0.30851063829787234, - "acc_norm_stderr": 0.027553366165101362 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.25892857142857145, - "acc_stderr": 0.041577515398656284, - "acc_norm": 0.25892857142857145, - "acc_norm_stderr": 0.041577515398656284 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.25462962962962965, - "acc_stderr": 0.029711275860005344, - "acc_norm": 0.25462962962962965, - "acc_norm_stderr": 0.029711275860005344 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.264804469273743, - "acc_stderr": 0.014756906483260664, - "acc_norm": 0.264804469273743, - "acc_norm_stderr": 0.014756906483260664 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.42, - "acc_stderr": 0.04960449637488584, - "acc_norm": 0.42, - "acc_norm_stderr": 0.04960449637488584 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.32, - "acc_stderr": 0.04688261722621504, - "acc_norm": 0.32, - "acc_norm_stderr": 0.04688261722621504 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.2867647058823529, - "acc_stderr": 0.02747227447323382, - "acc_norm": 0.2867647058823529, - "acc_norm_stderr": 0.02747227447323382 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.3551020408163265, - "acc_stderr": 0.030635655150387638, - "acc_norm": 0.3551020408163265, - "acc_norm_stderr": 0.030635655150387638 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.5147679324894515, - "acc_stderr": 0.032533028078777386, - "acc_norm": 0.5147679324894515, - "acc_norm_stderr": 0.032533028078777386 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.29139504563233376, - "acc_stderr": 0.011605720214257605, - "acc_norm": 0.29139504563233376, - "acc_norm_stderr": 0.011605720214257605 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.3431372549019608, - "acc_stderr": 0.033321399446680854, - "acc_norm": 0.3431372549019608, - "acc_norm_stderr": 0.033321399446680854 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.43636363636363634, - "acc_stderr": 0.03872592983524754, - "acc_norm": 0.43636363636363634, - "acc_norm_stderr": 0.03872592983524754 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.24479804161566707, - "mc1_stderr": 0.015051869486715008, - "mc2": 0.396242471455397, - "mc2_stderr": 0.01500796953934626 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.5551643192488263, - "acc_stderr": 0.01703514366596628, - "acc_norm": 0.6490610328638498, - "acc_norm_stderr": 0.016360395003030395 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "Korabbit/my_model", - "model_sha": "4e31f162c656d46d38fb785707b02628c5ef5965", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/MNCJihun/Mistral-7B-OpenOrca-eng-kor-combined/result_2023-10-24 01:28:22.json b/MNCJihun/Mistral-7B-OpenOrca-eng-kor-combined/result_2023-10-24 01:28:22.json deleted file mode 100644 index 44597e0c2241956a3d48f6387c406a937affc8ef..0000000000000000000000000000000000000000 --- a/MNCJihun/Mistral-7B-OpenOrca-eng-kor-combined/result_2023-10-24 01:28:22.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.2721843003412969, - "acc_stderr": 0.013006600406423709, - "acc_norm": 0.32849829351535836, - "acc_norm_stderr": 0.013724978465537377 - }, - "harness|ko_hellaswag|10": { - "acc": 0.345947022505477, - "acc_stderr": 0.00474703876817253, - "acc_norm": 0.42362079267078273, - "acc_norm_stderr": 0.004931219148182244 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.4269005847953216, - "acc_stderr": 0.03793620616529916, - "acc_norm": 0.4269005847953216, - "acc_norm_stderr": 0.03793620616529916 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.3592233009708738, - "acc_stderr": 0.047504583990416925, - "acc_norm": 0.3592233009708738, - "acc_norm_stderr": 0.047504583990416925 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.4163473818646232, - "acc_stderr": 0.017627948030430298, - "acc_norm": 0.4163473818646232, - "acc_norm_stderr": 0.017627948030430298 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.2814814814814815, - "acc_stderr": 0.03885004245800254, - "acc_norm": 0.2814814814814815, - "acc_norm_stderr": 0.03885004245800254 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.36, - "acc_stderr": 0.048241815132442176, - "acc_norm": 0.36, - "acc_norm_stderr": 0.048241815132442176 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.31063829787234043, - "acc_stderr": 0.03025123757921317, - "acc_norm": 0.31063829787234043, - "acc_norm_stderr": 0.03025123757921317 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3253012048192771, - "acc_stderr": 0.03647168523683227, - "acc_norm": 0.3253012048192771, - "acc_norm_stderr": 0.03647168523683227 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.3633440514469453, - "acc_stderr": 0.027316847674192717, - "acc_norm": 0.3633440514469453, - "acc_norm_stderr": 0.027316847674192717 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.3542600896860987, - "acc_stderr": 0.032100621541349864, - "acc_norm": 0.3542600896860987, - "acc_norm_stderr": 0.032100621541349864 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.3282442748091603, - "acc_stderr": 0.041184385658062976, - "acc_norm": 0.3282442748091603, - "acc_norm_stderr": 0.041184385658062976 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.35, - "acc_stderr": 0.0479372485441102, - "acc_norm": 0.35, - "acc_norm_stderr": 0.0479372485441102 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.033586181457325226, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.033586181457325226 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.32413793103448274, - "acc_stderr": 0.03900432069185553, - "acc_norm": 0.32413793103448274, - "acc_norm_stderr": 0.03900432069185553 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.18627450980392157, - "acc_stderr": 0.03873958714149352, - "acc_norm": 0.18627450980392157, - "acc_norm_stderr": 0.03873958714149352 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.3403361344537815, - "acc_stderr": 0.030778057422931673, - "acc_norm": 0.3403361344537815, - "acc_norm_stderr": 0.030778057422931673 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.358974358974359, - "acc_stderr": 0.024321738484602357, - "acc_norm": 0.358974358974359, - "acc_norm_stderr": 0.024321738484602357 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.56, - "acc_stderr": 0.0498887651569859, - "acc_norm": 0.56, - "acc_norm_stderr": 0.0498887651569859 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.3888888888888889, - "acc_stderr": 0.0471282125742677, - "acc_norm": 0.3888888888888889, - "acc_norm_stderr": 0.0471282125742677 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.3103448275862069, - "acc_stderr": 0.03255086769970103, - "acc_norm": 0.3103448275862069, - "acc_norm_stderr": 0.03255086769970103 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.4, - "acc_stderr": 0.027869320571664632, - "acc_norm": 0.4, - "acc_norm_stderr": 0.027869320571664632 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.5384615384615384, - "acc_stderr": 0.03265903381186195, - "acc_norm": 0.5384615384615384, - "acc_norm_stderr": 0.03265903381186195 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.3622641509433962, - "acc_stderr": 0.0295822451283843, - "acc_norm": 0.3622641509433962, - "acc_norm_stderr": 0.0295822451283843 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.37272727272727274, - "acc_stderr": 0.04631381319425464, - "acc_norm": 0.37272727272727274, - "acc_norm_stderr": 0.04631381319425464 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.31851851851851853, - "acc_stderr": 0.02840653309060846, - "acc_norm": 0.31851851851851853, - "acc_norm_stderr": 0.02840653309060846 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.304635761589404, - "acc_stderr": 0.037579499229433426, - "acc_norm": 0.304635761589404, - "acc_norm_stderr": 0.037579499229433426 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.43781094527363185, - "acc_stderr": 0.0350808011219984, - "acc_norm": 0.43781094527363185, - "acc_norm_stderr": 0.0350808011219984 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.27167630057803466, - "acc_stderr": 0.03391750322321659, - "acc_norm": 0.27167630057803466, - "acc_norm_stderr": 0.03391750322321659 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.34656084656084657, - "acc_stderr": 0.024508777521028428, - "acc_norm": 0.34656084656084657, - "acc_norm_stderr": 0.024508777521028428 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2777777777777778, - "acc_stderr": 0.03745554791462457, - "acc_norm": 0.2777777777777778, - "acc_norm_stderr": 0.03745554791462457 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.36, - "acc_stderr": 0.04824181513244218, - "acc_norm": 0.36, - "acc_norm_stderr": 0.04824181513244218 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.56, - "acc_stderr": 0.049888765156985884, - "acc_norm": 0.56, - "acc_norm_stderr": 0.049888765156985884 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.31213872832369943, - "acc_stderr": 0.02494679222527231, - "acc_norm": 0.31213872832369943, - "acc_norm_stderr": 0.02494679222527231 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.34355828220858897, - "acc_stderr": 0.037311335196738925, - "acc_norm": 0.34355828220858897, - "acc_norm_stderr": 0.037311335196738925 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.3888888888888889, - "acc_stderr": 0.02712511551316687, - "acc_norm": 0.3888888888888889, - "acc_norm_stderr": 0.02712511551316687 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.48704663212435234, - "acc_stderr": 0.03607228061047749, - "acc_norm": 0.48704663212435234, - "acc_norm_stderr": 0.03607228061047749 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.24561403508771928, - "acc_stderr": 0.040493392977481404, - "acc_norm": 0.24561403508771928, - "acc_norm_stderr": 0.040493392977481404 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.3779816513761468, - "acc_stderr": 0.020789187066728113, - "acc_norm": 0.3779816513761468, - "acc_norm_stderr": 0.020789187066728113 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.3253968253968254, - "acc_stderr": 0.04190596438871137, - "acc_norm": 0.3253968253968254, - "acc_norm_stderr": 0.04190596438871137 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.3627450980392157, - "acc_stderr": 0.027530078447110317, - "acc_norm": 0.3627450980392157, - "acc_norm_stderr": 0.027530078447110317 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.39, - "acc_stderr": 0.04902071300001974, - "acc_norm": 0.39, - "acc_norm_stderr": 0.04902071300001974 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.5289256198347108, - "acc_stderr": 0.04556710331269498, - "acc_norm": 0.5289256198347108, - "acc_norm_stderr": 0.04556710331269498 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.34868421052631576, - "acc_stderr": 0.0387813988879761, - "acc_norm": 0.34868421052631576, - "acc_norm_stderr": 0.0387813988879761 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.3202614379084967, - "acc_stderr": 0.018875682938069443, - "acc_norm": 0.3202614379084967, - "acc_norm_stderr": 0.018875682938069443 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.3262411347517731, - "acc_stderr": 0.02796845304356317, - "acc_norm": 0.3262411347517731, - "acc_norm_stderr": 0.02796845304356317 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.23214285714285715, - "acc_stderr": 0.04007341809755806, - "acc_norm": 0.23214285714285715, - "acc_norm_stderr": 0.04007341809755806 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.25462962962962965, - "acc_stderr": 0.02971127586000533, - "acc_norm": 0.25462962962962965, - "acc_norm_stderr": 0.02971127586000533 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2569832402234637, - "acc_stderr": 0.014614465821966342, - "acc_norm": 0.2569832402234637, - "acc_norm_stderr": 0.014614465821966342 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.35, - "acc_stderr": 0.047937248544110196, - "acc_norm": 0.35, - "acc_norm_stderr": 0.047937248544110196 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.43, - "acc_stderr": 0.04975698519562428, - "acc_norm": 0.43, - "acc_norm_stderr": 0.04975698519562428 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.3161764705882353, - "acc_stderr": 0.02824568739146293, - "acc_norm": 0.3161764705882353, - "acc_norm_stderr": 0.02824568739146293 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.30612244897959184, - "acc_stderr": 0.029504896454595968, - "acc_norm": 0.30612244897959184, - "acc_norm_stderr": 0.029504896454595968 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.4388185654008439, - "acc_stderr": 0.032302649315470375, - "acc_norm": 0.4388185654008439, - "acc_norm_stderr": 0.032302649315470375 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.31421121251629724, - "acc_stderr": 0.011855911587048231, - "acc_norm": 0.31421121251629724, - "acc_norm_stderr": 0.011855911587048231 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.35784313725490197, - "acc_stderr": 0.03364487286088299, - "acc_norm": 0.35784313725490197, - "acc_norm_stderr": 0.03364487286088299 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.3575757575757576, - "acc_stderr": 0.037425970438065864, - "acc_norm": 0.3575757575757576, - "acc_norm_stderr": 0.037425970438065864 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.29008567931456547, - "mc1_stderr": 0.01588623687420952, - "mc2": 0.47535947414675184, - "mc2_stderr": 0.015845184891705482 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.29460093896713613, - "acc_stderr": 0.015626788056631532, - "acc_norm": 0.3908450704225352, - "acc_norm_stderr": 0.016726359269640355 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "MNCJihun/Mistral-7B-OpenOrca-eng-kor-combined", - "model_sha": "5f5dac05ae42c508810fe2dc7d4eef1350c3a1b2", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/MNCJihun/Mistral-7B-SlimOrca-eng-kor-combined/result_2023-10-24 01:03:10.json b/MNCJihun/Mistral-7B-SlimOrca-eng-kor-combined/result_2023-10-24 01:03:10.json deleted file mode 100644 index 08b44bc89ee7fc8fbeb6a7140156429d2d43f983..0000000000000000000000000000000000000000 --- a/MNCJihun/Mistral-7B-SlimOrca-eng-kor-combined/result_2023-10-24 01:03:10.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.295221843003413, - "acc_stderr": 0.013329750293382316, - "acc_norm": 0.3378839590443686, - "acc_norm_stderr": 0.013822047922283516 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3476399123680542, - "acc_stderr": 0.004752476997887829, - "acc_norm": 0.434973112925712, - "acc_norm_stderr": 0.004947402907996247 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.49707602339181284, - "acc_stderr": 0.03834759370936839, - "acc_norm": 0.49707602339181284, - "acc_norm_stderr": 0.03834759370936839 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.39805825242718446, - "acc_stderr": 0.0484674825397724, - "acc_norm": 0.39805825242718446, - "acc_norm_stderr": 0.0484674825397724 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.4112388250319285, - "acc_stderr": 0.017595971908056573, - "acc_norm": 0.4112388250319285, - "acc_norm_stderr": 0.017595971908056573 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.34814814814814815, - "acc_stderr": 0.041153246103369526, - "acc_norm": 0.34814814814814815, - "acc_norm_stderr": 0.041153246103369526 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542127, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542127 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.3617021276595745, - "acc_stderr": 0.03141082197596241, - "acc_norm": 0.3617021276595745, - "acc_norm_stderr": 0.03141082197596241 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.30120481927710846, - "acc_stderr": 0.03571609230053481, - "acc_norm": 0.30120481927710846, - "acc_norm_stderr": 0.03571609230053481 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.44694533762057875, - "acc_stderr": 0.028237769422085335, - "acc_norm": 0.44694533762057875, - "acc_norm_stderr": 0.028237769422085335 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.40358744394618834, - "acc_stderr": 0.03292802819330313, - "acc_norm": 0.40358744394618834, - "acc_norm_stderr": 0.03292802819330313 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.3282442748091603, - "acc_stderr": 0.041184385658062976, - "acc_norm": 0.3282442748091603, - "acc_norm_stderr": 0.041184385658062976 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.35, - "acc_stderr": 0.0479372485441102, - "acc_norm": 0.35, - "acc_norm_stderr": 0.0479372485441102 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.3181818181818182, - "acc_stderr": 0.03318477333845331, - "acc_norm": 0.3181818181818182, - "acc_norm_stderr": 0.03318477333845331 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.35172413793103446, - "acc_stderr": 0.03979236637497411, - "acc_norm": 0.35172413793103446, - "acc_norm_stderr": 0.03979236637497411 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.22549019607843138, - "acc_stderr": 0.041583075330832865, - "acc_norm": 0.22549019607843138, - "acc_norm_stderr": 0.041583075330832865 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.36554621848739494, - "acc_stderr": 0.03128217706368461, - "acc_norm": 0.36554621848739494, - "acc_norm_stderr": 0.03128217706368461 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.3564102564102564, - "acc_stderr": 0.0242831405294673, - "acc_norm": 0.3564102564102564, - "acc_norm_stderr": 0.0242831405294673 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.53, - "acc_stderr": 0.05016135580465919, - "acc_norm": 0.53, - "acc_norm_stderr": 0.05016135580465919 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.28, - "acc_stderr": 0.045126085985421276, - "acc_norm": 0.28, - "acc_norm_stderr": 0.045126085985421276 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.3611111111111111, - "acc_stderr": 0.04643454608906275, - "acc_norm": 0.3611111111111111, - "acc_norm_stderr": 0.04643454608906275 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.3448275862068966, - "acc_stderr": 0.03344283744280457, - "acc_norm": 0.3448275862068966, - "acc_norm_stderr": 0.03344283744280457 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.36451612903225805, - "acc_stderr": 0.02737987122994325, - "acc_norm": 0.36451612903225805, - "acc_norm_stderr": 0.02737987122994325 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.5854700854700855, - "acc_stderr": 0.0322739656762378, - "acc_norm": 0.5854700854700855, - "acc_norm_stderr": 0.0322739656762378 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.32452830188679244, - "acc_stderr": 0.028815615713432118, - "acc_norm": 0.32452830188679244, - "acc_norm_stderr": 0.028815615713432118 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.4818181818181818, - "acc_stderr": 0.04785964010794916, - "acc_norm": 0.4818181818181818, - "acc_norm_stderr": 0.04785964010794916 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.3148148148148148, - "acc_stderr": 0.02831753349606648, - "acc_norm": 0.3148148148148148, - "acc_norm_stderr": 0.02831753349606648 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.304635761589404, - "acc_stderr": 0.03757949922943343, - "acc_norm": 0.304635761589404, - "acc_norm_stderr": 0.03757949922943343 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.46766169154228854, - "acc_stderr": 0.035281314729336065, - "acc_norm": 0.46766169154228854, - "acc_norm_stderr": 0.035281314729336065 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.31213872832369943, - "acc_stderr": 0.035331333893236574, - "acc_norm": 0.31213872832369943, - "acc_norm_stderr": 0.035331333893236574 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.3253968253968254, - "acc_stderr": 0.024130158299762606, - "acc_norm": 0.3253968253968254, - "acc_norm_stderr": 0.024130158299762606 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3125, - "acc_stderr": 0.038760854559127644, - "acc_norm": 0.3125, - "acc_norm_stderr": 0.038760854559127644 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.23, - "acc_stderr": 0.04229525846816505, - "acc_norm": 0.23, - "acc_norm_stderr": 0.04229525846816505 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.5, - "acc_stderr": 0.050251890762960605, - "acc_norm": 0.5, - "acc_norm_stderr": 0.050251890762960605 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.41329479768786126, - "acc_stderr": 0.02651126136940924, - "acc_norm": 0.41329479768786126, - "acc_norm_stderr": 0.02651126136940924 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.3496932515337423, - "acc_stderr": 0.03746668325470021, - "acc_norm": 0.3496932515337423, - "acc_norm_stderr": 0.03746668325470021 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.39197530864197533, - "acc_stderr": 0.027163686038271233, - "acc_norm": 0.39197530864197533, - "acc_norm_stderr": 0.027163686038271233 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.41968911917098445, - "acc_stderr": 0.035615873276858855, - "acc_norm": 0.41968911917098445, - "acc_norm_stderr": 0.035615873276858855 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2543859649122807, - "acc_stderr": 0.0409698513984367, - "acc_norm": 0.2543859649122807, - "acc_norm_stderr": 0.0409698513984367 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.3871559633027523, - "acc_stderr": 0.02088423199264345, - "acc_norm": 0.3871559633027523, - "acc_norm_stderr": 0.02088423199264345 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.35714285714285715, - "acc_stderr": 0.042857142857142816, - "acc_norm": 0.35714285714285715, - "acc_norm_stderr": 0.042857142857142816 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.3888888888888889, - "acc_stderr": 0.027914055510468, - "acc_norm": 0.3888888888888889, - "acc_norm_stderr": 0.027914055510468 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.41, - "acc_stderr": 0.049431107042371025, - "acc_norm": 0.41, - "acc_norm_stderr": 0.049431107042371025 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.5702479338842975, - "acc_stderr": 0.04519082021319772, - "acc_norm": 0.5702479338842975, - "acc_norm_stderr": 0.04519082021319772 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.2565789473684211, - "acc_stderr": 0.0355418036802569, - "acc_norm": 0.2565789473684211, - "acc_norm_stderr": 0.0355418036802569 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.3137254901960784, - "acc_stderr": 0.018771683893528183, - "acc_norm": 0.3137254901960784, - "acc_norm_stderr": 0.018771683893528183 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.31560283687943264, - "acc_stderr": 0.027724989449509314, - "acc_norm": 0.31560283687943264, - "acc_norm_stderr": 0.027724989449509314 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.35714285714285715, - "acc_stderr": 0.04547960999764376, - "acc_norm": 0.35714285714285715, - "acc_norm_stderr": 0.04547960999764376 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.2962962962962963, - "acc_stderr": 0.031141447823536048, - "acc_norm": 0.2962962962962963, - "acc_norm_stderr": 0.031141447823536048 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.22793296089385476, - "acc_stderr": 0.014030149950805097, - "acc_norm": 0.22793296089385476, - "acc_norm_stderr": 0.014030149950805097 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.32, - "acc_stderr": 0.046882617226215034, - "acc_norm": 0.32, - "acc_norm_stderr": 0.046882617226215034 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.42, - "acc_stderr": 0.049604496374885836, - "acc_norm": 0.42, - "acc_norm_stderr": 0.049604496374885836 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.28308823529411764, - "acc_stderr": 0.02736586113151381, - "acc_norm": 0.28308823529411764, - "acc_norm_stderr": 0.02736586113151381 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.24081632653061225, - "acc_stderr": 0.027372942201788153, - "acc_norm": 0.24081632653061225, - "acc_norm_stderr": 0.027372942201788153 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.48945147679324896, - "acc_stderr": 0.032539983791662855, - "acc_norm": 0.48945147679324896, - "acc_norm_stderr": 0.032539983791662855 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.31877444589308995, - "acc_stderr": 0.011901895635786088, - "acc_norm": 0.31877444589308995, - "acc_norm_stderr": 0.011901895635786088 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.31862745098039214, - "acc_stderr": 0.0327028718148208, - "acc_norm": 0.31862745098039214, - "acc_norm_stderr": 0.0327028718148208 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.3939393939393939, - "acc_stderr": 0.0381549430868893, - "acc_norm": 0.3939393939393939, - "acc_norm_stderr": 0.0381549430868893 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2741738066095471, - "mc1_stderr": 0.015616518497219374, - "mc2": 0.4600089007139919, - "mc2_stderr": 0.015856276729730875 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.3145539906103286, - "acc_stderr": 0.015917301615490643, - "acc_norm": 0.3967136150234742, - "acc_norm_stderr": 0.016770095463498454 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "MNCJihun/Mistral-7B-SlimOrca-eng-kor-combined", - "model_sha": "a9340fcc369bba2e0200a3a378078fa14f4075b3", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/MNCJihun/Mistral-7B-SlimOrca-orca-platy-1k/result_2023-10-23 07:44:01.json b/MNCJihun/Mistral-7B-SlimOrca-orca-platy-1k/result_2023-10-23 07:44:01.json deleted file mode 100644 index 0ffaea8af4479d5d5284df4fbdc70a7d34b4da92..0000000000000000000000000000000000000000 --- a/MNCJihun/Mistral-7B-SlimOrca-orca-platy-1k/result_2023-10-23 07:44:01.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.28754266211604096, - "acc_stderr": 0.013226719056266127, - "acc_norm": 0.3395904436860068, - "acc_norm_stderr": 0.01383903976282016 - }, - "harness|ko_hellaswag|10": { - "acc": 0.36297550288787095, - "acc_stderr": 0.004798751281560832, - "acc_norm": 0.45558653654650466, - "acc_norm_stderr": 0.004970057183367319 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.38011695906432746, - "acc_stderr": 0.037229657413855394, - "acc_norm": 0.38011695906432746, - "acc_norm_stderr": 0.037229657413855394 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.5048543689320388, - "acc_stderr": 0.049505043821289195, - "acc_norm": 0.5048543689320388, - "acc_norm_stderr": 0.049505043821289195 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.4074074074074074, - "acc_stderr": 0.01757070523925654, - "acc_norm": 0.4074074074074074, - "acc_norm_stderr": 0.01757070523925654 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.32592592592592595, - "acc_stderr": 0.040491220417025055, - "acc_norm": 0.32592592592592595, - "acc_norm_stderr": 0.040491220417025055 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.3191489361702128, - "acc_stderr": 0.030472973363380056, - "acc_norm": 0.3191489361702128, - "acc_norm_stderr": 0.030472973363380056 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.39759036144578314, - "acc_stderr": 0.038099730845402184, - "acc_norm": 0.39759036144578314, - "acc_norm_stderr": 0.038099730845402184 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.3665594855305466, - "acc_stderr": 0.02736807824397163, - "acc_norm": 0.3665594855305466, - "acc_norm_stderr": 0.02736807824397163 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.2914798206278027, - "acc_stderr": 0.03050028317654591, - "acc_norm": 0.2914798206278027, - "acc_norm_stderr": 0.03050028317654591 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.3893129770992366, - "acc_stderr": 0.04276486542814591, - "acc_norm": 0.3893129770992366, - "acc_norm_stderr": 0.04276486542814591 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.33, - "acc_stderr": 0.047258156262526045, - "acc_norm": 0.33, - "acc_norm_stderr": 0.047258156262526045 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.4797979797979798, - "acc_stderr": 0.03559443565563918, - "acc_norm": 0.4797979797979798, - "acc_norm_stderr": 0.03559443565563918 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.43448275862068964, - "acc_stderr": 0.04130740879555497, - "acc_norm": 0.43448275862068964, - "acc_norm_stderr": 0.04130740879555497 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.28431372549019607, - "acc_stderr": 0.04488482852329017, - "acc_norm": 0.28431372549019607, - "acc_norm_stderr": 0.04488482852329017 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.42857142857142855, - "acc_stderr": 0.032145368597886394, - "acc_norm": 0.42857142857142855, - "acc_norm_stderr": 0.032145368597886394 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.43846153846153846, - "acc_stderr": 0.025158266016868568, - "acc_norm": 0.43846153846153846, - "acc_norm_stderr": 0.025158266016868568 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.5, - "acc_stderr": 0.050251890762960605, - "acc_norm": 0.5, - "acc_norm_stderr": 0.050251890762960605 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.35, - "acc_stderr": 0.0479372485441102, - "acc_norm": 0.35, - "acc_norm_stderr": 0.0479372485441102 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.04557239513497751, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.04557239513497751 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.35467980295566504, - "acc_stderr": 0.033661244890514495, - "acc_norm": 0.35467980295566504, - "acc_norm_stderr": 0.033661244890514495 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.35161290322580646, - "acc_stderr": 0.027162537826948458, - "acc_norm": 0.35161290322580646, - "acc_norm_stderr": 0.027162537826948458 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.5512820512820513, - "acc_stderr": 0.032583346493868806, - "acc_norm": 0.5512820512820513, - "acc_norm_stderr": 0.032583346493868806 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.4377358490566038, - "acc_stderr": 0.030533338430467516, - "acc_norm": 0.4377358490566038, - "acc_norm_stderr": 0.030533338430467516 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.37272727272727274, - "acc_stderr": 0.04631381319425465, - "acc_norm": 0.37272727272727274, - "acc_norm_stderr": 0.04631381319425465 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.29259259259259257, - "acc_stderr": 0.02773896963217609, - "acc_norm": 0.29259259259259257, - "acc_norm_stderr": 0.02773896963217609 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.271523178807947, - "acc_stderr": 0.03631329803969653, - "acc_norm": 0.271523178807947, - "acc_norm_stderr": 0.03631329803969653 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.42786069651741293, - "acc_stderr": 0.03498541988407795, - "acc_norm": 0.42786069651741293, - "acc_norm_stderr": 0.03498541988407795 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.3583815028901734, - "acc_stderr": 0.0365634365335316, - "acc_norm": 0.3583815028901734, - "acc_norm_stderr": 0.0365634365335316 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.3386243386243386, - "acc_stderr": 0.024373197867983053, - "acc_norm": 0.3386243386243386, - "acc_norm_stderr": 0.024373197867983053 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3263888888888889, - "acc_stderr": 0.03921067198982266, - "acc_norm": 0.3263888888888889, - "acc_norm_stderr": 0.03921067198982266 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.4, - "acc_stderr": 0.049236596391733084, - "acc_norm": 0.4, - "acc_norm_stderr": 0.049236596391733084 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.47, - "acc_stderr": 0.05016135580465919, - "acc_norm": 0.47, - "acc_norm_stderr": 0.05016135580465919 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.3208092485549133, - "acc_stderr": 0.025131000233647907, - "acc_norm": 0.3208092485549133, - "acc_norm_stderr": 0.025131000233647907 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.3558282208588957, - "acc_stderr": 0.03761521380046734, - "acc_norm": 0.3558282208588957, - "acc_norm_stderr": 0.03761521380046734 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.3765432098765432, - "acc_stderr": 0.02695934451874779, - "acc_norm": 0.3765432098765432, - "acc_norm_stderr": 0.02695934451874779 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.35, - "acc_stderr": 0.0479372485441102, - "acc_norm": 0.35, - "acc_norm_stderr": 0.0479372485441102 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.48186528497409326, - "acc_stderr": 0.03606065001832917, - "acc_norm": 0.48186528497409326, - "acc_norm_stderr": 0.03606065001832917 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.23684210526315788, - "acc_stderr": 0.03999423879281336, - "acc_norm": 0.23684210526315788, - "acc_norm_stderr": 0.03999423879281336 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.46055045871559636, - "acc_stderr": 0.021370494609995096, - "acc_norm": 0.46055045871559636, - "acc_norm_stderr": 0.021370494609995096 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.4126984126984127, - "acc_stderr": 0.04403438954768176, - "acc_norm": 0.4126984126984127, - "acc_norm_stderr": 0.04403438954768176 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.41830065359477125, - "acc_stderr": 0.028245134024387292, - "acc_norm": 0.41830065359477125, - "acc_norm_stderr": 0.028245134024387292 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695235, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695235 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.47107438016528924, - "acc_stderr": 0.04556710331269498, - "acc_norm": 0.47107438016528924, - "acc_norm_stderr": 0.04556710331269498 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.3618421052631579, - "acc_stderr": 0.03910525752849727, - "acc_norm": 0.3618421052631579, - "acc_norm_stderr": 0.03910525752849727 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.26633986928104575, - "acc_stderr": 0.017883188134667192, - "acc_norm": 0.26633986928104575, - "acc_norm_stderr": 0.017883188134667192 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.26595744680851063, - "acc_stderr": 0.026358065698880592, - "acc_norm": 0.26595744680851063, - "acc_norm_stderr": 0.026358065698880592 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.24107142857142858, - "acc_stderr": 0.04059867246952685, - "acc_norm": 0.24107142857142858, - "acc_norm_stderr": 0.04059867246952685 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.4583333333333333, - "acc_stderr": 0.03398110890294636, - "acc_norm": 0.4583333333333333, - "acc_norm_stderr": 0.03398110890294636 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2547486033519553, - "acc_stderr": 0.014572650383409167, - "acc_norm": 0.2547486033519553, - "acc_norm_stderr": 0.014572650383409167 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.38, - "acc_stderr": 0.04878317312145633, - "acc_norm": 0.38, - "acc_norm_stderr": 0.04878317312145633 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.4, - "acc_stderr": 0.049236596391733084, - "acc_norm": 0.4, - "acc_norm_stderr": 0.049236596391733084 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.4227941176470588, - "acc_stderr": 0.030008562845003476, - "acc_norm": 0.4227941176470588, - "acc_norm_stderr": 0.030008562845003476 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.4163265306122449, - "acc_stderr": 0.03155782816556163, - "acc_norm": 0.4163265306122449, - "acc_norm_stderr": 0.03155782816556163 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.41350210970464135, - "acc_stderr": 0.03205649904851859, - "acc_norm": 0.41350210970464135, - "acc_norm_stderr": 0.03205649904851859 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.24902216427640156, - "acc_stderr": 0.01104489226404077, - "acc_norm": 0.24902216427640156, - "acc_norm_stderr": 0.01104489226404077 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.3235294117647059, - "acc_stderr": 0.03283472056108567, - "acc_norm": 0.3235294117647059, - "acc_norm_stderr": 0.03283472056108567 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.3575757575757576, - "acc_stderr": 0.03742597043806586, - "acc_norm": 0.3575757575757576, - "acc_norm_stderr": 0.03742597043806586 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.3023255813953488, - "mc1_stderr": 0.01607750926613303, - "mc2": 0.4664193395730685, - "mc2_stderr": 0.015885964841438872 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.5, - "acc_stderr": 0.017139779254776524, - "acc_norm": 0.5751173708920188, - "acc_norm_stderr": 0.0169452488268217 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "MNCJihun/Mistral-7B-SlimOrca-orca-platy-1k", - "model_sha": "96fceca38b3714b0ae8ec6dc120f13036eaeb69c", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/MNCJihun/Mistral-7B-eng-kor-cot-combined/result_2023-10-24 01:05:12.json b/MNCJihun/Mistral-7B-eng-kor-cot-combined/result_2023-10-24 01:05:12.json deleted file mode 100644 index c777ae6f0a21f6d98b3523e41eb367063f2a5266..0000000000000000000000000000000000000000 --- a/MNCJihun/Mistral-7B-eng-kor-cot-combined/result_2023-10-24 01:05:12.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.28668941979522183, - "acc_stderr": 0.01321498632927476, - "acc_norm": 0.33447098976109213, - "acc_norm_stderr": 0.013787460322441374 - }, - "harness|ko_hellaswag|10": { - "acc": 0.34096793467436765, - "acc_stderr": 0.004730658073041555, - "acc_norm": 0.4268074088826927, - "acc_norm_stderr": 0.004936029827672039 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.36257309941520466, - "acc_stderr": 0.0368713061556206, - "acc_norm": 0.36257309941520466, - "acc_norm_stderr": 0.0368713061556206 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.32038834951456313, - "acc_stderr": 0.046202840822800406, - "acc_norm": 0.32038834951456313, - "acc_norm_stderr": 0.046202840822800406 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.3997445721583653, - "acc_stderr": 0.01751684790705327, - "acc_norm": 0.3997445721583653, - "acc_norm_stderr": 0.01751684790705327 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.31851851851851853, - "acc_stderr": 0.0402477840197711, - "acc_norm": 0.31851851851851853, - "acc_norm_stderr": 0.0402477840197711 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.3191489361702128, - "acc_stderr": 0.030472973363380045, - "acc_norm": 0.3191489361702128, - "acc_norm_stderr": 0.030472973363380045 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3313253012048193, - "acc_stderr": 0.03664314777288085, - "acc_norm": 0.3313253012048193, - "acc_norm_stderr": 0.03664314777288085 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.3858520900321543, - "acc_stderr": 0.027648149599751464, - "acc_norm": 0.3858520900321543, - "acc_norm_stderr": 0.027648149599751464 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.34977578475336324, - "acc_stderr": 0.03200736719484504, - "acc_norm": 0.34977578475336324, - "acc_norm_stderr": 0.03200736719484504 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.25190839694656486, - "acc_stderr": 0.03807387116306086, - "acc_norm": 0.25190839694656486, - "acc_norm_stderr": 0.03807387116306086 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.35353535353535354, - "acc_stderr": 0.03406086723547153, - "acc_norm": 0.35353535353535354, - "acc_norm_stderr": 0.03406086723547153 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.31724137931034485, - "acc_stderr": 0.03878352372138622, - "acc_norm": 0.31724137931034485, - "acc_norm_stderr": 0.03878352372138622 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.21568627450980393, - "acc_stderr": 0.04092563958237653, - "acc_norm": 0.21568627450980393, - "acc_norm_stderr": 0.04092563958237653 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.31512605042016806, - "acc_stderr": 0.03017680828897434, - "acc_norm": 0.31512605042016806, - "acc_norm_stderr": 0.03017680828897434 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.023901157979402538, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.023901157979402538 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.44, - "acc_stderr": 0.04988876515698589, - "acc_norm": 0.44, - "acc_norm_stderr": 0.04988876515698589 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.35185185185185186, - "acc_stderr": 0.04616631111801714, - "acc_norm": 0.35185185185185186, - "acc_norm_stderr": 0.04616631111801714 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.2660098522167488, - "acc_stderr": 0.03108982600293753, - "acc_norm": 0.2660098522167488, - "acc_norm_stderr": 0.03108982600293753 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.32903225806451614, - "acc_stderr": 0.02672949906834996, - "acc_norm": 0.32903225806451614, - "acc_norm_stderr": 0.02672949906834996 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.5042735042735043, - "acc_stderr": 0.03275489264382132, - "acc_norm": 0.5042735042735043, - "acc_norm_stderr": 0.03275489264382132 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.2943396226415094, - "acc_stderr": 0.028049186315695248, - "acc_norm": 0.2943396226415094, - "acc_norm_stderr": 0.028049186315695248 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.39090909090909093, - "acc_stderr": 0.04673752333670237, - "acc_norm": 0.39090909090909093, - "acc_norm_stderr": 0.04673752333670237 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.3, - "acc_stderr": 0.027940457136228412, - "acc_norm": 0.3, - "acc_norm_stderr": 0.027940457136228412 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.25165562913907286, - "acc_stderr": 0.03543304234389985, - "acc_norm": 0.25165562913907286, - "acc_norm_stderr": 0.03543304234389985 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.43283582089552236, - "acc_stderr": 0.0350349092367328, - "acc_norm": 0.43283582089552236, - "acc_norm_stderr": 0.0350349092367328 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.23121387283236994, - "acc_stderr": 0.0321473730202947, - "acc_norm": 0.23121387283236994, - "acc_norm_stderr": 0.0321473730202947 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.3201058201058201, - "acc_stderr": 0.024026846392873502, - "acc_norm": 0.3201058201058201, - "acc_norm_stderr": 0.024026846392873502 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2916666666666667, - "acc_stderr": 0.038009680605548574, - "acc_norm": 0.2916666666666667, - "acc_norm_stderr": 0.038009680605548574 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.27, - "acc_stderr": 0.044619604333847394, - "acc_norm": 0.27, - "acc_norm_stderr": 0.044619604333847394 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.45, - "acc_stderr": 0.04999999999999998, - "acc_norm": 0.45, - "acc_norm_stderr": 0.04999999999999998 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.3439306358381503, - "acc_stderr": 0.025574123786546648, - "acc_norm": 0.3439306358381503, - "acc_norm_stderr": 0.025574123786546648 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.32515337423312884, - "acc_stderr": 0.03680350371286461, - "acc_norm": 0.32515337423312884, - "acc_norm_stderr": 0.03680350371286461 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.37037037037037035, - "acc_stderr": 0.026869490744815247, - "acc_norm": 0.37037037037037035, - "acc_norm_stderr": 0.026869490744815247 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.3626943005181347, - "acc_stderr": 0.034697137917043715, - "acc_norm": 0.3626943005181347, - "acc_norm_stderr": 0.034697137917043715 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2982456140350877, - "acc_stderr": 0.04303684033537315, - "acc_norm": 0.2982456140350877, - "acc_norm_stderr": 0.04303684033537315 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.3651376146788991, - "acc_stderr": 0.020642801454383995, - "acc_norm": 0.3651376146788991, - "acc_norm_stderr": 0.020642801454383995 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.2619047619047619, - "acc_stderr": 0.0393253768039287, - "acc_norm": 0.2619047619047619, - "acc_norm_stderr": 0.0393253768039287 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.3235294117647059, - "acc_stderr": 0.026787453111906532, - "acc_norm": 0.3235294117647059, - "acc_norm_stderr": 0.026787453111906532 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.4, - "acc_stderr": 0.049236596391733084, - "acc_norm": 0.4, - "acc_norm_stderr": 0.049236596391733084 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.4462809917355372, - "acc_stderr": 0.0453793517794788, - "acc_norm": 0.4462809917355372, - "acc_norm_stderr": 0.0453793517794788 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.28289473684210525, - "acc_stderr": 0.03665349695640767, - "acc_norm": 0.28289473684210525, - "acc_norm_stderr": 0.03665349695640767 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.32189542483660133, - "acc_stderr": 0.018901015322093085, - "acc_norm": 0.32189542483660133, - "acc_norm_stderr": 0.018901015322093085 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2907801418439716, - "acc_stderr": 0.027090664368353178, - "acc_norm": 0.2907801418439716, - "acc_norm_stderr": 0.027090664368353178 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.24107142857142858, - "acc_stderr": 0.04059867246952688, - "acc_norm": 0.24107142857142858, - "acc_norm_stderr": 0.04059867246952688 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.2916666666666667, - "acc_stderr": 0.03099866630456052, - "acc_norm": 0.2916666666666667, - "acc_norm_stderr": 0.03099866630456052 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.22793296089385476, - "acc_stderr": 0.014030149950805097, - "acc_norm": 0.22793296089385476, - "acc_norm_stderr": 0.014030149950805097 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695236, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695236 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.42, - "acc_stderr": 0.049604496374885836, - "acc_norm": 0.42, - "acc_norm_stderr": 0.049604496374885836 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.34191176470588236, - "acc_stderr": 0.02881472242225418, - "acc_norm": 0.34191176470588236, - "acc_norm_stderr": 0.02881472242225418 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.3469387755102041, - "acc_stderr": 0.030472526026726492, - "acc_norm": 0.3469387755102041, - "acc_norm_stderr": 0.030472526026726492 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.4219409282700422, - "acc_stderr": 0.032148146302403695, - "acc_norm": 0.4219409282700422, - "acc_norm_stderr": 0.032148146302403695 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.29986962190352023, - "acc_stderr": 0.011702660860193989, - "acc_norm": 0.29986962190352023, - "acc_norm_stderr": 0.011702660860193989 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.3431372549019608, - "acc_stderr": 0.03332139944668086, - "acc_norm": 0.3431372549019608, - "acc_norm_stderr": 0.03332139944668086 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.3939393939393939, - "acc_stderr": 0.03815494308688929, - "acc_norm": 0.3939393939393939, - "acc_norm_stderr": 0.03815494308688929 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2864137086903305, - "mc1_stderr": 0.015826142439502342, - "mc2": 0.46556936650012803, - "mc2_stderr": 0.01608055615378503 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.2652582159624413, - "acc_stderr": 0.01513341742455363, - "acc_norm": 0.34976525821596244, - "acc_norm_stderr": 0.01634777454286078 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "MNCJihun/Mistral-7B-eng-kor-cot-combined", - "model_sha": "ad4d7c60244d0f1e0cc11d44be9b14c3354df448", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/MNCJihun/Mistral-7B-guanaco-1k-orca-platy-1k-ep4/result_2023-10-23 06:43:50.json b/MNCJihun/Mistral-7B-guanaco-1k-orca-platy-1k-ep4/result_2023-10-23 06:43:50.json deleted file mode 100644 index 5b72ca8c6dc22eba397542bfcddab4495ab45e21..0000000000000000000000000000000000000000 --- a/MNCJihun/Mistral-7B-guanaco-1k-orca-platy-1k-ep4/result_2023-10-23 06:43:50.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.2960750853242321, - "acc_stderr": 0.013340916085246263, - "acc_norm": 0.3319112627986348, - "acc_norm_stderr": 0.01376098820088054 - }, - "harness|ko_hellaswag|10": { - "acc": 0.36367257518422624, - "acc_stderr": 0.004800728138792386, - "acc_norm": 0.4591714797849034, - "acc_norm_stderr": 0.004973117975062484 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.4152046783625731, - "acc_stderr": 0.03779275945503201, - "acc_norm": 0.4152046783625731, - "acc_norm_stderr": 0.03779275945503201 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.42718446601941745, - "acc_stderr": 0.04897957737781169, - "acc_norm": 0.42718446601941745, - "acc_norm_stderr": 0.04897957737781169 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.45849297573435505, - "acc_stderr": 0.017818248603465568, - "acc_norm": 0.45849297573435505, - "acc_norm_stderr": 0.017818248603465568 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.37777777777777777, - "acc_stderr": 0.04188307537595853, - "acc_norm": 0.37777777777777777, - "acc_norm_stderr": 0.04188307537595853 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.4, - "acc_stderr": 0.04923659639173309, - "acc_norm": 0.4, - "acc_norm_stderr": 0.04923659639173309 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.32340425531914896, - "acc_stderr": 0.030579442773610337, - "acc_norm": 0.32340425531914896, - "acc_norm_stderr": 0.030579442773610337 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3373493975903614, - "acc_stderr": 0.036807836907275814, - "acc_norm": 0.3373493975903614, - "acc_norm_stderr": 0.036807836907275814 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.3890675241157556, - "acc_stderr": 0.027690337536485372, - "acc_norm": 0.3890675241157556, - "acc_norm_stderr": 0.027690337536485372 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.39461883408071746, - "acc_stderr": 0.03280400504755291, - "acc_norm": 0.39461883408071746, - "acc_norm_stderr": 0.03280400504755291 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.3435114503816794, - "acc_stderr": 0.041649760719448786, - "acc_norm": 0.3435114503816794, - "acc_norm_stderr": 0.041649760719448786 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.32, - "acc_stderr": 0.04688261722621504, - "acc_norm": 0.32, - "acc_norm_stderr": 0.04688261722621504 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.3939393939393939, - "acc_stderr": 0.03481285338232963, - "acc_norm": 0.3939393939393939, - "acc_norm_stderr": 0.03481285338232963 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.36551724137931035, - "acc_stderr": 0.04013124195424386, - "acc_norm": 0.36551724137931035, - "acc_norm_stderr": 0.04013124195424386 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.22549019607843138, - "acc_stderr": 0.041583075330832865, - "acc_norm": 0.22549019607843138, - "acc_norm_stderr": 0.041583075330832865 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.3445378151260504, - "acc_stderr": 0.030868682604121633, - "acc_norm": 0.3445378151260504, - "acc_norm_stderr": 0.030868682604121633 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.3564102564102564, - "acc_stderr": 0.0242831405294673, - "acc_norm": 0.3564102564102564, - "acc_norm_stderr": 0.0242831405294673 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.54, - "acc_stderr": 0.05009082659620332, - "acc_norm": 0.54, - "acc_norm_stderr": 0.05009082659620332 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.27, - "acc_stderr": 0.044619604333847394, - "acc_norm": 0.27, - "acc_norm_stderr": 0.044619604333847394 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.4166666666666667, - "acc_stderr": 0.04766075165356461, - "acc_norm": 0.4166666666666667, - "acc_norm_stderr": 0.04766075165356461 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.35960591133004927, - "acc_stderr": 0.03376458246509567, - "acc_norm": 0.35960591133004927, - "acc_norm_stderr": 0.03376458246509567 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3935483870967742, - "acc_stderr": 0.027791878753132274, - "acc_norm": 0.3935483870967742, - "acc_norm_stderr": 0.027791878753132274 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.6581196581196581, - "acc_stderr": 0.03107502852650775, - "acc_norm": 0.6581196581196581, - "acc_norm_stderr": 0.03107502852650775 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.3622641509433962, - "acc_stderr": 0.029582245128384303, - "acc_norm": 0.3622641509433962, - "acc_norm_stderr": 0.029582245128384303 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.509090909090909, - "acc_stderr": 0.04788339768702861, - "acc_norm": 0.509090909090909, - "acc_norm_stderr": 0.04788339768702861 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.3037037037037037, - "acc_stderr": 0.028037929969114986, - "acc_norm": 0.3037037037037037, - "acc_norm_stderr": 0.028037929969114986 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.304635761589404, - "acc_stderr": 0.03757949922943343, - "acc_norm": 0.304635761589404, - "acc_norm_stderr": 0.03757949922943343 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.4129353233830846, - "acc_stderr": 0.03481520803367348, - "acc_norm": 0.4129353233830846, - "acc_norm_stderr": 0.03481520803367348 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.2774566473988439, - "acc_stderr": 0.034140140070440354, - "acc_norm": 0.2774566473988439, - "acc_norm_stderr": 0.034140140070440354 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.328042328042328, - "acc_stderr": 0.02418049716437691, - "acc_norm": 0.328042328042328, - "acc_norm_stderr": 0.02418049716437691 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2708333333333333, - "acc_stderr": 0.03716177437566017, - "acc_norm": 0.2708333333333333, - "acc_norm_stderr": 0.03716177437566017 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.46, - "acc_stderr": 0.05009082659620332, - "acc_norm": 0.46, - "acc_norm_stderr": 0.05009082659620332 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.3670520231213873, - "acc_stderr": 0.02595005433765408, - "acc_norm": 0.3670520231213873, - "acc_norm_stderr": 0.02595005433765408 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.3619631901840491, - "acc_stderr": 0.037757007291414416, - "acc_norm": 0.3619631901840491, - "acc_norm_stderr": 0.037757007291414416 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.38580246913580246, - "acc_stderr": 0.027085401226132143, - "acc_norm": 0.38580246913580246, - "acc_norm_stderr": 0.027085401226132143 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542127, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542127 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.40414507772020725, - "acc_stderr": 0.035415085788840193, - "acc_norm": 0.40414507772020725, - "acc_norm_stderr": 0.035415085788840193 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2982456140350877, - "acc_stderr": 0.043036840335373146, - "acc_norm": 0.2982456140350877, - "acc_norm_stderr": 0.043036840335373146 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.3871559633027523, - "acc_stderr": 0.02088423199264345, - "acc_norm": 0.3871559633027523, - "acc_norm_stderr": 0.02088423199264345 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.21428571428571427, - "acc_stderr": 0.03670066451047181, - "acc_norm": 0.21428571428571427, - "acc_norm_stderr": 0.03670066451047181 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.3790849673202614, - "acc_stderr": 0.027780141207023334, - "acc_norm": 0.3790849673202614, - "acc_norm_stderr": 0.027780141207023334 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.42, - "acc_stderr": 0.049604496374885836, - "acc_norm": 0.42, - "acc_norm_stderr": 0.049604496374885836 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.6115702479338843, - "acc_stderr": 0.04449270350068382, - "acc_norm": 0.6115702479338843, - "acc_norm_stderr": 0.04449270350068382 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.3223684210526316, - "acc_stderr": 0.03803510248351585, - "acc_norm": 0.3223684210526316, - "acc_norm_stderr": 0.03803510248351585 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.3480392156862745, - "acc_stderr": 0.019270998708223977, - "acc_norm": 0.3480392156862745, - "acc_norm_stderr": 0.019270998708223977 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.3191489361702128, - "acc_stderr": 0.027807990141320196, - "acc_norm": 0.3191489361702128, - "acc_norm_stderr": 0.027807990141320196 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.2767857142857143, - "acc_stderr": 0.04246624336697625, - "acc_norm": 0.2767857142857143, - "acc_norm_stderr": 0.04246624336697625 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.25, - "acc_stderr": 0.029531221160930918, - "acc_norm": 0.25, - "acc_norm_stderr": 0.029531221160930918 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.23910614525139665, - "acc_stderr": 0.014265554192331149, - "acc_norm": 0.23910614525139665, - "acc_norm_stderr": 0.014265554192331149 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.45, - "acc_stderr": 0.05, - "acc_norm": 0.45, - "acc_norm_stderr": 0.05 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.2867647058823529, - "acc_stderr": 0.027472274473233818, - "acc_norm": 0.2867647058823529, - "acc_norm_stderr": 0.027472274473233818 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.32653061224489793, - "acc_stderr": 0.030021056238440327, - "acc_norm": 0.32653061224489793, - "acc_norm_stderr": 0.030021056238440327 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.47257383966244726, - "acc_stderr": 0.03249822718301303, - "acc_norm": 0.47257383966244726, - "acc_norm_stderr": 0.03249822718301303 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2940026075619296, - "acc_stderr": 0.011636062953698604, - "acc_norm": 0.2940026075619296, - "acc_norm_stderr": 0.011636062953698604 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.3872549019607843, - "acc_stderr": 0.03418931233833344, - "acc_norm": 0.3872549019607843, - "acc_norm_stderr": 0.03418931233833344 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.43636363636363634, - "acc_stderr": 0.03872592983524753, - "acc_norm": 0.43636363636363634, - "acc_norm_stderr": 0.03872592983524753 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.29865361077111385, - "mc1_stderr": 0.016021570613768545, - "mc2": 0.4747810026483803, - "mc2_stderr": 0.016087880887613513 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.24413145539906103, - "acc_stderr": 0.014725505977824273, - "acc_norm": 0.284037558685446, - "acc_norm_stderr": 0.015458531159043908 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "MNCJihun/Mistral-7B-guanaco-1k-orca-platy-1k-ep4", - "model_sha": "13e5692b7a084265617f75f81209dce34e414489", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/MNCJihun/Mistral-7B-orca-platy-2k/result_2023-10-23 06:43:37.json b/MNCJihun/Mistral-7B-orca-platy-2k/result_2023-10-23 06:43:37.json deleted file mode 100644 index 63374ff8a7ea2232e1c1faa4cd4fa09fb39ee881..0000000000000000000000000000000000000000 --- a/MNCJihun/Mistral-7B-orca-platy-2k/result_2023-10-23 06:43:37.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.2977815699658703, - "acc_stderr": 0.013363080107244489, - "acc_norm": 0.33361774744027306, - "acc_norm_stderr": 0.013778687054176541 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3686516630153356, - "acc_stderr": 0.004814532642574648, - "acc_norm": 0.46086436964748057, - "acc_norm_stderr": 0.004974473255391268 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.4269005847953216, - "acc_stderr": 0.03793620616529918, - "acc_norm": 0.4269005847953216, - "acc_norm_stderr": 0.03793620616529918 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.39805825242718446, - "acc_stderr": 0.0484674825397724, - "acc_norm": 0.39805825242718446, - "acc_norm_stderr": 0.0484674825397724 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.41890166028097064, - "acc_stderr": 0.01764320505237717, - "acc_norm": 0.41890166028097064, - "acc_norm_stderr": 0.01764320505237717 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.31851851851851853, - "acc_stderr": 0.04024778401977109, - "acc_norm": 0.31851851851851853, - "acc_norm_stderr": 0.04024778401977109 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.22, - "acc_stderr": 0.04163331998932269, - "acc_norm": 0.22, - "acc_norm_stderr": 0.04163331998932269 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.2723404255319149, - "acc_stderr": 0.029101290698386694, - "acc_norm": 0.2723404255319149, - "acc_norm_stderr": 0.029101290698386694 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3614457831325301, - "acc_stderr": 0.0374005938202932, - "acc_norm": 0.3614457831325301, - "acc_norm_stderr": 0.0374005938202932 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.44694533762057875, - "acc_stderr": 0.028237769422085342, - "acc_norm": 0.44694533762057875, - "acc_norm_stderr": 0.028237769422085342 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.4080717488789238, - "acc_stderr": 0.03298574607842821, - "acc_norm": 0.4080717488789238, - "acc_norm_stderr": 0.03298574607842821 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.33587786259541985, - "acc_stderr": 0.041423137719966634, - "acc_norm": 0.33587786259541985, - "acc_norm_stderr": 0.041423137719966634 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.35, - "acc_stderr": 0.0479372485441102, - "acc_norm": 0.35, - "acc_norm_stderr": 0.0479372485441102 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.36363636363636365, - "acc_stderr": 0.034273086529999344, - "acc_norm": 0.36363636363636365, - "acc_norm_stderr": 0.034273086529999344 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.4206896551724138, - "acc_stderr": 0.0411391498118926, - "acc_norm": 0.4206896551724138, - "acc_norm_stderr": 0.0411391498118926 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.17647058823529413, - "acc_stderr": 0.03793281185307809, - "acc_norm": 0.17647058823529413, - "acc_norm_stderr": 0.03793281185307809 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.3697478991596639, - "acc_stderr": 0.03135709599613591, - "acc_norm": 0.3697478991596639, - "acc_norm_stderr": 0.03135709599613591 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.41794871794871796, - "acc_stderr": 0.025007329882461224, - "acc_norm": 0.41794871794871796, - "acc_norm_stderr": 0.025007329882461224 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.59, - "acc_stderr": 0.049431107042371025, - "acc_norm": 0.59, - "acc_norm_stderr": 0.049431107042371025 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.28, - "acc_stderr": 0.045126085985421276, - "acc_norm": 0.28, - "acc_norm_stderr": 0.045126085985421276 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.3888888888888889, - "acc_stderr": 0.047128212574267705, - "acc_norm": 0.3888888888888889, - "acc_norm_stderr": 0.047128212574267705 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.3251231527093596, - "acc_stderr": 0.032957975663112704, - "acc_norm": 0.3251231527093596, - "acc_norm_stderr": 0.032957975663112704 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.38064516129032255, - "acc_stderr": 0.027621717832907036, - "acc_norm": 0.38064516129032255, - "acc_norm_stderr": 0.027621717832907036 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.6025641025641025, - "acc_stderr": 0.03205953453789293, - "acc_norm": 0.6025641025641025, - "acc_norm_stderr": 0.03205953453789293 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.3622641509433962, - "acc_stderr": 0.0295822451283843, - "acc_norm": 0.3622641509433962, - "acc_norm_stderr": 0.0295822451283843 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.4, - "acc_stderr": 0.0469237132203465, - "acc_norm": 0.4, - "acc_norm_stderr": 0.0469237132203465 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.2962962962962963, - "acc_stderr": 0.02784081149587194, - "acc_norm": 0.2962962962962963, - "acc_norm_stderr": 0.02784081149587194 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.33112582781456956, - "acc_stderr": 0.038425817186598696, - "acc_norm": 0.33112582781456956, - "acc_norm_stderr": 0.038425817186598696 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.5223880597014925, - "acc_stderr": 0.03531987930208732, - "acc_norm": 0.5223880597014925, - "acc_norm_stderr": 0.03531987930208732 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.3468208092485549, - "acc_stderr": 0.036291466701596636, - "acc_norm": 0.3468208092485549, - "acc_norm_stderr": 0.036291466701596636 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2830687830687831, - "acc_stderr": 0.023201392938194974, - "acc_norm": 0.2830687830687831, - "acc_norm_stderr": 0.023201392938194974 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3125, - "acc_stderr": 0.038760854559127644, - "acc_norm": 0.3125, - "acc_norm_stderr": 0.038760854559127644 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.52, - "acc_stderr": 0.050211673156867795, - "acc_norm": 0.52, - "acc_norm_stderr": 0.050211673156867795 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.3988439306358382, - "acc_stderr": 0.026362437574546545, - "acc_norm": 0.3988439306358382, - "acc_norm_stderr": 0.026362437574546545 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.37423312883435583, - "acc_stderr": 0.038020681028996146, - "acc_norm": 0.37423312883435583, - "acc_norm_stderr": 0.038020681028996146 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.41975308641975306, - "acc_stderr": 0.027460099557005138, - "acc_norm": 0.41975308641975306, - "acc_norm_stderr": 0.027460099557005138 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.42487046632124353, - "acc_stderr": 0.0356747133521254, - "acc_norm": 0.42487046632124353, - "acc_norm_stderr": 0.0356747133521254 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.30701754385964913, - "acc_stderr": 0.04339138322579861, - "acc_norm": 0.30701754385964913, - "acc_norm_stderr": 0.04339138322579861 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.43302752293577984, - "acc_stderr": 0.021244146569074345, - "acc_norm": 0.43302752293577984, - "acc_norm_stderr": 0.021244146569074345 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.36507936507936506, - "acc_stderr": 0.043062412591271526, - "acc_norm": 0.36507936507936506, - "acc_norm_stderr": 0.043062412591271526 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.3300653594771242, - "acc_stderr": 0.02692565465361569, - "acc_norm": 0.3300653594771242, - "acc_norm_stderr": 0.02692565465361569 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.46, - "acc_stderr": 0.05009082659620332, - "acc_norm": 0.46, - "acc_norm_stderr": 0.05009082659620332 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.5702479338842975, - "acc_stderr": 0.04519082021319773, - "acc_norm": 0.5702479338842975, - "acc_norm_stderr": 0.04519082021319773 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.25, - "acc_stderr": 0.03523807393012047, - "acc_norm": 0.25, - "acc_norm_stderr": 0.03523807393012047 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.32679738562091504, - "acc_stderr": 0.018975427920507215, - "acc_norm": 0.32679738562091504, - "acc_norm_stderr": 0.018975427920507215 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.028121636040639882, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.028121636040639882 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.32142857142857145, - "acc_stderr": 0.044328040552915206, - "acc_norm": 0.32142857142857145, - "acc_norm_stderr": 0.044328040552915206 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.3287037037037037, - "acc_stderr": 0.032036140846700596, - "acc_norm": 0.3287037037037037, - "acc_norm_stderr": 0.032036140846700596 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2435754189944134, - "acc_stderr": 0.01435591196476787, - "acc_norm": 0.2435754189944134, - "acc_norm_stderr": 0.01435591196476787 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252605, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252605 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.56, - "acc_stderr": 0.04988876515698589, - "acc_norm": 0.56, - "acc_norm_stderr": 0.04988876515698589 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.38235294117647056, - "acc_stderr": 0.029520095697687765, - "acc_norm": 0.38235294117647056, - "acc_norm_stderr": 0.029520095697687765 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.2612244897959184, - "acc_stderr": 0.028123429335142804, - "acc_norm": 0.2612244897959184, - "acc_norm_stderr": 0.028123429335142804 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.48945147679324896, - "acc_stderr": 0.032539983791662855, - "acc_norm": 0.48945147679324896, - "acc_norm_stderr": 0.032539983791662855 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.33116036505867014, - "acc_stderr": 0.012020128195985759, - "acc_norm": 0.33116036505867014, - "acc_norm_stderr": 0.012020128195985759 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.4411764705882353, - "acc_stderr": 0.034849415144292316, - "acc_norm": 0.4411764705882353, - "acc_norm_stderr": 0.034849415144292316 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.4303030303030303, - "acc_stderr": 0.03866225962879077, - "acc_norm": 0.4303030303030303, - "acc_norm_stderr": 0.03866225962879077 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2962056303549572, - "mc1_stderr": 0.01598359510181139, - "mc2": 0.4593881639963632, - "mc2_stderr": 0.01579718957910925 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.5, - "acc_stderr": 0.017139779254776524, - "acc_norm": 0.5786384976525821, - "acc_norm_stderr": 0.01692646662043148 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "MNCJihun/Mistral-7B-orca-platy-2k", - "model_sha": "45eb0f68911f65b3a5ac83a851c716add059bf5a", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/MNCJihunKim/Mistral-7B-OpenOrca-orca-platy-out1kover/result_2023-10-25 15:13:11.json b/MNCJihunKim/Mistral-7B-OpenOrca-orca-platy-out1kover/result_2023-10-25 15:13:11.json deleted file mode 100644 index 4a171165268286623332fd329c745de7c6f4f7ba..0000000000000000000000000000000000000000 --- a/MNCJihunKim/Mistral-7B-OpenOrca-orca-platy-out1kover/result_2023-10-25 15:13:11.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.35409556313993173, - "acc_stderr": 0.013975454122756557, - "acc_norm": 0.4129692832764505, - "acc_norm_stderr": 0.014388344935398322 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3813981278629755, - "acc_stderr": 0.004847372670134637, - "acc_norm": 0.48954391555467036, - "acc_norm_stderr": 0.00498869022950566 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.5263157894736842, - "acc_stderr": 0.03829509868994727, - "acc_norm": 0.5263157894736842, - "acc_norm_stderr": 0.03829509868994727 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.6116504854368932, - "acc_stderr": 0.04825729337356389, - "acc_norm": 0.6116504854368932, - "acc_norm_stderr": 0.04825729337356389 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.4776500638569604, - "acc_stderr": 0.017862091778507876, - "acc_norm": 0.4776500638569604, - "acc_norm_stderr": 0.017862091778507876 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.4148148148148148, - "acc_stderr": 0.042561937679014075, - "acc_norm": 0.4148148148148148, - "acc_norm_stderr": 0.042561937679014075 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.37872340425531914, - "acc_stderr": 0.03170995606040655, - "acc_norm": 0.37872340425531914, - "acc_norm_stderr": 0.03170995606040655 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.45180722891566266, - "acc_stderr": 0.03874371556587953, - "acc_norm": 0.45180722891566266, - "acc_norm_stderr": 0.03874371556587953 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.4662379421221865, - "acc_stderr": 0.028333277109562786, - "acc_norm": 0.4662379421221865, - "acc_norm_stderr": 0.028333277109562786 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.45739910313901344, - "acc_stderr": 0.033435777055830646, - "acc_norm": 0.45739910313901344, - "acc_norm_stderr": 0.033435777055830646 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.3969465648854962, - "acc_stderr": 0.04291135671009225, - "acc_norm": 0.3969465648854962, - "acc_norm_stderr": 0.04291135671009225 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.37, - "acc_stderr": 0.04852365870939099, - "acc_norm": 0.37, - "acc_norm_stderr": 0.04852365870939099 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.5252525252525253, - "acc_stderr": 0.03557806245087314, - "acc_norm": 0.5252525252525253, - "acc_norm_stderr": 0.03557806245087314 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.4482758620689655, - "acc_stderr": 0.04144311810878151, - "acc_norm": 0.4482758620689655, - "acc_norm_stderr": 0.04144311810878151 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.17647058823529413, - "acc_stderr": 0.03793281185307809, - "acc_norm": 0.17647058823529413, - "acc_norm_stderr": 0.03793281185307809 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.4789915966386555, - "acc_stderr": 0.03244980849990029, - "acc_norm": 0.4789915966386555, - "acc_norm_stderr": 0.03244980849990029 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.4307692307692308, - "acc_stderr": 0.02510682066053975, - "acc_norm": 0.4307692307692308, - "acc_norm_stderr": 0.02510682066053975 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.6, - "acc_stderr": 0.049236596391733084, - "acc_norm": 0.6, - "acc_norm_stderr": 0.049236596391733084 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.32, - "acc_stderr": 0.04688261722621504, - "acc_norm": 0.32, - "acc_norm_stderr": 0.04688261722621504 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.5, - "acc_stderr": 0.04833682445228318, - "acc_norm": 0.5, - "acc_norm_stderr": 0.04833682445228318 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.3645320197044335, - "acc_stderr": 0.0338640574606209, - "acc_norm": 0.3645320197044335, - "acc_norm_stderr": 0.0338640574606209 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.5032258064516129, - "acc_stderr": 0.028443414226438316, - "acc_norm": 0.5032258064516129, - "acc_norm_stderr": 0.028443414226438316 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.6709401709401709, - "acc_stderr": 0.03078232157768818, - "acc_norm": 0.6709401709401709, - "acc_norm_stderr": 0.03078232157768818 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.4490566037735849, - "acc_stderr": 0.030612730713641095, - "acc_norm": 0.4490566037735849, - "acc_norm_stderr": 0.030612730713641095 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.4909090909090909, - "acc_stderr": 0.04788339768702862, - "acc_norm": 0.4909090909090909, - "acc_norm_stderr": 0.04788339768702862 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.028742040903948496, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.028742040903948496 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2913907284768212, - "acc_stderr": 0.037101857261199946, - "acc_norm": 0.2913907284768212, - "acc_norm_stderr": 0.037101857261199946 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.6218905472636815, - "acc_stderr": 0.03428867848778657, - "acc_norm": 0.6218905472636815, - "acc_norm_stderr": 0.03428867848778657 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.36416184971098264, - "acc_stderr": 0.03669072477416907, - "acc_norm": 0.36416184971098264, - "acc_norm_stderr": 0.03669072477416907 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.37566137566137564, - "acc_stderr": 0.024942368931159795, - "acc_norm": 0.37566137566137564, - "acc_norm_stderr": 0.024942368931159795 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2847222222222222, - "acc_stderr": 0.037738099906869334, - "acc_norm": 0.2847222222222222, - "acc_norm_stderr": 0.037738099906869334 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.59, - "acc_stderr": 0.04943110704237101, - "acc_norm": 0.59, - "acc_norm_stderr": 0.04943110704237101 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.49421965317919075, - "acc_stderr": 0.026917296179149123, - "acc_norm": 0.49421965317919075, - "acc_norm_stderr": 0.026917296179149123 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.4294478527607362, - "acc_stderr": 0.03889066619112722, - "acc_norm": 0.4294478527607362, - "acc_norm_stderr": 0.03889066619112722 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.4351851851851852, - "acc_stderr": 0.027586006221607718, - "acc_norm": 0.4351851851851852, - "acc_norm_stderr": 0.027586006221607718 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.38, - "acc_stderr": 0.04878317312145632, - "acc_norm": 0.38, - "acc_norm_stderr": 0.04878317312145632 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.48704663212435234, - "acc_stderr": 0.03607228061047749, - "acc_norm": 0.48704663212435234, - "acc_norm_stderr": 0.03607228061047749 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.32456140350877194, - "acc_stderr": 0.044045561573747685, - "acc_norm": 0.32456140350877194, - "acc_norm_stderr": 0.044045561573747685 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.5009174311926605, - "acc_stderr": 0.021437287056051215, - "acc_norm": 0.5009174311926605, - "acc_norm_stderr": 0.021437287056051215 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.35714285714285715, - "acc_stderr": 0.04285714285714281, - "acc_norm": 0.35714285714285715, - "acc_norm_stderr": 0.04285714285714281 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.4869281045751634, - "acc_stderr": 0.028620130800700246, - "acc_norm": 0.4869281045751634, - "acc_norm_stderr": 0.028620130800700246 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.46, - "acc_stderr": 0.05009082659620332, - "acc_norm": 0.46, - "acc_norm_stderr": 0.05009082659620332 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.5950413223140496, - "acc_stderr": 0.04481137755942469, - "acc_norm": 0.5950413223140496, - "acc_norm_stderr": 0.04481137755942469 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.34868421052631576, - "acc_stderr": 0.03878139888797609, - "acc_norm": 0.34868421052631576, - "acc_norm_stderr": 0.03878139888797609 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.3741830065359477, - "acc_stderr": 0.01957695312208884, - "acc_norm": 0.3741830065359477, - "acc_norm_stderr": 0.01957695312208884 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.3617021276595745, - "acc_stderr": 0.028663820147199492, - "acc_norm": 0.3617021276595745, - "acc_norm_stderr": 0.028663820147199492 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.41964285714285715, - "acc_stderr": 0.046840993210771065, - "acc_norm": 0.41964285714285715, - "acc_norm_stderr": 0.046840993210771065 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.37962962962962965, - "acc_stderr": 0.03309682581119035, - "acc_norm": 0.37962962962962965, - "acc_norm_stderr": 0.03309682581119035 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2022346368715084, - "acc_stderr": 0.01343372948332099, - "acc_norm": 0.2022346368715084, - "acc_norm_stderr": 0.01343372948332099 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.46, - "acc_stderr": 0.05009082659620333, - "acc_norm": 0.46, - "acc_norm_stderr": 0.05009082659620333 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.56, - "acc_stderr": 0.04988876515698589, - "acc_norm": 0.56, - "acc_norm_stderr": 0.04988876515698589 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.3492647058823529, - "acc_stderr": 0.02895975519682487, - "acc_norm": 0.3492647058823529, - "acc_norm_stderr": 0.02895975519682487 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.46122448979591835, - "acc_stderr": 0.03191282052669277, - "acc_norm": 0.46122448979591835, - "acc_norm_stderr": 0.03191282052669277 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.5611814345991561, - "acc_stderr": 0.032302649315470375, - "acc_norm": 0.5611814345991561, - "acc_norm_stderr": 0.032302649315470375 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.31029986962190353, - "acc_stderr": 0.011815439293469829, - "acc_norm": 0.31029986962190353, - "acc_norm_stderr": 0.011815439293469829 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.49019607843137253, - "acc_stderr": 0.03508637358630572, - "acc_norm": 0.49019607843137253, - "acc_norm_stderr": 0.03508637358630572 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.45454545454545453, - "acc_stderr": 0.03888176921674099, - "acc_norm": 0.45454545454545453, - "acc_norm_stderr": 0.03888176921674099 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.3023255813953488, - "mc1_stderr": 0.016077509266133033, - "mc2": 0.4750791587895867, - "mc2_stderr": 0.015736885636484024 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.25234741784037557, - "acc_stderr": 0.014889652814217332, - "acc_norm": 0.31220657276995306, - "acc_norm_stderr": 0.01588492803037488 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "MNCJihunKim/Mistral-7B-OpenOrca-orca-platy-out1kover", - "model_sha": "6a36ede83f774993cca1e5193c0c702e4b998676", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/MNCJihunKim/Mistral-7B-SlimOrca-orca-platy-out1kover/result_2023-10-25 15:12:52.json b/MNCJihunKim/Mistral-7B-SlimOrca-orca-platy-out1kover/result_2023-10-25 15:12:52.json deleted file mode 100644 index 9398e8ee882fe10e516c69bdbfa416b925a2dabc..0000000000000000000000000000000000000000 --- a/MNCJihunKim/Mistral-7B-SlimOrca-orca-platy-out1kover/result_2023-10-25 15:12:52.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.35921501706484643, - "acc_stderr": 0.014020224155839155, - "acc_norm": 0.41552901023890787, - "acc_norm_stderr": 0.014401366641216395 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3822943636725752, - "acc_stderr": 0.004849547819134474, - "acc_norm": 0.4878510256920932, - "acc_norm_stderr": 0.004988308234687271 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.49707602339181284, - "acc_stderr": 0.03834759370936839, - "acc_norm": 0.49707602339181284, - "acc_norm_stderr": 0.03834759370936839 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.5631067961165048, - "acc_stderr": 0.04911147107365777, - "acc_norm": 0.5631067961165048, - "acc_norm_stderr": 0.04911147107365777 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.49936143039591313, - "acc_stderr": 0.017879948914431662, - "acc_norm": 0.49936143039591313, - "acc_norm_stderr": 0.017879948914431662 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.3925925925925926, - "acc_stderr": 0.04218506215368879, - "acc_norm": 0.3925925925925926, - "acc_norm_stderr": 0.04218506215368879 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.4127659574468085, - "acc_stderr": 0.03218471141400351, - "acc_norm": 0.4127659574468085, - "acc_norm_stderr": 0.03218471141400351 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.45180722891566266, - "acc_stderr": 0.03874371556587953, - "acc_norm": 0.45180722891566266, - "acc_norm_stderr": 0.03874371556587953 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.47266881028938906, - "acc_stderr": 0.02835563356832818, - "acc_norm": 0.47266881028938906, - "acc_norm_stderr": 0.02835563356832818 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.4618834080717489, - "acc_stderr": 0.033460150119732274, - "acc_norm": 0.4618834080717489, - "acc_norm_stderr": 0.033460150119732274 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.37404580152671757, - "acc_stderr": 0.042438692422305246, - "acc_norm": 0.37404580152671757, - "acc_norm_stderr": 0.042438692422305246 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.37, - "acc_stderr": 0.04852365870939098, - "acc_norm": 0.37, - "acc_norm_stderr": 0.04852365870939098 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.5404040404040404, - "acc_stderr": 0.035507024651313425, - "acc_norm": 0.5404040404040404, - "acc_norm_stderr": 0.035507024651313425 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.41379310344827586, - "acc_stderr": 0.04104269211806232, - "acc_norm": 0.41379310344827586, - "acc_norm_stderr": 0.04104269211806232 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.24509803921568626, - "acc_stderr": 0.042801058373643966, - "acc_norm": 0.24509803921568626, - "acc_norm_stderr": 0.042801058373643966 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.4579831932773109, - "acc_stderr": 0.03236361111951941, - "acc_norm": 0.4579831932773109, - "acc_norm_stderr": 0.03236361111951941 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.43333333333333335, - "acc_stderr": 0.02512465352588513, - "acc_norm": 0.43333333333333335, - "acc_norm_stderr": 0.02512465352588513 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.62, - "acc_stderr": 0.04878317312145631, - "acc_norm": 0.62, - "acc_norm_stderr": 0.04878317312145631 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.32, - "acc_stderr": 0.04688261722621505, - "acc_norm": 0.32, - "acc_norm_stderr": 0.04688261722621505 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.5185185185185185, - "acc_stderr": 0.04830366024635331, - "acc_norm": 0.5185185185185185, - "acc_norm_stderr": 0.04830366024635331 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.3842364532019704, - "acc_stderr": 0.0342239856565755, - "acc_norm": 0.3842364532019704, - "acc_norm_stderr": 0.0342239856565755 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.45483870967741935, - "acc_stderr": 0.028327743091561056, - "acc_norm": 0.45483870967741935, - "acc_norm_stderr": 0.028327743091561056 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.7136752136752137, - "acc_stderr": 0.02961432369045665, - "acc_norm": 0.7136752136752137, - "acc_norm_stderr": 0.02961432369045665 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.4830188679245283, - "acc_stderr": 0.030755120364119898, - "acc_norm": 0.4830188679245283, - "acc_norm_stderr": 0.030755120364119898 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.4818181818181818, - "acc_stderr": 0.04785964010794917, - "acc_norm": 0.4818181818181818, - "acc_norm_stderr": 0.04785964010794917 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.028742040903948492, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.028742040903948492 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2847682119205298, - "acc_stderr": 0.03684881521389023, - "acc_norm": 0.2847682119205298, - "acc_norm_stderr": 0.03684881521389023 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.5472636815920398, - "acc_stderr": 0.03519702717576915, - "acc_norm": 0.5472636815920398, - "acc_norm_stderr": 0.03519702717576915 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.3583815028901734, - "acc_stderr": 0.036563436533531585, - "acc_norm": 0.3583815028901734, - "acc_norm_stderr": 0.036563436533531585 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.373015873015873, - "acc_stderr": 0.02490699045899257, - "acc_norm": 0.373015873015873, - "acc_norm_stderr": 0.02490699045899257 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3125, - "acc_stderr": 0.038760854559127644, - "acc_norm": 0.3125, - "acc_norm_stderr": 0.038760854559127644 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695235, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695235 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.57, - "acc_stderr": 0.04975698519562426, - "acc_norm": 0.57, - "acc_norm_stderr": 0.04975698519562426 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.49421965317919075, - "acc_stderr": 0.026917296179149126, - "acc_norm": 0.49421965317919075, - "acc_norm_stderr": 0.026917296179149126 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.4110429447852761, - "acc_stderr": 0.038656978537853624, - "acc_norm": 0.4110429447852761, - "acc_norm_stderr": 0.038656978537853624 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.45987654320987653, - "acc_stderr": 0.027731022753539274, - "acc_norm": 0.45987654320987653, - "acc_norm_stderr": 0.027731022753539274 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.35, - "acc_stderr": 0.0479372485441102, - "acc_norm": 0.35, - "acc_norm_stderr": 0.0479372485441102 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.48186528497409326, - "acc_stderr": 0.036060650018329185, - "acc_norm": 0.48186528497409326, - "acc_norm_stderr": 0.036060650018329185 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.3157894736842105, - "acc_stderr": 0.043727482902780085, - "acc_norm": 0.3157894736842105, - "acc_norm_stderr": 0.043727482902780085 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.48623853211009177, - "acc_stderr": 0.02142920208987408, - "acc_norm": 0.48623853211009177, - "acc_norm_stderr": 0.02142920208987408 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.2857142857142857, - "acc_stderr": 0.0404061017820884, - "acc_norm": 0.2857142857142857, - "acc_norm_stderr": 0.0404061017820884 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.4084967320261438, - "acc_stderr": 0.028146405993096358, - "acc_norm": 0.4084967320261438, - "acc_norm_stderr": 0.028146405993096358 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.47, - "acc_stderr": 0.05016135580465919, - "acc_norm": 0.47, - "acc_norm_stderr": 0.05016135580465919 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.6694214876033058, - "acc_stderr": 0.04294340845212094, - "acc_norm": 0.6694214876033058, - "acc_norm_stderr": 0.04294340845212094 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.42105263157894735, - "acc_stderr": 0.04017901275981747, - "acc_norm": 0.42105263157894735, - "acc_norm_stderr": 0.04017901275981747 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.39869281045751637, - "acc_stderr": 0.019808281317449848, - "acc_norm": 0.39869281045751637, - "acc_norm_stderr": 0.019808281317449848 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.3404255319148936, - "acc_stderr": 0.02826765748265015, - "acc_norm": 0.3404255319148936, - "acc_norm_stderr": 0.02826765748265015 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.45535714285714285, - "acc_stderr": 0.04726835553719099, - "acc_norm": 0.45535714285714285, - "acc_norm_stderr": 0.04726835553719099 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.3472222222222222, - "acc_stderr": 0.032468872436376486, - "acc_norm": 0.3472222222222222, - "acc_norm_stderr": 0.032468872436376486 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2011173184357542, - "acc_stderr": 0.013405946402609045, - "acc_norm": 0.2011173184357542, - "acc_norm_stderr": 0.013405946402609045 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.36, - "acc_stderr": 0.04824181513244218, - "acc_norm": 0.36, - "acc_norm_stderr": 0.04824181513244218 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.57, - "acc_stderr": 0.04975698519562428, - "acc_norm": 0.57, - "acc_norm_stderr": 0.04975698519562428 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.35294117647058826, - "acc_stderr": 0.029029422815681404, - "acc_norm": 0.35294117647058826, - "acc_norm_stderr": 0.029029422815681404 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.46938775510204084, - "acc_stderr": 0.031949171367580624, - "acc_norm": 0.46938775510204084, - "acc_norm_stderr": 0.031949171367580624 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.5274261603375527, - "acc_stderr": 0.03249822718301304, - "acc_norm": 0.5274261603375527, - "acc_norm_stderr": 0.03249822718301304 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.32073011734028684, - "acc_stderr": 0.011921199991782625, - "acc_norm": 0.32073011734028684, - "acc_norm_stderr": 0.011921199991782625 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.44607843137254904, - "acc_stderr": 0.03488845451304974, - "acc_norm": 0.44607843137254904, - "acc_norm_stderr": 0.03488845451304974 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.44242424242424244, - "acc_stderr": 0.03878372113711274, - "acc_norm": 0.44242424242424244, - "acc_norm_stderr": 0.03878372113711274 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.28886168910648713, - "mc1_stderr": 0.015866346401384304, - "mc2": 0.4582449322023691, - "mc2_stderr": 0.015573281761179949 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.21948356807511737, - "acc_stderr": 0.014188198345511627, - "acc_norm": 0.27582159624413144, - "acc_norm_stderr": 0.015320471749565221 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "MNCJihunKim/Mistral-7B-SlimOrca-orca-platy-out1kover", - "model_sha": "fcc2973dac87df41de97b6972e0323fee599bcf3", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/MNCLLM/Mistral-7B-KoCot-Platypus-4096/result_2023-10-24 10:48:31.json b/MNCLLM/Mistral-7B-KoCot-Platypus-4096/result_2023-10-24 10:48:31.json deleted file mode 100644 index 8d8aef27614f918e0b30ad8591a56f155e46abf8..0000000000000000000000000000000000000000 --- a/MNCLLM/Mistral-7B-KoCot-Platypus-4096/result_2023-10-24 10:48:31.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.28668941979522183, - "acc_stderr": 0.01321498632927477, - "acc_norm": 0.3387372013651877, - "acc_norm_stderr": 0.01383056892797433 - }, - "harness|ko_hellaswag|10": { - "acc": 0.344353714399522, - "acc_stderr": 0.004741859753178415, - "acc_norm": 0.4213304122684724, - "acc_norm_stderr": 0.004927631806477553 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.3157894736842105, - "acc_stderr": 0.03565079670708311, - "acc_norm": 0.3157894736842105, - "acc_norm_stderr": 0.03565079670708311 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.4174757281553398, - "acc_stderr": 0.048828405482122375, - "acc_norm": 0.4174757281553398, - "acc_norm_stderr": 0.048828405482122375 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.3946360153256705, - "acc_stderr": 0.017478464305911542, - "acc_norm": 0.3946360153256705, - "acc_norm_stderr": 0.017478464305911542 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.3037037037037037, - "acc_stderr": 0.039725528847851375, - "acc_norm": 0.3037037037037037, - "acc_norm_stderr": 0.039725528847851375 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206824, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206824 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.3872340425531915, - "acc_stderr": 0.03184389265339526, - "acc_norm": 0.3872340425531915, - "acc_norm_stderr": 0.03184389265339526 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3373493975903614, - "acc_stderr": 0.03680783690727581, - "acc_norm": 0.3373493975903614, - "acc_norm_stderr": 0.03680783690727581 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.40514469453376206, - "acc_stderr": 0.027882383791325946, - "acc_norm": 0.40514469453376206, - "acc_norm_stderr": 0.027882383791325946 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.38565022421524664, - "acc_stderr": 0.03266842214289202, - "acc_norm": 0.38565022421524664, - "acc_norm_stderr": 0.03266842214289202 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.2824427480916031, - "acc_stderr": 0.03948406125768362, - "acc_norm": 0.2824427480916031, - "acc_norm_stderr": 0.03948406125768362 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.39, - "acc_stderr": 0.04902071300001975, - "acc_norm": 0.39, - "acc_norm_stderr": 0.04902071300001975 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.35353535353535354, - "acc_stderr": 0.03406086723547155, - "acc_norm": 0.35353535353535354, - "acc_norm_stderr": 0.03406086723547155 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.32413793103448274, - "acc_stderr": 0.03900432069185554, - "acc_norm": 0.32413793103448274, - "acc_norm_stderr": 0.03900432069185554 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.23529411764705882, - "acc_stderr": 0.042207736591714534, - "acc_norm": 0.23529411764705882, - "acc_norm_stderr": 0.042207736591714534 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.3403361344537815, - "acc_stderr": 0.030778057422931673, - "acc_norm": 0.3403361344537815, - "acc_norm_stderr": 0.030778057422931673 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.34615384615384615, - "acc_stderr": 0.024121125416941183, - "acc_norm": 0.34615384615384615, - "acc_norm_stderr": 0.024121125416941183 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.51, - "acc_stderr": 0.05024183937956912, - "acc_norm": 0.51, - "acc_norm_stderr": 0.05024183937956912 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.3888888888888889, - "acc_stderr": 0.0471282125742677, - "acc_norm": 0.3888888888888889, - "acc_norm_stderr": 0.0471282125742677 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.2561576354679803, - "acc_stderr": 0.0307127300709826, - "acc_norm": 0.2561576354679803, - "acc_norm_stderr": 0.0307127300709826 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.36774193548387096, - "acc_stderr": 0.027430866579973474, - "acc_norm": 0.36774193548387096, - "acc_norm_stderr": 0.027430866579973474 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.5683760683760684, - "acc_stderr": 0.0324483553531149, - "acc_norm": 0.5683760683760684, - "acc_norm_stderr": 0.0324483553531149 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.2981132075471698, - "acc_stderr": 0.028152837942493854, - "acc_norm": 0.2981132075471698, - "acc_norm_stderr": 0.028152837942493854 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.4636363636363636, - "acc_stderr": 0.047764491623961985, - "acc_norm": 0.4636363636363636, - "acc_norm_stderr": 0.047764491623961985 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.3, - "acc_stderr": 0.027940457136228416, - "acc_norm": 0.3, - "acc_norm_stderr": 0.027940457136228416 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2251655629139073, - "acc_stderr": 0.03410435282008937, - "acc_norm": 0.2251655629139073, - "acc_norm_stderr": 0.03410435282008937 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.43283582089552236, - "acc_stderr": 0.0350349092367328, - "acc_norm": 0.43283582089552236, - "acc_norm_stderr": 0.0350349092367328 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.2543352601156069, - "acc_stderr": 0.0332055644308557, - "acc_norm": 0.2543352601156069, - "acc_norm_stderr": 0.0332055644308557 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.30158730158730157, - "acc_stderr": 0.0236369759961018, - "acc_norm": 0.30158730158730157, - "acc_norm_stderr": 0.0236369759961018 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2847222222222222, - "acc_stderr": 0.037738099906869334, - "acc_norm": 0.2847222222222222, - "acc_norm_stderr": 0.037738099906869334 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.27, - "acc_stderr": 0.044619604333847394, - "acc_norm": 0.27, - "acc_norm_stderr": 0.044619604333847394 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.5, - "acc_stderr": 0.050251890762960605, - "acc_norm": 0.5, - "acc_norm_stderr": 0.050251890762960605 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.3786127167630058, - "acc_stderr": 0.026113749361310338, - "acc_norm": 0.3786127167630058, - "acc_norm_stderr": 0.026113749361310338 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.294478527607362, - "acc_stderr": 0.03581165790474082, - "acc_norm": 0.294478527607362, - "acc_norm_stderr": 0.03581165790474082 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.33641975308641975, - "acc_stderr": 0.026289734945952926, - "acc_norm": 0.33641975308641975, - "acc_norm_stderr": 0.026289734945952926 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.34196891191709844, - "acc_stderr": 0.034234651001042816, - "acc_norm": 0.34196891191709844, - "acc_norm_stderr": 0.034234651001042816 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.23684210526315788, - "acc_stderr": 0.039994238792813365, - "acc_norm": 0.23684210526315788, - "acc_norm_stderr": 0.039994238792813365 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.3871559633027523, - "acc_stderr": 0.020884231992643453, - "acc_norm": 0.3871559633027523, - "acc_norm_stderr": 0.020884231992643453 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.21428571428571427, - "acc_stderr": 0.03670066451047182, - "acc_norm": 0.21428571428571427, - "acc_norm_stderr": 0.03670066451047182 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.35947712418300654, - "acc_stderr": 0.027475969910660952, - "acc_norm": 0.35947712418300654, - "acc_norm_stderr": 0.027475969910660952 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.38, - "acc_stderr": 0.04878317312145632, - "acc_norm": 0.38, - "acc_norm_stderr": 0.04878317312145632 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.5041322314049587, - "acc_stderr": 0.04564198767432754, - "acc_norm": 0.5041322314049587, - "acc_norm_stderr": 0.04564198767432754 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.25, - "acc_stderr": 0.03523807393012047, - "acc_norm": 0.25, - "acc_norm_stderr": 0.03523807393012047 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.32516339869281047, - "acc_stderr": 0.018950886770806308, - "acc_norm": 0.32516339869281047, - "acc_norm_stderr": 0.018950886770806308 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2553191489361702, - "acc_stderr": 0.02601199293090201, - "acc_norm": 0.2553191489361702, - "acc_norm_stderr": 0.02601199293090201 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.25, - "acc_stderr": 0.04109974682633932, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04109974682633932 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.25, - "acc_stderr": 0.029531221160930918, - "acc_norm": 0.25, - "acc_norm_stderr": 0.029531221160930918 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2659217877094972, - "acc_stderr": 0.014776765066438888, - "acc_norm": 0.2659217877094972, - "acc_norm_stderr": 0.014776765066438888 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.32, - "acc_stderr": 0.046882617226215034, - "acc_norm": 0.32, - "acc_norm_stderr": 0.046882617226215034 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.42, - "acc_stderr": 0.049604496374885836, - "acc_norm": 0.42, - "acc_norm_stderr": 0.049604496374885836 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.2977941176470588, - "acc_stderr": 0.027778298701545436, - "acc_norm": 0.2977941176470588, - "acc_norm_stderr": 0.027778298701545436 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.3510204081632653, - "acc_stderr": 0.03055531675557364, - "acc_norm": 0.3510204081632653, - "acc_norm_stderr": 0.03055531675557364 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.4008438818565401, - "acc_stderr": 0.031900803894732356, - "acc_norm": 0.4008438818565401, - "acc_norm_stderr": 0.031900803894732356 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.32529335071707954, - "acc_stderr": 0.01196531153657153, - "acc_norm": 0.32529335071707954, - "acc_norm_stderr": 0.01196531153657153 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.35784313725490197, - "acc_stderr": 0.033644872860882996, - "acc_norm": 0.35784313725490197, - "acc_norm_stderr": 0.033644872860882996 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.3393939393939394, - "acc_stderr": 0.03697442205031596, - "acc_norm": 0.3393939393939394, - "acc_norm_stderr": 0.03697442205031596 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2802937576499388, - "mc1_stderr": 0.01572313952460875, - "mc2": 0.44624551916312966, - "mc2_stderr": 0.015796983100879885 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.18427230046948356, - "acc_stderr": 0.013290393929379349, - "acc_norm": 0.25, - "acc_norm_stderr": 0.014843484249893985 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "MNCLLM/Mistral-7B-KoCot-Platypus-4096", - "model_sha": "bbb51b457200947001a0dc6e318a7d2d7e717197", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/MNCLLM/Mistral-7B-OP-over1k-grad0.3/result_2023-10-25 09:13:09.json b/MNCLLM/Mistral-7B-OP-over1k-grad0.3/result_2023-10-25 09:13:09.json deleted file mode 100644 index 6a4030492c7ab37517494cc4057786b668962548..0000000000000000000000000000000000000000 --- a/MNCLLM/Mistral-7B-OP-over1k-grad0.3/result_2023-10-25 09:13:09.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.3651877133105802, - "acc_stderr": 0.014070265519268802, - "acc_norm": 0.4104095563139932, - "acc_norm_stderr": 0.014374922192642662 - }, - "harness|ko_hellaswag|10": { - "acc": 0.38309101772555265, - "acc_stderr": 0.004851466623601446, - "acc_norm": 0.4949213304122685, - "acc_norm_stderr": 0.0049895240030924425 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.5029239766081871, - "acc_stderr": 0.03834759370936839, - "acc_norm": 0.5029239766081871, - "acc_norm_stderr": 0.03834759370936839 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.5728155339805825, - "acc_stderr": 0.04897957737781168, - "acc_norm": 0.5728155339805825, - "acc_norm_stderr": 0.04897957737781168 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.47381864623243936, - "acc_stderr": 0.017855434554041982, - "acc_norm": 0.47381864623243936, - "acc_norm_stderr": 0.017855434554041982 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.32592592592592595, - "acc_stderr": 0.04049122041702506, - "acc_norm": 0.32592592592592595, - "acc_norm_stderr": 0.04049122041702506 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.23, - "acc_stderr": 0.04229525846816506, - "acc_norm": 0.23, - "acc_norm_stderr": 0.04229525846816506 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.4765957446808511, - "acc_stderr": 0.03265019475033582, - "acc_norm": 0.4765957446808511, - "acc_norm_stderr": 0.03265019475033582 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.4457831325301205, - "acc_stderr": 0.03869543323472101, - "acc_norm": 0.4457831325301205, - "acc_norm_stderr": 0.03869543323472101 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.4790996784565916, - "acc_stderr": 0.028373270961069414, - "acc_norm": 0.4790996784565916, - "acc_norm_stderr": 0.028373270961069414 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.484304932735426, - "acc_stderr": 0.0335412657542081, - "acc_norm": 0.484304932735426, - "acc_norm_stderr": 0.0335412657542081 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.4351145038167939, - "acc_stderr": 0.04348208051644858, - "acc_norm": 0.4351145038167939, - "acc_norm_stderr": 0.04348208051644858 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.38, - "acc_stderr": 0.048783173121456316, - "acc_norm": 0.38, - "acc_norm_stderr": 0.048783173121456316 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.5404040404040404, - "acc_stderr": 0.035507024651313425, - "acc_norm": 0.5404040404040404, - "acc_norm_stderr": 0.035507024651313425 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.45517241379310347, - "acc_stderr": 0.04149886942192117, - "acc_norm": 0.45517241379310347, - "acc_norm_stderr": 0.04149886942192117 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.22549019607843138, - "acc_stderr": 0.041583075330832865, - "acc_norm": 0.22549019607843138, - "acc_norm_stderr": 0.041583075330832865 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.47478991596638653, - "acc_stderr": 0.0324371805513741, - "acc_norm": 0.47478991596638653, - "acc_norm_stderr": 0.0324371805513741 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.4794871794871795, - "acc_stderr": 0.025329663163489943, - "acc_norm": 0.4794871794871795, - "acc_norm_stderr": 0.025329663163489943 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.63, - "acc_stderr": 0.048523658709390974, - "acc_norm": 0.63, - "acc_norm_stderr": 0.048523658709390974 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.32, - "acc_stderr": 0.04688261722621504, - "acc_norm": 0.32, - "acc_norm_stderr": 0.04688261722621504 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.5092592592592593, - "acc_stderr": 0.04832853553437055, - "acc_norm": 0.5092592592592593, - "acc_norm_stderr": 0.04832853553437055 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.41379310344827586, - "acc_stderr": 0.03465304488406796, - "acc_norm": 0.41379310344827586, - "acc_norm_stderr": 0.03465304488406796 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.4645161290322581, - "acc_stderr": 0.028372287797962952, - "acc_norm": 0.4645161290322581, - "acc_norm_stderr": 0.028372287797962952 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.6837606837606838, - "acc_stderr": 0.03046365674734026, - "acc_norm": 0.6837606837606838, - "acc_norm_stderr": 0.03046365674734026 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.45660377358490567, - "acc_stderr": 0.030656748696739428, - "acc_norm": 0.45660377358490567, - "acc_norm_stderr": 0.030656748696739428 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.5272727272727272, - "acc_stderr": 0.04782001791380061, - "acc_norm": 0.5272727272727272, - "acc_norm_stderr": 0.04782001791380061 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.3148148148148148, - "acc_stderr": 0.028317533496066475, - "acc_norm": 0.3148148148148148, - "acc_norm_stderr": 0.028317533496066475 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.26490066225165565, - "acc_stderr": 0.03603038545360384, - "acc_norm": 0.26490066225165565, - "acc_norm_stderr": 0.03603038545360384 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.6318407960199005, - "acc_stderr": 0.03410410565495302, - "acc_norm": 0.6318407960199005, - "acc_norm_stderr": 0.03410410565495302 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.3583815028901734, - "acc_stderr": 0.036563436533531585, - "acc_norm": 0.3583815028901734, - "acc_norm_stderr": 0.036563436533531585 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.35978835978835977, - "acc_stderr": 0.024718075944129277, - "acc_norm": 0.35978835978835977, - "acc_norm_stderr": 0.024718075944129277 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3472222222222222, - "acc_stderr": 0.039812405437178615, - "acc_norm": 0.3472222222222222, - "acc_norm_stderr": 0.039812405437178615 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.62, - "acc_stderr": 0.048783173121456344, - "acc_norm": 0.62, - "acc_norm_stderr": 0.048783173121456344 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.5317919075144508, - "acc_stderr": 0.026864624366756653, - "acc_norm": 0.5317919075144508, - "acc_norm_stderr": 0.026864624366756653 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.44171779141104295, - "acc_stderr": 0.039015918258361836, - "acc_norm": 0.44171779141104295, - "acc_norm_stderr": 0.039015918258361836 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.4351851851851852, - "acc_stderr": 0.027586006221607718, - "acc_norm": 0.4351851851851852, - "acc_norm_stderr": 0.027586006221607718 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.36, - "acc_stderr": 0.04824181513244218, - "acc_norm": 0.36, - "acc_norm_stderr": 0.04824181513244218 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.48704663212435234, - "acc_stderr": 0.03607228061047749, - "acc_norm": 0.48704663212435234, - "acc_norm_stderr": 0.03607228061047749 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2719298245614035, - "acc_stderr": 0.04185774424022056, - "acc_norm": 0.2719298245614035, - "acc_norm_stderr": 0.04185774424022056 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.46788990825688076, - "acc_stderr": 0.021393071222680814, - "acc_norm": 0.46788990825688076, - "acc_norm_stderr": 0.021393071222680814 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.373015873015873, - "acc_stderr": 0.04325506042017086, - "acc_norm": 0.373015873015873, - "acc_norm_stderr": 0.04325506042017086 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.4477124183006536, - "acc_stderr": 0.028472938478033526, - "acc_norm": 0.4477124183006536, - "acc_norm_stderr": 0.028472938478033526 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.47, - "acc_stderr": 0.050161355804659205, - "acc_norm": 0.47, - "acc_norm_stderr": 0.050161355804659205 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.6776859504132231, - "acc_stderr": 0.04266416363352168, - "acc_norm": 0.6776859504132231, - "acc_norm_stderr": 0.04266416363352168 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.40789473684210525, - "acc_stderr": 0.03999309712777472, - "acc_norm": 0.40789473684210525, - "acc_norm_stderr": 0.03999309712777472 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.4019607843137255, - "acc_stderr": 0.01983517648437538, - "acc_norm": 0.4019607843137255, - "acc_norm_stderr": 0.01983517648437538 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.37943262411347517, - "acc_stderr": 0.028947338851614105, - "acc_norm": 0.37943262411347517, - "acc_norm_stderr": 0.028947338851614105 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.42857142857142855, - "acc_stderr": 0.04697113923010213, - "acc_norm": 0.42857142857142855, - "acc_norm_stderr": 0.04697113923010213 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.37037037037037035, - "acc_stderr": 0.03293377139415191, - "acc_norm": 0.37037037037037035, - "acc_norm_stderr": 0.03293377139415191 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2100558659217877, - "acc_stderr": 0.013623755371333519, - "acc_norm": 0.2100558659217877, - "acc_norm_stderr": 0.013623755371333519 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.4, - "acc_stderr": 0.04923659639173309, - "acc_norm": 0.4, - "acc_norm_stderr": 0.04923659639173309 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.56, - "acc_stderr": 0.04988876515698589, - "acc_norm": 0.56, - "acc_norm_stderr": 0.04988876515698589 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.35294117647058826, - "acc_stderr": 0.0290294228156814, - "acc_norm": 0.35294117647058826, - "acc_norm_stderr": 0.0290294228156814 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.4816326530612245, - "acc_stderr": 0.03198761546763126, - "acc_norm": 0.4816326530612245, - "acc_norm_stderr": 0.03198761546763126 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.6118143459915611, - "acc_stderr": 0.03172295004332331, - "acc_norm": 0.6118143459915611, - "acc_norm_stderr": 0.03172295004332331 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.31877444589308995, - "acc_stderr": 0.011901895635786088, - "acc_norm": 0.31877444589308995, - "acc_norm_stderr": 0.011901895635786088 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.45098039215686275, - "acc_stderr": 0.03492406104163613, - "acc_norm": 0.45098039215686275, - "acc_norm_stderr": 0.03492406104163613 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.42424242424242425, - "acc_stderr": 0.038592681420702615, - "acc_norm": 0.42424242424242425, - "acc_norm_stderr": 0.038592681420702615 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2827417380660955, - "mc1_stderr": 0.015764770836777305, - "mc2": 0.4637619506541597, - "mc2_stderr": 0.015446438806039912 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.1607981220657277, - "acc_stderr": 0.012592412148598336, - "acc_norm": 0.22065727699530516, - "acc_norm_stderr": 0.014215383794777145 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "MNCLLM/Mistral-7B-OP-over1k-grad0.3", - "model_sha": "4053a441cc7724e204d047f88c2b1646a1d6aad2", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/MNCLLM/Mistral-7B-OP-over1k-grad1.0/result_2023-10-25 09:13:00.json b/MNCLLM/Mistral-7B-OP-over1k-grad1.0/result_2023-10-25 09:13:00.json deleted file mode 100644 index be5ddb1359aa740e244dcc6dda92d78a9b0600e9..0000000000000000000000000000000000000000 --- a/MNCLLM/Mistral-7B-OP-over1k-grad1.0/result_2023-10-25 09:13:00.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.36177474402730375, - "acc_stderr": 0.014041957945038076, - "acc_norm": 0.41723549488054607, - "acc_norm_stderr": 0.014409825518403084 - }, - "harness|ko_hellaswag|10": { - "acc": 0.386476797450707, - "acc_stderr": 0.00485946798415526, - "acc_norm": 0.4965146385182235, - "acc_norm_stderr": 0.00498966018079217 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.5146198830409356, - "acc_stderr": 0.038331852752130254, - "acc_norm": 0.5146198830409356, - "acc_norm_stderr": 0.038331852752130254 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.5242718446601942, - "acc_stderr": 0.049449010929737795, - "acc_norm": 0.5242718446601942, - "acc_norm_stderr": 0.049449010929737795 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.4661558109833972, - "acc_stderr": 0.017838956009136802, - "acc_norm": 0.4661558109833972, - "acc_norm_stderr": 0.017838956009136802 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.362962962962963, - "acc_stderr": 0.04153948404742398, - "acc_norm": 0.362962962962963, - "acc_norm_stderr": 0.04153948404742398 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.29, - "acc_stderr": 0.04560480215720684, - "acc_norm": 0.29, - "acc_norm_stderr": 0.04560480215720684 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.4425531914893617, - "acc_stderr": 0.03246956919789958, - "acc_norm": 0.4425531914893617, - "acc_norm_stderr": 0.03246956919789958 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.42771084337349397, - "acc_stderr": 0.03851597683718533, - "acc_norm": 0.42771084337349397, - "acc_norm_stderr": 0.03851597683718533 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.45980707395498394, - "acc_stderr": 0.028306190403305696, - "acc_norm": 0.45980707395498394, - "acc_norm_stderr": 0.028306190403305696 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.45739910313901344, - "acc_stderr": 0.033435777055830646, - "acc_norm": 0.45739910313901344, - "acc_norm_stderr": 0.033435777055830646 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.46564885496183206, - "acc_stderr": 0.04374928560599738, - "acc_norm": 0.46564885496183206, - "acc_norm_stderr": 0.04374928560599738 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.43, - "acc_stderr": 0.049756985195624284, - "acc_norm": 0.43, - "acc_norm_stderr": 0.049756985195624284 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.5151515151515151, - "acc_stderr": 0.0356071651653106, - "acc_norm": 0.5151515151515151, - "acc_norm_stderr": 0.0356071651653106 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.4689655172413793, - "acc_stderr": 0.04158632762097828, - "acc_norm": 0.4689655172413793, - "acc_norm_stderr": 0.04158632762097828 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.2549019607843137, - "acc_stderr": 0.04336432707993177, - "acc_norm": 0.2549019607843137, - "acc_norm_stderr": 0.04336432707993177 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.48739495798319327, - "acc_stderr": 0.03246816765752174, - "acc_norm": 0.48739495798319327, - "acc_norm_stderr": 0.03246816765752174 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.4666666666666667, - "acc_stderr": 0.02529460802398648, - "acc_norm": 0.4666666666666667, - "acc_norm_stderr": 0.02529460802398648 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.61, - "acc_stderr": 0.04902071300001974, - "acc_norm": 0.61, - "acc_norm_stderr": 0.04902071300001974 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.4722222222222222, - "acc_stderr": 0.048262172941398944, - "acc_norm": 0.4722222222222222, - "acc_norm_stderr": 0.048262172941398944 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.41379310344827586, - "acc_stderr": 0.03465304488406796, - "acc_norm": 0.41379310344827586, - "acc_norm_stderr": 0.03465304488406796 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.47096774193548385, - "acc_stderr": 0.028396016402761008, - "acc_norm": 0.47096774193548385, - "acc_norm_stderr": 0.028396016402761008 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.7136752136752137, - "acc_stderr": 0.029614323690456648, - "acc_norm": 0.7136752136752137, - "acc_norm_stderr": 0.029614323690456648 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.47924528301886793, - "acc_stderr": 0.030746349975723463, - "acc_norm": 0.47924528301886793, - "acc_norm_stderr": 0.030746349975723463 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.4818181818181818, - "acc_stderr": 0.04785964010794917, - "acc_norm": 0.4818181818181818, - "acc_norm_stderr": 0.04785964010794917 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.34444444444444444, - "acc_stderr": 0.02897264888484427, - "acc_norm": 0.34444444444444444, - "acc_norm_stderr": 0.02897264888484427 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2847682119205298, - "acc_stderr": 0.03684881521389023, - "acc_norm": 0.2847682119205298, - "acc_norm_stderr": 0.03684881521389023 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.6169154228855721, - "acc_stderr": 0.034375193373382504, - "acc_norm": 0.6169154228855721, - "acc_norm_stderr": 0.034375193373382504 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.35260115606936415, - "acc_stderr": 0.036430371689585496, - "acc_norm": 0.35260115606936415, - "acc_norm_stderr": 0.036430371689585496 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.35978835978835977, - "acc_stderr": 0.024718075944129277, - "acc_norm": 0.35978835978835977, - "acc_norm_stderr": 0.024718075944129277 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3263888888888889, - "acc_stderr": 0.03921067198982266, - "acc_norm": 0.3263888888888889, - "acc_norm_stderr": 0.03921067198982266 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.38, - "acc_stderr": 0.048783173121456316, - "acc_norm": 0.38, - "acc_norm_stderr": 0.048783173121456316 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.61, - "acc_stderr": 0.04902071300001975, - "acc_norm": 0.61, - "acc_norm_stderr": 0.04902071300001975 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.5086705202312138, - "acc_stderr": 0.0269150473553698, - "acc_norm": 0.5086705202312138, - "acc_norm_stderr": 0.0269150473553698 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.43558282208588955, - "acc_stderr": 0.03895632464138937, - "acc_norm": 0.43558282208588955, - "acc_norm_stderr": 0.03895632464138937 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.4228395061728395, - "acc_stderr": 0.027487472980871598, - "acc_norm": 0.4228395061728395, - "acc_norm_stderr": 0.027487472980871598 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.38, - "acc_stderr": 0.048783173121456316, - "acc_norm": 0.38, - "acc_norm_stderr": 0.048783173121456316 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.5233160621761658, - "acc_stderr": 0.036045136724422014, - "acc_norm": 0.5233160621761658, - "acc_norm_stderr": 0.036045136724422014 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2719298245614035, - "acc_stderr": 0.04185774424022056, - "acc_norm": 0.2719298245614035, - "acc_norm_stderr": 0.04185774424022056 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.47706422018348627, - "acc_stderr": 0.021414757058175506, - "acc_norm": 0.47706422018348627, - "acc_norm_stderr": 0.021414757058175506 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.4126984126984127, - "acc_stderr": 0.04403438954768177, - "acc_norm": 0.4126984126984127, - "acc_norm_stderr": 0.04403438954768177 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.46405228758169936, - "acc_stderr": 0.028555827516528787, - "acc_norm": 0.46405228758169936, - "acc_norm_stderr": 0.028555827516528787 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.46, - "acc_stderr": 0.05009082659620332, - "acc_norm": 0.46, - "acc_norm_stderr": 0.05009082659620332 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.6694214876033058, - "acc_stderr": 0.04294340845212094, - "acc_norm": 0.6694214876033058, - "acc_norm_stderr": 0.04294340845212094 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.40789473684210525, - "acc_stderr": 0.03999309712777472, - "acc_norm": 0.40789473684210525, - "acc_norm_stderr": 0.03999309712777472 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.39052287581699346, - "acc_stderr": 0.019737008998094593, - "acc_norm": 0.39052287581699346, - "acc_norm_stderr": 0.019737008998094593 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.3546099290780142, - "acc_stderr": 0.02853865002887864, - "acc_norm": 0.3546099290780142, - "acc_norm_stderr": 0.02853865002887864 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.42857142857142855, - "acc_stderr": 0.04697113923010213, - "acc_norm": 0.42857142857142855, - "acc_norm_stderr": 0.04697113923010213 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.4074074074074074, - "acc_stderr": 0.033509916046960436, - "acc_norm": 0.4074074074074074, - "acc_norm_stderr": 0.033509916046960436 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.23575418994413408, - "acc_stderr": 0.014196375686290804, - "acc_norm": 0.23575418994413408, - "acc_norm_stderr": 0.014196375686290804 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.43, - "acc_stderr": 0.049756985195624284, - "acc_norm": 0.43, - "acc_norm_stderr": 0.049756985195624284 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.62, - "acc_stderr": 0.048783173121456316, - "acc_norm": 0.62, - "acc_norm_stderr": 0.048783173121456316 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.3786764705882353, - "acc_stderr": 0.02946513363977613, - "acc_norm": 0.3786764705882353, - "acc_norm_stderr": 0.02946513363977613 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.5020408163265306, - "acc_stderr": 0.0320089533497105, - "acc_norm": 0.5020408163265306, - "acc_norm_stderr": 0.0320089533497105 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.6160337552742616, - "acc_stderr": 0.031658678064106674, - "acc_norm": 0.6160337552742616, - "acc_norm_stderr": 0.031658678064106674 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.3116036505867014, - "acc_stderr": 0.011829039182849646, - "acc_norm": 0.3116036505867014, - "acc_norm_stderr": 0.011829039182849646 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.45588235294117646, - "acc_stderr": 0.03495624522015474, - "acc_norm": 0.45588235294117646, - "acc_norm_stderr": 0.03495624522015474 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.43636363636363634, - "acc_stderr": 0.03872592983524754, - "acc_norm": 0.43636363636363634, - "acc_norm_stderr": 0.03872592983524754 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.28886168910648713, - "mc1_stderr": 0.015866346401384308, - "mc2": 0.4667125764870672, - "mc2_stderr": 0.015432249803510123 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.2147887323943662, - "acc_stderr": 0.014077781780936459, - "acc_norm": 0.25704225352112675, - "acc_norm_stderr": 0.014980266433015269 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "MNCLLM/Mistral-7B-OP-over1k-grad1.0", - "model_sha": "b03dd11e5e2e64d2c59bf37ab513947869606609", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/MNCLLM/Mistral-7B-OP-over500-grad1.0/result_2023-10-25 09:14:48.json b/MNCLLM/Mistral-7B-OP-over500-grad1.0/result_2023-10-25 09:14:48.json deleted file mode 100644 index 116290cc7c75fbff920c867d436a0832b3e5f28e..0000000000000000000000000000000000000000 --- a/MNCLLM/Mistral-7B-OP-over500-grad1.0/result_2023-10-25 09:14:48.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.371160409556314, - "acc_stderr": 0.014117971901142817, - "acc_norm": 0.4283276450511945, - "acc_norm_stderr": 0.014460496367599022 - }, - "harness|ko_hellaswag|10": { - "acc": 0.37950607448715395, - "acc_stderr": 0.004842723234022034, - "acc_norm": 0.481876120294762, - "acc_norm_stderr": 0.00498650229693118 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.4619883040935672, - "acc_stderr": 0.03823727092882307, - "acc_norm": 0.4619883040935672, - "acc_norm_stderr": 0.03823727092882307 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.6019417475728155, - "acc_stderr": 0.048467482539772386, - "acc_norm": 0.6019417475728155, - "acc_norm_stderr": 0.048467482539772386 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.4840357598978289, - "acc_stderr": 0.017870847506081717, - "acc_norm": 0.4840357598978289, - "acc_norm_stderr": 0.017870847506081717 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.37777777777777777, - "acc_stderr": 0.04188307537595853, - "acc_norm": 0.37777777777777777, - "acc_norm_stderr": 0.04188307537595853 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.4085106382978723, - "acc_stderr": 0.03213418026701576, - "acc_norm": 0.4085106382978723, - "acc_norm_stderr": 0.03213418026701576 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.4457831325301205, - "acc_stderr": 0.038695433234721015, - "acc_norm": 0.4457831325301205, - "acc_norm_stderr": 0.038695433234721015 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.41479099678456594, - "acc_stderr": 0.027982680459759556, - "acc_norm": 0.41479099678456594, - "acc_norm_stderr": 0.027982680459759556 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.4663677130044843, - "acc_stderr": 0.033481800170603065, - "acc_norm": 0.4663677130044843, - "acc_norm_stderr": 0.033481800170603065 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.366412213740458, - "acc_stderr": 0.04225875451969638, - "acc_norm": 0.366412213740458, - "acc_norm_stderr": 0.04225875451969638 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.36, - "acc_stderr": 0.04824181513244218, - "acc_norm": 0.36, - "acc_norm_stderr": 0.04824181513244218 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.5606060606060606, - "acc_stderr": 0.035360859475294805, - "acc_norm": 0.5606060606060606, - "acc_norm_stderr": 0.035360859475294805 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.3793103448275862, - "acc_stderr": 0.04043461861916747, - "acc_norm": 0.3793103448275862, - "acc_norm_stderr": 0.04043461861916747 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.24509803921568626, - "acc_stderr": 0.04280105837364396, - "acc_norm": 0.24509803921568626, - "acc_norm_stderr": 0.04280105837364396 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.46218487394957986, - "acc_stderr": 0.032385469487589795, - "acc_norm": 0.46218487394957986, - "acc_norm_stderr": 0.032385469487589795 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.4076923076923077, - "acc_stderr": 0.024915243985987837, - "acc_norm": 0.4076923076923077, - "acc_norm_stderr": 0.024915243985987837 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.63, - "acc_stderr": 0.048523658709390974, - "acc_norm": 0.63, - "acc_norm_stderr": 0.048523658709390974 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.32, - "acc_stderr": 0.04688261722621505, - "acc_norm": 0.32, - "acc_norm_stderr": 0.04688261722621505 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.5092592592592593, - "acc_stderr": 0.04832853553437055, - "acc_norm": 0.5092592592592593, - "acc_norm_stderr": 0.04832853553437055 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.42857142857142855, - "acc_stderr": 0.03481904844438803, - "acc_norm": 0.42857142857142855, - "acc_norm_stderr": 0.03481904844438803 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.43548387096774194, - "acc_stderr": 0.02820622559150274, - "acc_norm": 0.43548387096774194, - "acc_norm_stderr": 0.02820622559150274 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.688034188034188, - "acc_stderr": 0.030351527323344944, - "acc_norm": 0.688034188034188, - "acc_norm_stderr": 0.030351527323344944 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.45660377358490567, - "acc_stderr": 0.030656748696739428, - "acc_norm": 0.45660377358490567, - "acc_norm_stderr": 0.030656748696739428 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.5272727272727272, - "acc_stderr": 0.04782001791380061, - "acc_norm": 0.5272727272727272, - "acc_norm_stderr": 0.04782001791380061 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.34814814814814815, - "acc_stderr": 0.029045600290616255, - "acc_norm": 0.34814814814814815, - "acc_norm_stderr": 0.029045600290616255 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.3708609271523179, - "acc_stderr": 0.03943966699183629, - "acc_norm": 0.3708609271523179, - "acc_norm_stderr": 0.03943966699183629 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.5870646766169154, - "acc_stderr": 0.03481520803367348, - "acc_norm": 0.5870646766169154, - "acc_norm_stderr": 0.03481520803367348 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.36416184971098264, - "acc_stderr": 0.03669072477416908, - "acc_norm": 0.36416184971098264, - "acc_norm_stderr": 0.03669072477416908 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.35978835978835977, - "acc_stderr": 0.024718075944129277, - "acc_norm": 0.35978835978835977, - "acc_norm_stderr": 0.024718075944129277 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3402777777777778, - "acc_stderr": 0.039621355734862175, - "acc_norm": 0.3402777777777778, - "acc_norm_stderr": 0.039621355734862175 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.38, - "acc_stderr": 0.04878317312145632, - "acc_norm": 0.38, - "acc_norm_stderr": 0.04878317312145632 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.57, - "acc_stderr": 0.04975698519562427, - "acc_norm": 0.57, - "acc_norm_stderr": 0.04975698519562427 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.48265895953757226, - "acc_stderr": 0.026902900458666633, - "acc_norm": 0.48265895953757226, - "acc_norm_stderr": 0.026902900458666633 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.4233128834355828, - "acc_stderr": 0.03881891213334383, - "acc_norm": 0.4233128834355828, - "acc_norm_stderr": 0.03881891213334383 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.4351851851851852, - "acc_stderr": 0.0275860062216077, - "acc_norm": 0.4351851851851852, - "acc_norm_stderr": 0.0275860062216077 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.36, - "acc_stderr": 0.04824181513244218, - "acc_norm": 0.36, - "acc_norm_stderr": 0.04824181513244218 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.46113989637305697, - "acc_stderr": 0.035975244117345775, - "acc_norm": 0.46113989637305697, - "acc_norm_stderr": 0.035975244117345775 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2719298245614035, - "acc_stderr": 0.04185774424022056, - "acc_norm": 0.2719298245614035, - "acc_norm_stderr": 0.04185774424022056 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.45321100917431195, - "acc_stderr": 0.021343255165546034, - "acc_norm": 0.45321100917431195, - "acc_norm_stderr": 0.021343255165546034 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.2698412698412698, - "acc_stderr": 0.03970158273235172, - "acc_norm": 0.2698412698412698, - "acc_norm_stderr": 0.03970158273235172 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.41830065359477125, - "acc_stderr": 0.02824513402438729, - "acc_norm": 0.41830065359477125, - "acc_norm_stderr": 0.02824513402438729 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.49, - "acc_stderr": 0.05024183937956911, - "acc_norm": 0.49, - "acc_norm_stderr": 0.05024183937956911 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.6694214876033058, - "acc_stderr": 0.04294340845212094, - "acc_norm": 0.6694214876033058, - "acc_norm_stderr": 0.04294340845212094 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.40789473684210525, - "acc_stderr": 0.03999309712777472, - "acc_norm": 0.40789473684210525, - "acc_norm_stderr": 0.03999309712777472 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.4084967320261438, - "acc_stderr": 0.019886221037501872, - "acc_norm": 0.4084967320261438, - "acc_norm_stderr": 0.019886221037501872 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.3475177304964539, - "acc_stderr": 0.028406627809590954, - "acc_norm": 0.3475177304964539, - "acc_norm_stderr": 0.028406627809590954 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.38392857142857145, - "acc_stderr": 0.046161430750285455, - "acc_norm": 0.38392857142857145, - "acc_norm_stderr": 0.046161430750285455 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.3148148148148148, - "acc_stderr": 0.03167468706828978, - "acc_norm": 0.3148148148148148, - "acc_norm_stderr": 0.03167468706828978 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.23575418994413408, - "acc_stderr": 0.014196375686290804, - "acc_norm": 0.23575418994413408, - "acc_norm_stderr": 0.014196375686290804 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.41, - "acc_stderr": 0.049431107042371025, - "acc_norm": 0.41, - "acc_norm_stderr": 0.049431107042371025 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.54, - "acc_stderr": 0.05009082659620332, - "acc_norm": 0.54, - "acc_norm_stderr": 0.05009082659620332 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.33088235294117646, - "acc_stderr": 0.028582709753898435, - "acc_norm": 0.33088235294117646, - "acc_norm_stderr": 0.028582709753898435 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.3510204081632653, - "acc_stderr": 0.03055531675557364, - "acc_norm": 0.3510204081632653, - "acc_norm_stderr": 0.03055531675557364 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.5569620253164557, - "acc_stderr": 0.03233532777533484, - "acc_norm": 0.5569620253164557, - "acc_norm_stderr": 0.03233532777533484 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.32073011734028684, - "acc_stderr": 0.011921199991782629, - "acc_norm": 0.32073011734028684, - "acc_norm_stderr": 0.011921199991782629 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.45098039215686275, - "acc_stderr": 0.03492406104163614, - "acc_norm": 0.45098039215686275, - "acc_norm_stderr": 0.03492406104163614 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.4484848484848485, - "acc_stderr": 0.038835659779569286, - "acc_norm": 0.4484848484848485, - "acc_norm_stderr": 0.038835659779569286 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.27906976744186046, - "mc1_stderr": 0.01570210709062788, - "mc2": 0.46295306302174644, - "mc2_stderr": 0.015320970978421385 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.2699530516431925, - "acc_stderr": 0.015217900336776898, - "acc_norm": 0.35563380281690143, - "acc_norm_stderr": 0.01640979333825091 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "MNCLLM/Mistral-7B-OP-over500-grad1.0", - "model_sha": "f7789c5af9b3b166070a886207090228deccf9d6", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/MNCLLM/Mistral-7B-orca-platy-over1k/result_2023-10-25 06:43:45.json b/MNCLLM/Mistral-7B-orca-platy-over1k/result_2023-10-25 06:43:45.json deleted file mode 100644 index 5166232a03dbc8308df95f3604b194ea6f6ca14e..0000000000000000000000000000000000000000 --- a/MNCLLM/Mistral-7B-orca-platy-over1k/result_2023-10-25 06:43:45.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.36177474402730375, - "acc_stderr": 0.014041957945038076, - "acc_norm": 0.41723549488054607, - "acc_norm_stderr": 0.014409825518403084 - }, - "harness|ko_hellaswag|10": { - "acc": 0.386476797450707, - "acc_stderr": 0.00485946798415526, - "acc_norm": 0.4965146385182235, - "acc_norm_stderr": 0.00498966018079217 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.5146198830409356, - "acc_stderr": 0.038331852752130254, - "acc_norm": 0.5146198830409356, - "acc_norm_stderr": 0.038331852752130254 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.5242718446601942, - "acc_stderr": 0.049449010929737795, - "acc_norm": 0.5242718446601942, - "acc_norm_stderr": 0.049449010929737795 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.4674329501915709, - "acc_stderr": 0.017841995750520857, - "acc_norm": 0.4674329501915709, - "acc_norm_stderr": 0.017841995750520857 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.362962962962963, - "acc_stderr": 0.04153948404742398, - "acc_norm": 0.362962962962963, - "acc_norm_stderr": 0.04153948404742398 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.29, - "acc_stderr": 0.04560480215720684, - "acc_norm": 0.29, - "acc_norm_stderr": 0.04560480215720684 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.4425531914893617, - "acc_stderr": 0.03246956919789958, - "acc_norm": 0.4425531914893617, - "acc_norm_stderr": 0.03246956919789958 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.42771084337349397, - "acc_stderr": 0.03851597683718533, - "acc_norm": 0.42771084337349397, - "acc_norm_stderr": 0.03851597683718533 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.45980707395498394, - "acc_stderr": 0.028306190403305696, - "acc_norm": 0.45980707395498394, - "acc_norm_stderr": 0.028306190403305696 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.45739910313901344, - "acc_stderr": 0.033435777055830646, - "acc_norm": 0.45739910313901344, - "acc_norm_stderr": 0.033435777055830646 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.46564885496183206, - "acc_stderr": 0.04374928560599738, - "acc_norm": 0.46564885496183206, - "acc_norm_stderr": 0.04374928560599738 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.43, - "acc_stderr": 0.049756985195624284, - "acc_norm": 0.43, - "acc_norm_stderr": 0.049756985195624284 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.5151515151515151, - "acc_stderr": 0.0356071651653106, - "acc_norm": 0.5151515151515151, - "acc_norm_stderr": 0.0356071651653106 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.4689655172413793, - "acc_stderr": 0.04158632762097828, - "acc_norm": 0.4689655172413793, - "acc_norm_stderr": 0.04158632762097828 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.2549019607843137, - "acc_stderr": 0.04336432707993177, - "acc_norm": 0.2549019607843137, - "acc_norm_stderr": 0.04336432707993177 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.48739495798319327, - "acc_stderr": 0.03246816765752174, - "acc_norm": 0.48739495798319327, - "acc_norm_stderr": 0.03246816765752174 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.4666666666666667, - "acc_stderr": 0.02529460802398648, - "acc_norm": 0.4666666666666667, - "acc_norm_stderr": 0.02529460802398648 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.61, - "acc_stderr": 0.04902071300001974, - "acc_norm": 0.61, - "acc_norm_stderr": 0.04902071300001974 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.4722222222222222, - "acc_stderr": 0.048262172941398944, - "acc_norm": 0.4722222222222222, - "acc_norm_stderr": 0.048262172941398944 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.41379310344827586, - "acc_stderr": 0.03465304488406796, - "acc_norm": 0.41379310344827586, - "acc_norm_stderr": 0.03465304488406796 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.47096774193548385, - "acc_stderr": 0.028396016402761008, - "acc_norm": 0.47096774193548385, - "acc_norm_stderr": 0.028396016402761008 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.7136752136752137, - "acc_stderr": 0.029614323690456648, - "acc_norm": 0.7136752136752137, - "acc_norm_stderr": 0.029614323690456648 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.47924528301886793, - "acc_stderr": 0.030746349975723463, - "acc_norm": 0.47924528301886793, - "acc_norm_stderr": 0.030746349975723463 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.4818181818181818, - "acc_stderr": 0.04785964010794917, - "acc_norm": 0.4818181818181818, - "acc_norm_stderr": 0.04785964010794917 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.34444444444444444, - "acc_stderr": 0.02897264888484427, - "acc_norm": 0.34444444444444444, - "acc_norm_stderr": 0.02897264888484427 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2847682119205298, - "acc_stderr": 0.03684881521389023, - "acc_norm": 0.2847682119205298, - "acc_norm_stderr": 0.03684881521389023 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.6169154228855721, - "acc_stderr": 0.034375193373382504, - "acc_norm": 0.6169154228855721, - "acc_norm_stderr": 0.034375193373382504 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.35260115606936415, - "acc_stderr": 0.036430371689585496, - "acc_norm": 0.35260115606936415, - "acc_norm_stderr": 0.036430371689585496 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.35978835978835977, - "acc_stderr": 0.024718075944129277, - "acc_norm": 0.35978835978835977, - "acc_norm_stderr": 0.024718075944129277 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3263888888888889, - "acc_stderr": 0.03921067198982266, - "acc_norm": 0.3263888888888889, - "acc_norm_stderr": 0.03921067198982266 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.38, - "acc_stderr": 0.048783173121456316, - "acc_norm": 0.38, - "acc_norm_stderr": 0.048783173121456316 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.61, - "acc_stderr": 0.04902071300001975, - "acc_norm": 0.61, - "acc_norm_stderr": 0.04902071300001975 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.5086705202312138, - "acc_stderr": 0.0269150473553698, - "acc_norm": 0.5086705202312138, - "acc_norm_stderr": 0.0269150473553698 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.43558282208588955, - "acc_stderr": 0.03895632464138937, - "acc_norm": 0.43558282208588955, - "acc_norm_stderr": 0.03895632464138937 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.4228395061728395, - "acc_stderr": 0.027487472980871598, - "acc_norm": 0.4228395061728395, - "acc_norm_stderr": 0.027487472980871598 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.38, - "acc_stderr": 0.048783173121456316, - "acc_norm": 0.38, - "acc_norm_stderr": 0.048783173121456316 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.5233160621761658, - "acc_stderr": 0.036045136724422014, - "acc_norm": 0.5233160621761658, - "acc_norm_stderr": 0.036045136724422014 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2719298245614035, - "acc_stderr": 0.04185774424022056, - "acc_norm": 0.2719298245614035, - "acc_norm_stderr": 0.04185774424022056 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.47706422018348627, - "acc_stderr": 0.021414757058175506, - "acc_norm": 0.47706422018348627, - "acc_norm_stderr": 0.021414757058175506 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.4126984126984127, - "acc_stderr": 0.04403438954768177, - "acc_norm": 0.4126984126984127, - "acc_norm_stderr": 0.04403438954768177 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.46405228758169936, - "acc_stderr": 0.028555827516528787, - "acc_norm": 0.46405228758169936, - "acc_norm_stderr": 0.028555827516528787 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.46, - "acc_stderr": 0.05009082659620332, - "acc_norm": 0.46, - "acc_norm_stderr": 0.05009082659620332 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.6694214876033058, - "acc_stderr": 0.04294340845212094, - "acc_norm": 0.6694214876033058, - "acc_norm_stderr": 0.04294340845212094 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.40789473684210525, - "acc_stderr": 0.03999309712777472, - "acc_norm": 0.40789473684210525, - "acc_norm_stderr": 0.03999309712777472 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.39052287581699346, - "acc_stderr": 0.019737008998094593, - "acc_norm": 0.39052287581699346, - "acc_norm_stderr": 0.019737008998094593 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.3546099290780142, - "acc_stderr": 0.02853865002887864, - "acc_norm": 0.3546099290780142, - "acc_norm_stderr": 0.02853865002887864 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.42857142857142855, - "acc_stderr": 0.04697113923010213, - "acc_norm": 0.42857142857142855, - "acc_norm_stderr": 0.04697113923010213 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.4074074074074074, - "acc_stderr": 0.033509916046960436, - "acc_norm": 0.4074074074074074, - "acc_norm_stderr": 0.033509916046960436 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.23575418994413408, - "acc_stderr": 0.014196375686290804, - "acc_norm": 0.23575418994413408, - "acc_norm_stderr": 0.014196375686290804 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.43, - "acc_stderr": 0.049756985195624284, - "acc_norm": 0.43, - "acc_norm_stderr": 0.049756985195624284 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.62, - "acc_stderr": 0.048783173121456316, - "acc_norm": 0.62, - "acc_norm_stderr": 0.048783173121456316 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.3786764705882353, - "acc_stderr": 0.02946513363977613, - "acc_norm": 0.3786764705882353, - "acc_norm_stderr": 0.02946513363977613 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.5020408163265306, - "acc_stderr": 0.0320089533497105, - "acc_norm": 0.5020408163265306, - "acc_norm_stderr": 0.0320089533497105 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.6160337552742616, - "acc_stderr": 0.031658678064106674, - "acc_norm": 0.6160337552742616, - "acc_norm_stderr": 0.031658678064106674 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.3116036505867014, - "acc_stderr": 0.011829039182849646, - "acc_norm": 0.3116036505867014, - "acc_norm_stderr": 0.011829039182849646 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.45588235294117646, - "acc_stderr": 0.03495624522015474, - "acc_norm": 0.45588235294117646, - "acc_norm_stderr": 0.03495624522015474 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.43636363636363634, - "acc_stderr": 0.03872592983524754, - "acc_norm": 0.43636363636363634, - "acc_norm_stderr": 0.03872592983524754 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.28886168910648713, - "mc1_stderr": 0.015866346401384308, - "mc2": 0.4667008752277657, - "mc2_stderr": 0.015432114393165898 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.2147887323943662, - "acc_stderr": 0.014077781780936459, - "acc_norm": 0.25704225352112675, - "acc_norm_stderr": 0.014980266433015269 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "MNCLLM/Mistral-7B-orca-platy-over1k", - "model_sha": "65fda49b7459f17a98b8d1c5136001698f647919", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/MarkrAI/kyujin-CoTy-platypus-ko-12.8b/result_2023-10-03 18:47:45.json b/MarkrAI/kyujin-CoTy-platypus-ko-12.8b/result_2023-10-03 18:47:45.json deleted file mode 100644 index dbf37ae37bda27f7f25af9985d5f6fcaeebd4f74..0000000000000000000000000000000000000000 --- a/MarkrAI/kyujin-CoTy-platypus-ko-12.8b/result_2023-10-03 18:47:45.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.28668941979522183, - "acc_stderr": 0.013214986329274755, - "acc_norm": 0.34982935153583616, - "acc_norm_stderr": 0.013936809212158287 - }, - "harness|ko_hellaswag|10": { - "acc": 0.383788090021908, - "acc_stderr": 0.004853134271547759, - "acc_norm": 0.4911372236606254, - "acc_norm_stderr": 0.004988997467134492 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.29239766081871343, - "acc_stderr": 0.034886477134579236, - "acc_norm": 0.29239766081871343, - "acc_norm_stderr": 0.034886477134579236 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.1941747572815534, - "acc_stderr": 0.03916667762822584, - "acc_norm": 0.1941747572815534, - "acc_norm_stderr": 0.03916667762822584 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.26309067688378035, - "acc_stderr": 0.01574549716904906, - "acc_norm": 0.26309067688378035, - "acc_norm_stderr": 0.01574549716904906 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.32592592592592595, - "acc_stderr": 0.040491220417025055, - "acc_norm": 0.32592592592592595, - "acc_norm_stderr": 0.040491220417025055 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.28, - "acc_stderr": 0.045126085985421296, - "acc_norm": 0.28, - "acc_norm_stderr": 0.045126085985421296 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.19574468085106383, - "acc_stderr": 0.025937853139977148, - "acc_norm": 0.19574468085106383, - "acc_norm_stderr": 0.025937853139977148 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.19879518072289157, - "acc_stderr": 0.031069390260789413, - "acc_norm": 0.19879518072289157, - "acc_norm_stderr": 0.031069390260789413 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.3086816720257235, - "acc_stderr": 0.026236965881153266, - "acc_norm": 0.3086816720257235, - "acc_norm_stderr": 0.026236965881153266 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.19730941704035873, - "acc_stderr": 0.02670985334496796, - "acc_norm": 0.19730941704035873, - "acc_norm_stderr": 0.02670985334496796 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.2748091603053435, - "acc_stderr": 0.039153454088478354, - "acc_norm": 0.2748091603053435, - "acc_norm_stderr": 0.039153454088478354 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.23, - "acc_stderr": 0.04229525846816506, - "acc_norm": 0.23, - "acc_norm_stderr": 0.04229525846816506 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.25757575757575757, - "acc_stderr": 0.031156269519646836, - "acc_norm": 0.25757575757575757, - "acc_norm_stderr": 0.031156269519646836 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.296551724137931, - "acc_stderr": 0.03806142687309994, - "acc_norm": 0.296551724137931, - "acc_norm_stderr": 0.03806142687309994 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.19607843137254902, - "acc_stderr": 0.039505818611799616, - "acc_norm": 0.19607843137254902, - "acc_norm_stderr": 0.039505818611799616 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.21008403361344538, - "acc_stderr": 0.026461398717471874, - "acc_norm": 0.21008403361344538, - "acc_norm_stderr": 0.026461398717471874 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.2128205128205128, - "acc_stderr": 0.020752423722128002, - "acc_norm": 0.2128205128205128, - "acc_norm_stderr": 0.020752423722128002 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252604, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252604 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.23148148148148148, - "acc_stderr": 0.04077494709252627, - "acc_norm": 0.23148148148148148, - "acc_norm_stderr": 0.04077494709252627 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.27586206896551724, - "acc_stderr": 0.03144712581678242, - "acc_norm": 0.27586206896551724, - "acc_norm_stderr": 0.03144712581678242 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.25161290322580643, - "acc_stderr": 0.024685979286239952, - "acc_norm": 0.25161290322580643, - "acc_norm_stderr": 0.024685979286239952 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.2564102564102564, - "acc_stderr": 0.028605953702004253, - "acc_norm": 0.2564102564102564, - "acc_norm_stderr": 0.028605953702004253 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.2528301886792453, - "acc_stderr": 0.026749899771241238, - "acc_norm": 0.2528301886792453, - "acc_norm_stderr": 0.026749899771241238 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.20909090909090908, - "acc_stderr": 0.03895091015724135, - "acc_norm": 0.20909090909090908, - "acc_norm_stderr": 0.03895091015724135 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.26296296296296295, - "acc_stderr": 0.026842057873833706, - "acc_norm": 0.26296296296296295, - "acc_norm_stderr": 0.026842057873833706 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2847682119205298, - "acc_stderr": 0.03684881521389023, - "acc_norm": 0.2847682119205298, - "acc_norm_stderr": 0.03684881521389023 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.2537313432835821, - "acc_stderr": 0.03076944496729602, - "acc_norm": 0.2537313432835821, - "acc_norm_stderr": 0.03076944496729602 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.2543352601156069, - "acc_stderr": 0.0332055644308557, - "acc_norm": 0.2543352601156069, - "acc_norm_stderr": 0.0332055644308557 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.26455026455026454, - "acc_stderr": 0.022717467897708607, - "acc_norm": 0.26455026455026454, - "acc_norm_stderr": 0.022717467897708607 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2569444444444444, - "acc_stderr": 0.03653946969442099, - "acc_norm": 0.2569444444444444, - "acc_norm_stderr": 0.03653946969442099 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.18, - "acc_stderr": 0.03861229196653694, - "acc_norm": 0.18, - "acc_norm_stderr": 0.03861229196653694 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.28034682080924855, - "acc_stderr": 0.024182427496577612, - "acc_norm": 0.28034682080924855, - "acc_norm_stderr": 0.024182427496577612 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.2883435582822086, - "acc_stderr": 0.035590395316173425, - "acc_norm": 0.2883435582822086, - "acc_norm_stderr": 0.035590395316173425 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.2932098765432099, - "acc_stderr": 0.025329888171900926, - "acc_norm": 0.2932098765432099, - "acc_norm_stderr": 0.025329888171900926 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542128, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542128 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.22797927461139897, - "acc_stderr": 0.030276909945178256, - "acc_norm": 0.22797927461139897, - "acc_norm_stderr": 0.030276909945178256 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.23684210526315788, - "acc_stderr": 0.03999423879281336, - "acc_norm": 0.23684210526315788, - "acc_norm_stderr": 0.03999423879281336 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.25871559633027524, - "acc_stderr": 0.01877605231961962, - "acc_norm": 0.25871559633027524, - "acc_norm_stderr": 0.01877605231961962 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.1349206349206349, - "acc_stderr": 0.030557101589417508, - "acc_norm": 0.1349206349206349, - "acc_norm_stderr": 0.030557101589417508 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.2647058823529412, - "acc_stderr": 0.025261691219729484, - "acc_norm": 0.2647058823529412, - "acc_norm_stderr": 0.025261691219729484 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.23, - "acc_stderr": 0.04229525846816506, - "acc_norm": 0.23, - "acc_norm_stderr": 0.04229525846816506 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.36363636363636365, - "acc_stderr": 0.043913262867240704, - "acc_norm": 0.36363636363636365, - "acc_norm_stderr": 0.043913262867240704 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.3092105263157895, - "acc_stderr": 0.03761070869867479, - "acc_norm": 0.3092105263157895, - "acc_norm_stderr": 0.03761070869867479 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.27941176470588236, - "acc_stderr": 0.018152871051538812, - "acc_norm": 0.27941176470588236, - "acc_norm_stderr": 0.018152871051538812 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2624113475177305, - "acc_stderr": 0.02624492034984301, - "acc_norm": 0.2624113475177305, - "acc_norm_stderr": 0.02624492034984301 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.24107142857142858, - "acc_stderr": 0.04059867246952689, - "acc_norm": 0.24107142857142858, - "acc_norm_stderr": 0.04059867246952689 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.2175925925925926, - "acc_stderr": 0.028139689444859676, - "acc_norm": 0.2175925925925926, - "acc_norm_stderr": 0.028139689444859676 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.24916201117318434, - "acc_stderr": 0.014465893829859924, - "acc_norm": 0.24916201117318434, - "acc_norm_stderr": 0.014465893829859924 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.28, - "acc_stderr": 0.045126085985421276, - "acc_norm": 0.28, - "acc_norm_stderr": 0.045126085985421276 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252605, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252605 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.23161764705882354, - "acc_stderr": 0.025626533803777562, - "acc_norm": 0.23161764705882354, - "acc_norm_stderr": 0.025626533803777562 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.24081632653061225, - "acc_stderr": 0.027372942201788167, - "acc_norm": 0.24081632653061225, - "acc_norm_stderr": 0.027372942201788167 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.270042194092827, - "acc_stderr": 0.028900721906293426, - "acc_norm": 0.270042194092827, - "acc_norm_stderr": 0.028900721906293426 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.26792698826597133, - "acc_stderr": 0.011311347690633872, - "acc_norm": 0.26792698826597133, - "acc_norm_stderr": 0.011311347690633872 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.25980392156862747, - "acc_stderr": 0.03077855467869326, - "acc_norm": 0.25980392156862747, - "acc_norm_stderr": 0.03077855467869326 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.2909090909090909, - "acc_stderr": 0.03546563019624336, - "acc_norm": 0.2909090909090909, - "acc_norm_stderr": 0.03546563019624336 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2252141982864137, - "mc1_stderr": 0.014623240768023479, - "mc2": 0.3758708542635285, - "mc2_stderr": 0.014474804257008467 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.8051643192488263, - "acc_stderr": 0.013577247256924913, - "acc_norm": 0.8485915492957746, - "acc_norm_stderr": 0.012287389605332915 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "MarkrAI/kyujin-CoTy-platypus-ko-12.8b", - "model_sha": "9568cc22eea62132df4e8df27510e51fd734483c", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/MarkrAI/kyujin-Poly-platypus-ko-12.8b/result_2023-10-02 06:31:33.json b/MarkrAI/kyujin-Poly-platypus-ko-12.8b/result_2023-10-02 06:31:33.json deleted file mode 100644 index f9a79ba0da154757ac7a651c6315988865b4815d..0000000000000000000000000000000000000000 --- a/MarkrAI/kyujin-Poly-platypus-ko-12.8b/result_2023-10-02 06:31:33.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.30887372013651876, - "acc_stderr": 0.013501770929344003, - "acc_norm": 0.3515358361774744, - "acc_norm_stderr": 0.013952413699600933 - }, - "harness|ko_hellaswag|10": { - "acc": 0.391256721768572, - "acc_stderr": 0.004870342592915049, - "acc_norm": 0.5038836885082653, - "acc_norm_stderr": 0.004989630887066195 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.28654970760233917, - "acc_stderr": 0.034678266857038245, - "acc_norm": 0.28654970760233917, - "acc_norm_stderr": 0.034678266857038245 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.1941747572815534, - "acc_stderr": 0.03916667762822584, - "acc_norm": 0.1941747572815534, - "acc_norm_stderr": 0.03916667762822584 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.26309067688378035, - "acc_stderr": 0.015745497169049057, - "acc_norm": 0.26309067688378035, - "acc_norm_stderr": 0.015745497169049057 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.04072314811876837, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.04072314811876837 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.3, - "acc_stderr": 0.04605661864718381, - "acc_norm": 0.3, - "acc_norm_stderr": 0.04605661864718381 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.20851063829787234, - "acc_stderr": 0.026556982117838752, - "acc_norm": 0.20851063829787234, - "acc_norm_stderr": 0.026556982117838752 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.20481927710843373, - "acc_stderr": 0.03141784291663926, - "acc_norm": 0.20481927710843373, - "acc_norm_stderr": 0.03141784291663926 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.2990353697749196, - "acc_stderr": 0.02600330111788514, - "acc_norm": 0.2990353697749196, - "acc_norm_stderr": 0.02600330111788514 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.2062780269058296, - "acc_stderr": 0.027157150479563824, - "acc_norm": 0.2062780269058296, - "acc_norm_stderr": 0.027157150479563824 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.2366412213740458, - "acc_stderr": 0.03727673575596916, - "acc_norm": 0.2366412213740458, - "acc_norm_stderr": 0.03727673575596916 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.2, - "acc_stderr": 0.04020151261036845, - "acc_norm": 0.2, - "acc_norm_stderr": 0.04020151261036845 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.25252525252525254, - "acc_stderr": 0.030954055470365907, - "acc_norm": 0.25252525252525254, - "acc_norm_stderr": 0.030954055470365907 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.296551724137931, - "acc_stderr": 0.03806142687309994, - "acc_norm": 0.296551724137931, - "acc_norm_stderr": 0.03806142687309994 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.21568627450980393, - "acc_stderr": 0.04092563958237657, - "acc_norm": 0.21568627450980393, - "acc_norm_stderr": 0.04092563958237657 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.21008403361344538, - "acc_stderr": 0.026461398717471874, - "acc_norm": 0.21008403361344538, - "acc_norm_stderr": 0.026461398717471874 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.21025641025641026, - "acc_stderr": 0.020660597485026928, - "acc_norm": 0.21025641025641026, - "acc_norm_stderr": 0.020660597485026928 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.23148148148148148, - "acc_stderr": 0.04077494709252627, - "acc_norm": 0.23148148148148148, - "acc_norm_stderr": 0.04077494709252627 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.27586206896551724, - "acc_stderr": 0.03144712581678242, - "acc_norm": 0.27586206896551724, - "acc_norm_stderr": 0.03144712581678242 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.25161290322580643, - "acc_stderr": 0.024685979286239956, - "acc_norm": 0.25161290322580643, - "acc_norm_stderr": 0.024685979286239956 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.2564102564102564, - "acc_stderr": 0.028605953702004253, - "acc_norm": 0.2564102564102564, - "acc_norm_stderr": 0.028605953702004253 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.22264150943396227, - "acc_stderr": 0.025604233470899098, - "acc_norm": 0.22264150943396227, - "acc_norm_stderr": 0.025604233470899098 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.20909090909090908, - "acc_stderr": 0.03895091015724135, - "acc_norm": 0.20909090909090908, - "acc_norm_stderr": 0.03895091015724135 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.26666666666666666, - "acc_stderr": 0.026962424325073838, - "acc_norm": 0.26666666666666666, - "acc_norm_stderr": 0.026962424325073838 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2847682119205298, - "acc_stderr": 0.03684881521389023, - "acc_norm": 0.2847682119205298, - "acc_norm_stderr": 0.03684881521389023 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.25870646766169153, - "acc_stderr": 0.03096590312357301, - "acc_norm": 0.25870646766169153, - "acc_norm_stderr": 0.03096590312357301 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.23699421965317918, - "acc_stderr": 0.03242414757483098, - "acc_norm": 0.23699421965317918, - "acc_norm_stderr": 0.03242414757483098 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2751322751322751, - "acc_stderr": 0.02300008685906865, - "acc_norm": 0.2751322751322751, - "acc_norm_stderr": 0.02300008685906865 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2569444444444444, - "acc_stderr": 0.03653946969442099, - "acc_norm": 0.2569444444444444, - "acc_norm_stderr": 0.03653946969442099 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.2, - "acc_stderr": 0.04020151261036846, - "acc_norm": 0.2, - "acc_norm_stderr": 0.04020151261036846 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.2947976878612717, - "acc_stderr": 0.02454761779480383, - "acc_norm": 0.2947976878612717, - "acc_norm_stderr": 0.02454761779480383 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.3006134969325153, - "acc_stderr": 0.03602511318806771, - "acc_norm": 0.3006134969325153, - "acc_norm_stderr": 0.03602511318806771 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.2932098765432099, - "acc_stderr": 0.025329888171900926, - "acc_norm": 0.2932098765432099, - "acc_norm_stderr": 0.025329888171900926 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.27, - "acc_stderr": 0.044619604333847394, - "acc_norm": 0.27, - "acc_norm_stderr": 0.044619604333847394 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.22797927461139897, - "acc_stderr": 0.030276909945178256, - "acc_norm": 0.22797927461139897, - "acc_norm_stderr": 0.030276909945178256 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.24561403508771928, - "acc_stderr": 0.04049339297748141, - "acc_norm": 0.24561403508771928, - "acc_norm_stderr": 0.04049339297748141 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.22752293577981653, - "acc_stderr": 0.0179744635787765, - "acc_norm": 0.22752293577981653, - "acc_norm_stderr": 0.0179744635787765 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.1984126984126984, - "acc_stderr": 0.03567016675276863, - "acc_norm": 0.1984126984126984, - "acc_norm_stderr": 0.03567016675276863 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.2647058823529412, - "acc_stderr": 0.025261691219729487, - "acc_norm": 0.2647058823529412, - "acc_norm_stderr": 0.025261691219729487 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.23, - "acc_stderr": 0.04229525846816506, - "acc_norm": 0.23, - "acc_norm_stderr": 0.04229525846816506 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.371900826446281, - "acc_stderr": 0.04412015806624503, - "acc_norm": 0.371900826446281, - "acc_norm_stderr": 0.04412015806624503 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.3092105263157895, - "acc_stderr": 0.03761070869867479, - "acc_norm": 0.3092105263157895, - "acc_norm_stderr": 0.03761070869867479 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.2761437908496732, - "acc_stderr": 0.018087276935663133, - "acc_norm": 0.2761437908496732, - "acc_norm_stderr": 0.018087276935663133 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.25886524822695034, - "acc_stderr": 0.026129572527180848, - "acc_norm": 0.25886524822695034, - "acc_norm_stderr": 0.026129572527180848 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.24107142857142858, - "acc_stderr": 0.04059867246952689, - "acc_norm": 0.24107142857142858, - "acc_norm_stderr": 0.04059867246952689 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.2175925925925926, - "acc_stderr": 0.028139689444859676, - "acc_norm": 0.2175925925925926, - "acc_norm_stderr": 0.028139689444859676 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.24692737430167597, - "acc_stderr": 0.014422292204808852, - "acc_norm": 0.24692737430167597, - "acc_norm_stderr": 0.014422292204808852 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.26, - "acc_stderr": 0.044084400227680794, - "acc_norm": 0.26, - "acc_norm_stderr": 0.044084400227680794 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.32, - "acc_stderr": 0.046882617226215034, - "acc_norm": 0.32, - "acc_norm_stderr": 0.046882617226215034 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.16544117647058823, - "acc_stderr": 0.022571771025494757, - "acc_norm": 0.16544117647058823, - "acc_norm_stderr": 0.022571771025494757 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.23265306122448978, - "acc_stderr": 0.02704925791589618, - "acc_norm": 0.23265306122448978, - "acc_norm_stderr": 0.02704925791589618 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.29957805907172996, - "acc_stderr": 0.0298180247497531, - "acc_norm": 0.29957805907172996, - "acc_norm_stderr": 0.0298180247497531 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.26010430247718386, - "acc_stderr": 0.011204382887823827, - "acc_norm": 0.26010430247718386, - "acc_norm_stderr": 0.011204382887823827 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.27450980392156865, - "acc_stderr": 0.031321798030832904, - "acc_norm": 0.27450980392156865, - "acc_norm_stderr": 0.031321798030832904 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.2909090909090909, - "acc_stderr": 0.03546563019624337, - "acc_norm": 0.2909090909090909, - "acc_norm_stderr": 0.03546563019624337 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.23011015911872704, - "mc1_stderr": 0.01473455795980776, - "mc2": 0.38739814063055383, - "mc2_stderr": 0.01474443864761987 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.7089201877934272, - "acc_stderr": 0.015571840078994576, - "acc_norm": 0.7488262910798122, - "acc_norm_stderr": 0.014866640898170014 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "MarkrAI/kyujin-Poly-platypus-ko-12.8b", - "model_sha": "036706515817a153e2249d2a135700bdcc10a36e", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/MarkrAI/kyujin-Poly-platypus-ko-12.8b/result_2023-10-02 06:47:06.json b/MarkrAI/kyujin-Poly-platypus-ko-12.8b/result_2023-10-02 06:47:06.json deleted file mode 100644 index eb08477a11be2afaa37f2ece40276ab4c8c35a4d..0000000000000000000000000000000000000000 --- a/MarkrAI/kyujin-Poly-platypus-ko-12.8b/result_2023-10-02 06:47:06.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.30887372013651876, - "acc_stderr": 0.013501770929344003, - "acc_norm": 0.3515358361774744, - "acc_norm_stderr": 0.013952413699600933 - }, - "harness|ko_hellaswag|10": { - "acc": 0.391256721768572, - "acc_stderr": 0.004870342592915049, - "acc_norm": 0.5038836885082653, - "acc_norm_stderr": 0.004989630887066195 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.28654970760233917, - "acc_stderr": 0.034678266857038245, - "acc_norm": 0.28654970760233917, - "acc_norm_stderr": 0.034678266857038245 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.1941747572815534, - "acc_stderr": 0.03916667762822584, - "acc_norm": 0.1941747572815534, - "acc_norm_stderr": 0.03916667762822584 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.26309067688378035, - "acc_stderr": 0.015745497169049057, - "acc_norm": 0.26309067688378035, - "acc_norm_stderr": 0.015745497169049057 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.04072314811876837, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.04072314811876837 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.3, - "acc_stderr": 0.04605661864718381, - "acc_norm": 0.3, - "acc_norm_stderr": 0.04605661864718381 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.20851063829787234, - "acc_stderr": 0.026556982117838752, - "acc_norm": 0.20851063829787234, - "acc_norm_stderr": 0.026556982117838752 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.20481927710843373, - "acc_stderr": 0.03141784291663926, - "acc_norm": 0.20481927710843373, - "acc_norm_stderr": 0.03141784291663926 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.2990353697749196, - "acc_stderr": 0.02600330111788514, - "acc_norm": 0.2990353697749196, - "acc_norm_stderr": 0.02600330111788514 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.2062780269058296, - "acc_stderr": 0.027157150479563824, - "acc_norm": 0.2062780269058296, - "acc_norm_stderr": 0.027157150479563824 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.2366412213740458, - "acc_stderr": 0.03727673575596916, - "acc_norm": 0.2366412213740458, - "acc_norm_stderr": 0.03727673575596916 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.2, - "acc_stderr": 0.04020151261036845, - "acc_norm": 0.2, - "acc_norm_stderr": 0.04020151261036845 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.25252525252525254, - "acc_stderr": 0.030954055470365907, - "acc_norm": 0.25252525252525254, - "acc_norm_stderr": 0.030954055470365907 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.296551724137931, - "acc_stderr": 0.03806142687309994, - "acc_norm": 0.296551724137931, - "acc_norm_stderr": 0.03806142687309994 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.21568627450980393, - "acc_stderr": 0.04092563958237657, - "acc_norm": 0.21568627450980393, - "acc_norm_stderr": 0.04092563958237657 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.21008403361344538, - "acc_stderr": 0.026461398717471874, - "acc_norm": 0.21008403361344538, - "acc_norm_stderr": 0.026461398717471874 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.21025641025641026, - "acc_stderr": 0.020660597485026928, - "acc_norm": 0.21025641025641026, - "acc_norm_stderr": 0.020660597485026928 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.23148148148148148, - "acc_stderr": 0.04077494709252627, - "acc_norm": 0.23148148148148148, - "acc_norm_stderr": 0.04077494709252627 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.27586206896551724, - "acc_stderr": 0.03144712581678242, - "acc_norm": 0.27586206896551724, - "acc_norm_stderr": 0.03144712581678242 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.25161290322580643, - "acc_stderr": 0.024685979286239956, - "acc_norm": 0.25161290322580643, - "acc_norm_stderr": 0.024685979286239956 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.2564102564102564, - "acc_stderr": 0.028605953702004253, - "acc_norm": 0.2564102564102564, - "acc_norm_stderr": 0.028605953702004253 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.22264150943396227, - "acc_stderr": 0.025604233470899098, - "acc_norm": 0.22264150943396227, - "acc_norm_stderr": 0.025604233470899098 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.20909090909090908, - "acc_stderr": 0.03895091015724135, - "acc_norm": 0.20909090909090908, - "acc_norm_stderr": 0.03895091015724135 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.26666666666666666, - "acc_stderr": 0.026962424325073838, - "acc_norm": 0.26666666666666666, - "acc_norm_stderr": 0.026962424325073838 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2847682119205298, - "acc_stderr": 0.03684881521389023, - "acc_norm": 0.2847682119205298, - "acc_norm_stderr": 0.03684881521389023 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.25870646766169153, - "acc_stderr": 0.03096590312357301, - "acc_norm": 0.25870646766169153, - "acc_norm_stderr": 0.03096590312357301 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.23699421965317918, - "acc_stderr": 0.03242414757483098, - "acc_norm": 0.23699421965317918, - "acc_norm_stderr": 0.03242414757483098 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2751322751322751, - "acc_stderr": 0.02300008685906865, - "acc_norm": 0.2751322751322751, - "acc_norm_stderr": 0.02300008685906865 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2569444444444444, - "acc_stderr": 0.03653946969442099, - "acc_norm": 0.2569444444444444, - "acc_norm_stderr": 0.03653946969442099 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.2, - "acc_stderr": 0.04020151261036846, - "acc_norm": 0.2, - "acc_norm_stderr": 0.04020151261036846 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.2947976878612717, - "acc_stderr": 0.02454761779480383, - "acc_norm": 0.2947976878612717, - "acc_norm_stderr": 0.02454761779480383 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.3006134969325153, - "acc_stderr": 0.03602511318806771, - "acc_norm": 0.3006134969325153, - "acc_norm_stderr": 0.03602511318806771 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.2932098765432099, - "acc_stderr": 0.025329888171900926, - "acc_norm": 0.2932098765432099, - "acc_norm_stderr": 0.025329888171900926 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.27, - "acc_stderr": 0.044619604333847394, - "acc_norm": 0.27, - "acc_norm_stderr": 0.044619604333847394 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.22797927461139897, - "acc_stderr": 0.030276909945178256, - "acc_norm": 0.22797927461139897, - "acc_norm_stderr": 0.030276909945178256 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.24561403508771928, - "acc_stderr": 0.04049339297748141, - "acc_norm": 0.24561403508771928, - "acc_norm_stderr": 0.04049339297748141 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.22752293577981653, - "acc_stderr": 0.0179744635787765, - "acc_norm": 0.22752293577981653, - "acc_norm_stderr": 0.0179744635787765 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.1984126984126984, - "acc_stderr": 0.03567016675276863, - "acc_norm": 0.1984126984126984, - "acc_norm_stderr": 0.03567016675276863 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.2647058823529412, - "acc_stderr": 0.025261691219729487, - "acc_norm": 0.2647058823529412, - "acc_norm_stderr": 0.025261691219729487 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.23, - "acc_stderr": 0.04229525846816506, - "acc_norm": 0.23, - "acc_norm_stderr": 0.04229525846816506 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.371900826446281, - "acc_stderr": 0.04412015806624503, - "acc_norm": 0.371900826446281, - "acc_norm_stderr": 0.04412015806624503 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.3092105263157895, - "acc_stderr": 0.03761070869867479, - "acc_norm": 0.3092105263157895, - "acc_norm_stderr": 0.03761070869867479 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.2761437908496732, - "acc_stderr": 0.018087276935663133, - "acc_norm": 0.2761437908496732, - "acc_norm_stderr": 0.018087276935663133 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.25886524822695034, - "acc_stderr": 0.026129572527180848, - "acc_norm": 0.25886524822695034, - "acc_norm_stderr": 0.026129572527180848 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.24107142857142858, - "acc_stderr": 0.04059867246952689, - "acc_norm": 0.24107142857142858, - "acc_norm_stderr": 0.04059867246952689 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.2175925925925926, - "acc_stderr": 0.028139689444859676, - "acc_norm": 0.2175925925925926, - "acc_norm_stderr": 0.028139689444859676 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.24692737430167597, - "acc_stderr": 0.014422292204808852, - "acc_norm": 0.24692737430167597, - "acc_norm_stderr": 0.014422292204808852 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.26, - "acc_stderr": 0.044084400227680794, - "acc_norm": 0.26, - "acc_norm_stderr": 0.044084400227680794 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.32, - "acc_stderr": 0.046882617226215034, - "acc_norm": 0.32, - "acc_norm_stderr": 0.046882617226215034 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.16544117647058823, - "acc_stderr": 0.022571771025494757, - "acc_norm": 0.16544117647058823, - "acc_norm_stderr": 0.022571771025494757 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.23265306122448978, - "acc_stderr": 0.02704925791589618, - "acc_norm": 0.23265306122448978, - "acc_norm_stderr": 0.02704925791589618 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.29957805907172996, - "acc_stderr": 0.0298180247497531, - "acc_norm": 0.29957805907172996, - "acc_norm_stderr": 0.0298180247497531 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.25945241199478486, - "acc_stderr": 0.011195262076350309, - "acc_norm": 0.25945241199478486, - "acc_norm_stderr": 0.011195262076350309 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.27450980392156865, - "acc_stderr": 0.031321798030832904, - "acc_norm": 0.27450980392156865, - "acc_norm_stderr": 0.031321798030832904 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.2909090909090909, - "acc_stderr": 0.03546563019624337, - "acc_norm": 0.2909090909090909, - "acc_norm_stderr": 0.03546563019624337 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.23011015911872704, - "mc1_stderr": 0.01473455795980776, - "mc2": 0.38739814063055383, - "mc2_stderr": 0.01474443864761987 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.7089201877934272, - "acc_stderr": 0.015571840078994576, - "acc_norm": 0.7488262910798122, - "acc_norm_stderr": 0.014866640898170014 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "MarkrAI/kyujin-Poly-platypus-ko-12.8b", - "model_sha": "4137c49c1c3902a187dc5368dacc13b1ee26814b", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/Nara-Lab/nallm-bart/result_2023-09-27 06:09:37.json b/Nara-Lab/nallm-bart/result_2023-09-27 06:09:37.json deleted file mode 100644 index d1a0107b23316edb31de952c9b55261bfdb89e86..0000000000000000000000000000000000000000 --- a/Nara-Lab/nallm-bart/result_2023-09-27 06:09:37.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.20733788395904437, - "acc_stderr": 0.011846905782971363, - "acc_norm": 0.2593856655290102, - "acc_norm_stderr": 0.012808273573927088 - }, - "harness|ko_hellaswag|10": { - "acc": 0.25124477195777734, - "acc_stderr": 0.00432842570099869, - "acc_norm": 0.2526389165504879, - "acc_norm_stderr": 0.004336375492801796 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.2982456140350877, - "acc_stderr": 0.03508771929824565, - "acc_norm": 0.2982456140350877, - "acc_norm_stderr": 0.03508771929824565 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.3786407766990291, - "acc_stderr": 0.04802694698258973, - "acc_norm": 0.3786407766990291, - "acc_norm_stderr": 0.04802694698258973 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.2707535121328225, - "acc_stderr": 0.015889888362560486, - "acc_norm": 0.2707535121328225, - "acc_norm_stderr": 0.015889888362560486 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.04072314811876837, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.04072314811876837 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.20425531914893616, - "acc_stderr": 0.02635515841334941, - "acc_norm": 0.20425531914893616, - "acc_norm_stderr": 0.02635515841334941 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.20481927710843373, - "acc_stderr": 0.03141784291663926, - "acc_norm": 0.20481927710843373, - "acc_norm_stderr": 0.03141784291663926 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.3054662379421222, - "acc_stderr": 0.026160584450140474, - "acc_norm": 0.3054662379421222, - "acc_norm_stderr": 0.026160584450140474 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.11659192825112108, - "acc_stderr": 0.021539639816244467, - "acc_norm": 0.11659192825112108, - "acc_norm_stderr": 0.021539639816244467 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.2824427480916031, - "acc_stderr": 0.03948406125768361, - "acc_norm": 0.2824427480916031, - "acc_norm_stderr": 0.03948406125768361 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.19, - "acc_stderr": 0.03942772444036625, - "acc_norm": 0.19, - "acc_norm_stderr": 0.03942772444036625 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.2474747474747475, - "acc_stderr": 0.030746300742124505, - "acc_norm": 0.2474747474747475, - "acc_norm_stderr": 0.030746300742124505 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.296551724137931, - "acc_stderr": 0.03806142687309994, - "acc_norm": 0.296551724137931, - "acc_norm_stderr": 0.03806142687309994 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.21568627450980393, - "acc_stderr": 0.04092563958237657, - "acc_norm": 0.21568627450980393, - "acc_norm_stderr": 0.04092563958237657 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.2605042016806723, - "acc_stderr": 0.02851025151234192, - "acc_norm": 0.2605042016806723, - "acc_norm_stderr": 0.02851025151234192 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.34615384615384615, - "acc_stderr": 0.024121125416941183, - "acc_norm": 0.34615384615384615, - "acc_norm_stderr": 0.024121125416941183 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252604, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252604 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.21296296296296297, - "acc_stderr": 0.03957835471980981, - "acc_norm": 0.21296296296296297, - "acc_norm_stderr": 0.03957835471980981 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.2955665024630542, - "acc_stderr": 0.032104944337514575, - "acc_norm": 0.2955665024630542, - "acc_norm_stderr": 0.032104944337514575 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.25161290322580643, - "acc_stderr": 0.024685979286239956, - "acc_norm": 0.25161290322580643, - "acc_norm_stderr": 0.024685979286239956 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.19658119658119658, - "acc_stderr": 0.02603538609895129, - "acc_norm": 0.19658119658119658, - "acc_norm_stderr": 0.02603538609895129 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.21509433962264152, - "acc_stderr": 0.02528839450289136, - "acc_norm": 0.21509433962264152, - "acc_norm_stderr": 0.02528839450289136 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.22727272727272727, - "acc_stderr": 0.040139645540727735, - "acc_norm": 0.22727272727272727, - "acc_norm_stderr": 0.040139645540727735 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.26296296296296295, - "acc_stderr": 0.026842057873833706, - "acc_norm": 0.26296296296296295, - "acc_norm_stderr": 0.026842057873833706 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.271523178807947, - "acc_stderr": 0.03631329803969653, - "acc_norm": 0.271523178807947, - "acc_norm_stderr": 0.03631329803969653 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.26865671641791045, - "acc_stderr": 0.03134328358208954, - "acc_norm": 0.26865671641791045, - "acc_norm_stderr": 0.03134328358208954 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.24855491329479767, - "acc_stderr": 0.03295304696818317, - "acc_norm": 0.24855491329479767, - "acc_norm_stderr": 0.03295304696818317 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2671957671957672, - "acc_stderr": 0.022789673145776575, - "acc_norm": 0.2671957671957672, - "acc_norm_stderr": 0.022789673145776575 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2569444444444444, - "acc_stderr": 0.03653946969442099, - "acc_norm": 0.2569444444444444, - "acc_norm_stderr": 0.03653946969442099 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.18, - "acc_stderr": 0.03861229196653695, - "acc_norm": 0.18, - "acc_norm_stderr": 0.03861229196653695 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.27, - "acc_stderr": 0.04461960433384741, - "acc_norm": 0.27, - "acc_norm_stderr": 0.04461960433384741 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.21965317919075145, - "acc_stderr": 0.0222896388526179, - "acc_norm": 0.21965317919075145, - "acc_norm_stderr": 0.0222896388526179 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.2331288343558282, - "acc_stderr": 0.0332201579577674, - "acc_norm": 0.2331288343558282, - "acc_norm_stderr": 0.0332201579577674 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.29012345679012347, - "acc_stderr": 0.02525117393649501, - "acc_norm": 0.29012345679012347, - "acc_norm_stderr": 0.02525117393649501 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.36787564766839376, - "acc_stderr": 0.034801756684660366, - "acc_norm": 0.36787564766839376, - "acc_norm_stderr": 0.034801756684660366 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.23684210526315788, - "acc_stderr": 0.03999423879281336, - "acc_norm": 0.23684210526315788, - "acc_norm_stderr": 0.03999423879281336 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.344954128440367, - "acc_stderr": 0.02038060540506697, - "acc_norm": 0.344954128440367, - "acc_norm_stderr": 0.02038060540506697 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.15079365079365079, - "acc_stderr": 0.03200686497287392, - "acc_norm": 0.15079365079365079, - "acc_norm_stderr": 0.03200686497287392 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.25163398692810457, - "acc_stderr": 0.0248480182638752, - "acc_norm": 0.25163398692810457, - "acc_norm_stderr": 0.0248480182638752 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.23, - "acc_stderr": 0.04229525846816506, - "acc_norm": 0.23, - "acc_norm_stderr": 0.04229525846816506 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.1487603305785124, - "acc_stderr": 0.03248470083807195, - "acc_norm": 0.1487603305785124, - "acc_norm_stderr": 0.03248470083807195 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.3026315789473684, - "acc_stderr": 0.03738520676119668, - "acc_norm": 0.3026315789473684, - "acc_norm_stderr": 0.03738520676119668 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.2696078431372549, - "acc_stderr": 0.017952449196987866, - "acc_norm": 0.2696078431372549, - "acc_norm_stderr": 0.017952449196987866 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2624113475177305, - "acc_stderr": 0.026244920349843007, - "acc_norm": 0.2624113475177305, - "acc_norm_stderr": 0.026244920349843007 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.23214285714285715, - "acc_stderr": 0.04007341809755808, - "acc_norm": 0.23214285714285715, - "acc_norm_stderr": 0.04007341809755808 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.21296296296296297, - "acc_stderr": 0.027920963147993662, - "acc_norm": 0.21296296296296297, - "acc_norm_stderr": 0.027920963147993662 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.27262569832402234, - "acc_stderr": 0.014893391735249608, - "acc_norm": 0.27262569832402234, - "acc_norm_stderr": 0.014893391735249608 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.26, - "acc_stderr": 0.04408440022768079, - "acc_norm": 0.26, - "acc_norm_stderr": 0.04408440022768079 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252605, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252605 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.16544117647058823, - "acc_stderr": 0.02257177102549475, - "acc_norm": 0.16544117647058823, - "acc_norm_stderr": 0.02257177102549475 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.22040816326530613, - "acc_stderr": 0.026537045312145277, - "acc_norm": 0.22040816326530613, - "acc_norm_stderr": 0.026537045312145277 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.28270042194092826, - "acc_stderr": 0.02931281415395592, - "acc_norm": 0.28270042194092826, - "acc_norm_stderr": 0.02931281415395592 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.24641460234680573, - "acc_stderr": 0.011005971399927246, - "acc_norm": 0.24641460234680573, - "acc_norm_stderr": 0.011005971399927246 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.2549019607843137, - "acc_stderr": 0.030587591351604243, - "acc_norm": 0.2549019607843137, - "acc_norm_stderr": 0.030587591351604243 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.21212121212121213, - "acc_stderr": 0.03192271569548299, - "acc_norm": 0.21212121212121213, - "acc_norm_stderr": 0.03192271569548299 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.25091799265605874, - "mc1_stderr": 0.015176985027707684, - "mc2": 0.5034077613881154, - "mc2_stderr": 0.016935414149113398 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.1255868544600939, - "acc_stderr": 0.011359668304132963, - "acc_norm": 0.4460093896713615, - "acc_norm_stderr": 0.01703956183256367 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "Nara-Lab/nallm-bart", - "model_sha": "a3a334adbae67f890b4186b5ce5aa4c5d7fbceaf", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/Nara-Lab/nallm-polyglot-ko-1.3b-base/result_2023-09-27 06:09:53.json b/Nara-Lab/nallm-polyglot-ko-1.3b-base/result_2023-09-27 06:09:53.json deleted file mode 100644 index 69531bbdc333eb75fb52a68ed78d858543977118..0000000000000000000000000000000000000000 --- a/Nara-Lab/nallm-polyglot-ko-1.3b-base/result_2023-09-27 06:09:53.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.25170648464163825, - "acc_stderr": 0.01268249633404297, - "acc_norm": 0.3054607508532423, - "acc_norm_stderr": 0.013460080478002496 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3325034853614818, - "acc_stderr": 0.004701474865207019, - "acc_norm": 0.4032065325632344, - "acc_norm_stderr": 0.004895390341445625 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.22807017543859648, - "acc_stderr": 0.03218093795602357, - "acc_norm": 0.22807017543859648, - "acc_norm_stderr": 0.03218093795602357 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.18446601941747573, - "acc_stderr": 0.03840423627288276, - "acc_norm": 0.18446601941747573, - "acc_norm_stderr": 0.03840423627288276 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.22860791826309068, - "acc_stderr": 0.015016884698539873, - "acc_norm": 0.22860791826309068, - "acc_norm_stderr": 0.015016884698539873 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.2740740740740741, - "acc_stderr": 0.03853254836552003, - "acc_norm": 0.2740740740740741, - "acc_norm_stderr": 0.03853254836552003 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542129, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542129 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.2170212765957447, - "acc_stderr": 0.026947483121496238, - "acc_norm": 0.2170212765957447, - "acc_norm_stderr": 0.026947483121496238 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3253012048192771, - "acc_stderr": 0.03647168523683227, - "acc_norm": 0.3253012048192771, - "acc_norm_stderr": 0.03647168523683227 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.2765273311897106, - "acc_stderr": 0.025403832978179622, - "acc_norm": 0.2765273311897106, - "acc_norm_stderr": 0.025403832978179622 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.3811659192825112, - "acc_stderr": 0.032596251184168264, - "acc_norm": 0.3811659192825112, - "acc_norm_stderr": 0.032596251184168264 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.29770992366412213, - "acc_stderr": 0.04010358942462203, - "acc_norm": 0.29770992366412213, - "acc_norm_stderr": 0.04010358942462203 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.34, - "acc_stderr": 0.047609522856952344, - "acc_norm": 0.34, - "acc_norm_stderr": 0.047609522856952344 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.25252525252525254, - "acc_stderr": 0.030954055470365914, - "acc_norm": 0.25252525252525254, - "acc_norm_stderr": 0.030954055470365914 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.25517241379310346, - "acc_stderr": 0.03632984052707842, - "acc_norm": 0.25517241379310346, - "acc_norm_stderr": 0.03632984052707842 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.18627450980392157, - "acc_stderr": 0.03873958714149354, - "acc_norm": 0.18627450980392157, - "acc_norm_stderr": 0.03873958714149354 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.23109243697478993, - "acc_stderr": 0.027381406927868963, - "acc_norm": 0.23109243697478993, - "acc_norm_stderr": 0.027381406927868963 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.23333333333333334, - "acc_stderr": 0.021444547301560476, - "acc_norm": 0.23333333333333334, - "acc_norm_stderr": 0.021444547301560476 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.18, - "acc_stderr": 0.038612291966536955, - "acc_norm": 0.18, - "acc_norm_stderr": 0.038612291966536955 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.23148148148148148, - "acc_stderr": 0.04077494709252627, - "acc_norm": 0.23148148148148148, - "acc_norm_stderr": 0.04077494709252627 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.24630541871921183, - "acc_stderr": 0.030315099285617722, - "acc_norm": 0.24630541871921183, - "acc_norm_stderr": 0.030315099285617722 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.2870967741935484, - "acc_stderr": 0.025736542745594528, - "acc_norm": 0.2870967741935484, - "acc_norm_stderr": 0.025736542745594528 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.2948717948717949, - "acc_stderr": 0.029872577708891162, - "acc_norm": 0.2948717948717949, - "acc_norm_stderr": 0.029872577708891162 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.24528301886792453, - "acc_stderr": 0.02648035717989569, - "acc_norm": 0.24528301886792453, - "acc_norm_stderr": 0.02648035717989569 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.22727272727272727, - "acc_stderr": 0.04013964554072775, - "acc_norm": 0.22727272727272727, - "acc_norm_stderr": 0.04013964554072775 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.2777777777777778, - "acc_stderr": 0.027309140588230165, - "acc_norm": 0.2777777777777778, - "acc_norm_stderr": 0.027309140588230165 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.24503311258278146, - "acc_stderr": 0.03511807571804724, - "acc_norm": 0.24503311258278146, - "acc_norm_stderr": 0.03511807571804724 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.29850746268656714, - "acc_stderr": 0.032357437893550424, - "acc_norm": 0.29850746268656714, - "acc_norm_stderr": 0.032357437893550424 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.21965317919075145, - "acc_stderr": 0.031568093627031744, - "acc_norm": 0.21965317919075145, - "acc_norm_stderr": 0.031568093627031744 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.25925925925925924, - "acc_stderr": 0.022569897074918417, - "acc_norm": 0.25925925925925924, - "acc_norm_stderr": 0.022569897074918417 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2361111111111111, - "acc_stderr": 0.03551446610810826, - "acc_norm": 0.2361111111111111, - "acc_norm_stderr": 0.03551446610810826 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.21, - "acc_stderr": 0.040936018074033256, - "acc_norm": 0.21, - "acc_norm_stderr": 0.040936018074033256 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.22, - "acc_stderr": 0.04163331998932269, - "acc_norm": 0.22, - "acc_norm_stderr": 0.04163331998932269 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.24855491329479767, - "acc_stderr": 0.023267528432100174, - "acc_norm": 0.24855491329479767, - "acc_norm_stderr": 0.023267528432100174 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.26993865030674846, - "acc_stderr": 0.034878251684978906, - "acc_norm": 0.26993865030674846, - "acc_norm_stderr": 0.034878251684978906 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.26851851851851855, - "acc_stderr": 0.024659685185967277, - "acc_norm": 0.26851851851851855, - "acc_norm_stderr": 0.024659685185967277 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.24352331606217617, - "acc_stderr": 0.030975436386845436, - "acc_norm": 0.24352331606217617, - "acc_norm_stderr": 0.030975436386845436 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.24561403508771928, - "acc_stderr": 0.04049339297748141, - "acc_norm": 0.24561403508771928, - "acc_norm_stderr": 0.04049339297748141 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.26055045871559634, - "acc_stderr": 0.01881918203485007, - "acc_norm": 0.26055045871559634, - "acc_norm_stderr": 0.01881918203485007 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.25396825396825395, - "acc_stderr": 0.038932596106046734, - "acc_norm": 0.25396825396825395, - "acc_norm_stderr": 0.038932596106046734 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.238562091503268, - "acc_stderr": 0.024404394928087873, - "acc_norm": 0.238562091503268, - "acc_norm_stderr": 0.024404394928087873 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.26, - "acc_stderr": 0.044084400227680794, - "acc_norm": 0.26, - "acc_norm_stderr": 0.044084400227680794 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.2396694214876033, - "acc_stderr": 0.03896878985070417, - "acc_norm": 0.2396694214876033, - "acc_norm_stderr": 0.03896878985070417 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.20394736842105263, - "acc_stderr": 0.03279000406310049, - "acc_norm": 0.20394736842105263, - "acc_norm_stderr": 0.03279000406310049 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.25980392156862747, - "acc_stderr": 0.017740899509177795, - "acc_norm": 0.25980392156862747, - "acc_norm_stderr": 0.017740899509177795 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.24822695035460993, - "acc_stderr": 0.025770015644290396, - "acc_norm": 0.24822695035460993, - "acc_norm_stderr": 0.025770015644290396 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.2857142857142857, - "acc_stderr": 0.042878587513404544, - "acc_norm": 0.2857142857142857, - "acc_norm_stderr": 0.042878587513404544 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.3194444444444444, - "acc_stderr": 0.03179876342176849, - "acc_norm": 0.3194444444444444, - "acc_norm_stderr": 0.03179876342176849 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2558659217877095, - "acc_stderr": 0.014593620923210756, - "acc_norm": 0.2558659217877095, - "acc_norm_stderr": 0.014593620923210756 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.13, - "acc_stderr": 0.03379976689896309, - "acc_norm": 0.13, - "acc_norm_stderr": 0.03379976689896309 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.23, - "acc_stderr": 0.04229525846816507, - "acc_norm": 0.23, - "acc_norm_stderr": 0.04229525846816507 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.3639705882352941, - "acc_stderr": 0.02922719246003203, - "acc_norm": 0.3639705882352941, - "acc_norm_stderr": 0.02922719246003203 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.24081632653061225, - "acc_stderr": 0.02737294220178817, - "acc_norm": 0.24081632653061225, - "acc_norm_stderr": 0.02737294220178817 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.2616033755274262, - "acc_stderr": 0.028609516716994934, - "acc_norm": 0.2616033755274262, - "acc_norm_stderr": 0.028609516716994934 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.23663624511082137, - "acc_stderr": 0.010855137351572747, - "acc_norm": 0.23663624511082137, - "acc_norm_stderr": 0.010855137351572747 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.22549019607843138, - "acc_stderr": 0.02933116229425173, - "acc_norm": 0.22549019607843138, - "acc_norm_stderr": 0.02933116229425173 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.21212121212121213, - "acc_stderr": 0.03192271569548297, - "acc_norm": 0.21212121212121213, - "acc_norm_stderr": 0.03192271569548297 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.23133414932680538, - "mc1_stderr": 0.014761945174862685, - "mc2": 0.4101668259727761, - "mc2_stderr": 0.01554453474117709 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.06924882629107981, - "acc_stderr": 0.00870278440176373, - "acc_norm": 0.1068075117370892, - "acc_norm_stderr": 0.010587871205074872 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "Nara-Lab/nallm-polyglot-ko-1.3b-base", - "model_sha": "8fd7fa9b1b5bbe857f65576e2e37bd600e10ce8c", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/Nara-Lab/nallm-polyglot-ko-3.8b-base/result_2023-09-27 06:10:01.json b/Nara-Lab/nallm-polyglot-ko-3.8b-base/result_2023-09-27 06:10:01.json deleted file mode 100644 index 915c6f490d6c6e0db7da3c3ebb45ae4e7c268afa..0000000000000000000000000000000000000000 --- a/Nara-Lab/nallm-polyglot-ko-3.8b-base/result_2023-09-27 06:10:01.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.26109215017064846, - "acc_stderr": 0.012835523909473864, - "acc_norm": 0.32337883959044367, - "acc_norm_stderr": 0.013669421630012123 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3641704839673372, - "acc_stderr": 0.004802133511654235, - "acc_norm": 0.45727942640908187, - "acc_norm_stderr": 0.004971534874389935 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.21637426900584794, - "acc_stderr": 0.031581495393387345, - "acc_norm": 0.21637426900584794, - "acc_norm_stderr": 0.031581495393387345 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.2524271844660194, - "acc_stderr": 0.04301250399690876, - "acc_norm": 0.2524271844660194, - "acc_norm_stderr": 0.04301250399690876 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.2388250319284802, - "acc_stderr": 0.015246803197398687, - "acc_norm": 0.2388250319284802, - "acc_norm_stderr": 0.015246803197398687 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.2814814814814815, - "acc_stderr": 0.03885004245800254, - "acc_norm": 0.2814814814814815, - "acc_norm_stderr": 0.03885004245800254 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.2553191489361702, - "acc_stderr": 0.028504856470514196, - "acc_norm": 0.2553191489361702, - "acc_norm_stderr": 0.028504856470514196 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3313253012048193, - "acc_stderr": 0.03664314777288086, - "acc_norm": 0.3313253012048193, - "acc_norm_stderr": 0.03664314777288086 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.29260450160771706, - "acc_stderr": 0.025839898334877983, - "acc_norm": 0.29260450160771706, - "acc_norm_stderr": 0.025839898334877983 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.21973094170403587, - "acc_stderr": 0.0277901770643836, - "acc_norm": 0.21973094170403587, - "acc_norm_stderr": 0.0277901770643836 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.20610687022900764, - "acc_stderr": 0.03547771004159463, - "acc_norm": 0.20610687022900764, - "acc_norm_stderr": 0.03547771004159463 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.24, - "acc_stderr": 0.042923469599092816, - "acc_norm": 0.24, - "acc_norm_stderr": 0.042923469599092816 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.22727272727272727, - "acc_stderr": 0.029857515673386414, - "acc_norm": 0.22727272727272727, - "acc_norm_stderr": 0.029857515673386414 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.22758620689655173, - "acc_stderr": 0.03493950380131184, - "acc_norm": 0.22758620689655173, - "acc_norm_stderr": 0.03493950380131184 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.18627450980392157, - "acc_stderr": 0.03873958714149351, - "acc_norm": 0.18627450980392157, - "acc_norm_stderr": 0.03873958714149351 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.22268907563025211, - "acc_stderr": 0.027025433498882367, - "acc_norm": 0.22268907563025211, - "acc_norm_stderr": 0.027025433498882367 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.24102564102564103, - "acc_stderr": 0.021685546665333184, - "acc_norm": 0.24102564102564103, - "acc_norm_stderr": 0.021685546665333184 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.27, - "acc_stderr": 0.0446196043338474, - "acc_norm": 0.27, - "acc_norm_stderr": 0.0446196043338474 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.2777777777777778, - "acc_stderr": 0.04330043749650742, - "acc_norm": 0.2777777777777778, - "acc_norm_stderr": 0.04330043749650742 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.27586206896551724, - "acc_stderr": 0.03144712581678245, - "acc_norm": 0.27586206896551724, - "acc_norm_stderr": 0.03144712581678245 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.2806451612903226, - "acc_stderr": 0.025560604721022884, - "acc_norm": 0.2806451612903226, - "acc_norm_stderr": 0.025560604721022884 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.27350427350427353, - "acc_stderr": 0.029202540153431194, - "acc_norm": 0.27350427350427353, - "acc_norm_stderr": 0.029202540153431194 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.26037735849056604, - "acc_stderr": 0.0270087660907081, - "acc_norm": 0.26037735849056604, - "acc_norm_stderr": 0.0270087660907081 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.23636363636363636, - "acc_stderr": 0.040693063197213775, - "acc_norm": 0.23636363636363636, - "acc_norm_stderr": 0.040693063197213775 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.25925925925925924, - "acc_stderr": 0.02671924078371216, - "acc_norm": 0.25925925925925924, - "acc_norm_stderr": 0.02671924078371216 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.23178807947019867, - "acc_stderr": 0.03445406271987054, - "acc_norm": 0.23178807947019867, - "acc_norm_stderr": 0.03445406271987054 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.22885572139303484, - "acc_stderr": 0.029705284056772436, - "acc_norm": 0.22885572139303484, - "acc_norm_stderr": 0.029705284056772436 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.21965317919075145, - "acc_stderr": 0.031568093627031744, - "acc_norm": 0.21965317919075145, - "acc_norm_stderr": 0.031568093627031744 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.24603174603174602, - "acc_stderr": 0.022182037202948368, - "acc_norm": 0.24603174603174602, - "acc_norm_stderr": 0.022182037202948368 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2222222222222222, - "acc_stderr": 0.03476590104304134, - "acc_norm": 0.2222222222222222, - "acc_norm_stderr": 0.03476590104304134 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.19, - "acc_stderr": 0.03942772444036623, - "acc_norm": 0.19, - "acc_norm_stderr": 0.03942772444036623 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.2, - "acc_stderr": 0.04020151261036845, - "acc_norm": 0.2, - "acc_norm_stderr": 0.04020151261036845 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.2543352601156069, - "acc_stderr": 0.023445826276545546, - "acc_norm": 0.2543352601156069, - "acc_norm_stderr": 0.023445826276545546 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.24539877300613497, - "acc_stderr": 0.03380939813943354, - "acc_norm": 0.24539877300613497, - "acc_norm_stderr": 0.03380939813943354 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.19444444444444445, - "acc_stderr": 0.0220213661002202, - "acc_norm": 0.19444444444444445, - "acc_norm_stderr": 0.0220213661002202 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.24, - "acc_stderr": 0.04292346959909283, - "acc_norm": 0.24, - "acc_norm_stderr": 0.04292346959909283 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.24870466321243523, - "acc_stderr": 0.031195840877700293, - "acc_norm": 0.24870466321243523, - "acc_norm_stderr": 0.031195840877700293 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.24561403508771928, - "acc_stderr": 0.04049339297748141, - "acc_norm": 0.24561403508771928, - "acc_norm_stderr": 0.04049339297748141 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.22568807339449543, - "acc_stderr": 0.01792308766780305, - "acc_norm": 0.22568807339449543, - "acc_norm_stderr": 0.01792308766780305 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.2619047619047619, - "acc_stderr": 0.039325376803928724, - "acc_norm": 0.2619047619047619, - "acc_norm_stderr": 0.039325376803928724 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.22549019607843138, - "acc_stderr": 0.023929155517351284, - "acc_norm": 0.22549019607843138, - "acc_norm_stderr": 0.023929155517351284 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.24, - "acc_stderr": 0.04292346959909284, - "acc_norm": 0.24, - "acc_norm_stderr": 0.04292346959909284 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.19834710743801653, - "acc_stderr": 0.03640118271990945, - "acc_norm": 0.19834710743801653, - "acc_norm_stderr": 0.03640118271990945 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.17763157894736842, - "acc_stderr": 0.03110318238312337, - "acc_norm": 0.17763157894736842, - "acc_norm_stderr": 0.03110318238312337 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.23529411764705882, - "acc_stderr": 0.01716058723504635, - "acc_norm": 0.23529411764705882, - "acc_norm_stderr": 0.01716058723504635 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.20212765957446807, - "acc_stderr": 0.02395666823785024, - "acc_norm": 0.20212765957446807, - "acc_norm_stderr": 0.02395666823785024 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.29464285714285715, - "acc_stderr": 0.043270409325787296, - "acc_norm": 0.29464285714285715, - "acc_norm_stderr": 0.043270409325787296 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.3611111111111111, - "acc_stderr": 0.03275773486100999, - "acc_norm": 0.3611111111111111, - "acc_norm_stderr": 0.03275773486100999 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.22569832402234638, - "acc_stderr": 0.013981395058455059, - "acc_norm": 0.22569832402234638, - "acc_norm_stderr": 0.013981395058455059 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.23, - "acc_stderr": 0.042295258468165065, - "acc_norm": 0.23, - "acc_norm_stderr": 0.042295258468165065 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.26, - "acc_stderr": 0.044084400227680794, - "acc_norm": 0.26, - "acc_norm_stderr": 0.044084400227680794 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.25735294117647056, - "acc_stderr": 0.026556519470041513, - "acc_norm": 0.25735294117647056, - "acc_norm_stderr": 0.026556519470041513 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.23673469387755103, - "acc_stderr": 0.027212835884073167, - "acc_norm": 0.23673469387755103, - "acc_norm_stderr": 0.027212835884073167 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.27848101265822783, - "acc_stderr": 0.029178682304842534, - "acc_norm": 0.27848101265822783, - "acc_norm_stderr": 0.029178682304842534 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2438070404172099, - "acc_stderr": 0.010966507972178475, - "acc_norm": 0.2438070404172099, - "acc_norm_stderr": 0.010966507972178475 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.25, - "acc_stderr": 0.03039153369274154, - "acc_norm": 0.25, - "acc_norm_stderr": 0.03039153369274154 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.2787878787878788, - "acc_stderr": 0.03501438706296781, - "acc_norm": 0.2787878787878788, - "acc_norm_stderr": 0.03501438706296781 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2423500611995104, - "mc1_stderr": 0.015000674373570338, - "mc2": 0.4144742012895836, - "mc2_stderr": 0.015299571868403075 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.07042253521126761, - "acc_stderr": 0.00877069161631731, - "acc_norm": 0.10093896713615023, - "acc_norm_stderr": 0.010326644717799555 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "Nara-Lab/nallm-polyglot-ko-3.8b-base", - "model_sha": "8d20c1e3d77f2a9a58046b58fb229c809476d350", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/NousResearch/Nous-Capybara-7B/result_2023-09-29 13:26:34.json b/NousResearch/Nous-Capybara-7B/result_2023-09-29 13:26:34.json deleted file mode 100644 index 13b7fe6e3492b8978eb50b0cf37015c593882b38..0000000000000000000000000000000000000000 --- a/NousResearch/Nous-Capybara-7B/result_2023-09-29 13:26:34.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.2790102389078498, - "acc_stderr": 0.013106784883601348, - "acc_norm": 0.318259385665529, - "acc_norm_stderr": 0.013611993916971453 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3414658434574786, - "acc_stderr": 0.0047323221721537485, - "acc_norm": 0.41884086835291773, - "acc_norm_stderr": 0.004923609207861538 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.45614035087719296, - "acc_stderr": 0.03820042586602967, - "acc_norm": 0.45614035087719296, - "acc_norm_stderr": 0.03820042586602967 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.3786407766990291, - "acc_stderr": 0.048026946982589726, - "acc_norm": 0.3786407766990291, - "acc_norm_stderr": 0.048026946982589726 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.3933588761174968, - "acc_stderr": 0.017468556724503162, - "acc_norm": 0.3933588761174968, - "acc_norm_stderr": 0.017468556724503162 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.04072314811876837, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.04072314811876837 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252604, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252604 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.34893617021276596, - "acc_stderr": 0.031158522131357797, - "acc_norm": 0.34893617021276596, - "acc_norm_stderr": 0.031158522131357797 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.30120481927710846, - "acc_stderr": 0.03571609230053481, - "acc_norm": 0.30120481927710846, - "acc_norm_stderr": 0.03571609230053481 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.3408360128617363, - "acc_stderr": 0.026920841260776162, - "acc_norm": 0.3408360128617363, - "acc_norm_stderr": 0.026920841260776162 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.37668161434977576, - "acc_stderr": 0.032521134899291884, - "acc_norm": 0.37668161434977576, - "acc_norm_stderr": 0.032521134899291884 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.44274809160305345, - "acc_stderr": 0.0435644720266507, - "acc_norm": 0.44274809160305345, - "acc_norm_stderr": 0.0435644720266507 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.37, - "acc_stderr": 0.04852365870939099, - "acc_norm": 0.37, - "acc_norm_stderr": 0.04852365870939099 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.37373737373737376, - "acc_stderr": 0.03446897738659333, - "acc_norm": 0.37373737373737376, - "acc_norm_stderr": 0.03446897738659333 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.33793103448275863, - "acc_stderr": 0.03941707632064889, - "acc_norm": 0.33793103448275863, - "acc_norm_stderr": 0.03941707632064889 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.17647058823529413, - "acc_stderr": 0.0379328118530781, - "acc_norm": 0.17647058823529413, - "acc_norm_stderr": 0.0379328118530781 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.3445378151260504, - "acc_stderr": 0.030868682604121633, - "acc_norm": 0.3445378151260504, - "acc_norm_stderr": 0.030868682604121633 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.3871794871794872, - "acc_stderr": 0.024697216930878944, - "acc_norm": 0.3871794871794872, - "acc_norm_stderr": 0.024697216930878944 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.33, - "acc_stderr": 0.047258156262526045, - "acc_norm": 0.33, - "acc_norm_stderr": 0.047258156262526045 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.4074074074074074, - "acc_stderr": 0.04750077341199985, - "acc_norm": 0.4074074074074074, - "acc_norm_stderr": 0.04750077341199985 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.28078817733990147, - "acc_stderr": 0.03161856335358611, - "acc_norm": 0.28078817733990147, - "acc_norm_stderr": 0.03161856335358611 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3580645161290323, - "acc_stderr": 0.02727389059430064, - "acc_norm": 0.3580645161290323, - "acc_norm_stderr": 0.02727389059430064 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.5555555555555556, - "acc_stderr": 0.03255326307272486, - "acc_norm": 0.5555555555555556, - "acc_norm_stderr": 0.03255326307272486 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.33584905660377357, - "acc_stderr": 0.029067220146644826, - "acc_norm": 0.33584905660377357, - "acc_norm_stderr": 0.029067220146644826 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.43636363636363634, - "acc_stderr": 0.04750185058907297, - "acc_norm": 0.43636363636363634, - "acc_norm_stderr": 0.04750185058907297 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.2777777777777778, - "acc_stderr": 0.027309140588230193, - "acc_norm": 0.2777777777777778, - "acc_norm_stderr": 0.027309140588230193 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2052980132450331, - "acc_stderr": 0.03297986648473834, - "acc_norm": 0.2052980132450331, - "acc_norm_stderr": 0.03297986648473834 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.48756218905472637, - "acc_stderr": 0.035344398485395806, - "acc_norm": 0.48756218905472637, - "acc_norm_stderr": 0.035344398485395806 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.34104046242774566, - "acc_stderr": 0.036146654241808254, - "acc_norm": 0.34104046242774566, - "acc_norm_stderr": 0.036146654241808254 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2619047619047619, - "acc_stderr": 0.022644212615525218, - "acc_norm": 0.2619047619047619, - "acc_norm_stderr": 0.022644212615525218 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3472222222222222, - "acc_stderr": 0.039812405437178615, - "acc_norm": 0.3472222222222222, - "acc_norm_stderr": 0.039812405437178615 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.27, - "acc_stderr": 0.044619604333847394, - "acc_norm": 0.27, - "acc_norm_stderr": 0.044619604333847394 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.51, - "acc_stderr": 0.05024183937956911, - "acc_norm": 0.51, - "acc_norm_stderr": 0.05024183937956911 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.36127167630057805, - "acc_stderr": 0.02586220185227789, - "acc_norm": 0.36127167630057805, - "acc_norm_stderr": 0.02586220185227789 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.3619631901840491, - "acc_stderr": 0.037757007291414416, - "acc_norm": 0.3619631901840491, - "acc_norm_stderr": 0.037757007291414416 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.39197530864197533, - "acc_stderr": 0.027163686038271215, - "acc_norm": 0.39197530864197533, - "acc_norm_stderr": 0.027163686038271215 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.26, - "acc_stderr": 0.04408440022768079, - "acc_norm": 0.26, - "acc_norm_stderr": 0.04408440022768079 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.46113989637305697, - "acc_stderr": 0.03597524411734578, - "acc_norm": 0.46113989637305697, - "acc_norm_stderr": 0.03597524411734578 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2631578947368421, - "acc_stderr": 0.0414243971948936, - "acc_norm": 0.2631578947368421, - "acc_norm_stderr": 0.0414243971948936 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.3688073394495413, - "acc_stderr": 0.020686227560729548, - "acc_norm": 0.3688073394495413, - "acc_norm_stderr": 0.020686227560729548 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.29365079365079366, - "acc_stderr": 0.040735243221471255, - "acc_norm": 0.29365079365079366, - "acc_norm_stderr": 0.040735243221471255 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.4084967320261438, - "acc_stderr": 0.028146405993096358, - "acc_norm": 0.4084967320261438, - "acc_norm_stderr": 0.028146405993096358 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.41, - "acc_stderr": 0.049431107042371025, - "acc_norm": 0.41, - "acc_norm_stderr": 0.049431107042371025 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.5371900826446281, - "acc_stderr": 0.04551711196104218, - "acc_norm": 0.5371900826446281, - "acc_norm_stderr": 0.04551711196104218 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.34868421052631576, - "acc_stderr": 0.03878139888797611, - "acc_norm": 0.34868421052631576, - "acc_norm_stderr": 0.03878139888797611 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.32516339869281047, - "acc_stderr": 0.01895088677080631, - "acc_norm": 0.32516339869281047, - "acc_norm_stderr": 0.01895088677080631 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2978723404255319, - "acc_stderr": 0.027281608344469414, - "acc_norm": 0.2978723404255319, - "acc_norm_stderr": 0.027281608344469414 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.3125, - "acc_stderr": 0.043994650575715215, - "acc_norm": 0.3125, - "acc_norm_stderr": 0.043994650575715215 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.2916666666666667, - "acc_stderr": 0.030998666304560538, - "acc_norm": 0.2916666666666667, - "acc_norm_stderr": 0.030998666304560538 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2446927374301676, - "acc_stderr": 0.014378169884098424, - "acc_norm": 0.2446927374301676, - "acc_norm_stderr": 0.014378169884098424 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.27, - "acc_stderr": 0.044619604333847394, - "acc_norm": 0.27, - "acc_norm_stderr": 0.044619604333847394 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.3639705882352941, - "acc_stderr": 0.02922719246003203, - "acc_norm": 0.3639705882352941, - "acc_norm_stderr": 0.02922719246003203 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.3673469387755102, - "acc_stderr": 0.030862144921087558, - "acc_norm": 0.3673469387755102, - "acc_norm_stderr": 0.030862144921087558 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.43037974683544306, - "acc_stderr": 0.03223017195937599, - "acc_norm": 0.43037974683544306, - "acc_norm_stderr": 0.03223017195937599 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.26140808344198174, - "acc_stderr": 0.011222528169771316, - "acc_norm": 0.26140808344198174, - "acc_norm_stderr": 0.011222528169771316 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.37254901960784315, - "acc_stderr": 0.03393388584958406, - "acc_norm": 0.37254901960784315, - "acc_norm_stderr": 0.03393388584958406 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.41818181818181815, - "acc_stderr": 0.038517163193983954, - "acc_norm": 0.41818181818181815, - "acc_norm_stderr": 0.038517163193983954 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2962056303549572, - "mc1_stderr": 0.015983595101811392, - "mc2": 0.4712238434154724, - "mc2_stderr": 0.016160223034293618 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.3215962441314554, - "acc_stderr": 0.01601160345597796, - "acc_norm": 0.3544600938967136, - "acc_norm_stderr": 0.016397605788502075 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "NousResearch/Nous-Capybara-7B", - "model_sha": "49d96bb4659cad308f204ca946962cdcd8958ca6", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/NousResearch/Nous-Hermes-llama-2-7b/result_2023-09-29 13:26:53.json b/NousResearch/Nous-Hermes-llama-2-7b/result_2023-09-29 13:26:53.json deleted file mode 100644 index db3c6b73a94819a2902dee897fe2be26ac630cd8..0000000000000000000000000000000000000000 --- a/NousResearch/Nous-Hermes-llama-2-7b/result_2023-09-29 13:26:53.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.27559726962457337, - "acc_stderr": 0.013057169655761838, - "acc_norm": 0.3191126279863481, - "acc_norm_stderr": 0.013621696119173304 - }, - "harness|ko_hellaswag|10": { - "acc": 0.33718382792272455, - "acc_stderr": 0.004717820714968746, - "acc_norm": 0.416849233220474, - "acc_norm_stderr": 0.004920298437884909 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.4853801169590643, - "acc_stderr": 0.038331852752130205, - "acc_norm": 0.4853801169590643, - "acc_norm_stderr": 0.038331852752130205 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.30097087378640774, - "acc_stderr": 0.045416094465039476, - "acc_norm": 0.30097087378640774, - "acc_norm_stderr": 0.045416094465039476 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.39208173690932313, - "acc_stderr": 0.017458524050147643, - "acc_norm": 0.39208173690932313, - "acc_norm_stderr": 0.017458524050147643 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.31851851851851853, - "acc_stderr": 0.04024778401977111, - "acc_norm": 0.31851851851851853, - "acc_norm_stderr": 0.04024778401977111 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.31063829787234043, - "acc_stderr": 0.03025123757921317, - "acc_norm": 0.31063829787234043, - "acc_norm_stderr": 0.03025123757921317 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.27710843373493976, - "acc_stderr": 0.03484331592680589, - "acc_norm": 0.27710843373493976, - "acc_norm_stderr": 0.03484331592680589 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.3440514469453376, - "acc_stderr": 0.02698147804364803, - "acc_norm": 0.3440514469453376, - "acc_norm_stderr": 0.02698147804364803 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.336322869955157, - "acc_stderr": 0.03170882426845501, - "acc_norm": 0.336322869955157, - "acc_norm_stderr": 0.03170882426845501 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.3816793893129771, - "acc_stderr": 0.0426073515764456, - "acc_norm": 0.3816793893129771, - "acc_norm_stderr": 0.0426073515764456 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.37, - "acc_stderr": 0.04852365870939099, - "acc_norm": 0.37, - "acc_norm_stderr": 0.04852365870939099 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.32323232323232326, - "acc_stderr": 0.03332299921070645, - "acc_norm": 0.32323232323232326, - "acc_norm_stderr": 0.03332299921070645 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.3586206896551724, - "acc_stderr": 0.039966295748767186, - "acc_norm": 0.3586206896551724, - "acc_norm_stderr": 0.039966295748767186 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.17647058823529413, - "acc_stderr": 0.03793281185307808, - "acc_norm": 0.17647058823529413, - "acc_norm_stderr": 0.03793281185307808 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.3277310924369748, - "acc_stderr": 0.030489911417673227, - "acc_norm": 0.3277310924369748, - "acc_norm_stderr": 0.030489911417673227 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.31025641025641026, - "acc_stderr": 0.02345467488940429, - "acc_norm": 0.31025641025641026, - "acc_norm_stderr": 0.02345467488940429 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.41, - "acc_stderr": 0.049431107042371025, - "acc_norm": 0.41, - "acc_norm_stderr": 0.049431107042371025 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.4444444444444444, - "acc_stderr": 0.04803752235190192, - "acc_norm": 0.4444444444444444, - "acc_norm_stderr": 0.04803752235190192 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.3103448275862069, - "acc_stderr": 0.03255086769970103, - "acc_norm": 0.3103448275862069, - "acc_norm_stderr": 0.03255086769970103 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3419354838709677, - "acc_stderr": 0.02698528957655274, - "acc_norm": 0.3419354838709677, - "acc_norm_stderr": 0.02698528957655274 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.49572649572649574, - "acc_stderr": 0.032754892643821316, - "acc_norm": 0.49572649572649574, - "acc_norm_stderr": 0.032754892643821316 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.32075471698113206, - "acc_stderr": 0.028727502957880263, - "acc_norm": 0.32075471698113206, - "acc_norm_stderr": 0.028727502957880263 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.3181818181818182, - "acc_stderr": 0.04461272175910507, - "acc_norm": 0.3181818181818182, - "acc_norm_stderr": 0.04461272175910507 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.27037037037037037, - "acc_stderr": 0.027080372815145668, - "acc_norm": 0.27037037037037037, - "acc_norm_stderr": 0.027080372815145668 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2251655629139073, - "acc_stderr": 0.03410435282008937, - "acc_norm": 0.2251655629139073, - "acc_norm_stderr": 0.03410435282008937 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.472636815920398, - "acc_stderr": 0.03530235517334682, - "acc_norm": 0.472636815920398, - "acc_norm_stderr": 0.03530235517334682 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.2947976878612717, - "acc_stderr": 0.034765996075164785, - "acc_norm": 0.2947976878612717, - "acc_norm_stderr": 0.034765996075164785 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2566137566137566, - "acc_stderr": 0.022494510767503154, - "acc_norm": 0.2566137566137566, - "acc_norm_stderr": 0.022494510767503154 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3125, - "acc_stderr": 0.038760854559127644, - "acc_norm": 0.3125, - "acc_norm_stderr": 0.038760854559127644 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.27, - "acc_stderr": 0.04461960433384739, - "acc_norm": 0.27, - "acc_norm_stderr": 0.04461960433384739 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.47, - "acc_stderr": 0.05016135580465919, - "acc_norm": 0.47, - "acc_norm_stderr": 0.05016135580465919 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.3554913294797688, - "acc_stderr": 0.025770292082977247, - "acc_norm": 0.3554913294797688, - "acc_norm_stderr": 0.025770292082977247 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.31901840490797545, - "acc_stderr": 0.03661997551073836, - "acc_norm": 0.31901840490797545, - "acc_norm_stderr": 0.03661997551073836 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.37037037037037035, - "acc_stderr": 0.02686949074481525, - "acc_norm": 0.37037037037037035, - "acc_norm_stderr": 0.02686949074481525 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.32, - "acc_stderr": 0.046882617226215034, - "acc_norm": 0.32, - "acc_norm_stderr": 0.046882617226215034 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.37823834196891193, - "acc_stderr": 0.034998072761933376, - "acc_norm": 0.37823834196891193, - "acc_norm_stderr": 0.034998072761933376 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.21929824561403508, - "acc_stderr": 0.03892431106518753, - "acc_norm": 0.21929824561403508, - "acc_norm_stderr": 0.03892431106518753 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.3376146788990826, - "acc_stderr": 0.020275265986638903, - "acc_norm": 0.3376146788990826, - "acc_norm_stderr": 0.020275265986638903 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.042163702135578345, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.042163702135578345 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.35947712418300654, - "acc_stderr": 0.027475969910660956, - "acc_norm": 0.35947712418300654, - "acc_norm_stderr": 0.027475969910660956 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.44, - "acc_stderr": 0.04988876515698589, - "acc_norm": 0.44, - "acc_norm_stderr": 0.04988876515698589 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.5041322314049587, - "acc_stderr": 0.045641987674327526, - "acc_norm": 0.5041322314049587, - "acc_norm_stderr": 0.045641987674327526 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.2631578947368421, - "acc_stderr": 0.03583496176361061, - "acc_norm": 0.2631578947368421, - "acc_norm_stderr": 0.03583496176361061 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.3088235294117647, - "acc_stderr": 0.018690850273595284, - "acc_norm": 0.3088235294117647, - "acc_norm_stderr": 0.018690850273595284 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.29432624113475175, - "acc_stderr": 0.02718712701150381, - "acc_norm": 0.29432624113475175, - "acc_norm_stderr": 0.02718712701150381 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.32142857142857145, - "acc_stderr": 0.04432804055291519, - "acc_norm": 0.32142857142857145, - "acc_norm_stderr": 0.04432804055291519 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.39814814814814814, - "acc_stderr": 0.033384734032074016, - "acc_norm": 0.39814814814814814, - "acc_norm_stderr": 0.033384734032074016 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2435754189944134, - "acc_stderr": 0.014355911964767857, - "acc_norm": 0.2435754189944134, - "acc_norm_stderr": 0.014355911964767857 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.37, - "acc_stderr": 0.04852365870939099, - "acc_norm": 0.37, - "acc_norm_stderr": 0.04852365870939099 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.36, - "acc_stderr": 0.048241815132442176, - "acc_norm": 0.36, - "acc_norm_stderr": 0.048241815132442176 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.40441176470588236, - "acc_stderr": 0.029812630701569743, - "acc_norm": 0.40441176470588236, - "acc_norm_stderr": 0.029812630701569743 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.47346938775510206, - "acc_stderr": 0.03196412734523272, - "acc_norm": 0.47346938775510206, - "acc_norm_stderr": 0.03196412734523272 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.3459915611814346, - "acc_stderr": 0.03096481058878671, - "acc_norm": 0.3459915611814346, - "acc_norm_stderr": 0.03096481058878671 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.26727509778357234, - "acc_stderr": 0.011302607515637513, - "acc_norm": 0.26727509778357234, - "acc_norm_stderr": 0.011302607515637513 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.3137254901960784, - "acc_stderr": 0.03256685484460388, - "acc_norm": 0.3137254901960784, - "acc_norm_stderr": 0.03256685484460388 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.36363636363636365, - "acc_stderr": 0.03756335775187897, - "acc_norm": 0.36363636363636365, - "acc_norm_stderr": 0.03756335775187897 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.31334149326805383, - "mc1_stderr": 0.016238065069059605, - "mc2": 0.48487503732289583, - "mc2_stderr": 0.015806306421646696 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.2535211267605634, - "acc_stderr": 0.014912520668056985, - "acc_norm": 0.2934272300469484, - "acc_norm_stderr": 0.015608597269197893 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "NousResearch/Nous-Hermes-llama-2-7b", - "model_sha": "b7c3ec54b754175e006ef75696a2ba3802697078", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/OMK510/ko-llama2-toy/result_2023-10-18 09:10:50.json b/OMK510/ko-llama2-toy/result_2023-10-18 09:10:50.json deleted file mode 100644 index be2249b61ad923a3563803e7d87b9beb76d43912..0000000000000000000000000000000000000000 --- a/OMK510/ko-llama2-toy/result_2023-10-18 09:10:50.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.2150170648464164, - "acc_stderr": 0.012005717634133611, - "acc_norm": 0.27474402730375425, - "acc_norm_stderr": 0.013044617212771227 - }, - "harness|ko_hellaswag|10": { - "acc": 0.25403306114319857, - "acc_stderr": 0.00434426617963492, - "acc_norm": 0.2605058753236407, - "acc_norm_stderr": 0.004380136468543937 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.30409356725146197, - "acc_stderr": 0.03528211258245231, - "acc_norm": 0.30409356725146197, - "acc_norm_stderr": 0.03528211258245231 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.1941747572815534, - "acc_stderr": 0.03916667762822584, - "acc_norm": 0.1941747572815534, - "acc_norm_stderr": 0.03916667762822584 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.27330779054916987, - "acc_stderr": 0.015936681062628556, - "acc_norm": 0.27330779054916987, - "acc_norm_stderr": 0.015936681062628556 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.32592592592592595, - "acc_stderr": 0.040491220417025055, - "acc_norm": 0.32592592592592595, - "acc_norm_stderr": 0.040491220417025055 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.19574468085106383, - "acc_stderr": 0.025937853139977148, - "acc_norm": 0.19574468085106383, - "acc_norm_stderr": 0.025937853139977148 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.1927710843373494, - "acc_stderr": 0.03070982405056527, - "acc_norm": 0.1927710843373494, - "acc_norm_stderr": 0.03070982405056527 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.2990353697749196, - "acc_stderr": 0.02600330111788513, - "acc_norm": 0.2990353697749196, - "acc_norm_stderr": 0.02600330111788513 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.18834080717488788, - "acc_stderr": 0.026241132996407256, - "acc_norm": 0.18834080717488788, - "acc_norm_stderr": 0.026241132996407256 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.22900763358778625, - "acc_stderr": 0.036853466317118506, - "acc_norm": 0.22900763358778625, - "acc_norm_stderr": 0.036853466317118506 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.21, - "acc_stderr": 0.040936018074033256, - "acc_norm": 0.21, - "acc_norm_stderr": 0.040936018074033256 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.25252525252525254, - "acc_stderr": 0.030954055470365907, - "acc_norm": 0.25252525252525254, - "acc_norm_stderr": 0.030954055470365907 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.33793103448275863, - "acc_stderr": 0.039417076320648906, - "acc_norm": 0.33793103448275863, - "acc_norm_stderr": 0.039417076320648906 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.21568627450980393, - "acc_stderr": 0.04092563958237657, - "acc_norm": 0.21568627450980393, - "acc_norm_stderr": 0.04092563958237657 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.24789915966386555, - "acc_stderr": 0.028047967224176892, - "acc_norm": 0.24789915966386555, - "acc_norm_stderr": 0.028047967224176892 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.31025641025641026, - "acc_stderr": 0.023454674889404295, - "acc_norm": 0.31025641025641026, - "acc_norm_stderr": 0.023454674889404295 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542129, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542129 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252604, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252604 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.23148148148148148, - "acc_stderr": 0.04077494709252627, - "acc_norm": 0.23148148148148148, - "acc_norm_stderr": 0.04077494709252627 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.3054187192118227, - "acc_stderr": 0.03240661565868408, - "acc_norm": 0.3054187192118227, - "acc_norm_stderr": 0.03240661565868408 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.33225806451612905, - "acc_stderr": 0.026795560848122787, - "acc_norm": 0.33225806451612905, - "acc_norm_stderr": 0.026795560848122787 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.2564102564102564, - "acc_stderr": 0.028605953702004253, - "acc_norm": 0.2564102564102564, - "acc_norm_stderr": 0.028605953702004253 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.24150943396226415, - "acc_stderr": 0.02634148037111834, - "acc_norm": 0.24150943396226415, - "acc_norm_stderr": 0.02634148037111834 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.2909090909090909, - "acc_stderr": 0.04350271442923243, - "acc_norm": 0.2909090909090909, - "acc_norm_stderr": 0.04350271442923243 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.26666666666666666, - "acc_stderr": 0.026962424325073828, - "acc_norm": 0.26666666666666666, - "acc_norm_stderr": 0.026962424325073828 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2913907284768212, - "acc_stderr": 0.037101857261199946, - "acc_norm": 0.2913907284768212, - "acc_norm_stderr": 0.037101857261199946 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.21890547263681592, - "acc_stderr": 0.029239174636647, - "acc_norm": 0.21890547263681592, - "acc_norm_stderr": 0.029239174636647 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.23699421965317918, - "acc_stderr": 0.03242414757483098, - "acc_norm": 0.23699421965317918, - "acc_norm_stderr": 0.03242414757483098 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2804232804232804, - "acc_stderr": 0.02313528797432563, - "acc_norm": 0.2804232804232804, - "acc_norm_stderr": 0.02313528797432563 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2708333333333333, - "acc_stderr": 0.03716177437566016, - "acc_norm": 0.2708333333333333, - "acc_norm_stderr": 0.03716177437566016 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.21, - "acc_stderr": 0.040936018074033256, - "acc_norm": 0.21, - "acc_norm_stderr": 0.040936018074033256 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.29, - "acc_stderr": 0.04560480215720684, - "acc_norm": 0.29, - "acc_norm_stderr": 0.04560480215720684 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.29190751445086704, - "acc_stderr": 0.02447699407624734, - "acc_norm": 0.29190751445086704, - "acc_norm_stderr": 0.02447699407624734 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.2883435582822086, - "acc_stderr": 0.035590395316173425, - "acc_norm": 0.2883435582822086, - "acc_norm_stderr": 0.035590395316173425 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.29012345679012347, - "acc_stderr": 0.02525117393649502, - "acc_norm": 0.29012345679012347, - "acc_norm_stderr": 0.02525117393649502 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.26, - "acc_stderr": 0.04408440022768078, - "acc_norm": 0.26, - "acc_norm_stderr": 0.04408440022768078 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.22797927461139897, - "acc_stderr": 0.03027690994517826, - "acc_norm": 0.22797927461139897, - "acc_norm_stderr": 0.03027690994517826 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.21929824561403508, - "acc_stderr": 0.03892431106518753, - "acc_norm": 0.21929824561403508, - "acc_norm_stderr": 0.03892431106518753 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.22568807339449543, - "acc_stderr": 0.01792308766780305, - "acc_norm": 0.22568807339449543, - "acc_norm_stderr": 0.01792308766780305 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.3412698412698413, - "acc_stderr": 0.04240799327574925, - "acc_norm": 0.3412698412698413, - "acc_norm_stderr": 0.04240799327574925 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.2581699346405229, - "acc_stderr": 0.02505850331695815, - "acc_norm": 0.2581699346405229, - "acc_norm_stderr": 0.02505850331695815 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.23, - "acc_stderr": 0.04229525846816506, - "acc_norm": 0.23, - "acc_norm_stderr": 0.04229525846816506 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.38016528925619836, - "acc_stderr": 0.04431324501968431, - "acc_norm": 0.38016528925619836, - "acc_norm_stderr": 0.04431324501968431 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.2894736842105263, - "acc_stderr": 0.03690677986137283, - "acc_norm": 0.2894736842105263, - "acc_norm_stderr": 0.03690677986137283 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.2761437908496732, - "acc_stderr": 0.018087276935663133, - "acc_norm": 0.2761437908496732, - "acc_norm_stderr": 0.018087276935663133 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.26595744680851063, - "acc_stderr": 0.026358065698880592, - "acc_norm": 0.26595744680851063, - "acc_norm_stderr": 0.026358065698880592 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.22321428571428573, - "acc_stderr": 0.03952301967702511, - "acc_norm": 0.22321428571428573, - "acc_norm_stderr": 0.03952301967702511 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.2361111111111111, - "acc_stderr": 0.028963702570791013, - "acc_norm": 0.2361111111111111, - "acc_norm_stderr": 0.028963702570791013 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.27262569832402234, - "acc_stderr": 0.014893391735249608, - "acc_norm": 0.27262569832402234, - "acc_norm_stderr": 0.014893391735249608 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.23, - "acc_stderr": 0.04229525846816506, - "acc_norm": 0.23, - "acc_norm_stderr": 0.04229525846816506 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.22, - "acc_stderr": 0.04163331998932267, - "acc_norm": 0.22, - "acc_norm_stderr": 0.04163331998932267 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.44485294117647056, - "acc_stderr": 0.030187532060329387, - "acc_norm": 0.44485294117647056, - "acc_norm_stderr": 0.030187532060329387 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.23265306122448978, - "acc_stderr": 0.02704925791589618, - "acc_norm": 0.23265306122448978, - "acc_norm_stderr": 0.02704925791589618 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.26582278481012656, - "acc_stderr": 0.028756799629658335, - "acc_norm": 0.26582278481012656, - "acc_norm_stderr": 0.028756799629658335 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2692307692307692, - "acc_stderr": 0.01132873440314031, - "acc_norm": 0.2692307692307692, - "acc_norm_stderr": 0.01132873440314031 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.27450980392156865, - "acc_stderr": 0.031321798030832904, - "acc_norm": 0.27450980392156865, - "acc_norm_stderr": 0.031321798030832904 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.24848484848484848, - "acc_stderr": 0.03374402644139406, - "acc_norm": 0.24848484848484848, - "acc_norm_stderr": 0.03374402644139406 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2582619339045288, - "mc1_stderr": 0.015321821688476196, - "mc2": 0.5244892940135847, - "mc2_stderr": 0.01678983173354145 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.08568075117370892, - "acc_stderr": 0.009594575928755997, - "acc_norm": 0.2992957746478873, - "acc_norm_stderr": 0.01569830927620494 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "OMK510/ko-llama2-toy", - "model_sha": "29cbd640d1c2e0be924fd88a27b3802e7523a390", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/OMK510/omk_mixed2/result_2023-10-20 04:05:12.json b/OMK510/omk_mixed2/result_2023-10-20 04:05:12.json deleted file mode 100644 index ba7e7d9aafc06a72b01c81ebf39a1e8806e61cff..0000000000000000000000000000000000000000 --- a/OMK510/omk_mixed2/result_2023-10-20 04:05:12.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.378839590443686, - "acc_stderr": 0.014175915490000324, - "acc_norm": 0.4300341296928328, - "acc_norm_stderr": 0.014467631559137998 - }, - "harness|ko_hellaswag|10": { - "acc": 0.4086835291774547, - "acc_stderr": 0.004905859114942308, - "acc_norm": 0.5462059350726947, - "acc_norm_stderr": 0.0049684294763450345 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.5146198830409356, - "acc_stderr": 0.038331852752130254, - "acc_norm": 0.5146198830409356, - "acc_norm_stderr": 0.038331852752130254 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.46601941747572817, - "acc_stderr": 0.0493929144727348, - "acc_norm": 0.46601941747572817, - "acc_norm_stderr": 0.0493929144727348 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.49808429118773945, - "acc_stderr": 0.017879832259026677, - "acc_norm": 0.49808429118773945, - "acc_norm_stderr": 0.017879832259026677 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.362962962962963, - "acc_stderr": 0.041539484047424, - "acc_norm": 0.362962962962963, - "acc_norm_stderr": 0.041539484047424 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.32, - "acc_stderr": 0.046882617226215034, - "acc_norm": 0.32, - "acc_norm_stderr": 0.046882617226215034 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.3617021276595745, - "acc_stderr": 0.03141082197596239, - "acc_norm": 0.3617021276595745, - "acc_norm_stderr": 0.03141082197596239 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.4397590361445783, - "acc_stderr": 0.03864139923699122, - "acc_norm": 0.4397590361445783, - "acc_norm_stderr": 0.03864139923699122 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.4694533762057878, - "acc_stderr": 0.02834504586484068, - "acc_norm": 0.4694533762057878, - "acc_norm_stderr": 0.02834504586484068 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.3632286995515695, - "acc_stderr": 0.03227790442850499, - "acc_norm": 0.3632286995515695, - "acc_norm_stderr": 0.03227790442850499 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.4732824427480916, - "acc_stderr": 0.04379024936553894, - "acc_norm": 0.4732824427480916, - "acc_norm_stderr": 0.04379024936553894 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542128, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542128 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.5202020202020202, - "acc_stderr": 0.03559443565563918, - "acc_norm": 0.5202020202020202, - "acc_norm_stderr": 0.03559443565563918 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.41379310344827586, - "acc_stderr": 0.04104269211806232, - "acc_norm": 0.41379310344827586, - "acc_norm_stderr": 0.04104269211806232 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.27450980392156865, - "acc_stderr": 0.04440521906179327, - "acc_norm": 0.27450980392156865, - "acc_norm_stderr": 0.04440521906179327 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.37815126050420167, - "acc_stderr": 0.031499305777849054, - "acc_norm": 0.37815126050420167, - "acc_norm_stderr": 0.031499305777849054 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.36153846153846153, - "acc_stderr": 0.024359581465396955, - "acc_norm": 0.36153846153846153, - "acc_norm_stderr": 0.024359581465396955 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.47, - "acc_stderr": 0.05016135580465919, - "acc_norm": 0.47, - "acc_norm_stderr": 0.05016135580465919 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252604, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252604 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.39814814814814814, - "acc_stderr": 0.04732332615978813, - "acc_norm": 0.39814814814814814, - "acc_norm_stderr": 0.04732332615978813 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.39408866995073893, - "acc_stderr": 0.034381579670365446, - "acc_norm": 0.39408866995073893, - "acc_norm_stderr": 0.034381579670365446 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.45806451612903226, - "acc_stderr": 0.02834378725054064, - "acc_norm": 0.45806451612903226, - "acc_norm_stderr": 0.02834378725054064 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.5854700854700855, - "acc_stderr": 0.03227396567623779, - "acc_norm": 0.5854700854700855, - "acc_norm_stderr": 0.03227396567623779 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.4377358490566038, - "acc_stderr": 0.030533338430467512, - "acc_norm": 0.4377358490566038, - "acc_norm_stderr": 0.030533338430467512 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.4636363636363636, - "acc_stderr": 0.04776449162396197, - "acc_norm": 0.4636363636363636, - "acc_norm_stderr": 0.04776449162396197 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.26666666666666666, - "acc_stderr": 0.026962424325073828, - "acc_norm": 0.26666666666666666, - "acc_norm_stderr": 0.026962424325073828 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2913907284768212, - "acc_stderr": 0.037101857261199946, - "acc_norm": 0.2913907284768212, - "acc_norm_stderr": 0.037101857261199946 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.572139303482587, - "acc_stderr": 0.03498541988407795, - "acc_norm": 0.572139303482587, - "acc_norm_stderr": 0.03498541988407795 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.35260115606936415, - "acc_stderr": 0.036430371689585496, - "acc_norm": 0.35260115606936415, - "acc_norm_stderr": 0.036430371689585496 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.29894179894179895, - "acc_stderr": 0.023577604791655795, - "acc_norm": 0.29894179894179895, - "acc_norm_stderr": 0.023577604791655795 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3194444444444444, - "acc_stderr": 0.03899073687357335, - "acc_norm": 0.3194444444444444, - "acc_norm_stderr": 0.03899073687357335 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.22, - "acc_stderr": 0.04163331998932269, - "acc_norm": 0.22, - "acc_norm_stderr": 0.04163331998932269 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.57, - "acc_stderr": 0.04975698519562428, - "acc_norm": 0.57, - "acc_norm_stderr": 0.04975698519562428 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.42196531791907516, - "acc_stderr": 0.02658923114217426, - "acc_norm": 0.42196531791907516, - "acc_norm_stderr": 0.02658923114217426 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.3496932515337423, - "acc_stderr": 0.03746668325470021, - "acc_norm": 0.3496932515337423, - "acc_norm_stderr": 0.03746668325470021 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.44135802469135804, - "acc_stderr": 0.027628737155668777, - "acc_norm": 0.44135802469135804, - "acc_norm_stderr": 0.027628737155668777 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.27, - "acc_stderr": 0.0446196043338474, - "acc_norm": 0.27, - "acc_norm_stderr": 0.0446196043338474 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.42487046632124353, - "acc_stderr": 0.0356747133521254, - "acc_norm": 0.42487046632124353, - "acc_norm_stderr": 0.0356747133521254 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2719298245614035, - "acc_stderr": 0.041857744240220575, - "acc_norm": 0.2719298245614035, - "acc_norm_stderr": 0.041857744240220575 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.46788990825688076, - "acc_stderr": 0.021393071222680814, - "acc_norm": 0.46788990825688076, - "acc_norm_stderr": 0.021393071222680814 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.2698412698412698, - "acc_stderr": 0.03970158273235173, - "acc_norm": 0.2698412698412698, - "acc_norm_stderr": 0.03970158273235173 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.4019607843137255, - "acc_stderr": 0.02807415894760066, - "acc_norm": 0.4019607843137255, - "acc_norm_stderr": 0.02807415894760066 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.38, - "acc_stderr": 0.04878317312145632, - "acc_norm": 0.38, - "acc_norm_stderr": 0.04878317312145632 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.5289256198347108, - "acc_stderr": 0.04556710331269498, - "acc_norm": 0.5289256198347108, - "acc_norm_stderr": 0.04556710331269498 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.39473684210526316, - "acc_stderr": 0.03977749934622074, - "acc_norm": 0.39473684210526316, - "acc_norm_stderr": 0.03977749934622074 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.31862745098039214, - "acc_stderr": 0.01885008469646872, - "acc_norm": 0.31862745098039214, - "acc_norm_stderr": 0.01885008469646872 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.3120567375886525, - "acc_stderr": 0.027640120545169945, - "acc_norm": 0.3120567375886525, - "acc_norm_stderr": 0.027640120545169945 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.21428571428571427, - "acc_stderr": 0.038946411200447915, - "acc_norm": 0.21428571428571427, - "acc_norm_stderr": 0.038946411200447915 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.28703703703703703, - "acc_stderr": 0.03085199299325701, - "acc_norm": 0.28703703703703703, - "acc_norm_stderr": 0.03085199299325701 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2424581005586592, - "acc_stderr": 0.01433352205921789, - "acc_norm": 0.2424581005586592, - "acc_norm_stderr": 0.01433352205921789 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.42, - "acc_stderr": 0.049604496374885836, - "acc_norm": 0.42, - "acc_norm_stderr": 0.049604496374885836 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.22058823529411764, - "acc_stderr": 0.025187786660227276, - "acc_norm": 0.22058823529411764, - "acc_norm_stderr": 0.025187786660227276 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.43673469387755104, - "acc_stderr": 0.031751952375833226, - "acc_norm": 0.43673469387755104, - "acc_norm_stderr": 0.031751952375833226 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.4978902953586498, - "acc_stderr": 0.032546938018020076, - "acc_norm": 0.4978902953586498, - "acc_norm_stderr": 0.032546938018020076 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.29595827900912647, - "acc_stderr": 0.011658518525277039, - "acc_norm": 0.29595827900912647, - "acc_norm_stderr": 0.011658518525277039 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.4068627450980392, - "acc_stderr": 0.03447891136353382, - "acc_norm": 0.4068627450980392, - "acc_norm_stderr": 0.03447891136353382 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.4727272727272727, - "acc_stderr": 0.03898531605579419, - "acc_norm": 0.4727272727272727, - "acc_norm_stderr": 0.03898531605579419 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2582619339045288, - "mc1_stderr": 0.0153218216884762, - "mc2": 0.4217472836360241, - "mc2_stderr": 0.014796357378387609 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.47417840375586856, - "acc_stderr": 0.017116907933735916, - "acc_norm": 0.596244131455399, - "acc_norm_stderr": 0.016819252969699622 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "OMK510/omk_mixed2", - "model_sha": "399afa9b7e7d9ecebfcb6b3fdc97b437e5529747", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/Open-Orca/Mistral-7B-SlimOrca/result_2023-10-25 02:17:52.json b/Open-Orca/Mistral-7B-SlimOrca/result_2023-10-25 02:17:52.json deleted file mode 100644 index 2fc208303ec9e0b75ac5e2e0304ad4296a6a8ff9..0000000000000000000000000000000000000000 --- a/Open-Orca/Mistral-7B-SlimOrca/result_2023-10-25 02:17:52.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.33532423208191126, - "acc_stderr": 0.013796182947785559, - "acc_norm": 0.38139931740614336, - "acc_norm_stderr": 0.01419438908668526 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3732324238199562, - "acc_stderr": 0.004826746160830184, - "acc_norm": 0.48048197570205137, - "acc_norm_stderr": 0.004985978214937919 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.45614035087719296, - "acc_stderr": 0.03820042586602966, - "acc_norm": 0.45614035087719296, - "acc_norm_stderr": 0.03820042586602966 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.5339805825242718, - "acc_stderr": 0.04939291447273481, - "acc_norm": 0.5339805825242718, - "acc_norm_stderr": 0.04939291447273481 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.4827586206896552, - "acc_stderr": 0.017869330154003705, - "acc_norm": 0.4827586206896552, - "acc_norm_stderr": 0.017869330154003705 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.37037037037037035, - "acc_stderr": 0.041716541613545426, - "acc_norm": 0.37037037037037035, - "acc_norm_stderr": 0.041716541613545426 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.26, - "acc_stderr": 0.04408440022768079, - "acc_norm": 0.26, - "acc_norm_stderr": 0.04408440022768079 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.4, - "acc_stderr": 0.03202563076101735, - "acc_norm": 0.4, - "acc_norm_stderr": 0.03202563076101735 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.4397590361445783, - "acc_stderr": 0.03864139923699122, - "acc_norm": 0.4397590361445783, - "acc_norm_stderr": 0.03864139923699122 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.47266881028938906, - "acc_stderr": 0.028355633568328188, - "acc_norm": 0.47266881028938906, - "acc_norm_stderr": 0.028355633568328188 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.4618834080717489, - "acc_stderr": 0.033460150119732274, - "acc_norm": 0.4618834080717489, - "acc_norm_stderr": 0.033460150119732274 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.40458015267175573, - "acc_stderr": 0.043046937953806645, - "acc_norm": 0.40458015267175573, - "acc_norm_stderr": 0.043046937953806645 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.41, - "acc_stderr": 0.049431107042371025, - "acc_norm": 0.41, - "acc_norm_stderr": 0.049431107042371025 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.5606060606060606, - "acc_stderr": 0.0353608594752948, - "acc_norm": 0.5606060606060606, - "acc_norm_stderr": 0.0353608594752948 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.4413793103448276, - "acc_stderr": 0.04137931034482758, - "acc_norm": 0.4413793103448276, - "acc_norm_stderr": 0.04137931034482758 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.28431372549019607, - "acc_stderr": 0.04488482852329017, - "acc_norm": 0.28431372549019607, - "acc_norm_stderr": 0.04488482852329017 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.46218487394957986, - "acc_stderr": 0.032385469487589795, - "acc_norm": 0.46218487394957986, - "acc_norm_stderr": 0.032385469487589795 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.41025641025641024, - "acc_stderr": 0.024939313906940777, - "acc_norm": 0.41025641025641024, - "acc_norm_stderr": 0.024939313906940777 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.63, - "acc_stderr": 0.04852365870939098, - "acc_norm": 0.63, - "acc_norm_stderr": 0.04852365870939098 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.26, - "acc_stderr": 0.0440844002276808, - "acc_norm": 0.26, - "acc_norm_stderr": 0.0440844002276808 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.5092592592592593, - "acc_stderr": 0.04832853553437055, - "acc_norm": 0.5092592592592593, - "acc_norm_stderr": 0.04832853553437055 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.3645320197044335, - "acc_stderr": 0.0338640574606209, - "acc_norm": 0.3645320197044335, - "acc_norm_stderr": 0.0338640574606209 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.4290322580645161, - "acc_stderr": 0.02815603653823321, - "acc_norm": 0.4290322580645161, - "acc_norm_stderr": 0.02815603653823321 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.7435897435897436, - "acc_stderr": 0.028605953702004253, - "acc_norm": 0.7435897435897436, - "acc_norm_stderr": 0.028605953702004253 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.4490566037735849, - "acc_stderr": 0.030612730713641095, - "acc_norm": 0.4490566037735849, - "acc_norm_stderr": 0.030612730713641095 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.4909090909090909, - "acc_stderr": 0.04788339768702861, - "acc_norm": 0.4909090909090909, - "acc_norm_stderr": 0.04788339768702861 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.34444444444444444, - "acc_stderr": 0.02897264888484427, - "acc_norm": 0.34444444444444444, - "acc_norm_stderr": 0.02897264888484427 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2847682119205298, - "acc_stderr": 0.03684881521389024, - "acc_norm": 0.2847682119205298, - "acc_norm_stderr": 0.03684881521389024 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.5870646766169154, - "acc_stderr": 0.03481520803367348, - "acc_norm": 0.5870646766169154, - "acc_norm_stderr": 0.03481520803367348 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.37572254335260113, - "acc_stderr": 0.03692820767264867, - "acc_norm": 0.37572254335260113, - "acc_norm_stderr": 0.03692820767264867 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.37566137566137564, - "acc_stderr": 0.024942368931159788, - "acc_norm": 0.37566137566137564, - "acc_norm_stderr": 0.024942368931159788 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3819444444444444, - "acc_stderr": 0.040629907841466674, - "acc_norm": 0.3819444444444444, - "acc_norm_stderr": 0.040629907841466674 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252605, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252605 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.56, - "acc_stderr": 0.049888765156985884, - "acc_norm": 0.56, - "acc_norm_stderr": 0.049888765156985884 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.5028901734104047, - "acc_stderr": 0.02691864538323901, - "acc_norm": 0.5028901734104047, - "acc_norm_stderr": 0.02691864538323901 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.4785276073619632, - "acc_stderr": 0.03924746876751129, - "acc_norm": 0.4785276073619632, - "acc_norm_stderr": 0.03924746876751129 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.5061728395061729, - "acc_stderr": 0.02781862396258329, - "acc_norm": 0.5061728395061729, - "acc_norm_stderr": 0.02781862396258329 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695235, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695235 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.49740932642487046, - "acc_stderr": 0.03608390745384487, - "acc_norm": 0.49740932642487046, - "acc_norm_stderr": 0.03608390745384487 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2631578947368421, - "acc_stderr": 0.0414243971948936, - "acc_norm": 0.2631578947368421, - "acc_norm_stderr": 0.0414243971948936 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.48256880733944957, - "acc_stderr": 0.02142429187185315, - "acc_norm": 0.48256880733944957, - "acc_norm_stderr": 0.02142429187185315 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.35714285714285715, - "acc_stderr": 0.04285714285714281, - "acc_norm": 0.35714285714285715, - "acc_norm_stderr": 0.04285714285714281 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.46405228758169936, - "acc_stderr": 0.028555827516528787, - "acc_norm": 0.46405228758169936, - "acc_norm_stderr": 0.028555827516528787 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.52, - "acc_stderr": 0.050211673156867795, - "acc_norm": 0.52, - "acc_norm_stderr": 0.050211673156867795 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.6446280991735537, - "acc_stderr": 0.0436923632657398, - "acc_norm": 0.6446280991735537, - "acc_norm_stderr": 0.0436923632657398 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.40131578947368424, - "acc_stderr": 0.03988903703336284, - "acc_norm": 0.40131578947368424, - "acc_norm_stderr": 0.03988903703336284 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.3758169934640523, - "acc_stderr": 0.019594021136577447, - "acc_norm": 0.3758169934640523, - "acc_norm_stderr": 0.019594021136577447 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.33687943262411346, - "acc_stderr": 0.028195534873966727, - "acc_norm": 0.33687943262411346, - "acc_norm_stderr": 0.028195534873966727 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.4732142857142857, - "acc_stderr": 0.047389751192741546, - "acc_norm": 0.4732142857142857, - "acc_norm_stderr": 0.047389751192741546 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.3472222222222222, - "acc_stderr": 0.032468872436376486, - "acc_norm": 0.3472222222222222, - "acc_norm_stderr": 0.032468872436376486 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.1877094972067039, - "acc_stderr": 0.013059605303257046, - "acc_norm": 0.1877094972067039, - "acc_norm_stderr": 0.013059605303257046 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.37, - "acc_stderr": 0.048523658709391, - "acc_norm": 0.37, - "acc_norm_stderr": 0.048523658709391 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.61, - "acc_stderr": 0.04902071300001975, - "acc_norm": 0.61, - "acc_norm_stderr": 0.04902071300001975 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.3713235294117647, - "acc_stderr": 0.02934980313976587, - "acc_norm": 0.3713235294117647, - "acc_norm_stderr": 0.02934980313976587 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.46938775510204084, - "acc_stderr": 0.031949171367580624, - "acc_norm": 0.46938775510204084, - "acc_norm_stderr": 0.031949171367580624 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.5738396624472574, - "acc_stderr": 0.03219035703131775, - "acc_norm": 0.5738396624472574, - "acc_norm_stderr": 0.03219035703131775 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.3318122555410691, - "acc_stderr": 0.012026088259897639, - "acc_norm": 0.3318122555410691, - "acc_norm_stderr": 0.012026088259897639 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.3627450980392157, - "acc_stderr": 0.03374499356319355, - "acc_norm": 0.3627450980392157, - "acc_norm_stderr": 0.03374499356319355 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.4303030303030303, - "acc_stderr": 0.03866225962879077, - "acc_norm": 0.4303030303030303, - "acc_norm_stderr": 0.03866225962879077 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.30966952264381886, - "mc1_stderr": 0.016185744355144895, - "mc2": 0.4995755882922268, - "mc2_stderr": 0.015845203460942626 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.32511737089201875, - "acc_stderr": 0.016057185777207574, - "acc_norm": 0.3392018779342723, - "acc_norm_stderr": 0.01622926097510445 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "Open-Orca/Mistral-7B-SlimOrca", - "model_sha": "da461634dccd94d2da6a8de3b3cd84a527f60588", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/OpenBuddy/openbuddy-llama2-13b-v8.1-fp16/result_2023-10-04 09:36:31.json b/OpenBuddy/openbuddy-llama2-13b-v8.1-fp16/result_2023-10-04 09:36:31.json deleted file mode 100644 index 98a685cd4bc5db8521586958621ae2c4b0d2d121..0000000000000000000000000000000000000000 --- a/OpenBuddy/openbuddy-llama2-13b-v8.1-fp16/result_2023-10-04 09:36:31.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.23464163822525597, - "acc_stderr": 0.012383873560768673, - "acc_norm": 0.26023890784982934, - "acc_norm_stderr": 0.01282193022511256 - }, - "harness|ko_hellaswag|10": { - "acc": 0.2562238597888867, - "acc_stderr": 0.0043565471858470406, - "acc_norm": 0.2517426807408883, - "acc_norm_stderr": 0.004331271717773835 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.27485380116959063, - "acc_stderr": 0.034240429246915824, - "acc_norm": 0.27485380116959063, - "acc_norm_stderr": 0.034240429246915824 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.17475728155339806, - "acc_stderr": 0.037601780060266196, - "acc_norm": 0.17475728155339806, - "acc_norm_stderr": 0.037601780060266196 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.280970625798212, - "acc_stderr": 0.01607312785122124, - "acc_norm": 0.280970625798212, - "acc_norm_stderr": 0.01607312785122124 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.04072314811876837, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.04072314811876837 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.32, - "acc_stderr": 0.04688261722621502, - "acc_norm": 0.32, - "acc_norm_stderr": 0.04688261722621502 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.28936170212765955, - "acc_stderr": 0.02964400657700962, - "acc_norm": 0.28936170212765955, - "acc_norm_stderr": 0.02964400657700962 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.26506024096385544, - "acc_stderr": 0.03436024037944966, - "acc_norm": 0.26506024096385544, - "acc_norm_stderr": 0.03436024037944966 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.2990353697749196, - "acc_stderr": 0.02600330111788514, - "acc_norm": 0.2990353697749196, - "acc_norm_stderr": 0.02600330111788514 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.3004484304932735, - "acc_stderr": 0.03076935200822915, - "acc_norm": 0.3004484304932735, - "acc_norm_stderr": 0.03076935200822915 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.24427480916030533, - "acc_stderr": 0.037683359597287434, - "acc_norm": 0.24427480916030533, - "acc_norm_stderr": 0.037683359597287434 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.21717171717171718, - "acc_stderr": 0.02937661648494563, - "acc_norm": 0.21717171717171718, - "acc_norm_stderr": 0.02937661648494563 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.22758620689655173, - "acc_stderr": 0.03493950380131184, - "acc_norm": 0.22758620689655173, - "acc_norm_stderr": 0.03493950380131184 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.1568627450980392, - "acc_stderr": 0.03618664819936245, - "acc_norm": 0.1568627450980392, - "acc_norm_stderr": 0.03618664819936245 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.23109243697478993, - "acc_stderr": 0.02738140692786897, - "acc_norm": 0.23109243697478993, - "acc_norm_stderr": 0.02738140692786897 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.2128205128205128, - "acc_stderr": 0.020752423722128002, - "acc_norm": 0.2128205128205128, - "acc_norm_stderr": 0.020752423722128002 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.21296296296296297, - "acc_stderr": 0.03957835471980981, - "acc_norm": 0.21296296296296297, - "acc_norm_stderr": 0.03957835471980981 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.29064039408866993, - "acc_stderr": 0.0319474007226554, - "acc_norm": 0.29064039408866993, - "acc_norm_stderr": 0.0319474007226554 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.25161290322580643, - "acc_stderr": 0.024685979286239956, - "acc_norm": 0.25161290322580643, - "acc_norm_stderr": 0.024685979286239956 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.19658119658119658, - "acc_stderr": 0.02603538609895129, - "acc_norm": 0.19658119658119658, - "acc_norm_stderr": 0.02603538609895129 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.26037735849056604, - "acc_stderr": 0.0270087660907081, - "acc_norm": 0.26037735849056604, - "acc_norm_stderr": 0.0270087660907081 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.20909090909090908, - "acc_stderr": 0.03895091015724135, - "acc_norm": 0.20909090909090908, - "acc_norm_stderr": 0.03895091015724135 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.22962962962962963, - "acc_stderr": 0.02564410863926762, - "acc_norm": 0.22962962962962963, - "acc_norm_stderr": 0.02564410863926762 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.1986754966887417, - "acc_stderr": 0.03257847384436776, - "acc_norm": 0.1986754966887417, - "acc_norm_stderr": 0.03257847384436776 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.25870646766169153, - "acc_stderr": 0.030965903123573026, - "acc_norm": 0.25870646766169153, - "acc_norm_stderr": 0.030965903123573026 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.2658959537572254, - "acc_stderr": 0.033687629322594316, - "acc_norm": 0.2658959537572254, - "acc_norm_stderr": 0.033687629322594316 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2566137566137566, - "acc_stderr": 0.022494510767503154, - "acc_norm": 0.2566137566137566, - "acc_norm_stderr": 0.022494510767503154 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.20833333333333334, - "acc_stderr": 0.03396116205845335, - "acc_norm": 0.20833333333333334, - "acc_norm_stderr": 0.03396116205845335 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.18, - "acc_stderr": 0.03861229196653695, - "acc_norm": 0.18, - "acc_norm_stderr": 0.03861229196653695 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.2832369942196532, - "acc_stderr": 0.02425790170532338, - "acc_norm": 0.2832369942196532, - "acc_norm_stderr": 0.02425790170532338 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.22699386503067484, - "acc_stderr": 0.03291099578615771, - "acc_norm": 0.22699386503067484, - "acc_norm_stderr": 0.03291099578615771 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.2222222222222222, - "acc_stderr": 0.023132376234543353, - "acc_norm": 0.2222222222222222, - "acc_norm_stderr": 0.023132376234543353 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.26, - "acc_stderr": 0.04408440022768078, - "acc_norm": 0.26, - "acc_norm_stderr": 0.04408440022768078 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.22797927461139897, - "acc_stderr": 0.030276909945178256, - "acc_norm": 0.22797927461139897, - "acc_norm_stderr": 0.030276909945178256 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.24561403508771928, - "acc_stderr": 0.04049339297748141, - "acc_norm": 0.24561403508771928, - "acc_norm_stderr": 0.04049339297748141 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.23669724770642203, - "acc_stderr": 0.018224078117299095, - "acc_norm": 0.23669724770642203, - "acc_norm_stderr": 0.018224078117299095 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.16666666666666666, - "acc_stderr": 0.03333333333333337, - "acc_norm": 0.16666666666666666, - "acc_norm_stderr": 0.03333333333333337 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.23202614379084968, - "acc_stderr": 0.024170840879341016, - "acc_norm": 0.23202614379084968, - "acc_norm_stderr": 0.024170840879341016 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.32, - "acc_stderr": 0.04688261722621505, - "acc_norm": 0.32, - "acc_norm_stderr": 0.04688261722621505 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.24793388429752067, - "acc_stderr": 0.039418975265163025, - "acc_norm": 0.24793388429752067, - "acc_norm_stderr": 0.039418975265163025 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.17763157894736842, - "acc_stderr": 0.03110318238312338, - "acc_norm": 0.17763157894736842, - "acc_norm_stderr": 0.03110318238312338 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.22875816993464052, - "acc_stderr": 0.016992723465466236, - "acc_norm": 0.22875816993464052, - "acc_norm_stderr": 0.016992723465466236 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.25886524822695034, - "acc_stderr": 0.026129572527180848, - "acc_norm": 0.25886524822695034, - "acc_norm_stderr": 0.026129572527180848 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.23214285714285715, - "acc_stderr": 0.04007341809755808, - "acc_norm": 0.23214285714285715, - "acc_norm_stderr": 0.04007341809755808 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.4722222222222222, - "acc_stderr": 0.0340470532865388, - "acc_norm": 0.4722222222222222, - "acc_norm_stderr": 0.0340470532865388 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.24804469273743016, - "acc_stderr": 0.014444157808261453, - "acc_norm": 0.24804469273743016, - "acc_norm_stderr": 0.014444157808261453 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695235, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695235 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.26, - "acc_stderr": 0.04408440022768078, - "acc_norm": 0.26, - "acc_norm_stderr": 0.04408440022768078 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.3382352941176471, - "acc_stderr": 0.028739328513983566, - "acc_norm": 0.3382352941176471, - "acc_norm_stderr": 0.028739328513983566 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.2938775510204082, - "acc_stderr": 0.029162738410249772, - "acc_norm": 0.2938775510204082, - "acc_norm_stderr": 0.029162738410249772 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.3459915611814346, - "acc_stderr": 0.03096481058878671, - "acc_norm": 0.3459915611814346, - "acc_norm_stderr": 0.03096481058878671 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.25684485006518903, - "acc_stderr": 0.011158455853098844, - "acc_norm": 0.25684485006518903, - "acc_norm_stderr": 0.011158455853098844 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.28921568627450983, - "acc_stderr": 0.031822318676475544, - "acc_norm": 0.28921568627450983, - "acc_norm_stderr": 0.031822318676475544 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.2606060606060606, - "acc_stderr": 0.03427743175816524, - "acc_norm": 0.2606060606060606, - "acc_norm_stderr": 0.03427743175816524 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2729498164014688, - "mc1_stderr": 0.01559475363200652, - "mc2": 0.4462776885774457, - "mc2_stderr": 0.01611369655251753 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.07511737089201878, - "acc_stderr": 0.009035427821270169, - "acc_norm": 0.3286384976525822, - "acc_norm_stderr": 0.016101734275119246 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "OpenBuddy/openbuddy-llama2-13b-v8.1-fp16", - "model_sha": "982a6b50fe0fa7e1bc3816d6d28484038e9daf29", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/Teddysum/bllossom-1.0-13b/result_2023-10-02 03:06:22.json b/Teddysum/bllossom-1.0-13b/result_2023-10-02 03:06:22.json deleted file mode 100644 index 574525bf7c94dd84b99cbb061da9db1922e647c1..0000000000000000000000000000000000000000 --- a/Teddysum/bllossom-1.0-13b/result_2023-10-02 03:06:22.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.32764505119453924, - "acc_stderr": 0.013715847940719346, - "acc_norm": 0.3890784982935154, - "acc_norm_stderr": 0.014247309976045605 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3777136028679546, - "acc_stderr": 0.0048382464107862766, - "acc_norm": 0.48894642501493724, - "acc_norm_stderr": 0.004988561944277391 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.4678362573099415, - "acc_stderr": 0.03826882417660368, - "acc_norm": 0.4678362573099415, - "acc_norm_stderr": 0.03826882417660368 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.5339805825242718, - "acc_stderr": 0.0493929144727348, - "acc_norm": 0.5339805825242718, - "acc_norm_stderr": 0.0493929144727348 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.4227330779054917, - "acc_stderr": 0.017665180351954066, - "acc_norm": 0.4227330779054917, - "acc_norm_stderr": 0.017665180351954066 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.3925925925925926, - "acc_stderr": 0.042185062153688786, - "acc_norm": 0.3925925925925926, - "acc_norm_stderr": 0.042185062153688786 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.27, - "acc_stderr": 0.044619604333847394, - "acc_norm": 0.27, - "acc_norm_stderr": 0.044619604333847394 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.28085106382978725, - "acc_stderr": 0.02937917046412482, - "acc_norm": 0.28085106382978725, - "acc_norm_stderr": 0.02937917046412482 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.28313253012048195, - "acc_stderr": 0.03507295431370519, - "acc_norm": 0.28313253012048195, - "acc_norm_stderr": 0.03507295431370519 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.4180064308681672, - "acc_stderr": 0.02801365189199507, - "acc_norm": 0.4180064308681672, - "acc_norm_stderr": 0.02801365189199507 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.35874439461883406, - "acc_stderr": 0.03219079200419996, - "acc_norm": 0.35874439461883406, - "acc_norm_stderr": 0.03219079200419996 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.4580152671755725, - "acc_stderr": 0.04369802690578756, - "acc_norm": 0.4580152671755725, - "acc_norm_stderr": 0.04369802690578756 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.27, - "acc_stderr": 0.0446196043338474, - "acc_norm": 0.27, - "acc_norm_stderr": 0.0446196043338474 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.46464646464646464, - "acc_stderr": 0.03553436368828063, - "acc_norm": 0.46464646464646464, - "acc_norm_stderr": 0.03553436368828063 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.35172413793103446, - "acc_stderr": 0.0397923663749741, - "acc_norm": 0.35172413793103446, - "acc_norm_stderr": 0.0397923663749741 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.2647058823529412, - "acc_stderr": 0.043898699568087785, - "acc_norm": 0.2647058823529412, - "acc_norm_stderr": 0.043898699568087785 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.3319327731092437, - "acc_stderr": 0.030588697013783663, - "acc_norm": 0.3319327731092437, - "acc_norm_stderr": 0.030588697013783663 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.3435897435897436, - "acc_stderr": 0.024078696580635463, - "acc_norm": 0.3435897435897436, - "acc_norm_stderr": 0.024078696580635463 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.42, - "acc_stderr": 0.04960449637488584, - "acc_norm": 0.42, - "acc_norm_stderr": 0.04960449637488584 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252604, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252604 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.37962962962962965, - "acc_stderr": 0.04691521224077742, - "acc_norm": 0.37962962962962965, - "acc_norm_stderr": 0.04691521224077742 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.4039408866995074, - "acc_stderr": 0.0345245390382204, - "acc_norm": 0.4039408866995074, - "acc_norm_stderr": 0.0345245390382204 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3903225806451613, - "acc_stderr": 0.027751256636969573, - "acc_norm": 0.3903225806451613, - "acc_norm_stderr": 0.027751256636969573 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.44871794871794873, - "acc_stderr": 0.032583346493868806, - "acc_norm": 0.44871794871794873, - "acc_norm_stderr": 0.032583346493868806 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.3283018867924528, - "acc_stderr": 0.028901593612411784, - "acc_norm": 0.3283018867924528, - "acc_norm_stderr": 0.028901593612411784 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.33636363636363636, - "acc_stderr": 0.04525393596302505, - "acc_norm": 0.33636363636363636, - "acc_norm_stderr": 0.04525393596302505 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.27037037037037037, - "acc_stderr": 0.02708037281514566, - "acc_norm": 0.27037037037037037, - "acc_norm_stderr": 0.02708037281514566 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.304635761589404, - "acc_stderr": 0.03757949922943343, - "acc_norm": 0.304635761589404, - "acc_norm_stderr": 0.03757949922943343 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.48258706467661694, - "acc_stderr": 0.035333892347392454, - "acc_norm": 0.48258706467661694, - "acc_norm_stderr": 0.035333892347392454 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.3236994219653179, - "acc_stderr": 0.035676037996391706, - "acc_norm": 0.3236994219653179, - "acc_norm_stderr": 0.035676037996391706 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.29894179894179895, - "acc_stderr": 0.023577604791655802, - "acc_norm": 0.29894179894179895, - "acc_norm_stderr": 0.023577604791655802 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3125, - "acc_stderr": 0.038760854559127644, - "acc_norm": 0.3125, - "acc_norm_stderr": 0.038760854559127644 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.24, - "acc_stderr": 0.042923469599092816, - "acc_norm": 0.24, - "acc_norm_stderr": 0.042923469599092816 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.55, - "acc_stderr": 0.05, - "acc_norm": 0.55, - "acc_norm_stderr": 0.05 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.3670520231213873, - "acc_stderr": 0.025950054337654085, - "acc_norm": 0.3670520231213873, - "acc_norm_stderr": 0.025950054337654085 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.4294478527607362, - "acc_stderr": 0.038890666191127216, - "acc_norm": 0.4294478527607362, - "acc_norm_stderr": 0.038890666191127216 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.41975308641975306, - "acc_stderr": 0.027460099557005138, - "acc_norm": 0.41975308641975306, - "acc_norm_stderr": 0.027460099557005138 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.3471502590673575, - "acc_stderr": 0.03435696168361355, - "acc_norm": 0.3471502590673575, - "acc_norm_stderr": 0.03435696168361355 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.21052631578947367, - "acc_stderr": 0.03835153954399421, - "acc_norm": 0.21052631578947367, - "acc_norm_stderr": 0.03835153954399421 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.4018348623853211, - "acc_stderr": 0.02102010617299701, - "acc_norm": 0.4018348623853211, - "acc_norm_stderr": 0.02102010617299701 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.21428571428571427, - "acc_stderr": 0.03670066451047181, - "acc_norm": 0.21428571428571427, - "acc_norm_stderr": 0.03670066451047181 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.3627450980392157, - "acc_stderr": 0.027530078447110307, - "acc_norm": 0.3627450980392157, - "acc_norm_stderr": 0.027530078447110307 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.35, - "acc_stderr": 0.047937248544110196, - "acc_norm": 0.35, - "acc_norm_stderr": 0.047937248544110196 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.5785123966942148, - "acc_stderr": 0.04507732278775087, - "acc_norm": 0.5785123966942148, - "acc_norm_stderr": 0.04507732278775087 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.39473684210526316, - "acc_stderr": 0.039777499346220734, - "acc_norm": 0.39473684210526316, - "acc_norm_stderr": 0.039777499346220734 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.315359477124183, - "acc_stderr": 0.018798086284886897, - "acc_norm": 0.315359477124183, - "acc_norm_stderr": 0.018798086284886897 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2978723404255319, - "acc_stderr": 0.027281608344469414, - "acc_norm": 0.2978723404255319, - "acc_norm_stderr": 0.027281608344469414 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.23214285714285715, - "acc_stderr": 0.04007341809755808, - "acc_norm": 0.23214285714285715, - "acc_norm_stderr": 0.04007341809755808 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.30092592592592593, - "acc_stderr": 0.031280390843298825, - "acc_norm": 0.30092592592592593, - "acc_norm_stderr": 0.031280390843298825 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2446927374301676, - "acc_stderr": 0.014378169884098423, - "acc_norm": 0.2446927374301676, - "acc_norm_stderr": 0.014378169884098423 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.32, - "acc_stderr": 0.046882617226215034, - "acc_norm": 0.32, - "acc_norm_stderr": 0.046882617226215034 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.44, - "acc_stderr": 0.04988876515698589, - "acc_norm": 0.44, - "acc_norm_stderr": 0.04988876515698589 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.20220588235294118, - "acc_stderr": 0.02439819298665492, - "acc_norm": 0.20220588235294118, - "acc_norm_stderr": 0.02439819298665492 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.4775510204081633, - "acc_stderr": 0.03197694118713673, - "acc_norm": 0.4775510204081633, - "acc_norm_stderr": 0.03197694118713673 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.4472573839662447, - "acc_stderr": 0.03236564251614192, - "acc_norm": 0.4472573839662447, - "acc_norm_stderr": 0.03236564251614192 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.3070404172099087, - "acc_stderr": 0.01178095911451376, - "acc_norm": 0.3070404172099087, - "acc_norm_stderr": 0.01178095911451376 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.4264705882352941, - "acc_stderr": 0.03471157907953426, - "acc_norm": 0.4264705882352941, - "acc_norm_stderr": 0.03471157907953426 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.4666666666666667, - "acc_stderr": 0.038956580652718446, - "acc_norm": 0.4666666666666667, - "acc_norm_stderr": 0.038956580652718446 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.23255813953488372, - "mc1_stderr": 0.01478915753108054, - "mc2": 0.40330820076202195, - "mc2_stderr": 0.01545475941668547 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.3380281690140845, - "acc_stderr": 0.016215540194273195, - "acc_norm": 0.3873239436619718, - "acc_norm_stderr": 0.016698899332843718 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "Teddysum/bllossom-1.0-13b", - "model_sha": "8d117668b35c61b1d0720a244531cf8123be0bc5", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/TheBloke/Llama-2-13B-fp16/result_2023-09-27 04:58:32.json b/TheBloke/Llama-2-13B-fp16/result_2023-09-27 04:58:32.json deleted file mode 100644 index 430334167e09e691f03ed0f5bad712d4069b6045..0000000000000000000000000000000000000000 --- a/TheBloke/Llama-2-13B-fp16/result_2023-09-27 04:58:32.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.3191126279863481, - "acc_stderr": 0.013621696119173302, - "acc_norm": 0.37627986348122866, - "acc_norm_stderr": 0.014157022555407166 - }, - "harness|ko_hellaswag|10": { - "acc": 0.36138219478191597, - "acc_stderr": 0.004794191785967945, - "acc_norm": 0.46614220274845647, - "acc_norm_stderr": 0.004978328190775522 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.40350877192982454, - "acc_stderr": 0.03762738699917055, - "acc_norm": 0.40350877192982454, - "acc_norm_stderr": 0.03762738699917055 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.5145631067961165, - "acc_stderr": 0.04948637324026637, - "acc_norm": 0.5145631067961165, - "acc_norm_stderr": 0.04948637324026637 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.4789272030651341, - "acc_stderr": 0.0178640767862129, - "acc_norm": 0.4789272030651341, - "acc_norm_stderr": 0.0178640767862129 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.37037037037037035, - "acc_stderr": 0.04171654161354543, - "acc_norm": 0.37037037037037035, - "acc_norm_stderr": 0.04171654161354543 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.35, - "acc_stderr": 0.0479372485441102, - "acc_norm": 0.35, - "acc_norm_stderr": 0.0479372485441102 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.39148936170212767, - "acc_stderr": 0.03190701242326812, - "acc_norm": 0.39148936170212767, - "acc_norm_stderr": 0.03190701242326812 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.39759036144578314, - "acc_stderr": 0.038099730845402184, - "acc_norm": 0.39759036144578314, - "acc_norm_stderr": 0.038099730845402184 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.4437299035369775, - "acc_stderr": 0.02821768355665232, - "acc_norm": 0.4437299035369775, - "acc_norm_stderr": 0.02821768355665232 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.3721973094170404, - "acc_stderr": 0.03244305283008731, - "acc_norm": 0.3721973094170404, - "acc_norm_stderr": 0.03244305283008731 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.5419847328244275, - "acc_stderr": 0.04369802690578756, - "acc_norm": 0.5419847328244275, - "acc_norm_stderr": 0.04369802690578756 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695236, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695236 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.51010101010101, - "acc_stderr": 0.035616254886737454, - "acc_norm": 0.51010101010101, - "acc_norm_stderr": 0.035616254886737454 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.4413793103448276, - "acc_stderr": 0.04137931034482758, - "acc_norm": 0.4413793103448276, - "acc_norm_stderr": 0.04137931034482758 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.21568627450980393, - "acc_stderr": 0.04092563958237655, - "acc_norm": 0.21568627450980393, - "acc_norm_stderr": 0.04092563958237655 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.453781512605042, - "acc_stderr": 0.03233943468182088, - "acc_norm": 0.453781512605042, - "acc_norm_stderr": 0.03233943468182088 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.3974358974358974, - "acc_stderr": 0.024811920017903836, - "acc_norm": 0.3974358974358974, - "acc_norm_stderr": 0.024811920017903836 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.47, - "acc_stderr": 0.050161355804659205, - "acc_norm": 0.47, - "acc_norm_stderr": 0.050161355804659205 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.32, - "acc_stderr": 0.04688261722621504, - "acc_norm": 0.32, - "acc_norm_stderr": 0.04688261722621504 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.5185185185185185, - "acc_stderr": 0.04830366024635331, - "acc_norm": 0.5185185185185185, - "acc_norm_stderr": 0.04830366024635331 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.35467980295566504, - "acc_stderr": 0.03366124489051449, - "acc_norm": 0.35467980295566504, - "acc_norm_stderr": 0.03366124489051449 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.47419354838709676, - "acc_stderr": 0.028406095057653315, - "acc_norm": 0.47419354838709676, - "acc_norm_stderr": 0.028406095057653315 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.5897435897435898, - "acc_stderr": 0.03222414045241107, - "acc_norm": 0.5897435897435898, - "acc_norm_stderr": 0.03222414045241107 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.3849056603773585, - "acc_stderr": 0.029946498567699945, - "acc_norm": 0.3849056603773585, - "acc_norm_stderr": 0.029946498567699945 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.4818181818181818, - "acc_stderr": 0.04785964010794916, - "acc_norm": 0.4818181818181818, - "acc_norm_stderr": 0.04785964010794916 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.23703703703703705, - "acc_stderr": 0.02592887613276611, - "acc_norm": 0.23703703703703705, - "acc_norm_stderr": 0.02592887613276611 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2980132450331126, - "acc_stderr": 0.037345356767871984, - "acc_norm": 0.2980132450331126, - "acc_norm_stderr": 0.037345356767871984 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.5422885572139303, - "acc_stderr": 0.035228658640995975, - "acc_norm": 0.5422885572139303, - "acc_norm_stderr": 0.035228658640995975 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.37572254335260113, - "acc_stderr": 0.036928207672648664, - "acc_norm": 0.37572254335260113, - "acc_norm_stderr": 0.036928207672648664 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2830687830687831, - "acc_stderr": 0.023201392938194978, - "acc_norm": 0.2830687830687831, - "acc_norm_stderr": 0.023201392938194978 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2986111111111111, - "acc_stderr": 0.03827052357950756, - "acc_norm": 0.2986111111111111, - "acc_norm_stderr": 0.03827052357950756 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.37, - "acc_stderr": 0.04852365870939099, - "acc_norm": 0.37, - "acc_norm_stderr": 0.04852365870939099 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.54, - "acc_stderr": 0.05009082659620333, - "acc_norm": 0.54, - "acc_norm_stderr": 0.05009082659620333 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.4595375722543353, - "acc_stderr": 0.02683080599895224, - "acc_norm": 0.4595375722543353, - "acc_norm_stderr": 0.02683080599895224 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.3619631901840491, - "acc_stderr": 0.037757007291414416, - "acc_norm": 0.3619631901840491, - "acc_norm_stderr": 0.037757007291414416 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.4012345679012346, - "acc_stderr": 0.0272725828498398, - "acc_norm": 0.4012345679012346, - "acc_norm_stderr": 0.0272725828498398 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.32, - "acc_stderr": 0.04688261722621504, - "acc_norm": 0.32, - "acc_norm_stderr": 0.04688261722621504 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.49740932642487046, - "acc_stderr": 0.03608390745384486, - "acc_norm": 0.49740932642487046, - "acc_norm_stderr": 0.03608390745384486 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.04434600701584926, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.04434600701584926 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.44770642201834865, - "acc_stderr": 0.021319754962425462, - "acc_norm": 0.44770642201834865, - "acc_norm_stderr": 0.021319754962425462 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.373015873015873, - "acc_stderr": 0.04325506042017086, - "acc_norm": 0.373015873015873, - "acc_norm_stderr": 0.04325506042017086 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.4411764705882353, - "acc_stderr": 0.028431095444176643, - "acc_norm": 0.4411764705882353, - "acc_norm_stderr": 0.028431095444176643 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.38, - "acc_stderr": 0.04878317312145633, - "acc_norm": 0.38, - "acc_norm_stderr": 0.04878317312145633 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.5702479338842975, - "acc_stderr": 0.04519082021319774, - "acc_norm": 0.5702479338842975, - "acc_norm_stderr": 0.04519082021319774 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.48026315789473684, - "acc_stderr": 0.040657710025626036, - "acc_norm": 0.48026315789473684, - "acc_norm_stderr": 0.040657710025626036 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.31699346405228757, - "acc_stderr": 0.018824219512706207, - "acc_norm": 0.31699346405228757, - "acc_norm_stderr": 0.018824219512706207 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.30851063829787234, - "acc_stderr": 0.027553366165101373, - "acc_norm": 0.30851063829787234, - "acc_norm_stderr": 0.027553366165101373 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.22321428571428573, - "acc_stderr": 0.039523019677025116, - "acc_norm": 0.22321428571428573, - "acc_norm_stderr": 0.039523019677025116 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.3888888888888889, - "acc_stderr": 0.03324708911809117, - "acc_norm": 0.3888888888888889, - "acc_norm_stderr": 0.03324708911809117 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2424581005586592, - "acc_stderr": 0.01433352205921789, - "acc_norm": 0.2424581005586592, - "acc_norm_stderr": 0.01433352205921789 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695235, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695235 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.37, - "acc_stderr": 0.04852365870939099, - "acc_norm": 0.37, - "acc_norm_stderr": 0.04852365870939099 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.375, - "acc_stderr": 0.029408372932278746, - "acc_norm": 0.375, - "acc_norm_stderr": 0.029408372932278746 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.43673469387755104, - "acc_stderr": 0.031751952375833226, - "acc_norm": 0.43673469387755104, - "acc_norm_stderr": 0.031751952375833226 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.39662447257383965, - "acc_stderr": 0.03184399873811226, - "acc_norm": 0.39662447257383965, - "acc_norm_stderr": 0.03184399873811226 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.31681877444589307, - "acc_stderr": 0.011882349954723015, - "acc_norm": 0.31681877444589307, - "acc_norm_stderr": 0.011882349954723015 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.4019607843137255, - "acc_stderr": 0.03441190023482466, - "acc_norm": 0.4019607843137255, - "acc_norm_stderr": 0.03441190023482466 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.42424242424242425, - "acc_stderr": 0.038592681420702615, - "acc_norm": 0.42424242424242425, - "acc_norm_stderr": 0.038592681420702615 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.24357405140758873, - "mc1_stderr": 0.015026354824910782, - "mc2": 0.41395274449910313, - "mc2_stderr": 0.015033140507060082 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.28169014084507044, - "acc_stderr": 0.015419736669636544, - "acc_norm": 0.33568075117370894, - "acc_norm_stderr": 0.01618776374375696 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "TheBloke/Llama-2-13B-fp16", - "model_sha": "b2e65e8ad4bb35e5abaee0170ebd5fc2134a50bb", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/Trofish/KULLM-RLHF/result_2023-09-28 05:43:12.json b/Trofish/KULLM-RLHF/result_2023-09-28 05:43:12.json deleted file mode 100644 index 4f4f6a1c9664f2c1da5a8808208bccbb0d89fd24..0000000000000000000000000000000000000000 --- a/Trofish/KULLM-RLHF/result_2023-09-28 05:43:12.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.2790102389078498, - "acc_stderr": 0.013106784883601352, - "acc_norm": 0.3199658703071672, - "acc_norm_stderr": 0.013631345807016198 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3842859988050189, - "acc_stderr": 0.004854318994447741, - "acc_norm": 0.4954192391953794, - "acc_norm_stderr": 0.0049895720021966876 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.3157894736842105, - "acc_stderr": 0.03565079670708313, - "acc_norm": 0.3157894736842105, - "acc_norm_stderr": 0.03565079670708313 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.1941747572815534, - "acc_stderr": 0.039166677628225836, - "acc_norm": 0.1941747572815534, - "acc_norm_stderr": 0.039166677628225836 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.2848020434227331, - "acc_stderr": 0.016139174096522563, - "acc_norm": 0.2848020434227331, - "acc_norm_stderr": 0.016139174096522563 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.2962962962962963, - "acc_stderr": 0.03944624162501116, - "acc_norm": 0.2962962962962963, - "acc_norm_stderr": 0.03944624162501116 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542128, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542128 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.20425531914893616, - "acc_stderr": 0.02635515841334941, - "acc_norm": 0.20425531914893616, - "acc_norm_stderr": 0.02635515841334941 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.19879518072289157, - "acc_stderr": 0.03106939026078942, - "acc_norm": 0.19879518072289157, - "acc_norm_stderr": 0.03106939026078942 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.26688102893890675, - "acc_stderr": 0.025122637608816653, - "acc_norm": 0.26688102893890675, - "acc_norm_stderr": 0.025122637608816653 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.2600896860986547, - "acc_stderr": 0.029442495585857473, - "acc_norm": 0.2600896860986547, - "acc_norm_stderr": 0.029442495585857473 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.1984732824427481, - "acc_stderr": 0.034981493854624686, - "acc_norm": 0.1984732824427481, - "acc_norm_stderr": 0.034981493854624686 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542128, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542128 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.25252525252525254, - "acc_stderr": 0.030954055470365907, - "acc_norm": 0.25252525252525254, - "acc_norm_stderr": 0.030954055470365907 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.2896551724137931, - "acc_stderr": 0.03780019230438014, - "acc_norm": 0.2896551724137931, - "acc_norm_stderr": 0.03780019230438014 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.18627450980392157, - "acc_stderr": 0.03873958714149353, - "acc_norm": 0.18627450980392157, - "acc_norm_stderr": 0.03873958714149353 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.21008403361344538, - "acc_stderr": 0.026461398717471874, - "acc_norm": 0.21008403361344538, - "acc_norm_stderr": 0.026461398717471874 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.2128205128205128, - "acc_stderr": 0.020752423722128002, - "acc_norm": 0.2128205128205128, - "acc_norm_stderr": 0.020752423722128002 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.2777777777777778, - "acc_stderr": 0.04330043749650742, - "acc_norm": 0.2777777777777778, - "acc_norm_stderr": 0.04330043749650742 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.2561576354679803, - "acc_stderr": 0.0307127300709826, - "acc_norm": 0.2561576354679803, - "acc_norm_stderr": 0.0307127300709826 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.24516129032258063, - "acc_stderr": 0.024472243840895518, - "acc_norm": 0.24516129032258063, - "acc_norm_stderr": 0.024472243840895518 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.28205128205128205, - "acc_stderr": 0.029480360549541198, - "acc_norm": 0.28205128205128205, - "acc_norm_stderr": 0.029480360549541198 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.24528301886792453, - "acc_stderr": 0.026480357179895688, - "acc_norm": 0.24528301886792453, - "acc_norm_stderr": 0.026480357179895688 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.20909090909090908, - "acc_stderr": 0.03895091015724135, - "acc_norm": 0.20909090909090908, - "acc_norm_stderr": 0.03895091015724135 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.26666666666666666, - "acc_stderr": 0.026962424325073838, - "acc_norm": 0.26666666666666666, - "acc_norm_stderr": 0.026962424325073838 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2185430463576159, - "acc_stderr": 0.033742355504256936, - "acc_norm": 0.2185430463576159, - "acc_norm_stderr": 0.033742355504256936 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.24875621890547264, - "acc_stderr": 0.030567675938916714, - "acc_norm": 0.24875621890547264, - "acc_norm_stderr": 0.030567675938916714 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.23699421965317918, - "acc_stderr": 0.03242414757483099, - "acc_norm": 0.23699421965317918, - "acc_norm_stderr": 0.03242414757483099 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.29894179894179895, - "acc_stderr": 0.023577604791655805, - "acc_norm": 0.29894179894179895, - "acc_norm_stderr": 0.023577604791655805 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2638888888888889, - "acc_stderr": 0.03685651095897532, - "acc_norm": 0.2638888888888889, - "acc_norm_stderr": 0.03685651095897532 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.15, - "acc_stderr": 0.03588702812826371, - "acc_norm": 0.15, - "acc_norm_stderr": 0.03588702812826371 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.2745664739884393, - "acc_stderr": 0.02402774515526501, - "acc_norm": 0.2745664739884393, - "acc_norm_stderr": 0.02402774515526501 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.3128834355828221, - "acc_stderr": 0.036429145782924034, - "acc_norm": 0.3128834355828221, - "acc_norm_stderr": 0.036429145782924034 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.30864197530864196, - "acc_stderr": 0.025702640260603767, - "acc_norm": 0.30864197530864196, - "acc_norm_stderr": 0.025702640260603767 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.23, - "acc_stderr": 0.04229525846816506, - "acc_norm": 0.23, - "acc_norm_stderr": 0.04229525846816506 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.22797927461139897, - "acc_stderr": 0.030276909945178256, - "acc_norm": 0.22797927461139897, - "acc_norm_stderr": 0.030276909945178256 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.23684210526315788, - "acc_stderr": 0.03999423879281336, - "acc_norm": 0.23684210526315788, - "acc_norm_stderr": 0.03999423879281336 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.23853211009174313, - "acc_stderr": 0.018272575810231857, - "acc_norm": 0.23853211009174313, - "acc_norm_stderr": 0.018272575810231857 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.1746031746031746, - "acc_stderr": 0.033954900208561116, - "acc_norm": 0.1746031746031746, - "acc_norm_stderr": 0.033954900208561116 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.26143790849673204, - "acc_stderr": 0.025160998214292456, - "acc_norm": 0.26143790849673204, - "acc_norm_stderr": 0.025160998214292456 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.27, - "acc_stderr": 0.044619604333847394, - "acc_norm": 0.27, - "acc_norm_stderr": 0.044619604333847394 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.3884297520661157, - "acc_stderr": 0.04449270350068382, - "acc_norm": 0.3884297520661157, - "acc_norm_stderr": 0.04449270350068382 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.23026315789473684, - "acc_stderr": 0.03426059424403165, - "acc_norm": 0.23026315789473684, - "acc_norm_stderr": 0.03426059424403165 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.2908496732026144, - "acc_stderr": 0.018373116915903966, - "acc_norm": 0.2908496732026144, - "acc_norm_stderr": 0.018373116915903966 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.26595744680851063, - "acc_stderr": 0.026358065698880582, - "acc_norm": 0.26595744680851063, - "acc_norm_stderr": 0.026358065698880582 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.25, - "acc_stderr": 0.04109974682633932, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04109974682633932 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.23148148148148148, - "acc_stderr": 0.028765111718046937, - "acc_norm": 0.23148148148148148, - "acc_norm_stderr": 0.028765111718046937 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.24692737430167597, - "acc_stderr": 0.014422292204808852, - "acc_norm": 0.24692737430167597, - "acc_norm_stderr": 0.014422292204808852 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.36, - "acc_stderr": 0.04824181513244218, - "acc_norm": 0.36, - "acc_norm_stderr": 0.04824181513244218 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.32, - "acc_stderr": 0.046882617226215034, - "acc_norm": 0.32, - "acc_norm_stderr": 0.046882617226215034 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.16911764705882354, - "acc_stderr": 0.022770868010113014, - "acc_norm": 0.16911764705882354, - "acc_norm_stderr": 0.022770868010113014 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.2530612244897959, - "acc_stderr": 0.02783302387139968, - "acc_norm": 0.2530612244897959, - "acc_norm_stderr": 0.02783302387139968 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.2869198312236287, - "acc_stderr": 0.029443773022594693, - "acc_norm": 0.2869198312236287, - "acc_norm_stderr": 0.029443773022594693 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.24967405475880053, - "acc_stderr": 0.011054538377832318, - "acc_norm": 0.24967405475880053, - "acc_norm_stderr": 0.011054538377832318 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.27941176470588236, - "acc_stderr": 0.03149328104507957, - "acc_norm": 0.27941176470588236, - "acc_norm_stderr": 0.03149328104507957 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.296969696969697, - "acc_stderr": 0.03567969772268047, - "acc_norm": 0.296969696969697, - "acc_norm_stderr": 0.03567969772268047 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.24724602203182375, - "mc1_stderr": 0.015102404797359649, - "mc2": 0.38771109052404834, - "mc2_stderr": 0.014784638195990142 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.5140845070422535, - "acc_stderr": 0.017132977754804355, - "acc_norm": 0.6068075117370892, - "acc_norm_stderr": 0.016744157492949253 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "Trofish/KULLM-RLHF", - "model_sha": "857362e5fbb814aa76ed6aceb9314fc8ddbcdd63", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/amphora/olaf-l.0.1/result_2023-10-21 02:17:55.json b/amphora/olaf-l.0.1/result_2023-10-21 02:17:55.json deleted file mode 100644 index 14afc234ee739d6979efd9b1d88069c5da4f10d6..0000000000000000000000000000000000000000 --- a/amphora/olaf-l.0.1/result_2023-10-21 02:17:55.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.40784982935153585, - "acc_stderr": 0.01436109728844971, - "acc_norm": 0.47013651877133106, - "acc_norm_stderr": 0.014585305840007105 - }, - "harness|ko_hellaswag|10": { - "acc": 0.40728938458474406, - "acc_stderr": 0.00490325426417762, - "acc_norm": 0.5451105357498506, - "acc_norm_stderr": 0.004969431900874312 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.5263157894736842, - "acc_stderr": 0.03829509868994727, - "acc_norm": 0.5263157894736842, - "acc_norm_stderr": 0.03829509868994727 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.6213592233009708, - "acc_stderr": 0.04802694698258975, - "acc_norm": 0.6213592233009708, - "acc_norm_stderr": 0.04802694698258975 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.5223499361430396, - "acc_stderr": 0.017862091778507852, - "acc_norm": 0.5223499361430396, - "acc_norm_stderr": 0.017862091778507852 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.4666666666666667, - "acc_stderr": 0.043097329010363554, - "acc_norm": 0.4666666666666667, - "acc_norm_stderr": 0.043097329010363554 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.3617021276595745, - "acc_stderr": 0.0314108219759624, - "acc_norm": 0.3617021276595745, - "acc_norm_stderr": 0.0314108219759624 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.39759036144578314, - "acc_stderr": 0.038099730845402184, - "acc_norm": 0.39759036144578314, - "acc_norm_stderr": 0.038099730845402184 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.4983922829581994, - "acc_stderr": 0.02839794490780661, - "acc_norm": 0.4983922829581994, - "acc_norm_stderr": 0.02839794490780661 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.4125560538116592, - "acc_stderr": 0.03304062175449297, - "acc_norm": 0.4125560538116592, - "acc_norm_stderr": 0.03304062175449297 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.48854961832061067, - "acc_stderr": 0.043841400240780176, - "acc_norm": 0.48854961832061067, - "acc_norm_stderr": 0.043841400240780176 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.43, - "acc_stderr": 0.049756985195624284, - "acc_norm": 0.43, - "acc_norm_stderr": 0.049756985195624284 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.5404040404040404, - "acc_stderr": 0.035507024651313425, - "acc_norm": 0.5404040404040404, - "acc_norm_stderr": 0.035507024651313425 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.4482758620689655, - "acc_stderr": 0.04144311810878152, - "acc_norm": 0.4482758620689655, - "acc_norm_stderr": 0.04144311810878152 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.22549019607843138, - "acc_stderr": 0.04158307533083286, - "acc_norm": 0.22549019607843138, - "acc_norm_stderr": 0.04158307533083286 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.46218487394957986, - "acc_stderr": 0.032385469487589795, - "acc_norm": 0.46218487394957986, - "acc_norm_stderr": 0.032385469487589795 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.44358974358974357, - "acc_stderr": 0.02518914989476419, - "acc_norm": 0.44358974358974357, - "acc_norm_stderr": 0.02518914989476419 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.47, - "acc_stderr": 0.050161355804659205, - "acc_norm": 0.47, - "acc_norm_stderr": 0.050161355804659205 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.5462962962962963, - "acc_stderr": 0.04812917324536823, - "acc_norm": 0.5462962962962963, - "acc_norm_stderr": 0.04812917324536823 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.3891625615763547, - "acc_stderr": 0.034304624161038716, - "acc_norm": 0.3891625615763547, - "acc_norm_stderr": 0.034304624161038716 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.4774193548387097, - "acc_stderr": 0.028414985019707868, - "acc_norm": 0.4774193548387097, - "acc_norm_stderr": 0.028414985019707868 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.6666666666666666, - "acc_stderr": 0.030882736974138653, - "acc_norm": 0.6666666666666666, - "acc_norm_stderr": 0.030882736974138653 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.47547169811320755, - "acc_stderr": 0.030735822206205615, - "acc_norm": 0.47547169811320755, - "acc_norm_stderr": 0.030735822206205615 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.4818181818181818, - "acc_stderr": 0.04785964010794916, - "acc_norm": 0.4818181818181818, - "acc_norm_stderr": 0.04785964010794916 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.24814814814814815, - "acc_stderr": 0.0263357394040558, - "acc_norm": 0.24814814814814815, - "acc_norm_stderr": 0.0263357394040558 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.31788079470198677, - "acc_stderr": 0.03802039760107903, - "acc_norm": 0.31788079470198677, - "acc_norm_stderr": 0.03802039760107903 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.6019900497512438, - "acc_stderr": 0.034611994290400135, - "acc_norm": 0.6019900497512438, - "acc_norm_stderr": 0.034611994290400135 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.43352601156069365, - "acc_stderr": 0.03778621079092055, - "acc_norm": 0.43352601156069365, - "acc_norm_stderr": 0.03778621079092055 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.31746031746031744, - "acc_stderr": 0.02397386199899208, - "acc_norm": 0.31746031746031744, - "acc_norm_stderr": 0.02397386199899208 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3402777777777778, - "acc_stderr": 0.039621355734862175, - "acc_norm": 0.3402777777777778, - "acc_norm_stderr": 0.039621355734862175 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695235, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695235 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.67, - "acc_stderr": 0.04725815626252605, - "acc_norm": 0.67, - "acc_norm_stderr": 0.04725815626252605 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.5086705202312138, - "acc_stderr": 0.0269150473553698, - "acc_norm": 0.5086705202312138, - "acc_norm_stderr": 0.0269150473553698 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.4662576687116564, - "acc_stderr": 0.039194155450484096, - "acc_norm": 0.4662576687116564, - "acc_norm_stderr": 0.039194155450484096 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.47530864197530864, - "acc_stderr": 0.02778680093142745, - "acc_norm": 0.47530864197530864, - "acc_norm_stderr": 0.02778680093142745 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.28, - "acc_stderr": 0.045126085985421276, - "acc_norm": 0.28, - "acc_norm_stderr": 0.045126085985421276 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.5284974093264249, - "acc_stderr": 0.03602573571288441, - "acc_norm": 0.5284974093264249, - "acc_norm_stderr": 0.03602573571288441 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.21052631578947367, - "acc_stderr": 0.038351539543994194, - "acc_norm": 0.21052631578947367, - "acc_norm_stderr": 0.038351539543994194 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.5321100917431193, - "acc_stderr": 0.021393071222680814, - "acc_norm": 0.5321100917431193, - "acc_norm_stderr": 0.021393071222680814 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.2857142857142857, - "acc_stderr": 0.0404061017820884, - "acc_norm": 0.2857142857142857, - "acc_norm_stderr": 0.0404061017820884 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.4477124183006536, - "acc_stderr": 0.02847293847803353, - "acc_norm": 0.4477124183006536, - "acc_norm_stderr": 0.02847293847803353 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.44, - "acc_stderr": 0.04988876515698589, - "acc_norm": 0.44, - "acc_norm_stderr": 0.04988876515698589 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.6859504132231405, - "acc_stderr": 0.042369647530410184, - "acc_norm": 0.6859504132231405, - "acc_norm_stderr": 0.042369647530410184 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.4473684210526316, - "acc_stderr": 0.040463368839782514, - "acc_norm": 0.4473684210526316, - "acc_norm_stderr": 0.040463368839782514 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.3366013071895425, - "acc_stderr": 0.01911721391149515, - "acc_norm": 0.3366013071895425, - "acc_norm_stderr": 0.01911721391149515 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.35106382978723405, - "acc_stderr": 0.02847350127296377, - "acc_norm": 0.35106382978723405, - "acc_norm_stderr": 0.02847350127296377 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.22321428571428573, - "acc_stderr": 0.039523019677025116, - "acc_norm": 0.22321428571428573, - "acc_norm_stderr": 0.039523019677025116 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.3148148148148148, - "acc_stderr": 0.03167468706828979, - "acc_norm": 0.3148148148148148, - "acc_norm_stderr": 0.03167468706828979 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2424581005586592, - "acc_stderr": 0.01433352205921789, - "acc_norm": 0.2424581005586592, - "acc_norm_stderr": 0.01433352205921789 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.39, - "acc_stderr": 0.04902071300001975, - "acc_norm": 0.39, - "acc_norm_stderr": 0.04902071300001975 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.5, - "acc_stderr": 0.050251890762960605, - "acc_norm": 0.5, - "acc_norm_stderr": 0.050251890762960605 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.34191176470588236, - "acc_stderr": 0.028814722422254174, - "acc_norm": 0.34191176470588236, - "acc_norm_stderr": 0.028814722422254174 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.42448979591836733, - "acc_stderr": 0.031642094879429414, - "acc_norm": 0.42448979591836733, - "acc_norm_stderr": 0.031642094879429414 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.5780590717299579, - "acc_stderr": 0.032148146302403695, - "acc_norm": 0.5780590717299579, - "acc_norm_stderr": 0.032148146302403695 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.31290743155149936, - "acc_stderr": 0.011842529823063, - "acc_norm": 0.31290743155149936, - "acc_norm_stderr": 0.011842529823063 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.47549019607843135, - "acc_stderr": 0.035050931943487976, - "acc_norm": 0.47549019607843135, - "acc_norm_stderr": 0.035050931943487976 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.5212121212121212, - "acc_stderr": 0.03900828913737302, - "acc_norm": 0.5212121212121212, - "acc_norm_stderr": 0.03900828913737302 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2717258261933905, - "mc1_stderr": 0.015572840452875833, - "mc2": 0.4439993647512429, - "mc2_stderr": 0.014990045797851265 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.38380281690140844, - "acc_stderr": 0.016670520242941633, - "acc_norm": 0.43779342723004694, - "acc_norm_stderr": 0.017006611775152725 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "amphora/olaf-l.0.1", - "model_sha": "1fe9598f2ec7fe35ce77e773ef35b97b893b11d0", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/amphora/polyglot-5.8B-CoT-e1/result_2023-09-28 03:35:31.json b/amphora/polyglot-5.8B-CoT-e1/result_2023-09-28 03:35:31.json deleted file mode 100644 index a1538ed601d92d9faa5f47ebb2a6710ac80d3076..0000000000000000000000000000000000000000 --- a/amphora/polyglot-5.8B-CoT-e1/result_2023-09-28 03:35:31.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.29266211604095566, - "acc_stderr": 0.013295916103619413, - "acc_norm": 0.31399317406143346, - "acc_norm_stderr": 0.013562691224726291 - }, - "harness|ko_hellaswag|10": { - "acc": 0.37004580760804623, - "acc_stderr": 0.00481829899101255, - "acc_norm": 0.47470623381796456, - "acc_norm_stderr": 0.004983392650570958 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.17543859649122806, - "acc_stderr": 0.029170885500727665, - "acc_norm": 0.17543859649122806, - "acc_norm_stderr": 0.029170885500727665 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.24271844660194175, - "acc_stderr": 0.04245022486384495, - "acc_norm": 0.24271844660194175, - "acc_norm_stderr": 0.04245022486384495 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.20434227330779056, - "acc_stderr": 0.014419123980931906, - "acc_norm": 0.20434227330779056, - "acc_norm_stderr": 0.014419123980931906 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.22962962962962963, - "acc_stderr": 0.036333844140734636, - "acc_norm": 0.22962962962962963, - "acc_norm_stderr": 0.036333844140734636 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.18, - "acc_stderr": 0.03861229196653694, - "acc_norm": 0.18, - "acc_norm_stderr": 0.03861229196653694 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.22127659574468084, - "acc_stderr": 0.027136349602424063, - "acc_norm": 0.22127659574468084, - "acc_norm_stderr": 0.027136349602424063 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.22289156626506024, - "acc_stderr": 0.03240004825594688, - "acc_norm": 0.22289156626506024, - "acc_norm_stderr": 0.03240004825594688 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.24115755627009647, - "acc_stderr": 0.024296594034763426, - "acc_norm": 0.24115755627009647, - "acc_norm_stderr": 0.024296594034763426 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.11659192825112108, - "acc_stderr": 0.021539639816244464, - "acc_norm": 0.11659192825112108, - "acc_norm_stderr": 0.021539639816244464 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.2824427480916031, - "acc_stderr": 0.03948406125768361, - "acc_norm": 0.2824427480916031, - "acc_norm_stderr": 0.03948406125768361 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.3434343434343434, - "acc_stderr": 0.03383201223244441, - "acc_norm": 0.3434343434343434, - "acc_norm_stderr": 0.03383201223244441 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.18620689655172415, - "acc_stderr": 0.03243946159004616, - "acc_norm": 0.18620689655172415, - "acc_norm_stderr": 0.03243946159004616 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.30392156862745096, - "acc_stderr": 0.04576665403207763, - "acc_norm": 0.30392156862745096, - "acc_norm_stderr": 0.04576665403207763 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.2857142857142857, - "acc_stderr": 0.02934457250063435, - "acc_norm": 0.2857142857142857, - "acc_norm_stderr": 0.02934457250063435 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.358974358974359, - "acc_stderr": 0.024321738484602364, - "acc_norm": 0.358974358974359, - "acc_norm_stderr": 0.024321738484602364 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.19, - "acc_stderr": 0.03942772444036625, - "acc_norm": 0.19, - "acc_norm_stderr": 0.03942772444036625 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.18, - "acc_stderr": 0.038612291966536955, - "acc_norm": 0.18, - "acc_norm_stderr": 0.038612291966536955 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.21296296296296297, - "acc_stderr": 0.03957835471980981, - "acc_norm": 0.21296296296296297, - "acc_norm_stderr": 0.03957835471980981 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.2413793103448276, - "acc_stderr": 0.030108330718011625, - "acc_norm": 0.2413793103448276, - "acc_norm_stderr": 0.030108330718011625 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3258064516129032, - "acc_stderr": 0.0266620105785671, - "acc_norm": 0.3258064516129032, - "acc_norm_stderr": 0.0266620105785671 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.19658119658119658, - "acc_stderr": 0.02603538609895129, - "acc_norm": 0.19658119658119658, - "acc_norm_stderr": 0.02603538609895129 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.27547169811320754, - "acc_stderr": 0.02749566368372406, - "acc_norm": 0.27547169811320754, - "acc_norm_stderr": 0.02749566368372406 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.23636363636363636, - "acc_stderr": 0.04069306319721376, - "acc_norm": 0.23636363636363636, - "acc_norm_stderr": 0.04069306319721376 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.26666666666666666, - "acc_stderr": 0.026962424325073835, - "acc_norm": 0.26666666666666666, - "acc_norm_stderr": 0.026962424325073835 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.3509933774834437, - "acc_stderr": 0.03896981964257374, - "acc_norm": 0.3509933774834437, - "acc_norm_stderr": 0.03896981964257374 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.26865671641791045, - "acc_stderr": 0.03134328358208954, - "acc_norm": 0.26865671641791045, - "acc_norm_stderr": 0.03134328358208954 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.3352601156069364, - "acc_stderr": 0.03599586301247078, - "acc_norm": 0.3352601156069364, - "acc_norm_stderr": 0.03599586301247078 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.25925925925925924, - "acc_stderr": 0.02256989707491842, - "acc_norm": 0.25925925925925924, - "acc_norm_stderr": 0.02256989707491842 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2638888888888889, - "acc_stderr": 0.03685651095897532, - "acc_norm": 0.2638888888888889, - "acc_norm_stderr": 0.03685651095897532 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.27, - "acc_stderr": 0.04461960433384741, - "acc_norm": 0.27, - "acc_norm_stderr": 0.04461960433384741 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.26878612716763006, - "acc_stderr": 0.023868003262500114, - "acc_norm": 0.26878612716763006, - "acc_norm_stderr": 0.023868003262500114 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.2392638036809816, - "acc_stderr": 0.03351953879521271, - "acc_norm": 0.2392638036809816, - "acc_norm_stderr": 0.03351953879521271 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.23148148148148148, - "acc_stderr": 0.02346842983245115, - "acc_norm": 0.23148148148148148, - "acc_norm_stderr": 0.02346842983245115 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.36787564766839376, - "acc_stderr": 0.034801756684660366, - "acc_norm": 0.36787564766839376, - "acc_norm_stderr": 0.034801756684660366 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2543859649122807, - "acc_stderr": 0.040969851398436695, - "acc_norm": 0.2543859649122807, - "acc_norm_stderr": 0.040969851398436695 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.3431192660550459, - "acc_stderr": 0.02035477773608604, - "acc_norm": 0.3431192660550459, - "acc_norm_stderr": 0.02035477773608604 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.3412698412698413, - "acc_stderr": 0.04240799327574924, - "acc_norm": 0.3412698412698413, - "acc_norm_stderr": 0.04240799327574924 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.2908496732026144, - "acc_stderr": 0.026004800363952113, - "acc_norm": 0.2908496732026144, - "acc_norm_stderr": 0.026004800363952113 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.26, - "acc_stderr": 0.04408440022768078, - "acc_norm": 0.26, - "acc_norm_stderr": 0.04408440022768078 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.17355371900826447, - "acc_stderr": 0.03457272836917669, - "acc_norm": 0.17355371900826447, - "acc_norm_stderr": 0.03457272836917669 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.3355263157894737, - "acc_stderr": 0.038424985593952694, - "acc_norm": 0.3355263157894737, - "acc_norm_stderr": 0.038424985593952694 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.2173202614379085, - "acc_stderr": 0.0166848209291486, - "acc_norm": 0.2173202614379085, - "acc_norm_stderr": 0.0166848209291486 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.20921985815602837, - "acc_stderr": 0.02426476943998849, - "acc_norm": 0.20921985815602837, - "acc_norm_stderr": 0.02426476943998849 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.19642857142857142, - "acc_stderr": 0.03770970049347018, - "acc_norm": 0.19642857142857142, - "acc_norm_stderr": 0.03770970049347018 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.4722222222222222, - "acc_stderr": 0.0340470532865388, - "acc_norm": 0.4722222222222222, - "acc_norm_stderr": 0.0340470532865388 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.27262569832402234, - "acc_stderr": 0.014893391735249608, - "acc_norm": 0.27262569832402234, - "acc_norm_stderr": 0.014893391735249608 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695235, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695235 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.21, - "acc_stderr": 0.04093601807403325, - "acc_norm": 0.21, - "acc_norm_stderr": 0.04093601807403325 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.4485294117647059, - "acc_stderr": 0.030211479609121593, - "acc_norm": 0.4485294117647059, - "acc_norm_stderr": 0.030211479609121593 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.3836734693877551, - "acc_stderr": 0.03113088039623593, - "acc_norm": 0.3836734693877551, - "acc_norm_stderr": 0.03113088039623593 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.25316455696202533, - "acc_stderr": 0.0283046579430353, - "acc_norm": 0.25316455696202533, - "acc_norm_stderr": 0.0283046579430353 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.24641460234680573, - "acc_stderr": 0.011005971399927235, - "acc_norm": 0.24641460234680573, - "acc_norm_stderr": 0.011005971399927235 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.25, - "acc_stderr": 0.03039153369274154, - "acc_norm": 0.25, - "acc_norm_stderr": 0.03039153369274154 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.2727272727272727, - "acc_stderr": 0.0347769116216366, - "acc_norm": 0.2727272727272727, - "acc_norm_stderr": 0.0347769116216366 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.24112607099143207, - "mc1_stderr": 0.014974827279752332, - "mc2": 0.39158327266747156, - "mc2_stderr": 0.014622481693781006 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.3685446009389671, - "acc_stderr": 0.01653680430615456, - "acc_norm": 0.4706572769953052, - "acc_norm_stderr": 0.017110239257076235 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "amphora/polyglot-5.8B-CoT-e1", - "model_sha": "e8f4cb1d884cf4d67e3e8afc0aab09c62a0d68c6", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/amphora/small-instruct/result_2023-10-09 19:17:00.json b/amphora/small-instruct/result_2023-10-09 19:17:00.json deleted file mode 100644 index 29e29da727016b58901a050f3d5bffa6ae4a0a56..0000000000000000000000000000000000000000 --- a/amphora/small-instruct/result_2023-10-09 19:17:00.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.2773037542662116, - "acc_stderr": 0.013082095839059374, - "acc_norm": 0.32764505119453924, - "acc_norm_stderr": 0.013715847940719346 - }, - "harness|ko_hellaswag|10": { - "acc": 0.34863572993427605, - "acc_stderr": 0.00475564501626385, - "acc_norm": 0.4313881696873133, - "acc_norm_stderr": 0.004942578520987342 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.2982456140350877, - "acc_stderr": 0.03508771929824565, - "acc_norm": 0.2982456140350877, - "acc_norm_stderr": 0.03508771929824565 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.17475728155339806, - "acc_stderr": 0.03760178006026621, - "acc_norm": 0.17475728155339806, - "acc_norm_stderr": 0.03760178006026621 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.26309067688378035, - "acc_stderr": 0.01574549716904906, - "acc_norm": 0.26309067688378035, - "acc_norm_stderr": 0.01574549716904906 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.34074074074074073, - "acc_stderr": 0.040943762699967946, - "acc_norm": 0.34074074074074073, - "acc_norm_stderr": 0.040943762699967946 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.27, - "acc_stderr": 0.0446196043338474, - "acc_norm": 0.27, - "acc_norm_stderr": 0.0446196043338474 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.2851063829787234, - "acc_stderr": 0.029513196625539355, - "acc_norm": 0.2851063829787234, - "acc_norm_stderr": 0.029513196625539355 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3132530120481928, - "acc_stderr": 0.036108050180310235, - "acc_norm": 0.3132530120481928, - "acc_norm_stderr": 0.036108050180310235 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.3022508038585209, - "acc_stderr": 0.02608270069539966, - "acc_norm": 0.3022508038585209, - "acc_norm_stderr": 0.02608270069539966 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.21973094170403587, - "acc_stderr": 0.0277901770643836, - "acc_norm": 0.21973094170403587, - "acc_norm_stderr": 0.0277901770643836 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.2748091603053435, - "acc_stderr": 0.03915345408847835, - "acc_norm": 0.2748091603053435, - "acc_norm_stderr": 0.03915345408847835 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.27, - "acc_stderr": 0.04461960433384739, - "acc_norm": 0.27, - "acc_norm_stderr": 0.04461960433384739 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.2474747474747475, - "acc_stderr": 0.030746300742124505, - "acc_norm": 0.2474747474747475, - "acc_norm_stderr": 0.030746300742124505 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.2827586206896552, - "acc_stderr": 0.03752833958003336, - "acc_norm": 0.2827586206896552, - "acc_norm_stderr": 0.03752833958003336 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.19607843137254902, - "acc_stderr": 0.03950581861179962, - "acc_norm": 0.19607843137254902, - "acc_norm_stderr": 0.03950581861179962 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.2605042016806723, - "acc_stderr": 0.02851025151234191, - "acc_norm": 0.2605042016806723, - "acc_norm_stderr": 0.02851025151234191 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.2, - "acc_stderr": 0.020280805062535722, - "acc_norm": 0.2, - "acc_norm_stderr": 0.020280805062535722 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.32407407407407407, - "acc_stderr": 0.04524596007030048, - "acc_norm": 0.32407407407407407, - "acc_norm_stderr": 0.04524596007030048 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.2019704433497537, - "acc_stderr": 0.028247350122180267, - "acc_norm": 0.2019704433497537, - "acc_norm_stderr": 0.028247350122180267 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.267741935483871, - "acc_stderr": 0.02518900666021238, - "acc_norm": 0.267741935483871, - "acc_norm_stderr": 0.02518900666021238 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.27350427350427353, - "acc_stderr": 0.029202540153431183, - "acc_norm": 0.27350427350427353, - "acc_norm_stderr": 0.029202540153431183 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.24528301886792453, - "acc_stderr": 0.026480357179895712, - "acc_norm": 0.24528301886792453, - "acc_norm_stderr": 0.026480357179895712 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.24545454545454545, - "acc_stderr": 0.04122066502878285, - "acc_norm": 0.24545454545454545, - "acc_norm_stderr": 0.04122066502878285 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.25925925925925924, - "acc_stderr": 0.026719240783712156, - "acc_norm": 0.25925925925925924, - "acc_norm_stderr": 0.026719240783712156 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2847682119205298, - "acc_stderr": 0.03684881521389024, - "acc_norm": 0.2847682119205298, - "acc_norm_stderr": 0.03684881521389024 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.263681592039801, - "acc_stderr": 0.031157150869355575, - "acc_norm": 0.263681592039801, - "acc_norm_stderr": 0.031157150869355575 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.26011560693641617, - "acc_stderr": 0.03345036916788991, - "acc_norm": 0.26011560693641617, - "acc_norm_stderr": 0.03345036916788991 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2619047619047619, - "acc_stderr": 0.022644212615525214, - "acc_norm": 0.2619047619047619, - "acc_norm_stderr": 0.022644212615525214 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2708333333333333, - "acc_stderr": 0.03716177437566017, - "acc_norm": 0.2708333333333333, - "acc_norm_stderr": 0.03716177437566017 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.2, - "acc_stderr": 0.040201512610368445, - "acc_norm": 0.2, - "acc_norm_stderr": 0.040201512610368445 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.21, - "acc_stderr": 0.040936018074033256, - "acc_norm": 0.21, - "acc_norm_stderr": 0.040936018074033256 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.2658959537572254, - "acc_stderr": 0.023786203255508283, - "acc_norm": 0.2658959537572254, - "acc_norm_stderr": 0.023786203255508283 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.2822085889570552, - "acc_stderr": 0.03536117886664743, - "acc_norm": 0.2822085889570552, - "acc_norm_stderr": 0.03536117886664743 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.2716049382716049, - "acc_stderr": 0.02474862449053737, - "acc_norm": 0.2716049382716049, - "acc_norm_stderr": 0.02474862449053737 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.26, - "acc_stderr": 0.04408440022768078, - "acc_norm": 0.26, - "acc_norm_stderr": 0.04408440022768078 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.24870466321243523, - "acc_stderr": 0.03119584087770029, - "acc_norm": 0.24870466321243523, - "acc_norm_stderr": 0.03119584087770029 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2543859649122807, - "acc_stderr": 0.040969851398436716, - "acc_norm": 0.2543859649122807, - "acc_norm_stderr": 0.040969851398436716 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.23853211009174313, - "acc_stderr": 0.018272575810231867, - "acc_norm": 0.23853211009174313, - "acc_norm_stderr": 0.018272575810231867 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.1349206349206349, - "acc_stderr": 0.030557101589417515, - "acc_norm": 0.1349206349206349, - "acc_norm_stderr": 0.030557101589417515 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.238562091503268, - "acc_stderr": 0.024404394928087866, - "acc_norm": 0.238562091503268, - "acc_norm_stderr": 0.024404394928087866 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.19, - "acc_stderr": 0.03942772444036624, - "acc_norm": 0.19, - "acc_norm_stderr": 0.03942772444036624 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.371900826446281, - "acc_stderr": 0.04412015806624503, - "acc_norm": 0.371900826446281, - "acc_norm_stderr": 0.04412015806624503 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.26973684210526316, - "acc_stderr": 0.03611780560284898, - "acc_norm": 0.26973684210526316, - "acc_norm_stderr": 0.03611780560284898 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.28104575163398693, - "acc_stderr": 0.01818521895431809, - "acc_norm": 0.28104575163398693, - "acc_norm_stderr": 0.01818521895431809 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2375886524822695, - "acc_stderr": 0.025389512552729903, - "acc_norm": 0.2375886524822695, - "acc_norm_stderr": 0.025389512552729903 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.2767857142857143, - "acc_stderr": 0.04246624336697623, - "acc_norm": 0.2767857142857143, - "acc_norm_stderr": 0.04246624336697623 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.22685185185185186, - "acc_stderr": 0.028561650102422273, - "acc_norm": 0.22685185185185186, - "acc_norm_stderr": 0.028561650102422273 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2446927374301676, - "acc_stderr": 0.014378169884098426, - "acc_norm": 0.2446927374301676, - "acc_norm_stderr": 0.014378169884098426 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.39, - "acc_stderr": 0.04902071300001974, - "acc_norm": 0.39, - "acc_norm_stderr": 0.04902071300001974 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.1875, - "acc_stderr": 0.023709788253811766, - "acc_norm": 0.1875, - "acc_norm_stderr": 0.023709788253811766 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.24489795918367346, - "acc_stderr": 0.027529637440174934, - "acc_norm": 0.24489795918367346, - "acc_norm_stderr": 0.027529637440174934 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.270042194092827, - "acc_stderr": 0.028900721906293426, - "acc_norm": 0.270042194092827, - "acc_norm_stderr": 0.028900721906293426 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.24315514993481094, - "acc_stderr": 0.010956556654417353, - "acc_norm": 0.24315514993481094, - "acc_norm_stderr": 0.010956556654417353 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.2549019607843137, - "acc_stderr": 0.030587591351604246, - "acc_norm": 0.2549019607843137, - "acc_norm_stderr": 0.030587591351604246 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.32727272727272727, - "acc_stderr": 0.03663974994391242, - "acc_norm": 0.32727272727272727, - "acc_norm_stderr": 0.03663974994391242 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2423500611995104, - "mc1_stderr": 0.015000674373570342, - "mc2": 0.415216441138711, - "mc2_stderr": 0.015096025074072256 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.33215962441314556, - "acc_stderr": 0.016145257507387774, - "acc_norm": 0.40492957746478875, - "acc_norm_stderr": 0.01682709522397798 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "amphora/small-instruct", - "model_sha": "f7187ec82340f592a33ec4b22d02cfbc935886de", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/amphora/xllama-7b-0.3/result_2023-09-28 07:39:43.json b/amphora/xllama-7b-0.3/result_2023-09-28 07:39:43.json deleted file mode 100644 index 23f57dc92fb217fd077baf0898fc3f028de79b3c..0000000000000000000000000000000000000000 --- a/amphora/xllama-7b-0.3/result_2023-09-28 07:39:43.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.20819112627986347, - "acc_stderr": 0.011864866118448069, - "acc_norm": 0.2508532423208191, - "acc_norm_stderr": 0.012668198621315433 - }, - "harness|ko_hellaswag|10": { - "acc": 0.2610037841067516, - "acc_stderr": 0.004382844128643425, - "acc_norm": 0.26767576180043817, - "acc_norm_stderr": 0.004418427613296678 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.21052631578947367, - "acc_stderr": 0.0312678171466318, - "acc_norm": 0.21052631578947367, - "acc_norm_stderr": 0.0312678171466318 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.2524271844660194, - "acc_stderr": 0.04301250399690877, - "acc_norm": 0.2524271844660194, - "acc_norm_stderr": 0.04301250399690877 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.2707535121328225, - "acc_stderr": 0.015889888362560486, - "acc_norm": 0.2707535121328225, - "acc_norm_stderr": 0.015889888362560486 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.23703703703703705, - "acc_stderr": 0.03673731683969506, - "acc_norm": 0.23703703703703705, - "acc_norm_stderr": 0.03673731683969506 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542127, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542127 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.28085106382978725, - "acc_stderr": 0.02937917046412482, - "acc_norm": 0.28085106382978725, - "acc_norm_stderr": 0.02937917046412482 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.23493975903614459, - "acc_stderr": 0.03300533186128922, - "acc_norm": 0.23493975903614459, - "acc_norm_stderr": 0.03300533186128922 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.2282958199356913, - "acc_stderr": 0.023839303311398195, - "acc_norm": 0.2282958199356913, - "acc_norm_stderr": 0.023839303311398195 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.25112107623318386, - "acc_stderr": 0.02910522083322462, - "acc_norm": 0.25112107623318386, - "acc_norm_stderr": 0.02910522083322462 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.1984732824427481, - "acc_stderr": 0.034981493854624714, - "acc_norm": 0.1984732824427481, - "acc_norm_stderr": 0.034981493854624714 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.29, - "acc_stderr": 0.04560480215720684, - "acc_norm": 0.29, - "acc_norm_stderr": 0.04560480215720684 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.25252525252525254, - "acc_stderr": 0.030954055470365907, - "acc_norm": 0.25252525252525254, - "acc_norm_stderr": 0.030954055470365907 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.2620689655172414, - "acc_stderr": 0.036646663372252565, - "acc_norm": 0.2620689655172414, - "acc_norm_stderr": 0.036646663372252565 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.18627450980392157, - "acc_stderr": 0.03873958714149351, - "acc_norm": 0.18627450980392157, - "acc_norm_stderr": 0.03873958714149351 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.2815126050420168, - "acc_stderr": 0.02921354941437216, - "acc_norm": 0.2815126050420168, - "acc_norm_stderr": 0.02921354941437216 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.26666666666666666, - "acc_stderr": 0.022421273612923714, - "acc_norm": 0.26666666666666666, - "acc_norm_stderr": 0.022421273612923714 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.23, - "acc_stderr": 0.04229525846816506, - "acc_norm": 0.23, - "acc_norm_stderr": 0.04229525846816506 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.19, - "acc_stderr": 0.03942772444036623, - "acc_norm": 0.19, - "acc_norm_stderr": 0.03942772444036623 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.2222222222222222, - "acc_stderr": 0.040191074725573483, - "acc_norm": 0.2222222222222222, - "acc_norm_stderr": 0.040191074725573483 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.3103448275862069, - "acc_stderr": 0.03255086769970103, - "acc_norm": 0.3103448275862069, - "acc_norm_stderr": 0.03255086769970103 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.31290322580645163, - "acc_stderr": 0.026377567028645858, - "acc_norm": 0.31290322580645163, - "acc_norm_stderr": 0.026377567028645858 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.19230769230769232, - "acc_stderr": 0.025819233256483727, - "acc_norm": 0.19230769230769232, - "acc_norm_stderr": 0.025819233256483727 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.23773584905660378, - "acc_stderr": 0.026199808807561918, - "acc_norm": 0.23773584905660378, - "acc_norm_stderr": 0.026199808807561918 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.2909090909090909, - "acc_stderr": 0.04350271442923243, - "acc_norm": 0.2909090909090909, - "acc_norm_stderr": 0.04350271442923243 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.26296296296296295, - "acc_stderr": 0.026842057873833706, - "acc_norm": 0.26296296296296295, - "acc_norm_stderr": 0.026842057873833706 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.32450331125827814, - "acc_stderr": 0.03822746937658754, - "acc_norm": 0.32450331125827814, - "acc_norm_stderr": 0.03822746937658754 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.208955223880597, - "acc_stderr": 0.028748298931728655, - "acc_norm": 0.208955223880597, - "acc_norm_stderr": 0.028748298931728655 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.21965317919075145, - "acc_stderr": 0.031568093627031744, - "acc_norm": 0.21965317919075145, - "acc_norm_stderr": 0.031568093627031744 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.25132275132275134, - "acc_stderr": 0.022340482339643898, - "acc_norm": 0.25132275132275134, - "acc_norm_stderr": 0.022340482339643898 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2361111111111111, - "acc_stderr": 0.03551446610810826, - "acc_norm": 0.2361111111111111, - "acc_norm_stderr": 0.03551446610810826 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.32, - "acc_stderr": 0.046882617226215034, - "acc_norm": 0.32, - "acc_norm_stderr": 0.046882617226215034 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.26, - "acc_stderr": 0.044084400227680794, - "acc_norm": 0.26, - "acc_norm_stderr": 0.044084400227680794 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.24566473988439305, - "acc_stderr": 0.02317629820399201, - "acc_norm": 0.24566473988439305, - "acc_norm_stderr": 0.02317629820399201 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.294478527607362, - "acc_stderr": 0.03581165790474082, - "acc_norm": 0.294478527607362, - "acc_norm_stderr": 0.03581165790474082 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.2623456790123457, - "acc_stderr": 0.024477222856135114, - "acc_norm": 0.2623456790123457, - "acc_norm_stderr": 0.024477222856135114 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.3316062176165803, - "acc_stderr": 0.03397636541089116, - "acc_norm": 0.3316062176165803, - "acc_norm_stderr": 0.03397636541089116 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.22807017543859648, - "acc_stderr": 0.03947152782669415, - "acc_norm": 0.22807017543859648, - "acc_norm_stderr": 0.03947152782669415 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.24036697247706423, - "acc_stderr": 0.01832060732096407, - "acc_norm": 0.24036697247706423, - "acc_norm_stderr": 0.01832060732096407 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.15873015873015872, - "acc_stderr": 0.03268454013011744, - "acc_norm": 0.15873015873015872, - "acc_norm_stderr": 0.03268454013011744 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.23529411764705882, - "acc_stderr": 0.024288619466046105, - "acc_norm": 0.23529411764705882, - "acc_norm_stderr": 0.024288619466046105 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542127, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542127 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.32231404958677684, - "acc_stderr": 0.04266416363352167, - "acc_norm": 0.32231404958677684, - "acc_norm_stderr": 0.04266416363352167 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.17763157894736842, - "acc_stderr": 0.031103182383123363, - "acc_norm": 0.17763157894736842, - "acc_norm_stderr": 0.031103182383123363 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.21568627450980393, - "acc_stderr": 0.016639319350313264, - "acc_norm": 0.21568627450980393, - "acc_norm_stderr": 0.016639319350313264 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2375886524822695, - "acc_stderr": 0.025389512552729903, - "acc_norm": 0.2375886524822695, - "acc_norm_stderr": 0.025389512552729903 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.20535714285714285, - "acc_stderr": 0.038342410214190714, - "acc_norm": 0.20535714285714285, - "acc_norm_stderr": 0.038342410214190714 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.47685185185185186, - "acc_stderr": 0.03406315360711507, - "acc_norm": 0.47685185185185186, - "acc_norm_stderr": 0.03406315360711507 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.24581005586592178, - "acc_stderr": 0.014400296429225605, - "acc_norm": 0.24581005586592178, - "acc_norm_stderr": 0.014400296429225605 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252604, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252604 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.27, - "acc_stderr": 0.0446196043338474, - "acc_norm": 0.27, - "acc_norm_stderr": 0.0446196043338474 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.4485294117647059, - "acc_stderr": 0.030211479609121593, - "acc_norm": 0.4485294117647059, - "acc_norm_stderr": 0.030211479609121593 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.2653061224489796, - "acc_stderr": 0.028263889943784603, - "acc_norm": 0.2653061224489796, - "acc_norm_stderr": 0.028263889943784603 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.25316455696202533, - "acc_stderr": 0.02830465794303529, - "acc_norm": 0.25316455696202533, - "acc_norm_stderr": 0.02830465794303529 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2379400260756193, - "acc_stderr": 0.010875700787694238, - "acc_norm": 0.2379400260756193, - "acc_norm_stderr": 0.010875700787694238 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.2647058823529412, - "acc_stderr": 0.03096451792692341, - "acc_norm": 0.2647058823529412, - "acc_norm_stderr": 0.03096451792692341 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.24242424242424243, - "acc_stderr": 0.03346409881055953, - "acc_norm": 0.24242424242424243, - "acc_norm_stderr": 0.03346409881055953 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2766217870257038, - "mc1_stderr": 0.015659605755326923, - "mc2": 0.4976972719721252, - "mc2_stderr": 0.01632084070201214 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.21009389671361503, - "acc_stderr": 0.013964637699696686, - "acc_norm": 0.4694835680751174, - "acc_norm_stderr": 0.017107826542381244 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "amphora/xllama-7b-0.3", - "model_sha": "a2c409d42ade00bb1e4dbd1815d94618a6fa23ed", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/amphora/xllama-7b-0.6/result_2023-09-29 01:20:14.json b/amphora/xllama-7b-0.6/result_2023-09-29 01:20:14.json deleted file mode 100644 index 399b764cff5e86268e5d4eea03c44163d7c46e34..0000000000000000000000000000000000000000 --- a/amphora/xllama-7b-0.6/result_2023-09-29 01:20:14.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.20563139931740615, - "acc_stderr": 0.01181074526074258, - "acc_norm": 0.24061433447098976, - "acc_norm_stderr": 0.01249146853239057 - }, - "harness|ko_hellaswag|10": { - "acc": 0.2604062935670185, - "acc_stderr": 0.004379594059141038, - "acc_norm": 0.26857199761003786, - "acc_norm_stderr": 0.004423109313298973 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.21052631578947367, - "acc_stderr": 0.0312678171466318, - "acc_norm": 0.21052631578947367, - "acc_norm_stderr": 0.0312678171466318 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.2524271844660194, - "acc_stderr": 0.04301250399690877, - "acc_norm": 0.2524271844660194, - "acc_norm_stderr": 0.04301250399690877 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.26947637292464877, - "acc_stderr": 0.01586624307321506, - "acc_norm": 0.26947637292464877, - "acc_norm_stderr": 0.01586624307321506 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.2518518518518518, - "acc_stderr": 0.03749850709174023, - "acc_norm": 0.2518518518518518, - "acc_norm_stderr": 0.03749850709174023 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542127, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542127 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.2765957446808511, - "acc_stderr": 0.029241883869628834, - "acc_norm": 0.2765957446808511, - "acc_norm_stderr": 0.029241883869628834 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.21686746987951808, - "acc_stderr": 0.03208284450356365, - "acc_norm": 0.21686746987951808, - "acc_norm_stderr": 0.03208284450356365 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.2282958199356913, - "acc_stderr": 0.023839303311398195, - "acc_norm": 0.2282958199356913, - "acc_norm_stderr": 0.023839303311398195 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.24663677130044842, - "acc_stderr": 0.028930413120910874, - "acc_norm": 0.24663677130044842, - "acc_norm_stderr": 0.028930413120910874 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.22137404580152673, - "acc_stderr": 0.03641297081313729, - "acc_norm": 0.22137404580152673, - "acc_norm_stderr": 0.03641297081313729 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.25252525252525254, - "acc_stderr": 0.030954055470365907, - "acc_norm": 0.25252525252525254, - "acc_norm_stderr": 0.030954055470365907 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.25517241379310346, - "acc_stderr": 0.03632984052707842, - "acc_norm": 0.25517241379310346, - "acc_norm_stderr": 0.03632984052707842 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.18627450980392157, - "acc_stderr": 0.03873958714149351, - "acc_norm": 0.18627450980392157, - "acc_norm_stderr": 0.03873958714149351 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.2689075630252101, - "acc_stderr": 0.028801392193631273, - "acc_norm": 0.2689075630252101, - "acc_norm_stderr": 0.028801392193631273 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.2794871794871795, - "acc_stderr": 0.02275238883977683, - "acc_norm": 0.2794871794871795, - "acc_norm_stderr": 0.02275238883977683 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.23, - "acc_stderr": 0.04229525846816506, - "acc_norm": 0.23, - "acc_norm_stderr": 0.04229525846816506 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.2, - "acc_stderr": 0.040201512610368445, - "acc_norm": 0.2, - "acc_norm_stderr": 0.040201512610368445 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.2222222222222222, - "acc_stderr": 0.040191074725573483, - "acc_norm": 0.2222222222222222, - "acc_norm_stderr": 0.040191074725573483 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.3054187192118227, - "acc_stderr": 0.03240661565868407, - "acc_norm": 0.3054187192118227, - "acc_norm_stderr": 0.03240661565868407 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3064516129032258, - "acc_stderr": 0.026226485652553883, - "acc_norm": 0.3064516129032258, - "acc_norm_stderr": 0.026226485652553883 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.19230769230769232, - "acc_stderr": 0.025819233256483727, - "acc_norm": 0.19230769230769232, - "acc_norm_stderr": 0.025819233256483727 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.2339622641509434, - "acc_stderr": 0.02605529690115292, - "acc_norm": 0.2339622641509434, - "acc_norm_stderr": 0.02605529690115292 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.2909090909090909, - "acc_stderr": 0.04350271442923243, - "acc_norm": 0.2909090909090909, - "acc_norm_stderr": 0.04350271442923243 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.27037037037037037, - "acc_stderr": 0.02708037281514567, - "acc_norm": 0.27037037037037037, - "acc_norm_stderr": 0.02708037281514567 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.33774834437086093, - "acc_stderr": 0.038615575462551684, - "acc_norm": 0.33774834437086093, - "acc_norm_stderr": 0.038615575462551684 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.17412935323383086, - "acc_stderr": 0.026814951200421603, - "acc_norm": 0.17412935323383086, - "acc_norm_stderr": 0.026814951200421603 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.21965317919075145, - "acc_stderr": 0.031568093627031744, - "acc_norm": 0.21965317919075145, - "acc_norm_stderr": 0.031568093627031744 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.25132275132275134, - "acc_stderr": 0.022340482339643898, - "acc_norm": 0.25132275132275134, - "acc_norm_stderr": 0.022340482339643898 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.25, - "acc_stderr": 0.03621034121889507, - "acc_norm": 0.25, - "acc_norm_stderr": 0.03621034121889507 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.26, - "acc_stderr": 0.044084400227680794, - "acc_norm": 0.26, - "acc_norm_stderr": 0.044084400227680794 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.24566473988439305, - "acc_stderr": 0.02317629820399201, - "acc_norm": 0.24566473988439305, - "acc_norm_stderr": 0.02317629820399201 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.3006134969325153, - "acc_stderr": 0.03602511318806771, - "acc_norm": 0.3006134969325153, - "acc_norm_stderr": 0.03602511318806771 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.2654320987654321, - "acc_stderr": 0.02456922360046085, - "acc_norm": 0.2654320987654321, - "acc_norm_stderr": 0.02456922360046085 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.23, - "acc_stderr": 0.042295258468165044, - "acc_norm": 0.23, - "acc_norm_stderr": 0.042295258468165044 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.32642487046632124, - "acc_stderr": 0.03384028621143294, - "acc_norm": 0.32642487046632124, - "acc_norm_stderr": 0.03384028621143294 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.23684210526315788, - "acc_stderr": 0.03999423879281336, - "acc_norm": 0.23684210526315788, - "acc_norm_stderr": 0.03999423879281336 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.23302752293577983, - "acc_stderr": 0.018125669180861493, - "acc_norm": 0.23302752293577983, - "acc_norm_stderr": 0.018125669180861493 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.15873015873015872, - "acc_stderr": 0.03268454013011744, - "acc_norm": 0.15873015873015872, - "acc_norm_stderr": 0.03268454013011744 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.22875816993464052, - "acc_stderr": 0.024051029739912255, - "acc_norm": 0.22875816993464052, - "acc_norm_stderr": 0.024051029739912255 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.29, - "acc_stderr": 0.04560480215720684, - "acc_norm": 0.29, - "acc_norm_stderr": 0.04560480215720684 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.34710743801652894, - "acc_stderr": 0.04345724570292535, - "acc_norm": 0.34710743801652894, - "acc_norm_stderr": 0.04345724570292535 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.17105263157894737, - "acc_stderr": 0.0306436070716771, - "acc_norm": 0.17105263157894737, - "acc_norm_stderr": 0.0306436070716771 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.21568627450980393, - "acc_stderr": 0.016639319350313264, - "acc_norm": 0.21568627450980393, - "acc_norm_stderr": 0.016639319350313264 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2375886524822695, - "acc_stderr": 0.025389512552729903, - "acc_norm": 0.2375886524822695, - "acc_norm_stderr": 0.025389512552729903 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.1875, - "acc_stderr": 0.0370468111477387, - "acc_norm": 0.1875, - "acc_norm_stderr": 0.0370468111477387 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.47685185185185186, - "acc_stderr": 0.03406315360711507, - "acc_norm": 0.47685185185185186, - "acc_norm_stderr": 0.03406315360711507 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2435754189944134, - "acc_stderr": 0.01435591196476786, - "acc_norm": 0.2435754189944134, - "acc_norm_stderr": 0.01435591196476786 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252604, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252604 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.4485294117647059, - "acc_stderr": 0.030211479609121593, - "acc_norm": 0.4485294117647059, - "acc_norm_stderr": 0.030211479609121593 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.2571428571428571, - "acc_stderr": 0.027979823538744543, - "acc_norm": 0.2571428571428571, - "acc_norm_stderr": 0.027979823538744543 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.24472573839662448, - "acc_stderr": 0.027985699387036416, - "acc_norm": 0.24472573839662448, - "acc_norm_stderr": 0.027985699387036416 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2379400260756193, - "acc_stderr": 0.010875700787694238, - "acc_norm": 0.2379400260756193, - "acc_norm_stderr": 0.010875700787694238 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.2549019607843137, - "acc_stderr": 0.030587591351604246, - "acc_norm": 0.2549019607843137, - "acc_norm_stderr": 0.030587591351604246 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.23030303030303031, - "acc_stderr": 0.032876667586034886, - "acc_norm": 0.23030303030303031, - "acc_norm_stderr": 0.032876667586034886 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2729498164014688, - "mc1_stderr": 0.015594753632006525, - "mc2": 0.5002435611916959, - "mc2_stderr": 0.016315855211312638 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.2136150234741784, - "acc_stderr": 0.014049754012186283, - "acc_norm": 0.47183098591549294, - "acc_norm_stderr": 0.017112557035086604 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "amphora/xllama-7b-0.6", - "model_sha": "9b692aa866144b2573752d292706173b49aea3ac", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/amphora/xllama-mix/result_2023-09-29 15:29:28.json b/amphora/xllama-mix/result_2023-09-29 15:29:28.json deleted file mode 100644 index e028a20a1e38827e36d0151cbe9867250987b6fc..0000000000000000000000000000000000000000 --- a/amphora/xllama-mix/result_2023-09-29 15:29:28.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.20392491467576793, - "acc_stderr": 0.011774262478702254, - "acc_norm": 0.24658703071672355, - "acc_norm_stderr": 0.012595726268790129 - }, - "harness|ko_hellaswag|10": { - "acc": 0.26309500099581756, - "acc_stderr": 0.004394136724173009, - "acc_norm": 0.2688707428799044, - "acc_norm_stderr": 0.004424664761480231 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.21637426900584794, - "acc_stderr": 0.03158149539338734, - "acc_norm": 0.21637426900584794, - "acc_norm_stderr": 0.03158149539338734 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.23300970873786409, - "acc_stderr": 0.04185832598928315, - "acc_norm": 0.23300970873786409, - "acc_norm_stderr": 0.04185832598928315 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.26947637292464877, - "acc_stderr": 0.01586624307321506, - "acc_norm": 0.26947637292464877, - "acc_norm_stderr": 0.01586624307321506 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.21481481481481482, - "acc_stderr": 0.03547854198560826, - "acc_norm": 0.21481481481481482, - "acc_norm_stderr": 0.03547854198560826 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.23, - "acc_stderr": 0.04229525846816506, - "acc_norm": 0.23, - "acc_norm_stderr": 0.04229525846816506 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.251063829787234, - "acc_stderr": 0.028346963777162452, - "acc_norm": 0.251063829787234, - "acc_norm_stderr": 0.028346963777162452 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.25903614457831325, - "acc_stderr": 0.034106466140718564, - "acc_norm": 0.25903614457831325, - "acc_norm_stderr": 0.034106466140718564 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.24115755627009647, - "acc_stderr": 0.024296594034763426, - "acc_norm": 0.24115755627009647, - "acc_norm_stderr": 0.024296594034763426 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.2825112107623318, - "acc_stderr": 0.030216831011508762, - "acc_norm": 0.2825112107623318, - "acc_norm_stderr": 0.030216831011508762 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.26717557251908397, - "acc_stderr": 0.038808483010823944, - "acc_norm": 0.26717557251908397, - "acc_norm_stderr": 0.038808483010823944 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.25757575757575757, - "acc_stderr": 0.031156269519646836, - "acc_norm": 0.25757575757575757, - "acc_norm_stderr": 0.031156269519646836 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.25517241379310346, - "acc_stderr": 0.03632984052707842, - "acc_norm": 0.25517241379310346, - "acc_norm_stderr": 0.03632984052707842 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.20588235294117646, - "acc_stderr": 0.04023382273617749, - "acc_norm": 0.20588235294117646, - "acc_norm_stderr": 0.04023382273617749 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.35294117647058826, - "acc_stderr": 0.031041941304059274, - "acc_norm": 0.35294117647058826, - "acc_norm_stderr": 0.031041941304059274 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.3564102564102564, - "acc_stderr": 0.024283140529467295, - "acc_norm": 0.3564102564102564, - "acc_norm_stderr": 0.024283140529467295 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.23, - "acc_stderr": 0.04229525846816505, - "acc_norm": 0.23, - "acc_norm_stderr": 0.04229525846816505 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.17, - "acc_stderr": 0.03775251680686371, - "acc_norm": 0.17, - "acc_norm_stderr": 0.03775251680686371 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.24074074074074073, - "acc_stderr": 0.04133119440243839, - "acc_norm": 0.24074074074074073, - "acc_norm_stderr": 0.04133119440243839 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.28078817733990147, - "acc_stderr": 0.03161856335358611, - "acc_norm": 0.28078817733990147, - "acc_norm_stderr": 0.03161856335358611 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3193548387096774, - "acc_stderr": 0.026522709674667768, - "acc_norm": 0.3193548387096774, - "acc_norm_stderr": 0.026522709674667768 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.19658119658119658, - "acc_stderr": 0.02603538609895129, - "acc_norm": 0.19658119658119658, - "acc_norm_stderr": 0.02603538609895129 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.26037735849056604, - "acc_stderr": 0.027008766090708083, - "acc_norm": 0.26037735849056604, - "acc_norm_stderr": 0.027008766090708083 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.22727272727272727, - "acc_stderr": 0.040139645540727735, - "acc_norm": 0.22727272727272727, - "acc_norm_stderr": 0.040139645540727735 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.2740740740740741, - "acc_stderr": 0.027195934804085622, - "acc_norm": 0.2740740740740741, - "acc_norm_stderr": 0.027195934804085622 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.33112582781456956, - "acc_stderr": 0.038425817186598696, - "acc_norm": 0.33112582781456956, - "acc_norm_stderr": 0.038425817186598696 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.17412935323383086, - "acc_stderr": 0.026814951200421603, - "acc_norm": 0.17412935323383086, - "acc_norm_stderr": 0.026814951200421603 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.1907514450867052, - "acc_stderr": 0.029957851329869334, - "acc_norm": 0.1907514450867052, - "acc_norm_stderr": 0.029957851329869334 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.25396825396825395, - "acc_stderr": 0.022418042891113946, - "acc_norm": 0.25396825396825395, - "acc_norm_stderr": 0.022418042891113946 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2916666666666667, - "acc_stderr": 0.038009680605548574, - "acc_norm": 0.2916666666666667, - "acc_norm_stderr": 0.038009680605548574 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.37, - "acc_stderr": 0.048523658709391, - "acc_norm": 0.37, - "acc_norm_stderr": 0.048523658709391 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.26, - "acc_stderr": 0.044084400227680794, - "acc_norm": 0.26, - "acc_norm_stderr": 0.044084400227680794 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.24277456647398843, - "acc_stderr": 0.023083658586984204, - "acc_norm": 0.24277456647398843, - "acc_norm_stderr": 0.023083658586984204 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.294478527607362, - "acc_stderr": 0.03581165790474082, - "acc_norm": 0.294478527607362, - "acc_norm_stderr": 0.03581165790474082 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.25925925925925924, - "acc_stderr": 0.02438366553103545, - "acc_norm": 0.25925925925925924, - "acc_norm_stderr": 0.02438366553103545 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.23, - "acc_stderr": 0.042295258468165044, - "acc_norm": 0.23, - "acc_norm_stderr": 0.042295258468165044 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.3626943005181347, - "acc_stderr": 0.03469713791704372, - "acc_norm": 0.3626943005181347, - "acc_norm_stderr": 0.03469713791704372 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2543859649122807, - "acc_stderr": 0.0409698513984367, - "acc_norm": 0.2543859649122807, - "acc_norm_stderr": 0.0409698513984367 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.26605504587155965, - "acc_stderr": 0.01894602232222559, - "acc_norm": 0.26605504587155965, - "acc_norm_stderr": 0.01894602232222559 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.23809523809523808, - "acc_stderr": 0.03809523809523812, - "acc_norm": 0.23809523809523808, - "acc_norm_stderr": 0.03809523809523812 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.24509803921568626, - "acc_stderr": 0.024630048979824765, - "acc_norm": 0.24509803921568626, - "acc_norm_stderr": 0.024630048979824765 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542126, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542126 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.371900826446281, - "acc_stderr": 0.04412015806624503, - "acc_norm": 0.371900826446281, - "acc_norm_stderr": 0.04412015806624503 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.2565789473684211, - "acc_stderr": 0.0355418036802569, - "acc_norm": 0.2565789473684211, - "acc_norm_stderr": 0.0355418036802569 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.2173202614379085, - "acc_stderr": 0.016684820929148598, - "acc_norm": 0.2173202614379085, - "acc_norm_stderr": 0.016684820929148598 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.22695035460992907, - "acc_stderr": 0.02498710636564297, - "acc_norm": 0.22695035460992907, - "acc_norm_stderr": 0.02498710636564297 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.19642857142857142, - "acc_stderr": 0.03770970049347018, - "acc_norm": 0.19642857142857142, - "acc_norm_stderr": 0.03770970049347018 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.4675925925925926, - "acc_stderr": 0.03402801581358966, - "acc_norm": 0.4675925925925926, - "acc_norm_stderr": 0.03402801581358966 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.23798882681564246, - "acc_stderr": 0.014242630070574892, - "acc_norm": 0.23798882681564246, - "acc_norm_stderr": 0.014242630070574892 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695235, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695235 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.26, - "acc_stderr": 0.0440844002276808, - "acc_norm": 0.26, - "acc_norm_stderr": 0.0440844002276808 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.4485294117647059, - "acc_stderr": 0.030211479609121593, - "acc_norm": 0.4485294117647059, - "acc_norm_stderr": 0.030211479609121593 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.2693877551020408, - "acc_stderr": 0.02840125202902294, - "acc_norm": 0.2693877551020408, - "acc_norm_stderr": 0.02840125202902294 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.2489451476793249, - "acc_stderr": 0.028146970599422644, - "acc_norm": 0.2489451476793249, - "acc_norm_stderr": 0.028146970599422644 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.23468057366362452, - "acc_stderr": 0.010824026872449342, - "acc_norm": 0.23468057366362452, - "acc_norm_stderr": 0.010824026872449342 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.24019607843137256, - "acc_stderr": 0.02998373305591361, - "acc_norm": 0.24019607843137256, - "acc_norm_stderr": 0.02998373305591361 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.21212121212121213, - "acc_stderr": 0.031922715695482995, - "acc_norm": 0.21212121212121213, - "acc_norm_stderr": 0.031922715695482995 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.27539779681762544, - "mc1_stderr": 0.015638135667775523, - "mc2": 0.4927495486615087, - "mc2_stderr": 0.016224378220105643 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.19131455399061034, - "acc_stderr": 0.013483388196318597, - "acc_norm": 0.42370892018779344, - "acc_norm_stderr": 0.016939085971158445 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "amphora/xllama-mix", - "model_sha": "0f6f3e17cb54b92a4817eea15cf566dec29fe89e", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/beomi/KoAlpaca-Polyglot-12.8B/result_2023-09-26 09:57:09.json b/beomi/KoAlpaca-Polyglot-12.8B/result_2023-09-26 09:57:09.json deleted file mode 100644 index 048f1e94f8215eb24560a468e6f627d9af12b95b..0000000000000000000000000000000000000000 --- a/beomi/KoAlpaca-Polyglot-12.8B/result_2023-09-26 09:57:09.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.31569965870307165, - "acc_stderr": 0.013582571095815291, - "acc_norm": 0.3438566552901024, - "acc_norm_stderr": 0.013880644570156208 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3873730332603067, - "acc_stderr": 0.004861544478451863, - "acc_norm": 0.4980083648675563, - "acc_norm_stderr": 0.004989741826250387 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.2807017543859649, - "acc_stderr": 0.034462962170884265, - "acc_norm": 0.2807017543859649, - "acc_norm_stderr": 0.034462962170884265 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.22330097087378642, - "acc_stderr": 0.04123553189891431, - "acc_norm": 0.22330097087378642, - "acc_norm_stderr": 0.04123553189891431 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.26947637292464877, - "acc_stderr": 0.01586624307321505, - "acc_norm": 0.26947637292464877, - "acc_norm_stderr": 0.01586624307321505 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.24444444444444444, - "acc_stderr": 0.03712537833614867, - "acc_norm": 0.24444444444444444, - "acc_norm_stderr": 0.03712537833614867 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.2765957446808511, - "acc_stderr": 0.029241883869628827, - "acc_norm": 0.2765957446808511, - "acc_norm_stderr": 0.029241883869628827 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.24096385542168675, - "acc_stderr": 0.033293941190735296, - "acc_norm": 0.24096385542168675, - "acc_norm_stderr": 0.033293941190735296 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.3022508038585209, - "acc_stderr": 0.02608270069539966, - "acc_norm": 0.3022508038585209, - "acc_norm_stderr": 0.02608270069539966 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.20179372197309417, - "acc_stderr": 0.026936111912802277, - "acc_norm": 0.20179372197309417, - "acc_norm_stderr": 0.026936111912802277 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.2595419847328244, - "acc_stderr": 0.03844876139785271, - "acc_norm": 0.2595419847328244, - "acc_norm_stderr": 0.03844876139785271 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.26, - "acc_stderr": 0.0440844002276808, - "acc_norm": 0.26, - "acc_norm_stderr": 0.0440844002276808 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.25252525252525254, - "acc_stderr": 0.030954055470365897, - "acc_norm": 0.25252525252525254, - "acc_norm_stderr": 0.030954055470365897 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.21379310344827587, - "acc_stderr": 0.03416520447747549, - "acc_norm": 0.21379310344827587, - "acc_norm_stderr": 0.03416520447747549 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.1568627450980392, - "acc_stderr": 0.036186648199362445, - "acc_norm": 0.1568627450980392, - "acc_norm_stderr": 0.036186648199362445 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.22268907563025211, - "acc_stderr": 0.027025433498882374, - "acc_norm": 0.22268907563025211, - "acc_norm_stderr": 0.027025433498882374 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.2, - "acc_stderr": 0.020280805062535722, - "acc_norm": 0.2, - "acc_norm_stderr": 0.020280805062535722 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.32, - "acc_stderr": 0.04688261722621504, - "acc_norm": 0.32, - "acc_norm_stderr": 0.04688261722621504 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.2037037037037037, - "acc_stderr": 0.03893542518824847, - "acc_norm": 0.2037037037037037, - "acc_norm_stderr": 0.03893542518824847 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.27586206896551724, - "acc_stderr": 0.03144712581678243, - "acc_norm": 0.27586206896551724, - "acc_norm_stderr": 0.03144712581678243 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.24838709677419354, - "acc_stderr": 0.02458002892148101, - "acc_norm": 0.24838709677419354, - "acc_norm_stderr": 0.02458002892148101 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.3034188034188034, - "acc_stderr": 0.030118210106942645, - "acc_norm": 0.3034188034188034, - "acc_norm_stderr": 0.030118210106942645 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.2, - "acc_stderr": 0.02461829819586651, - "acc_norm": 0.2, - "acc_norm_stderr": 0.02461829819586651 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.2545454545454545, - "acc_stderr": 0.041723430387053825, - "acc_norm": 0.2545454545454545, - "acc_norm_stderr": 0.041723430387053825 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.2740740740740741, - "acc_stderr": 0.027195934804085626, - "acc_norm": 0.2740740740740741, - "acc_norm_stderr": 0.027195934804085626 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.271523178807947, - "acc_stderr": 0.03631329803969653, - "acc_norm": 0.271523178807947, - "acc_norm_stderr": 0.03631329803969653 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.23880597014925373, - "acc_stderr": 0.03014777593540922, - "acc_norm": 0.23880597014925373, - "acc_norm_stderr": 0.03014777593540922 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.2543352601156069, - "acc_stderr": 0.0332055644308557, - "acc_norm": 0.2543352601156069, - "acc_norm_stderr": 0.0332055644308557 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.24867724867724866, - "acc_stderr": 0.022261817692400168, - "acc_norm": 0.24867724867724866, - "acc_norm_stderr": 0.022261817692400168 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2708333333333333, - "acc_stderr": 0.037161774375660164, - "acc_norm": 0.2708333333333333, - "acc_norm_stderr": 0.037161774375660164 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.17, - "acc_stderr": 0.0377525168068637, - "acc_norm": 0.17, - "acc_norm_stderr": 0.0377525168068637 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252604, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252604 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.26878612716763006, - "acc_stderr": 0.02386800326250011, - "acc_norm": 0.26878612716763006, - "acc_norm_stderr": 0.02386800326250011 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.27607361963190186, - "acc_stderr": 0.0351238528370505, - "acc_norm": 0.27607361963190186, - "acc_norm_stderr": 0.0351238528370505 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.28703703703703703, - "acc_stderr": 0.02517104191530968, - "acc_norm": 0.28703703703703703, - "acc_norm_stderr": 0.02517104191530968 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.29, - "acc_stderr": 0.04560480215720683, - "acc_norm": 0.29, - "acc_norm_stderr": 0.04560480215720683 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.24870466321243523, - "acc_stderr": 0.031195840877700286, - "acc_norm": 0.24870466321243523, - "acc_norm_stderr": 0.031195840877700286 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.21052631578947367, - "acc_stderr": 0.038351539543994194, - "acc_norm": 0.21052631578947367, - "acc_norm_stderr": 0.038351539543994194 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.23669724770642203, - "acc_stderr": 0.01822407811729908, - "acc_norm": 0.23669724770642203, - "acc_norm_stderr": 0.01822407811729908 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.18253968253968253, - "acc_stderr": 0.034550710191021496, - "acc_norm": 0.18253968253968253, - "acc_norm_stderr": 0.034550710191021496 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.2549019607843137, - "acc_stderr": 0.0249541843248799, - "acc_norm": 0.2549019607843137, - "acc_norm_stderr": 0.0249541843248799 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.2809917355371901, - "acc_stderr": 0.04103203830514512, - "acc_norm": 0.2809917355371901, - "acc_norm_stderr": 0.04103203830514512 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.23684210526315788, - "acc_stderr": 0.03459777606810535, - "acc_norm": 0.23684210526315788, - "acc_norm_stderr": 0.03459777606810535 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.26633986928104575, - "acc_stderr": 0.017883188134667192, - "acc_norm": 0.26633986928104575, - "acc_norm_stderr": 0.017883188134667192 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2553191489361702, - "acc_stderr": 0.02601199293090201, - "acc_norm": 0.2553191489361702, - "acc_norm_stderr": 0.02601199293090201 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.3125, - "acc_stderr": 0.043994650575715215, - "acc_norm": 0.3125, - "acc_norm_stderr": 0.043994650575715215 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.20833333333333334, - "acc_stderr": 0.027696910713093936, - "acc_norm": 0.20833333333333334, - "acc_norm_stderr": 0.027696910713093936 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2748603351955307, - "acc_stderr": 0.014931316703220513, - "acc_norm": 0.2748603351955307, - "acc_norm_stderr": 0.014931316703220513 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.24, - "acc_stderr": 0.04292346959909283, - "acc_norm": 0.24, - "acc_norm_stderr": 0.04292346959909283 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.17647058823529413, - "acc_stderr": 0.02315746830855938, - "acc_norm": 0.17647058823529413, - "acc_norm_stderr": 0.02315746830855938 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.23265306122448978, - "acc_stderr": 0.02704925791589618, - "acc_norm": 0.23265306122448978, - "acc_norm_stderr": 0.02704925791589618 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.29957805907172996, - "acc_stderr": 0.029818024749753102, - "acc_norm": 0.29957805907172996, - "acc_norm_stderr": 0.029818024749753102 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.25684485006518903, - "acc_stderr": 0.011158455853098862, - "acc_norm": 0.25684485006518903, - "acc_norm_stderr": 0.011158455853098862 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.2549019607843137, - "acc_stderr": 0.030587591351604246, - "acc_norm": 0.2549019607843137, - "acc_norm_stderr": 0.030587591351604246 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.2545454545454545, - "acc_stderr": 0.0340150671524904, - "acc_norm": 0.2545454545454545, - "acc_norm_stderr": 0.0340150671524904 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.24724602203182375, - "mc1_stderr": 0.015102404797359649, - "mc2": 0.4196185756093357, - "mc2_stderr": 0.01602551288494906 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.43309859154929575, - "acc_stderr": 0.016985657928418076, - "acc_norm": 0.4788732394366197, - "acc_norm_stderr": 0.017124472080967058 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "beomi/KoAlpaca-Polyglot-12.8B", - "model_sha": "5f225e9c5ae6c7238fc2316da0b8a9922019674d", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/beomi/KoAlpaca-Polyglot-5.8B/result_2023-09-26 09:56:49.json b/beomi/KoAlpaca-Polyglot-5.8B/result_2023-09-26 09:56:49.json deleted file mode 100644 index 4841da9c78a6a576a9246f41e159a19654cd4912..0000000000000000000000000000000000000000 --- a/beomi/KoAlpaca-Polyglot-5.8B/result_2023-09-26 09:56:49.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.2593856655290102, - "acc_stderr": 0.012808273573927094, - "acc_norm": 0.3037542662116041, - "acc_norm_stderr": 0.01343890918477876 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3390758812985461, - "acc_stderr": 0.004724281487819373, - "acc_norm": 0.4146584345747859, - "acc_norm_stderr": 0.004916561213591286 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.26900584795321636, - "acc_stderr": 0.03401052620104088, - "acc_norm": 0.26900584795321636, - "acc_norm_stderr": 0.03401052620104088 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.17475728155339806, - "acc_stderr": 0.037601780060266196, - "acc_norm": 0.17475728155339806, - "acc_norm_stderr": 0.037601780060266196 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.23243933588761176, - "acc_stderr": 0.015104550008905699, - "acc_norm": 0.23243933588761176, - "acc_norm_stderr": 0.015104550008905699 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.26666666666666666, - "acc_stderr": 0.038201699145179055, - "acc_norm": 0.26666666666666666, - "acc_norm_stderr": 0.038201699145179055 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.28, - "acc_stderr": 0.045126085985421276, - "acc_norm": 0.28, - "acc_norm_stderr": 0.045126085985421276 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.23829787234042554, - "acc_stderr": 0.027851252973889788, - "acc_norm": 0.23829787234042554, - "acc_norm_stderr": 0.027851252973889788 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.25903614457831325, - "acc_stderr": 0.034106466140718564, - "acc_norm": 0.25903614457831325, - "acc_norm_stderr": 0.034106466140718564 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.27009646302250806, - "acc_stderr": 0.025218040373410622, - "acc_norm": 0.27009646302250806, - "acc_norm_stderr": 0.025218040373410622 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.2556053811659193, - "acc_stderr": 0.029275891003969927, - "acc_norm": 0.2556053811659193, - "acc_norm_stderr": 0.029275891003969927 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.22137404580152673, - "acc_stderr": 0.0364129708131373, - "acc_norm": 0.22137404580152673, - "acc_norm_stderr": 0.0364129708131373 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.24, - "acc_stderr": 0.04292346959909281, - "acc_norm": 0.24, - "acc_norm_stderr": 0.04292346959909281 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.2474747474747475, - "acc_stderr": 0.03074630074212451, - "acc_norm": 0.2474747474747475, - "acc_norm_stderr": 0.03074630074212451 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.2413793103448276, - "acc_stderr": 0.03565998174135302, - "acc_norm": 0.2413793103448276, - "acc_norm_stderr": 0.03565998174135302 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.13725490196078433, - "acc_stderr": 0.034240846698915216, - "acc_norm": 0.13725490196078433, - "acc_norm_stderr": 0.034240846698915216 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.31092436974789917, - "acc_stderr": 0.030066761582977934, - "acc_norm": 0.31092436974789917, - "acc_norm_stderr": 0.030066761582977934 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.28974358974358977, - "acc_stderr": 0.02300062824368796, - "acc_norm": 0.28974358974358977, - "acc_norm_stderr": 0.02300062824368796 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.16, - "acc_stderr": 0.03684529491774709, - "acc_norm": 0.16, - "acc_norm_stderr": 0.03684529491774709 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.2037037037037037, - "acc_stderr": 0.038935425188248475, - "acc_norm": 0.2037037037037037, - "acc_norm_stderr": 0.038935425188248475 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.26108374384236455, - "acc_stderr": 0.030903796952114475, - "acc_norm": 0.26108374384236455, - "acc_norm_stderr": 0.030903796952114475 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.31290322580645163, - "acc_stderr": 0.02637756702864586, - "acc_norm": 0.31290322580645163, - "acc_norm_stderr": 0.02637756702864586 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.24358974358974358, - "acc_stderr": 0.028120966503914418, - "acc_norm": 0.24358974358974358, - "acc_norm_stderr": 0.028120966503914418 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.21132075471698114, - "acc_stderr": 0.025125766484827845, - "acc_norm": 0.21132075471698114, - "acc_norm_stderr": 0.025125766484827845 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.2818181818181818, - "acc_stderr": 0.043091187099464585, - "acc_norm": 0.2818181818181818, - "acc_norm_stderr": 0.043091187099464585 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.24814814814814815, - "acc_stderr": 0.0263357394040558, - "acc_norm": 0.24814814814814815, - "acc_norm_stderr": 0.0263357394040558 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.271523178807947, - "acc_stderr": 0.03631329803969653, - "acc_norm": 0.271523178807947, - "acc_norm_stderr": 0.03631329803969653 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.23383084577114427, - "acc_stderr": 0.029929415408348384, - "acc_norm": 0.23383084577114427, - "acc_norm_stderr": 0.029929415408348384 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.28901734104046245, - "acc_stderr": 0.03456425745086998, - "acc_norm": 0.28901734104046245, - "acc_norm_stderr": 0.03456425745086998 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.23544973544973544, - "acc_stderr": 0.021851509822031708, - "acc_norm": 0.23544973544973544, - "acc_norm_stderr": 0.021851509822031708 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.22916666666666666, - "acc_stderr": 0.035146974678623884, - "acc_norm": 0.22916666666666666, - "acc_norm_stderr": 0.035146974678623884 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.21, - "acc_stderr": 0.04093601807403326, - "acc_norm": 0.21, - "acc_norm_stderr": 0.04093601807403326 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.26, - "acc_stderr": 0.04408440022768079, - "acc_norm": 0.26, - "acc_norm_stderr": 0.04408440022768079 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.2543352601156069, - "acc_stderr": 0.02344582627654555, - "acc_norm": 0.2543352601156069, - "acc_norm_stderr": 0.02344582627654555 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.24539877300613497, - "acc_stderr": 0.03380939813943354, - "acc_norm": 0.24539877300613497, - "acc_norm_stderr": 0.03380939813943354 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.2777777777777778, - "acc_stderr": 0.02492200116888632, - "acc_norm": 0.2777777777777778, - "acc_norm_stderr": 0.02492200116888632 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.3316062176165803, - "acc_stderr": 0.03397636541089116, - "acc_norm": 0.3316062176165803, - "acc_norm_stderr": 0.03397636541089116 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2894736842105263, - "acc_stderr": 0.04266339443159394, - "acc_norm": 0.2894736842105263, - "acc_norm_stderr": 0.04266339443159394 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.21834862385321102, - "acc_stderr": 0.01771260052872273, - "acc_norm": 0.21834862385321102, - "acc_norm_stderr": 0.01771260052872273 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.31746031746031744, - "acc_stderr": 0.04163453031302859, - "acc_norm": 0.31746031746031744, - "acc_norm_stderr": 0.04163453031302859 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.2581699346405229, - "acc_stderr": 0.025058503316958157, - "acc_norm": 0.2581699346405229, - "acc_norm_stderr": 0.025058503316958157 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.27, - "acc_stderr": 0.044619604333847394, - "acc_norm": 0.27, - "acc_norm_stderr": 0.044619604333847394 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.2231404958677686, - "acc_stderr": 0.03800754475228733, - "acc_norm": 0.2231404958677686, - "acc_norm_stderr": 0.03800754475228733 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.27631578947368424, - "acc_stderr": 0.03639057569952925, - "acc_norm": 0.27631578947368424, - "acc_norm_stderr": 0.03639057569952925 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.23039215686274508, - "acc_stderr": 0.017035229258034038, - "acc_norm": 0.23039215686274508, - "acc_norm_stderr": 0.017035229258034038 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.24113475177304963, - "acc_stderr": 0.025518731049537773, - "acc_norm": 0.24113475177304963, - "acc_norm_stderr": 0.025518731049537773 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.16071428571428573, - "acc_stderr": 0.03485946096475741, - "acc_norm": 0.16071428571428573, - "acc_norm_stderr": 0.03485946096475741 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.27314814814814814, - "acc_stderr": 0.03038805130167812, - "acc_norm": 0.27314814814814814, - "acc_norm_stderr": 0.03038805130167812 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.23128491620111732, - "acc_stderr": 0.0141022236231526, - "acc_norm": 0.23128491620111732, - "acc_norm_stderr": 0.0141022236231526 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.27, - "acc_stderr": 0.0446196043338474, - "acc_norm": 0.27, - "acc_norm_stderr": 0.0446196043338474 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.2536764705882353, - "acc_stderr": 0.02643132987078953, - "acc_norm": 0.2536764705882353, - "acc_norm_stderr": 0.02643132987078953 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.23673469387755103, - "acc_stderr": 0.027212835884073132, - "acc_norm": 0.23673469387755103, - "acc_norm_stderr": 0.027212835884073132 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.26582278481012656, - "acc_stderr": 0.02875679962965834, - "acc_norm": 0.26582278481012656, - "acc_norm_stderr": 0.02875679962965834 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.26140808344198174, - "acc_stderr": 0.011222528169771316, - "acc_norm": 0.26140808344198174, - "acc_norm_stderr": 0.011222528169771316 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.2647058823529412, - "acc_stderr": 0.030964517926923413, - "acc_norm": 0.2647058823529412, - "acc_norm_stderr": 0.030964517926923413 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.21818181818181817, - "acc_stderr": 0.03225078108306289, - "acc_norm": 0.21818181818181817, - "acc_norm_stderr": 0.03225078108306289 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2350061199510404, - "mc1_stderr": 0.014843061507731613, - "mc2": 0.40043350315231013, - "mc2_stderr": 0.01604778937263507 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.3744131455399061, - "acc_stderr": 0.016590312676984492, - "acc_norm": 0.42488262910798125, - "acc_norm_stderr": 0.016945248826821704 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "beomi/KoAlpaca-Polyglot-5.8B", - "model_sha": "cb1597cbaf4a98e52e6b767381a80893e4818477", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/beomi/kollama-13b/result_2023-09-26 17:41:30.json b/beomi/kollama-13b/result_2023-09-26 17:41:30.json deleted file mode 100644 index 6333460214c90acf93a0c0e21f5b3ca09cb6df29..0000000000000000000000000000000000000000 --- a/beomi/kollama-13b/result_2023-09-26 17:41:30.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.18430034129692832, - "acc_stderr": 0.011330517933037432, - "acc_norm": 0.24061433447098976, - "acc_norm_stderr": 0.012491468532390559 - }, - "harness|ko_hellaswag|10": { - "acc": 0.2724556861183031, - "acc_stderr": 0.004443131632679339, - "acc_norm": 0.2983469428400717, - "acc_norm_stderr": 0.004565974937793705 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.26900584795321636, - "acc_stderr": 0.0340105262010409, - "acc_norm": 0.26900584795321636, - "acc_norm_stderr": 0.0340105262010409 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.17475728155339806, - "acc_stderr": 0.037601780060266196, - "acc_norm": 0.17475728155339806, - "acc_norm_stderr": 0.037601780060266196 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.25925925925925924, - "acc_stderr": 0.015671006009339582, - "acc_norm": 0.25925925925925924, - "acc_norm_stderr": 0.015671006009339582 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.22962962962962963, - "acc_stderr": 0.036333844140734636, - "acc_norm": 0.22962962962962963, - "acc_norm_stderr": 0.036333844140734636 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.29, - "acc_stderr": 0.04560480215720684, - "acc_norm": 0.29, - "acc_norm_stderr": 0.04560480215720684 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.20425531914893616, - "acc_stderr": 0.02635515841334941, - "acc_norm": 0.20425531914893616, - "acc_norm_stderr": 0.02635515841334941 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.28313253012048195, - "acc_stderr": 0.03507295431370518, - "acc_norm": 0.28313253012048195, - "acc_norm_stderr": 0.03507295431370518 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.2861736334405145, - "acc_stderr": 0.025670259242188947, - "acc_norm": 0.2861736334405145, - "acc_norm_stderr": 0.025670259242188947 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.2825112107623318, - "acc_stderr": 0.030216831011508773, - "acc_norm": 0.2825112107623318, - "acc_norm_stderr": 0.030216831011508773 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.2595419847328244, - "acc_stderr": 0.03844876139785271, - "acc_norm": 0.2595419847328244, - "acc_norm_stderr": 0.03844876139785271 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.24242424242424243, - "acc_stderr": 0.030532892233932036, - "acc_norm": 0.24242424242424243, - "acc_norm_stderr": 0.030532892233932036 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.2482758620689655, - "acc_stderr": 0.036001056927277716, - "acc_norm": 0.2482758620689655, - "acc_norm_stderr": 0.036001056927277716 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.21568627450980393, - "acc_stderr": 0.04092563958237654, - "acc_norm": 0.21568627450980393, - "acc_norm_stderr": 0.04092563958237654 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.3487394957983193, - "acc_stderr": 0.030956636328566548, - "acc_norm": 0.3487394957983193, - "acc_norm_stderr": 0.030956636328566548 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.3641025641025641, - "acc_stderr": 0.024396672985094778, - "acc_norm": 0.3641025641025641, - "acc_norm_stderr": 0.024396672985094778 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.22, - "acc_stderr": 0.0416333199893227, - "acc_norm": 0.22, - "acc_norm_stderr": 0.0416333199893227 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.21296296296296297, - "acc_stderr": 0.03957835471980981, - "acc_norm": 0.21296296296296297, - "acc_norm_stderr": 0.03957835471980981 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.2561576354679803, - "acc_stderr": 0.0307127300709826, - "acc_norm": 0.2561576354679803, - "acc_norm_stderr": 0.0307127300709826 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3161290322580645, - "acc_stderr": 0.026450874489042764, - "acc_norm": 0.3161290322580645, - "acc_norm_stderr": 0.026450874489042764 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.19658119658119658, - "acc_stderr": 0.02603538609895129, - "acc_norm": 0.19658119658119658, - "acc_norm_stderr": 0.02603538609895129 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.23773584905660378, - "acc_stderr": 0.02619980880756189, - "acc_norm": 0.23773584905660378, - "acc_norm_stderr": 0.02619980880756189 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.21818181818181817, - "acc_stderr": 0.03955932861795833, - "acc_norm": 0.21818181818181817, - "acc_norm_stderr": 0.03955932861795833 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.26296296296296295, - "acc_stderr": 0.026842057873833706, - "acc_norm": 0.26296296296296295, - "acc_norm_stderr": 0.026842057873833706 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.33112582781456956, - "acc_stderr": 0.038425817186598696, - "acc_norm": 0.33112582781456956, - "acc_norm_stderr": 0.038425817186598696 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.23383084577114427, - "acc_stderr": 0.02992941540834839, - "acc_norm": 0.23383084577114427, - "acc_norm_stderr": 0.02992941540834839 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.24855491329479767, - "acc_stderr": 0.03295304696818318, - "acc_norm": 0.24855491329479767, - "acc_norm_stderr": 0.03295304696818318 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2566137566137566, - "acc_stderr": 0.022494510767503154, - "acc_norm": 0.2566137566137566, - "acc_norm_stderr": 0.022494510767503154 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.22916666666666666, - "acc_stderr": 0.03514697467862388, - "acc_norm": 0.22916666666666666, - "acc_norm_stderr": 0.03514697467862388 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.27, - "acc_stderr": 0.044619604333847394, - "acc_norm": 0.27, - "acc_norm_stderr": 0.044619604333847394 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.26, - "acc_stderr": 0.044084400227680794, - "acc_norm": 0.26, - "acc_norm_stderr": 0.044084400227680794 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.24566473988439305, - "acc_stderr": 0.02317629820399201, - "acc_norm": 0.24566473988439305, - "acc_norm_stderr": 0.02317629820399201 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.2331288343558282, - "acc_stderr": 0.0332201579577674, - "acc_norm": 0.2331288343558282, - "acc_norm_stderr": 0.0332201579577674 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.2191358024691358, - "acc_stderr": 0.023016705640262185, - "acc_norm": 0.2191358024691358, - "acc_norm_stderr": 0.023016705640262185 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.27, - "acc_stderr": 0.044619604333847394, - "acc_norm": 0.27, - "acc_norm_stderr": 0.044619604333847394 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.36787564766839376, - "acc_stderr": 0.034801756684660366, - "acc_norm": 0.36787564766839376, - "acc_norm_stderr": 0.034801756684660366 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.24561403508771928, - "acc_stderr": 0.04049339297748141, - "acc_norm": 0.24561403508771928, - "acc_norm_stderr": 0.04049339297748141 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.24036697247706423, - "acc_stderr": 0.01832060732096407, - "acc_norm": 0.24036697247706423, - "acc_norm_stderr": 0.01832060732096407 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.25396825396825395, - "acc_stderr": 0.03893259610604675, - "acc_norm": 0.25396825396825395, - "acc_norm_stderr": 0.03893259610604675 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.24183006535947713, - "acc_stderr": 0.024518195641879334, - "acc_norm": 0.24183006535947713, - "acc_norm_stderr": 0.024518195641879334 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.19, - "acc_stderr": 0.039427724440366234, - "acc_norm": 0.19, - "acc_norm_stderr": 0.039427724440366234 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.24793388429752067, - "acc_stderr": 0.039418975265163025, - "acc_norm": 0.24793388429752067, - "acc_norm_stderr": 0.039418975265163025 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.17763157894736842, - "acc_stderr": 0.03110318238312338, - "acc_norm": 0.17763157894736842, - "acc_norm_stderr": 0.03110318238312338 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.2173202614379085, - "acc_stderr": 0.016684820929148598, - "acc_norm": 0.2173202614379085, - "acc_norm_stderr": 0.016684820929148598 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.24822695035460993, - "acc_stderr": 0.025770015644290396, - "acc_norm": 0.24822695035460993, - "acc_norm_stderr": 0.025770015644290396 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.16071428571428573, - "acc_stderr": 0.0348594609647574, - "acc_norm": 0.16071428571428573, - "acc_norm_stderr": 0.0348594609647574 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.4722222222222222, - "acc_stderr": 0.0340470532865388, - "acc_norm": 0.4722222222222222, - "acc_norm_stderr": 0.0340470532865388 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.23798882681564246, - "acc_stderr": 0.014242630070574892, - "acc_norm": 0.23798882681564246, - "acc_norm_stderr": 0.014242630070574892 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252604, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252604 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.27, - "acc_stderr": 0.044619604333847394, - "acc_norm": 0.27, - "acc_norm_stderr": 0.044619604333847394 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.4485294117647059, - "acc_stderr": 0.030211479609121593, - "acc_norm": 0.4485294117647059, - "acc_norm_stderr": 0.030211479609121593 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.3877551020408163, - "acc_stderr": 0.03119223072679566, - "acc_norm": 0.3877551020408163, - "acc_norm_stderr": 0.03119223072679566 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.2616033755274262, - "acc_stderr": 0.028609516716994934, - "acc_norm": 0.2616033755274262, - "acc_norm_stderr": 0.028609516716994934 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2457627118644068, - "acc_stderr": 0.010996156635142692, - "acc_norm": 0.2457627118644068, - "acc_norm_stderr": 0.010996156635142692 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.25980392156862747, - "acc_stderr": 0.030778554678693257, - "acc_norm": 0.25980392156862747, - "acc_norm_stderr": 0.030778554678693257 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.21818181818181817, - "acc_stderr": 0.03225078108306289, - "acc_norm": 0.21818181818181817, - "acc_norm_stderr": 0.03225078108306289 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.24479804161566707, - "mc1_stderr": 0.015051869486715021, - "mc2": 0.47018197225111685, - "mc2_stderr": 0.016150007373089376 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.3204225352112676, - "acc_stderr": 0.01599617808862693, - "acc_norm": 0.5786384976525821, - "acc_norm_stderr": 0.01692646662043148 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "beomi/kollama-13b", - "model_sha": "d25ffb8c1a147e67c1bce0aca49a710395ce18ae", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/beomi/llama-2-ko-7b/result_2023-09-26 12:00:48.json b/beomi/llama-2-ko-7b/result_2023-09-26 12:00:48.json deleted file mode 100644 index 3efa51aa529448fe220efc2aa9c81ff2a0afabdc..0000000000000000000000000000000000000000 --- a/beomi/llama-2-ko-7b/result_2023-09-26 12:00:48.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.3174061433447099, - "acc_stderr": 0.01360223908803817, - "acc_norm": 0.38054607508532423, - "acc_norm_stderr": 0.014188277712349814 - }, - "harness|ko_hellaswag|10": { - "acc": 0.38309101772555265, - "acc_stderr": 0.004851466623601449, - "acc_norm": 0.4958175662218682, - "acc_norm_stderr": 0.00498960683837107 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.2631578947368421, - "acc_stderr": 0.03377310252209194, - "acc_norm": 0.2631578947368421, - "acc_norm_stderr": 0.03377310252209194 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.2815533980582524, - "acc_stderr": 0.044532548363264673, - "acc_norm": 0.2815533980582524, - "acc_norm_stderr": 0.044532548363264673 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.3371647509578544, - "acc_stderr": 0.016905207420803554, - "acc_norm": 0.3371647509578544, - "acc_norm_stderr": 0.016905207420803554 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.04072314811876837, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.04072314811876837 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.28, - "acc_stderr": 0.045126085985421255, - "acc_norm": 0.28, - "acc_norm_stderr": 0.045126085985421255 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.3021276595744681, - "acc_stderr": 0.030017554471880557, - "acc_norm": 0.3021276595744681, - "acc_norm_stderr": 0.030017554471880557 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3373493975903614, - "acc_stderr": 0.03680783690727581, - "acc_norm": 0.3373493975903614, - "acc_norm_stderr": 0.03680783690727581 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.3279742765273312, - "acc_stderr": 0.0266644108869376, - "acc_norm": 0.3279742765273312, - "acc_norm_stderr": 0.0266644108869376 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.3094170403587444, - "acc_stderr": 0.031024411740572196, - "acc_norm": 0.3094170403587444, - "acc_norm_stderr": 0.031024411740572196 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.40458015267175573, - "acc_stderr": 0.043046937953806645, - "acc_norm": 0.40458015267175573, - "acc_norm_stderr": 0.043046937953806645 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.4090909090909091, - "acc_stderr": 0.03502975799413007, - "acc_norm": 0.4090909090909091, - "acc_norm_stderr": 0.03502975799413007 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.3103448275862069, - "acc_stderr": 0.03855289616378949, - "acc_norm": 0.3103448275862069, - "acc_norm_stderr": 0.03855289616378949 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.19607843137254902, - "acc_stderr": 0.03950581861179962, - "acc_norm": 0.19607843137254902, - "acc_norm_stderr": 0.03950581861179962 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.31092436974789917, - "acc_stderr": 0.030066761582977924, - "acc_norm": 0.31092436974789917, - "acc_norm_stderr": 0.030066761582977924 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.2743589743589744, - "acc_stderr": 0.022622765767493197, - "acc_norm": 0.2743589743589744, - "acc_norm_stderr": 0.022622765767493197 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.27, - "acc_stderr": 0.044619604333847394, - "acc_norm": 0.27, - "acc_norm_stderr": 0.044619604333847394 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.26, - "acc_stderr": 0.04408440022768078, - "acc_norm": 0.26, - "acc_norm_stderr": 0.04408440022768078 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.3055555555555556, - "acc_stderr": 0.044531975073749834, - "acc_norm": 0.3055555555555556, - "acc_norm_stderr": 0.044531975073749834 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.28078817733990147, - "acc_stderr": 0.03161856335358609, - "acc_norm": 0.28078817733990147, - "acc_norm_stderr": 0.03161856335358609 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3032258064516129, - "acc_stderr": 0.026148685930671746, - "acc_norm": 0.3032258064516129, - "acc_norm_stderr": 0.026148685930671746 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.36752136752136755, - "acc_stderr": 0.031585391577456365, - "acc_norm": 0.36752136752136755, - "acc_norm_stderr": 0.031585391577456365 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.32452830188679244, - "acc_stderr": 0.028815615713432115, - "acc_norm": 0.32452830188679244, - "acc_norm_stderr": 0.028815615713432115 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.35454545454545455, - "acc_stderr": 0.04582004841505416, - "acc_norm": 0.35454545454545455, - "acc_norm_stderr": 0.04582004841505416 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.2518518518518518, - "acc_stderr": 0.026466117538959912, - "acc_norm": 0.2518518518518518, - "acc_norm_stderr": 0.026466117538959912 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.304635761589404, - "acc_stderr": 0.03757949922943342, - "acc_norm": 0.304635761589404, - "acc_norm_stderr": 0.03757949922943342 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.3383084577114428, - "acc_stderr": 0.03345563070339192, - "acc_norm": 0.3383084577114428, - "acc_norm_stderr": 0.03345563070339192 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.24855491329479767, - "acc_stderr": 0.03295304696818318, - "acc_norm": 0.24855491329479767, - "acc_norm_stderr": 0.03295304696818318 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2566137566137566, - "acc_stderr": 0.022494510767503154, - "acc_norm": 0.2566137566137566, - "acc_norm_stderr": 0.022494510767503154 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2152777777777778, - "acc_stderr": 0.03437079344106134, - "acc_norm": 0.2152777777777778, - "acc_norm_stderr": 0.03437079344106134 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252604, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252604 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.36, - "acc_stderr": 0.04824181513244218, - "acc_norm": 0.36, - "acc_norm_stderr": 0.04824181513244218 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.32947976878612717, - "acc_stderr": 0.0253052581318797, - "acc_norm": 0.32947976878612717, - "acc_norm_stderr": 0.0253052581318797 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.26380368098159507, - "acc_stderr": 0.03462419931615624, - "acc_norm": 0.26380368098159507, - "acc_norm_stderr": 0.03462419931615624 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.3271604938271605, - "acc_stderr": 0.026105673861409825, - "acc_norm": 0.3271604938271605, - "acc_norm_stderr": 0.026105673861409825 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.27979274611398963, - "acc_stderr": 0.03239637046735704, - "acc_norm": 0.27979274611398963, - "acc_norm_stderr": 0.03239637046735704 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2543859649122807, - "acc_stderr": 0.040969851398436716, - "acc_norm": 0.2543859649122807, - "acc_norm_stderr": 0.040969851398436716 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.381651376146789, - "acc_stderr": 0.020828148517022596, - "acc_norm": 0.381651376146789, - "acc_norm_stderr": 0.020828148517022596 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.20634920634920634, - "acc_stderr": 0.03619604524124249, - "acc_norm": 0.20634920634920634, - "acc_norm_stderr": 0.03619604524124249 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.3660130718954248, - "acc_stderr": 0.027582811415159614, - "acc_norm": 0.3660130718954248, - "acc_norm_stderr": 0.027582811415159614 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.3884297520661157, - "acc_stderr": 0.04449270350068382, - "acc_norm": 0.3884297520661157, - "acc_norm_stderr": 0.04449270350068382 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.3618421052631579, - "acc_stderr": 0.03910525752849724, - "acc_norm": 0.3618421052631579, - "acc_norm_stderr": 0.03910525752849724 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.2777777777777778, - "acc_stderr": 0.018120224251484577, - "acc_norm": 0.2777777777777778, - "acc_norm_stderr": 0.018120224251484577 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2872340425531915, - "acc_stderr": 0.026992199173064356, - "acc_norm": 0.2872340425531915, - "acc_norm_stderr": 0.026992199173064356 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.2857142857142857, - "acc_stderr": 0.04287858751340456, - "acc_norm": 0.2857142857142857, - "acc_norm_stderr": 0.04287858751340456 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.35185185185185186, - "acc_stderr": 0.032568505702936484, - "acc_norm": 0.35185185185185186, - "acc_norm_stderr": 0.032568505702936484 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.24022346368715083, - "acc_stderr": 0.014288343803925295, - "acc_norm": 0.24022346368715083, - "acc_norm_stderr": 0.014288343803925295 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542127, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542127 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.23, - "acc_stderr": 0.042295258468165065, - "acc_norm": 0.23, - "acc_norm_stderr": 0.042295258468165065 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.3897058823529412, - "acc_stderr": 0.02962466358115969, - "acc_norm": 0.3897058823529412, - "acc_norm_stderr": 0.02962466358115969 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.2938775510204082, - "acc_stderr": 0.029162738410249772, - "acc_norm": 0.2938775510204082, - "acc_norm_stderr": 0.029162738410249772 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.35864978902953587, - "acc_stderr": 0.031219569445301833, - "acc_norm": 0.35864978902953587, - "acc_norm_stderr": 0.031219569445301833 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2900912646675359, - "acc_stderr": 0.011590375554733096, - "acc_norm": 0.2900912646675359, - "acc_norm_stderr": 0.011590375554733096 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.2696078431372549, - "acc_stderr": 0.03114557065948678, - "acc_norm": 0.2696078431372549, - "acc_norm_stderr": 0.03114557065948678 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.30303030303030304, - "acc_stderr": 0.035886248000917075, - "acc_norm": 0.30303030303030304, - "acc_norm_stderr": 0.035886248000917075 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.23133414932680538, - "mc1_stderr": 0.014761945174862677, - "mc2": 0.37061663539899015, - "mc2_stderr": 0.014735219813379136 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.31220657276995306, - "acc_stderr": 0.015884928030374876, - "acc_norm": 0.4964788732394366, - "acc_norm_stderr": 0.017139354240102787 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "beomi/llama-2-ko-7b", - "model_sha": "d79c4f41cdf9b78ce9fa100f687521df245c0e23", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/caisarl76/Mistral-7B-3data-merged/result_2023-10-09 14:50:39.json b/caisarl76/Mistral-7B-3data-merged/result_2023-10-09 14:50:39.json deleted file mode 100644 index ab55869015ce16874fc1dda9644a1ce96751642f..0000000000000000000000000000000000000000 --- a/caisarl76/Mistral-7B-3data-merged/result_2023-10-09 14:50:39.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.32081911262798635, - "acc_stderr": 0.013640943091946528, - "acc_norm": 0.37627986348122866, - "acc_norm_stderr": 0.014157022555407166 - }, - "harness|ko_hellaswag|10": { - "acc": 0.37094204341764586, - "acc_stderr": 0.004820697457420419, - "acc_norm": 0.47480581557458673, - "acc_norm_stderr": 0.0049834428886777705 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.39766081871345027, - "acc_stderr": 0.0375363895576169, - "acc_norm": 0.39766081871345027, - "acc_norm_stderr": 0.0375363895576169 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.5242718446601942, - "acc_stderr": 0.049449010929737795, - "acc_norm": 0.5242718446601942, - "acc_norm_stderr": 0.049449010929737795 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.4495530012771392, - "acc_stderr": 0.017788725283507337, - "acc_norm": 0.4495530012771392, - "acc_norm_stderr": 0.017788725283507337 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.3111111111111111, - "acc_stderr": 0.03999262876617723, - "acc_norm": 0.3111111111111111, - "acc_norm_stderr": 0.03999262876617723 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542128, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542128 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.3659574468085106, - "acc_stderr": 0.0314895582974553, - "acc_norm": 0.3659574468085106, - "acc_norm_stderr": 0.0314895582974553 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3614457831325301, - "acc_stderr": 0.0374005938202932, - "acc_norm": 0.3614457831325301, - "acc_norm_stderr": 0.0374005938202932 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.4212218649517685, - "acc_stderr": 0.028043399858210628, - "acc_norm": 0.4212218649517685, - "acc_norm_stderr": 0.028043399858210628 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.3452914798206278, - "acc_stderr": 0.03191100192835794, - "acc_norm": 0.3452914798206278, - "acc_norm_stderr": 0.03191100192835794 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.44274809160305345, - "acc_stderr": 0.0435644720266507, - "acc_norm": 0.44274809160305345, - "acc_norm_stderr": 0.0435644720266507 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.4696969696969697, - "acc_stderr": 0.03555804051763929, - "acc_norm": 0.4696969696969697, - "acc_norm_stderr": 0.03555804051763929 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.33793103448275863, - "acc_stderr": 0.03941707632064889, - "acc_norm": 0.33793103448275863, - "acc_norm_stderr": 0.03941707632064889 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.20588235294117646, - "acc_stderr": 0.04023382273617747, - "acc_norm": 0.20588235294117646, - "acc_norm_stderr": 0.04023382273617747 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.41596638655462187, - "acc_stderr": 0.03201650100739615, - "acc_norm": 0.41596638655462187, - "acc_norm_stderr": 0.03201650100739615 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.3564102564102564, - "acc_stderr": 0.0242831405294673, - "acc_norm": 0.3564102564102564, - "acc_norm_stderr": 0.0242831405294673 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.61, - "acc_stderr": 0.04902071300001975, - "acc_norm": 0.61, - "acc_norm_stderr": 0.04902071300001975 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.41, - "acc_stderr": 0.049431107042371025, - "acc_norm": 0.41, - "acc_norm_stderr": 0.049431107042371025 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.4351851851851852, - "acc_stderr": 0.04792898170907062, - "acc_norm": 0.4351851851851852, - "acc_norm_stderr": 0.04792898170907062 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.32019704433497537, - "acc_stderr": 0.032826493853041504, - "acc_norm": 0.32019704433497537, - "acc_norm_stderr": 0.032826493853041504 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.4258064516129032, - "acc_stderr": 0.028129112709165897, - "acc_norm": 0.4258064516129032, - "acc_norm_stderr": 0.028129112709165897 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.6324786324786325, - "acc_stderr": 0.031585391577456365, - "acc_norm": 0.6324786324786325, - "acc_norm_stderr": 0.031585391577456365 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.35471698113207545, - "acc_stderr": 0.029445175328199596, - "acc_norm": 0.35471698113207545, - "acc_norm_stderr": 0.029445175328199596 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.41818181818181815, - "acc_stderr": 0.04724577405731571, - "acc_norm": 0.41818181818181815, - "acc_norm_stderr": 0.04724577405731571 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.23333333333333334, - "acc_stderr": 0.025787874220959316, - "acc_norm": 0.23333333333333334, - "acc_norm_stderr": 0.025787874220959316 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.32450331125827814, - "acc_stderr": 0.03822746937658753, - "acc_norm": 0.32450331125827814, - "acc_norm_stderr": 0.03822746937658753 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.5223880597014925, - "acc_stderr": 0.035319879302087305, - "acc_norm": 0.5223880597014925, - "acc_norm_stderr": 0.035319879302087305 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.3063583815028902, - "acc_stderr": 0.03514942551267437, - "acc_norm": 0.3063583815028902, - "acc_norm_stderr": 0.03514942551267437 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.291005291005291, - "acc_stderr": 0.023393826500484875, - "acc_norm": 0.291005291005291, - "acc_norm_stderr": 0.023393826500484875 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3125, - "acc_stderr": 0.038760854559127644, - "acc_norm": 0.3125, - "acc_norm_stderr": 0.038760854559127644 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.48, - "acc_stderr": 0.05021167315686779, - "acc_norm": 0.48, - "acc_norm_stderr": 0.05021167315686779 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.44508670520231214, - "acc_stderr": 0.02675625512966377, - "acc_norm": 0.44508670520231214, - "acc_norm_stderr": 0.02675625512966377 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.39263803680981596, - "acc_stderr": 0.03836740907831029, - "acc_norm": 0.39263803680981596, - "acc_norm_stderr": 0.03836740907831029 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.4104938271604938, - "acc_stderr": 0.027371350925124764, - "acc_norm": 0.4104938271604938, - "acc_norm_stderr": 0.027371350925124764 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252606, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252606 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.46113989637305697, - "acc_stderr": 0.035975244117345775, - "acc_norm": 0.46113989637305697, - "acc_norm_stderr": 0.035975244117345775 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2894736842105263, - "acc_stderr": 0.04266339443159394, - "acc_norm": 0.2894736842105263, - "acc_norm_stderr": 0.04266339443159394 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.42385321100917434, - "acc_stderr": 0.021187263209087516, - "acc_norm": 0.42385321100917434, - "acc_norm_stderr": 0.021187263209087516 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.23809523809523808, - "acc_stderr": 0.03809523809523811, - "acc_norm": 0.23809523809523808, - "acc_norm_stderr": 0.03809523809523811 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.4150326797385621, - "acc_stderr": 0.028213504177824093, - "acc_norm": 0.4150326797385621, - "acc_norm_stderr": 0.028213504177824093 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.33, - "acc_stderr": 0.047258156262526045, - "acc_norm": 0.33, - "acc_norm_stderr": 0.047258156262526045 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.512396694214876, - "acc_stderr": 0.04562951548180765, - "acc_norm": 0.512396694214876, - "acc_norm_stderr": 0.04562951548180765 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.3815789473684211, - "acc_stderr": 0.039531733777491924, - "acc_norm": 0.3815789473684211, - "acc_norm_stderr": 0.039531733777491924 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.3431372549019608, - "acc_stderr": 0.01920660684882537, - "acc_norm": 0.3431372549019608, - "acc_norm_stderr": 0.01920660684882537 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.35106382978723405, - "acc_stderr": 0.028473501272963764, - "acc_norm": 0.35106382978723405, - "acc_norm_stderr": 0.028473501272963764 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.23214285714285715, - "acc_stderr": 0.04007341809755805, - "acc_norm": 0.23214285714285715, - "acc_norm_stderr": 0.04007341809755805 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.03214952147802749, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.03214952147802749 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2581005586592179, - "acc_stderr": 0.014635185616527829, - "acc_norm": 0.2581005586592179, - "acc_norm_stderr": 0.014635185616527829 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.37, - "acc_stderr": 0.04852365870939098, - "acc_norm": 0.37, - "acc_norm_stderr": 0.04852365870939098 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.43, - "acc_stderr": 0.049756985195624284, - "acc_norm": 0.43, - "acc_norm_stderr": 0.049756985195624284 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.35661764705882354, - "acc_stderr": 0.029097209568411952, - "acc_norm": 0.35661764705882354, - "acc_norm_stderr": 0.029097209568411952 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.3224489795918367, - "acc_stderr": 0.029923100563683906, - "acc_norm": 0.3224489795918367, - "acc_norm_stderr": 0.029923100563683906 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.6371308016877637, - "acc_stderr": 0.03129920825530213, - "acc_norm": 0.6371308016877637, - "acc_norm_stderr": 0.03129920825530213 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.34224250325945244, - "acc_stderr": 0.01211793999870587, - "acc_norm": 0.34224250325945244, - "acc_norm_stderr": 0.01211793999870587 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.4117647058823529, - "acc_stderr": 0.0345423658538061, - "acc_norm": 0.4117647058823529, - "acc_norm_stderr": 0.0345423658538061 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.45454545454545453, - "acc_stderr": 0.03888176921674099, - "acc_norm": 0.45454545454545453, - "acc_norm_stderr": 0.03888176921674099 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.28151774785801714, - "mc1_stderr": 0.015744027248256055, - "mc2": 0.45994906823090903, - "mc2_stderr": 0.01581120469816343 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.26291079812206575, - "acc_stderr": 0.015090354985077728, - "acc_norm": 0.31338028169014087, - "acc_norm_stderr": 0.015901173963487666 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "caisarl76/Mistral-7B-3data-merged", - "model_sha": "7df44d1c021898b608f741519016e4fd1373e636", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/caisarl76/Mistral-7B-Openorca-cot-2157/result_2023-10-23 00:34:03.json b/caisarl76/Mistral-7B-Openorca-cot-2157/result_2023-10-23 00:34:03.json deleted file mode 100644 index 8fd44d4f5c1b5ffc8f57bcc1902ab5a8126df285..0000000000000000000000000000000000000000 --- a/caisarl76/Mistral-7B-Openorca-cot-2157/result_2023-10-23 00:34:03.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.2226962457337884, - "acc_stderr": 0.012158314774829919, - "acc_norm": 0.2960750853242321, - "acc_norm_stderr": 0.013340916085246254 - }, - "harness|ko_hellaswag|10": { - "acc": 0.2795259908384784, - "acc_stderr": 0.004478491697891243, - "acc_norm": 0.30870344552877915, - "acc_norm_stderr": 0.004610143575553467 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.39766081871345027, - "acc_stderr": 0.03753638955761691, - "acc_norm": 0.39766081871345027, - "acc_norm_stderr": 0.03753638955761691 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.34951456310679613, - "acc_stderr": 0.04721188506097173, - "acc_norm": 0.34951456310679613, - "acc_norm_stderr": 0.04721188506097173 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.36909323116219667, - "acc_stderr": 0.01725628310912461, - "acc_norm": 0.36909323116219667, - "acc_norm_stderr": 0.01725628310912461 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.04072314811876837, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.04072314811876837 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.23, - "acc_stderr": 0.04229525846816508, - "acc_norm": 0.23, - "acc_norm_stderr": 0.04229525846816508 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.3446808510638298, - "acc_stderr": 0.031068985963122155, - "acc_norm": 0.3446808510638298, - "acc_norm_stderr": 0.031068985963122155 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.35542168674698793, - "acc_stderr": 0.03726214354322415, - "acc_norm": 0.35542168674698793, - "acc_norm_stderr": 0.03726214354322415 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.34726688102893893, - "acc_stderr": 0.027040745502307336, - "acc_norm": 0.34726688102893893, - "acc_norm_stderr": 0.027040745502307336 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.34977578475336324, - "acc_stderr": 0.03200736719484503, - "acc_norm": 0.34977578475336324, - "acc_norm_stderr": 0.03200736719484503 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.2900763358778626, - "acc_stderr": 0.03980066246467766, - "acc_norm": 0.2900763358778626, - "acc_norm_stderr": 0.03980066246467766 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.26, - "acc_stderr": 0.0440844002276808, - "acc_norm": 0.26, - "acc_norm_stderr": 0.0440844002276808 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.31313131313131315, - "acc_stderr": 0.03304205087813652, - "acc_norm": 0.31313131313131315, - "acc_norm_stderr": 0.03304205087813652 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.3103448275862069, - "acc_stderr": 0.03855289616378948, - "acc_norm": 0.3103448275862069, - "acc_norm_stderr": 0.03855289616378948 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.20588235294117646, - "acc_stderr": 0.04023382273617746, - "acc_norm": 0.20588235294117646, - "acc_norm_stderr": 0.04023382273617746 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.28991596638655465, - "acc_stderr": 0.02947248583313609, - "acc_norm": 0.28991596638655465, - "acc_norm_stderr": 0.02947248583313609 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.3230769230769231, - "acc_stderr": 0.023710888501970565, - "acc_norm": 0.3230769230769231, - "acc_norm_stderr": 0.023710888501970565 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.53, - "acc_stderr": 0.05016135580465919, - "acc_norm": 0.53, - "acc_norm_stderr": 0.05016135580465919 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.26, - "acc_stderr": 0.0440844002276808, - "acc_norm": 0.26, - "acc_norm_stderr": 0.0440844002276808 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.39814814814814814, - "acc_stderr": 0.04732332615978814, - "acc_norm": 0.39814814814814814, - "acc_norm_stderr": 0.04732332615978814 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.22167487684729065, - "acc_stderr": 0.029225575892489614, - "acc_norm": 0.22167487684729065, - "acc_norm_stderr": 0.029225575892489614 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.34838709677419355, - "acc_stderr": 0.02710482632810094, - "acc_norm": 0.34838709677419355, - "acc_norm_stderr": 0.02710482632810094 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.5128205128205128, - "acc_stderr": 0.03274531938842351, - "acc_norm": 0.5128205128205128, - "acc_norm_stderr": 0.03274531938842351 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.3169811320754717, - "acc_stderr": 0.028637235639800928, - "acc_norm": 0.3169811320754717, - "acc_norm_stderr": 0.028637235639800928 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.43636363636363634, - "acc_stderr": 0.04750185058907297, - "acc_norm": 0.43636363636363634, - "acc_norm_stderr": 0.04750185058907297 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.24074074074074073, - "acc_stderr": 0.026067159222275794, - "acc_norm": 0.24074074074074073, - "acc_norm_stderr": 0.026067159222275794 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.271523178807947, - "acc_stderr": 0.036313298039696525, - "acc_norm": 0.271523178807947, - "acc_norm_stderr": 0.036313298039696525 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.43781094527363185, - "acc_stderr": 0.0350808011219984, - "acc_norm": 0.43781094527363185, - "acc_norm_stderr": 0.0350808011219984 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.3236994219653179, - "acc_stderr": 0.0356760379963917, - "acc_norm": 0.3236994219653179, - "acc_norm_stderr": 0.0356760379963917 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.30158730158730157, - "acc_stderr": 0.023636975996101813, - "acc_norm": 0.30158730158730157, - "acc_norm_stderr": 0.023636975996101813 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3125, - "acc_stderr": 0.038760854559127644, - "acc_norm": 0.3125, - "acc_norm_stderr": 0.038760854559127644 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.42, - "acc_stderr": 0.049604496374885836, - "acc_norm": 0.42, - "acc_norm_stderr": 0.049604496374885836 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.33815028901734107, - "acc_stderr": 0.02546977014940017, - "acc_norm": 0.33815028901734107, - "acc_norm_stderr": 0.02546977014940017 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.3558282208588957, - "acc_stderr": 0.03761521380046734, - "acc_norm": 0.3558282208588957, - "acc_norm_stderr": 0.03761521380046734 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.3271604938271605, - "acc_stderr": 0.02610567386140981, - "acc_norm": 0.3271604938271605, - "acc_norm_stderr": 0.02610567386140981 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.27, - "acc_stderr": 0.04461960433384741, - "acc_norm": 0.27, - "acc_norm_stderr": 0.04461960433384741 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.35233160621761656, - "acc_stderr": 0.03447478286414358, - "acc_norm": 0.35233160621761656, - "acc_norm_stderr": 0.03447478286414358 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2982456140350877, - "acc_stderr": 0.04303684033537315, - "acc_norm": 0.2982456140350877, - "acc_norm_stderr": 0.04303684033537315 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.3798165137614679, - "acc_stderr": 0.020808825617866244, - "acc_norm": 0.3798165137614679, - "acc_norm_stderr": 0.020808825617866244 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.29365079365079366, - "acc_stderr": 0.040735243221471255, - "acc_norm": 0.29365079365079366, - "acc_norm_stderr": 0.040735243221471255 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.3202614379084967, - "acc_stderr": 0.026716118380156837, - "acc_norm": 0.3202614379084967, - "acc_norm_stderr": 0.026716118380156837 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.36, - "acc_stderr": 0.04824181513244218, - "acc_norm": 0.36, - "acc_norm_stderr": 0.04824181513244218 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.45454545454545453, - "acc_stderr": 0.045454545454545456, - "acc_norm": 0.45454545454545453, - "acc_norm_stderr": 0.045454545454545456 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.2894736842105263, - "acc_stderr": 0.03690677986137283, - "acc_norm": 0.2894736842105263, - "acc_norm_stderr": 0.03690677986137283 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.3202614379084967, - "acc_stderr": 0.01887568293806944, - "acc_norm": 0.3202614379084967, - "acc_norm_stderr": 0.01887568293806944 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.30141843971631205, - "acc_stderr": 0.02737412888263115, - "acc_norm": 0.30141843971631205, - "acc_norm_stderr": 0.02737412888263115 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.3482142857142857, - "acc_stderr": 0.04521829902833585, - "acc_norm": 0.3482142857142857, - "acc_norm_stderr": 0.04521829902833585 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.20833333333333334, - "acc_stderr": 0.027696910713093933, - "acc_norm": 0.20833333333333334, - "acc_norm_stderr": 0.027696910713093933 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2569832402234637, - "acc_stderr": 0.014614465821966351, - "acc_norm": 0.2569832402234637, - "acc_norm_stderr": 0.014614465821966351 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.34, - "acc_stderr": 0.047609522856952365, - "acc_norm": 0.34, - "acc_norm_stderr": 0.047609522856952365 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.35294117647058826, - "acc_stderr": 0.029029422815681404, - "acc_norm": 0.35294117647058826, - "acc_norm_stderr": 0.029029422815681404 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.22040816326530613, - "acc_stderr": 0.026537045312145312, - "acc_norm": 0.22040816326530613, - "acc_norm_stderr": 0.026537045312145312 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.47257383966244726, - "acc_stderr": 0.03249822718301303, - "acc_norm": 0.47257383966244726, - "acc_norm_stderr": 0.03249822718301303 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2542372881355932, - "acc_stderr": 0.011121129007840676, - "acc_norm": 0.2542372881355932, - "acc_norm_stderr": 0.011121129007840676 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.3137254901960784, - "acc_stderr": 0.032566854844603886, - "acc_norm": 0.3137254901960784, - "acc_norm_stderr": 0.032566854844603886 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.40606060606060607, - "acc_stderr": 0.03834816355401181, - "acc_norm": 0.40606060606060607, - "acc_norm_stderr": 0.03834816355401181 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2582619339045288, - "mc1_stderr": 0.015321821688476189, - "mc2": 0.4874892521316813, - "mc2_stderr": 0.017011135502882097 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.1936619718309859, - "acc_stderr": 0.013546152666107395, - "acc_norm": 0.3767605633802817, - "acc_norm_stderr": 0.016610985607291955 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "caisarl76/Mistral-7B-Openorca-cot-2157", - "model_sha": "eaf722c66f6bbb64f7f43d08bc9de3b36be29d2b", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/caisarl76/Mistral-7B-eng-kor-cot-combined/result_2023-10-23 00:34:59.json b/caisarl76/Mistral-7B-eng-kor-cot-combined/result_2023-10-23 00:34:59.json deleted file mode 100644 index 13ec8c87ee62c03bdbe4f16ab85d906e19229bb0..0000000000000000000000000000000000000000 --- a/caisarl76/Mistral-7B-eng-kor-cot-combined/result_2023-10-23 00:34:59.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.2901023890784983, - "acc_stderr": 0.01326157367752077, - "acc_norm": 0.34812286689419797, - "acc_norm_stderr": 0.013921008595179335 - }, - "harness|ko_hellaswag|10": { - "acc": 0.35012945628360886, - "acc_stderr": 0.004760354191370866, - "acc_norm": 0.4374626568412667, - "acc_norm_stderr": 0.0049505983006675565 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.4269005847953216, - "acc_stderr": 0.03793620616529917, - "acc_norm": 0.4269005847953216, - "acc_norm_stderr": 0.03793620616529917 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.39805825242718446, - "acc_stderr": 0.0484674825397724, - "acc_norm": 0.39805825242718446, - "acc_norm_stderr": 0.0484674825397724 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.41379310344827586, - "acc_stderr": 0.017612204084663775, - "acc_norm": 0.41379310344827586, - "acc_norm_stderr": 0.017612204084663775 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.2962962962962963, - "acc_stderr": 0.039446241625011175, - "acc_norm": 0.2962962962962963, - "acc_norm_stderr": 0.039446241625011175 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.32, - "acc_stderr": 0.046882617226215034, - "acc_norm": 0.32, - "acc_norm_stderr": 0.046882617226215034 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.35319148936170214, - "acc_stderr": 0.031245325202761926, - "acc_norm": 0.35319148936170214, - "acc_norm_stderr": 0.031245325202761926 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.35542168674698793, - "acc_stderr": 0.03726214354322415, - "acc_norm": 0.35542168674698793, - "acc_norm_stderr": 0.03726214354322415 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.3858520900321543, - "acc_stderr": 0.027648149599751464, - "acc_norm": 0.3858520900321543, - "acc_norm_stderr": 0.027648149599751464 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.38565022421524664, - "acc_stderr": 0.03266842214289201, - "acc_norm": 0.38565022421524664, - "acc_norm_stderr": 0.03266842214289201 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.33587786259541985, - "acc_stderr": 0.041423137719966634, - "acc_norm": 0.33587786259541985, - "acc_norm_stderr": 0.041423137719966634 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.32, - "acc_stderr": 0.046882617226215034, - "acc_norm": 0.32, - "acc_norm_stderr": 0.046882617226215034 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.3484848484848485, - "acc_stderr": 0.03394853965156402, - "acc_norm": 0.3484848484848485, - "acc_norm_stderr": 0.03394853965156402 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.296551724137931, - "acc_stderr": 0.03806142687309993, - "acc_norm": 0.296551724137931, - "acc_norm_stderr": 0.03806142687309993 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.30392156862745096, - "acc_stderr": 0.045766654032077636, - "acc_norm": 0.30392156862745096, - "acc_norm_stderr": 0.045766654032077636 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.31092436974789917, - "acc_stderr": 0.03006676158297793, - "acc_norm": 0.31092436974789917, - "acc_norm_stderr": 0.03006676158297793 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.36666666666666664, - "acc_stderr": 0.024433016466052452, - "acc_norm": 0.36666666666666664, - "acc_norm_stderr": 0.024433016466052452 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.51, - "acc_stderr": 0.05024183937956913, - "acc_norm": 0.51, - "acc_norm_stderr": 0.05024183937956913 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.28, - "acc_stderr": 0.045126085985421276, - "acc_norm": 0.28, - "acc_norm_stderr": 0.045126085985421276 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.42592592592592593, - "acc_stderr": 0.0478034362693679, - "acc_norm": 0.42592592592592593, - "acc_norm_stderr": 0.0478034362693679 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.3497536945812808, - "acc_stderr": 0.03355400904969565, - "acc_norm": 0.3497536945812808, - "acc_norm_stderr": 0.03355400904969565 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3935483870967742, - "acc_stderr": 0.027791878753132274, - "acc_norm": 0.3935483870967742, - "acc_norm_stderr": 0.027791878753132274 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.5427350427350427, - "acc_stderr": 0.03263622596380688, - "acc_norm": 0.5427350427350427, - "acc_norm_stderr": 0.03263622596380688 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.3018867924528302, - "acc_stderr": 0.028254200344438662, - "acc_norm": 0.3018867924528302, - "acc_norm_stderr": 0.028254200344438662 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.4, - "acc_stderr": 0.0469237132203465, - "acc_norm": 0.4, - "acc_norm_stderr": 0.0469237132203465 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.3296296296296296, - "acc_stderr": 0.028661201116524586, - "acc_norm": 0.3296296296296296, - "acc_norm_stderr": 0.028661201116524586 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2847682119205298, - "acc_stderr": 0.03684881521389024, - "acc_norm": 0.2847682119205298, - "acc_norm_stderr": 0.03684881521389024 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.4427860696517413, - "acc_stderr": 0.03512310964123937, - "acc_norm": 0.4427860696517413, - "acc_norm_stderr": 0.03512310964123937 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.3236994219653179, - "acc_stderr": 0.0356760379963917, - "acc_norm": 0.3236994219653179, - "acc_norm_stderr": 0.0356760379963917 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.36507936507936506, - "acc_stderr": 0.02479606060269995, - "acc_norm": 0.36507936507936506, - "acc_norm_stderr": 0.02479606060269995 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2847222222222222, - "acc_stderr": 0.03773809990686935, - "acc_norm": 0.2847222222222222, - "acc_norm_stderr": 0.03773809990686935 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.54, - "acc_stderr": 0.05009082659620332, - "acc_norm": 0.54, - "acc_norm_stderr": 0.05009082659620332 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.38439306358381503, - "acc_stderr": 0.026189666966272035, - "acc_norm": 0.38439306358381503, - "acc_norm_stderr": 0.026189666966272035 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.34355828220858897, - "acc_stderr": 0.037311335196738925, - "acc_norm": 0.34355828220858897, - "acc_norm_stderr": 0.037311335196738925 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.35185185185185186, - "acc_stderr": 0.026571483480719967, - "acc_norm": 0.35185185185185186, - "acc_norm_stderr": 0.026571483480719967 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.38860103626943004, - "acc_stderr": 0.035177397963731316, - "acc_norm": 0.38860103626943004, - "acc_norm_stderr": 0.035177397963731316 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2719298245614035, - "acc_stderr": 0.04185774424022056, - "acc_norm": 0.2719298245614035, - "acc_norm_stderr": 0.04185774424022056 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.3908256880733945, - "acc_stderr": 0.020920058346111065, - "acc_norm": 0.3908256880733945, - "acc_norm_stderr": 0.020920058346111065 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.3253968253968254, - "acc_stderr": 0.041905964388711366, - "acc_norm": 0.3253968253968254, - "acc_norm_stderr": 0.041905964388711366 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.3660130718954248, - "acc_stderr": 0.027582811415159624, - "acc_norm": 0.3660130718954248, - "acc_norm_stderr": 0.027582811415159624 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.41, - "acc_stderr": 0.049431107042371025, - "acc_norm": 0.41, - "acc_norm_stderr": 0.049431107042371025 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.48760330578512395, - "acc_stderr": 0.04562951548180765, - "acc_norm": 0.48760330578512395, - "acc_norm_stderr": 0.04562951548180765 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.3684210526315789, - "acc_stderr": 0.03925523381052932, - "acc_norm": 0.3684210526315789, - "acc_norm_stderr": 0.03925523381052932 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.3104575163398693, - "acc_stderr": 0.01871806705262323, - "acc_norm": 0.3104575163398693, - "acc_norm_stderr": 0.01871806705262323 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2872340425531915, - "acc_stderr": 0.026992199173064356, - "acc_norm": 0.2872340425531915, - "acc_norm_stderr": 0.026992199173064356 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.29464285714285715, - "acc_stderr": 0.0432704093257873, - "acc_norm": 0.29464285714285715, - "acc_norm_stderr": 0.0432704093257873 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.2824074074074074, - "acc_stderr": 0.030701372111510927, - "acc_norm": 0.2824074074074074, - "acc_norm_stderr": 0.030701372111510927 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.21564245810055865, - "acc_stderr": 0.013754835975482336, - "acc_norm": 0.21564245810055865, - "acc_norm_stderr": 0.013754835975482336 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.38, - "acc_stderr": 0.048783173121456316, - "acc_norm": 0.38, - "acc_norm_stderr": 0.048783173121456316 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.42, - "acc_stderr": 0.049604496374885836, - "acc_norm": 0.42, - "acc_norm_stderr": 0.049604496374885836 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.33455882352941174, - "acc_stderr": 0.028661996202335307, - "acc_norm": 0.33455882352941174, - "acc_norm_stderr": 0.028661996202335307 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.3224489795918367, - "acc_stderr": 0.029923100563683906, - "acc_norm": 0.3224489795918367, - "acc_norm_stderr": 0.029923100563683906 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.4641350210970464, - "acc_stderr": 0.03246338898055659, - "acc_norm": 0.4641350210970464, - "acc_norm_stderr": 0.03246338898055659 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2907431551499348, - "acc_stderr": 0.011598062372851974, - "acc_norm": 0.2907431551499348, - "acc_norm_stderr": 0.011598062372851974 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.37745098039215685, - "acc_stderr": 0.03402272044340705, - "acc_norm": 0.37745098039215685, - "acc_norm_stderr": 0.03402272044340705 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.3939393939393939, - "acc_stderr": 0.0381549430868893, - "acc_norm": 0.3939393939393939, - "acc_norm_stderr": 0.0381549430868893 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.29008567931456547, - "mc1_stderr": 0.01588623687420952, - "mc2": 0.4699106773315303, - "mc2_stderr": 0.01582978440702906 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.3967136150234742, - "acc_stderr": 0.01677009546349845, - "acc_norm": 0.47417840375586856, - "acc_norm_stderr": 0.017116907933735912 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "caisarl76/Mistral-7B-eng-kor-cot-combined", - "model_sha": "d7e959c88fdc316602494d1ffd2bf52d33371f89", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/caisarl76/Mistral-7B-orca-1k-platy-1k/result_2023-10-22 12:42:37.json b/caisarl76/Mistral-7B-orca-1k-platy-1k/result_2023-10-22 12:42:37.json deleted file mode 100644 index ea6b14d51b1f792311513d35f77926c9302c01ae..0000000000000000000000000000000000000000 --- a/caisarl76/Mistral-7B-orca-1k-platy-1k/result_2023-10-22 12:42:37.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.31313993174061433, - "acc_stderr": 0.013552671543623497, - "acc_norm": 0.3660409556313993, - "acc_norm_stderr": 0.014077223108470139 - }, - "harness|ko_hellaswag|10": { - "acc": 0.37183827922724555, - "acc_stderr": 0.004823078145064963, - "acc_norm": 0.45947022505477, - "acc_norm_stderr": 0.00497336133916965 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.43859649122807015, - "acc_stderr": 0.03805797505590459, - "acc_norm": 0.43859649122807015, - "acc_norm_stderr": 0.03805797505590459 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.5339805825242718, - "acc_stderr": 0.04939291447273482, - "acc_norm": 0.5339805825242718, - "acc_norm_stderr": 0.04939291447273482 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.4367816091954023, - "acc_stderr": 0.017736470837800677, - "acc_norm": 0.4367816091954023, - "acc_norm_stderr": 0.017736470837800677 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.362962962962963, - "acc_stderr": 0.041539484047424, - "acc_norm": 0.362962962962963, - "acc_norm_stderr": 0.041539484047424 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.28, - "acc_stderr": 0.045126085985421276, - "acc_norm": 0.28, - "acc_norm_stderr": 0.045126085985421276 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.2978723404255319, - "acc_stderr": 0.02989614568209546, - "acc_norm": 0.2978723404255319, - "acc_norm_stderr": 0.02989614568209546 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.37349397590361444, - "acc_stderr": 0.037658451171688624, - "acc_norm": 0.37349397590361444, - "acc_norm_stderr": 0.037658451171688624 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.4533762057877814, - "acc_stderr": 0.028274359854894245, - "acc_norm": 0.4533762057877814, - "acc_norm_stderr": 0.028274359854894245 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.36771300448430494, - "acc_stderr": 0.032361983509282766, - "acc_norm": 0.36771300448430494, - "acc_norm_stderr": 0.032361983509282766 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.40458015267175573, - "acc_stderr": 0.043046937953806645, - "acc_norm": 0.40458015267175573, - "acc_norm_stderr": 0.043046937953806645 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.44, - "acc_stderr": 0.04988876515698589, - "acc_norm": 0.44, - "acc_norm_stderr": 0.04988876515698589 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.45454545454545453, - "acc_stderr": 0.03547601494006937, - "acc_norm": 0.45454545454545453, - "acc_norm_stderr": 0.03547601494006937 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.45517241379310347, - "acc_stderr": 0.04149886942192117, - "acc_norm": 0.45517241379310347, - "acc_norm_stderr": 0.04149886942192117 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.17647058823529413, - "acc_stderr": 0.03793281185307808, - "acc_norm": 0.17647058823529413, - "acc_norm_stderr": 0.03793281185307808 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.4411764705882353, - "acc_stderr": 0.0322529423239964, - "acc_norm": 0.4411764705882353, - "acc_norm_stderr": 0.0322529423239964 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.39487179487179486, - "acc_stderr": 0.024784316942156374, - "acc_norm": 0.39487179487179486, - "acc_norm_stderr": 0.024784316942156374 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.58, - "acc_stderr": 0.04960449637488583, - "acc_norm": 0.58, - "acc_norm_stderr": 0.04960449637488583 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.28, - "acc_stderr": 0.045126085985421296, - "acc_norm": 0.28, - "acc_norm_stderr": 0.045126085985421296 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.48148148148148145, - "acc_stderr": 0.04830366024635331, - "acc_norm": 0.48148148148148145, - "acc_norm_stderr": 0.04830366024635331 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.3842364532019704, - "acc_stderr": 0.03422398565657551, - "acc_norm": 0.3842364532019704, - "acc_norm_stderr": 0.03422398565657551 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.43548387096774194, - "acc_stderr": 0.02820622559150274, - "acc_norm": 0.43548387096774194, - "acc_norm_stderr": 0.02820622559150274 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.6196581196581197, - "acc_stderr": 0.03180425204384099, - "acc_norm": 0.6196581196581197, - "acc_norm_stderr": 0.03180425204384099 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.3584905660377358, - "acc_stderr": 0.029514703583981755, - "acc_norm": 0.3584905660377358, - "acc_norm_stderr": 0.029514703583981755 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.4, - "acc_stderr": 0.0469237132203465, - "acc_norm": 0.4, - "acc_norm_stderr": 0.0469237132203465 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.3148148148148148, - "acc_stderr": 0.028317533496066485, - "acc_norm": 0.3148148148148148, - "acc_norm_stderr": 0.028317533496066485 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.304635761589404, - "acc_stderr": 0.037579499229433426, - "acc_norm": 0.304635761589404, - "acc_norm_stderr": 0.037579499229433426 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.6019900497512438, - "acc_stderr": 0.034611994290400135, - "acc_norm": 0.6019900497512438, - "acc_norm_stderr": 0.034611994290400135 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.31213872832369943, - "acc_stderr": 0.03533133389323657, - "acc_norm": 0.31213872832369943, - "acc_norm_stderr": 0.03533133389323657 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.31216931216931215, - "acc_stderr": 0.02386520683697259, - "acc_norm": 0.31216931216931215, - "acc_norm_stderr": 0.02386520683697259 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3611111111111111, - "acc_stderr": 0.040166600304512336, - "acc_norm": 0.3611111111111111, - "acc_norm_stderr": 0.040166600304512336 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.49, - "acc_stderr": 0.05024183937956912, - "acc_norm": 0.49, - "acc_norm_stderr": 0.05024183937956912 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.4797687861271676, - "acc_stderr": 0.026897049996382875, - "acc_norm": 0.4797687861271676, - "acc_norm_stderr": 0.026897049996382875 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.4171779141104294, - "acc_stderr": 0.038741028598180814, - "acc_norm": 0.4171779141104294, - "acc_norm_stderr": 0.038741028598180814 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.38271604938271603, - "acc_stderr": 0.02704453813840262, - "acc_norm": 0.38271604938271603, - "acc_norm_stderr": 0.02704453813840262 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252605, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252605 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.46632124352331605, - "acc_stderr": 0.036002440698671784, - "acc_norm": 0.46632124352331605, - "acc_norm_stderr": 0.036002440698671784 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2543859649122807, - "acc_stderr": 0.040969851398436716, - "acc_norm": 0.2543859649122807, - "acc_norm_stderr": 0.040969851398436716 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.43486238532110094, - "acc_stderr": 0.02125463146560928, - "acc_norm": 0.43486238532110094, - "acc_norm_stderr": 0.02125463146560928 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.2857142857142857, - "acc_stderr": 0.040406101782088394, - "acc_norm": 0.2857142857142857, - "acc_norm_stderr": 0.040406101782088394 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.39215686274509803, - "acc_stderr": 0.027956046165424516, - "acc_norm": 0.39215686274509803, - "acc_norm_stderr": 0.027956046165424516 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.37, - "acc_stderr": 0.04852365870939099, - "acc_norm": 0.37, - "acc_norm_stderr": 0.04852365870939099 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.6446280991735537, - "acc_stderr": 0.0436923632657398, - "acc_norm": 0.6446280991735537, - "acc_norm_stderr": 0.0436923632657398 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.3157894736842105, - "acc_stderr": 0.03782728980865469, - "acc_norm": 0.3157894736842105, - "acc_norm_stderr": 0.03782728980865469 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.35784313725490197, - "acc_stderr": 0.019393058402355442, - "acc_norm": 0.35784313725490197, - "acc_norm_stderr": 0.019393058402355442 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.33687943262411346, - "acc_stderr": 0.02819553487396673, - "acc_norm": 0.33687943262411346, - "acc_norm_stderr": 0.02819553487396673 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.3125, - "acc_stderr": 0.043994650575715215, - "acc_norm": 0.3125, - "acc_norm_stderr": 0.043994650575715215 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.39351851851851855, - "acc_stderr": 0.03331747876370312, - "acc_norm": 0.39351851851851855, - "acc_norm_stderr": 0.03331747876370312 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.30614525139664805, - "acc_stderr": 0.01541449448790321, - "acc_norm": 0.30614525139664805, - "acc_norm_stderr": 0.01541449448790321 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.47, - "acc_stderr": 0.05016135580465919, - "acc_norm": 0.47, - "acc_norm_stderr": 0.05016135580465919 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.53, - "acc_stderr": 0.05016135580465919, - "acc_norm": 0.53, - "acc_norm_stderr": 0.05016135580465919 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.3786764705882353, - "acc_stderr": 0.02946513363977613, - "acc_norm": 0.3786764705882353, - "acc_norm_stderr": 0.02946513363977613 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.37551020408163266, - "acc_stderr": 0.03100120903989484, - "acc_norm": 0.37551020408163266, - "acc_norm_stderr": 0.03100120903989484 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.5780590717299579, - "acc_stderr": 0.032148146302403695, - "acc_norm": 0.5780590717299579, - "acc_norm_stderr": 0.032148146302403695 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.34224250325945244, - "acc_stderr": 0.012117939998705876, - "acc_norm": 0.34224250325945244, - "acc_norm_stderr": 0.012117939998705876 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.3872549019607843, - "acc_stderr": 0.03418931233833344, - "acc_norm": 0.3872549019607843, - "acc_norm_stderr": 0.03418931233833344 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.45454545454545453, - "acc_stderr": 0.038881769216741, - "acc_norm": 0.45454545454545453, - "acc_norm_stderr": 0.038881769216741 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2937576499388005, - "mc1_stderr": 0.015945068581236618, - "mc2": 0.4769559005507783, - "mc2_stderr": 0.015879206203595765 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.18896713615023475, - "acc_stderr": 0.013419847693240445, - "acc_norm": 0.2476525821596244, - "acc_norm_stderr": 0.014796734034366498 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "caisarl76/Mistral-7B-orca-1k-platy-1k", - "model_sha": "528d7bcaa2489daeea58946d17b341b55946f21b", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/caisarl76/Mistral-7B-orca-platy-1k-ko-f-1871/result_2023-10-22 22:35:04.json b/caisarl76/Mistral-7B-orca-platy-1k-ko-f-1871/result_2023-10-22 22:35:04.json deleted file mode 100644 index e967f83964c955931223c62eced94ed92c1f71c0..0000000000000000000000000000000000000000 --- a/caisarl76/Mistral-7B-orca-platy-1k-ko-f-1871/result_2023-10-22 22:35:04.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.3267918088737201, - "acc_stderr": 0.013706665975587333, - "acc_norm": 0.3779863481228669, - "acc_norm_stderr": 0.014169664520303103 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3707428799044015, - "acc_stderr": 0.004820166002253069, - "acc_norm": 0.4790878311093408, - "acc_norm_stderr": 0.004985415250690911 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.4444444444444444, - "acc_stderr": 0.03811079669833531, - "acc_norm": 0.4444444444444444, - "acc_norm_stderr": 0.03811079669833531 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.3592233009708738, - "acc_stderr": 0.04750458399041692, - "acc_norm": 0.3592233009708738, - "acc_norm_stderr": 0.04750458399041692 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.38569604086845466, - "acc_stderr": 0.017406476619212904, - "acc_norm": 0.38569604086845466, - "acc_norm_stderr": 0.017406476619212904 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.04072314811876837, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.04072314811876837 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.2553191489361702, - "acc_stderr": 0.02850485647051418, - "acc_norm": 0.2553191489361702, - "acc_norm_stderr": 0.02850485647051418 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3313253012048193, - "acc_stderr": 0.03664314777288087, - "acc_norm": 0.3313253012048193, - "acc_norm_stderr": 0.03664314777288087 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.3536977491961415, - "acc_stderr": 0.027155208103200865, - "acc_norm": 0.3536977491961415, - "acc_norm_stderr": 0.027155208103200865 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.2556053811659193, - "acc_stderr": 0.029275891003969923, - "acc_norm": 0.2556053811659193, - "acc_norm_stderr": 0.029275891003969923 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.29770992366412213, - "acc_stderr": 0.04010358942462202, - "acc_norm": 0.29770992366412213, - "acc_norm_stderr": 0.04010358942462202 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.27, - "acc_stderr": 0.044619604333847394, - "acc_norm": 0.27, - "acc_norm_stderr": 0.044619604333847394 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.3484848484848485, - "acc_stderr": 0.03394853965156403, - "acc_norm": 0.3484848484848485, - "acc_norm_stderr": 0.03394853965156403 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.38620689655172413, - "acc_stderr": 0.04057324734419035, - "acc_norm": 0.38620689655172413, - "acc_norm_stderr": 0.04057324734419035 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.22549019607843138, - "acc_stderr": 0.041583075330832865, - "acc_norm": 0.22549019607843138, - "acc_norm_stderr": 0.041583075330832865 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.29411764705882354, - "acc_stderr": 0.02959732973097809, - "acc_norm": 0.29411764705882354, - "acc_norm_stderr": 0.02959732973097809 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.28205128205128205, - "acc_stderr": 0.02281581309889661, - "acc_norm": 0.28205128205128205, - "acc_norm_stderr": 0.02281581309889661 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.51, - "acc_stderr": 0.05024183937956912, - "acc_norm": 0.51, - "acc_norm_stderr": 0.05024183937956912 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252604, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252604 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.3611111111111111, - "acc_stderr": 0.04643454608906275, - "acc_norm": 0.3611111111111111, - "acc_norm_stderr": 0.04643454608906275 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.3399014778325123, - "acc_stderr": 0.033327690684107895, - "acc_norm": 0.3399014778325123, - "acc_norm_stderr": 0.033327690684107895 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3580645161290323, - "acc_stderr": 0.027273890594300642, - "acc_norm": 0.3580645161290323, - "acc_norm_stderr": 0.027273890594300642 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.4358974358974359, - "acc_stderr": 0.032485775115784, - "acc_norm": 0.4358974358974359, - "acc_norm_stderr": 0.032485775115784 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.2830188679245283, - "acc_stderr": 0.027724236492700907, - "acc_norm": 0.2830188679245283, - "acc_norm_stderr": 0.027724236492700907 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.2818181818181818, - "acc_stderr": 0.04309118709946458, - "acc_norm": 0.2818181818181818, - "acc_norm_stderr": 0.04309118709946458 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.26296296296296295, - "acc_stderr": 0.026842057873833706, - "acc_norm": 0.26296296296296295, - "acc_norm_stderr": 0.026842057873833706 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2847682119205298, - "acc_stderr": 0.03684881521389023, - "acc_norm": 0.2847682119205298, - "acc_norm_stderr": 0.03684881521389023 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.4228855721393035, - "acc_stderr": 0.03493231777421281, - "acc_norm": 0.4228855721393035, - "acc_norm_stderr": 0.03493231777421281 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.2947976878612717, - "acc_stderr": 0.03476599607516478, - "acc_norm": 0.2947976878612717, - "acc_norm_stderr": 0.03476599607516478 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.30158730158730157, - "acc_stderr": 0.023636975996101803, - "acc_norm": 0.30158730158730157, - "acc_norm_stderr": 0.023636975996101803 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2708333333333333, - "acc_stderr": 0.03716177437566016, - "acc_norm": 0.2708333333333333, - "acc_norm_stderr": 0.03716177437566016 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.21, - "acc_stderr": 0.040936018074033256, - "acc_norm": 0.21, - "acc_norm_stderr": 0.040936018074033256 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.37, - "acc_stderr": 0.048523658709391, - "acc_norm": 0.37, - "acc_norm_stderr": 0.048523658709391 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.35260115606936415, - "acc_stderr": 0.025722802200895813, - "acc_norm": 0.35260115606936415, - "acc_norm_stderr": 0.025722802200895813 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.3128834355828221, - "acc_stderr": 0.036429145782924034, - "acc_norm": 0.3128834355828221, - "acc_norm_stderr": 0.036429145782924034 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.38271604938271603, - "acc_stderr": 0.02704453813840262, - "acc_norm": 0.38271604938271603, - "acc_norm_stderr": 0.02704453813840262 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.29015544041450775, - "acc_stderr": 0.03275264467791515, - "acc_norm": 0.29015544041450775, - "acc_norm_stderr": 0.03275264467791515 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2719298245614035, - "acc_stderr": 0.04185774424022056, - "acc_norm": 0.2719298245614035, - "acc_norm_stderr": 0.04185774424022056 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.3357798165137615, - "acc_stderr": 0.02024808139675293, - "acc_norm": 0.3357798165137615, - "acc_norm_stderr": 0.02024808139675293 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.21428571428571427, - "acc_stderr": 0.03670066451047181, - "acc_norm": 0.21428571428571427, - "acc_norm_stderr": 0.03670066451047181 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.31699346405228757, - "acc_stderr": 0.026643278474508758, - "acc_norm": 0.31699346405228757, - "acc_norm_stderr": 0.026643278474508758 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542128, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542128 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.5454545454545454, - "acc_stderr": 0.045454545454545484, - "acc_norm": 0.5454545454545454, - "acc_norm_stderr": 0.045454545454545484 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.3815789473684211, - "acc_stderr": 0.03953173377749194, - "acc_norm": 0.3815789473684211, - "acc_norm_stderr": 0.03953173377749194 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.2957516339869281, - "acc_stderr": 0.01846315413263281, - "acc_norm": 0.2957516339869281, - "acc_norm_stderr": 0.01846315413263281 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2872340425531915, - "acc_stderr": 0.026992199173064356, - "acc_norm": 0.2872340425531915, - "acc_norm_stderr": 0.026992199173064356 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.2767857142857143, - "acc_stderr": 0.04246624336697624, - "acc_norm": 0.2767857142857143, - "acc_norm_stderr": 0.04246624336697624 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.28703703703703703, - "acc_stderr": 0.030851992993257013, - "acc_norm": 0.28703703703703703, - "acc_norm_stderr": 0.030851992993257013 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.25139664804469275, - "acc_stderr": 0.01450897945355399, - "acc_norm": 0.25139664804469275, - "acc_norm_stderr": 0.01450897945355399 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.27, - "acc_stderr": 0.0446196043338474, - "acc_norm": 0.27, - "acc_norm_stderr": 0.0446196043338474 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.42, - "acc_stderr": 0.049604496374885836, - "acc_norm": 0.42, - "acc_norm_stderr": 0.049604496374885836 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.21323529411764705, - "acc_stderr": 0.024880971512294268, - "acc_norm": 0.21323529411764705, - "acc_norm_stderr": 0.024880971512294268 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.33877551020408164, - "acc_stderr": 0.03029950656215418, - "acc_norm": 0.33877551020408164, - "acc_norm_stderr": 0.03029950656215418 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.41350210970464135, - "acc_stderr": 0.03205649904851859, - "acc_norm": 0.41350210970464135, - "acc_norm_stderr": 0.03205649904851859 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.288135593220339, - "acc_stderr": 0.011567140661324565, - "acc_norm": 0.288135593220339, - "acc_norm_stderr": 0.011567140661324565 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.28431372549019607, - "acc_stderr": 0.03166009679399812, - "acc_norm": 0.28431372549019607, - "acc_norm_stderr": 0.03166009679399812 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.03681050869161549, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.03681050869161549 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.28886168910648713, - "mc1_stderr": 0.015866346401384304, - "mc2": 0.45737169951487844, - "mc2_stderr": 0.015829256462411827 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.39906103286384975, - "acc_stderr": 0.01678688302608549, - "acc_norm": 0.47417840375586856, - "acc_norm_stderr": 0.017116907933735916 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "caisarl76/Mistral-7B-orca-platy-1k-ko-f-1871", - "model_sha": "648931fc59553f86c011a4e312d6fc0ee93d4b37", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/caisarl76/Mistral-7B-orca-platy-2k-ep4/result_2023-10-22 15:12:08.json b/caisarl76/Mistral-7B-orca-platy-2k-ep4/result_2023-10-22 15:12:08.json deleted file mode 100644 index 52c4fe479268e54f3d67330669dba419f49d81e8..0000000000000000000000000000000000000000 --- a/caisarl76/Mistral-7B-orca-platy-2k-ep4/result_2023-10-22 15:12:08.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.295221843003413, - "acc_stderr": 0.013329750293382316, - "acc_norm": 0.3430034129692833, - "acc_norm_stderr": 0.013872423223718167 - }, - "harness|ko_hellaswag|10": { - "acc": 0.37054371639115713, - "acc_stderr": 0.0048196336688325495, - "acc_norm": 0.46345349531965746, - "acc_norm_stderr": 0.00497643438746997 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.4152046783625731, - "acc_stderr": 0.037792759455032, - "acc_norm": 0.4152046783625731, - "acc_norm_stderr": 0.037792759455032 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.4368932038834951, - "acc_stderr": 0.049111471073657764, - "acc_norm": 0.4368932038834951, - "acc_norm_stderr": 0.049111471073657764 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.39719029374201786, - "acc_stderr": 0.01749790503715938, - "acc_norm": 0.39719029374201786, - "acc_norm_stderr": 0.01749790503715938 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.34074074074074073, - "acc_stderr": 0.040943762699967946, - "acc_norm": 0.34074074074074073, - "acc_norm_stderr": 0.040943762699967946 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542128, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542128 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.2765957446808511, - "acc_stderr": 0.029241883869628813, - "acc_norm": 0.2765957446808511, - "acc_norm_stderr": 0.029241883869628813 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3313253012048193, - "acc_stderr": 0.036643147772880864, - "acc_norm": 0.3313253012048193, - "acc_norm_stderr": 0.036643147772880864 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.42443729903536975, - "acc_stderr": 0.028071928247946205, - "acc_norm": 0.42443729903536975, - "acc_norm_stderr": 0.028071928247946205 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.336322869955157, - "acc_stderr": 0.031708824268455, - "acc_norm": 0.336322869955157, - "acc_norm_stderr": 0.031708824268455 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.3435114503816794, - "acc_stderr": 0.041649760719448786, - "acc_norm": 0.3435114503816794, - "acc_norm_stderr": 0.041649760719448786 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.26, - "acc_stderr": 0.0440844002276808, - "acc_norm": 0.26, - "acc_norm_stderr": 0.0440844002276808 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.47474747474747475, - "acc_stderr": 0.03557806245087314, - "acc_norm": 0.47474747474747475, - "acc_norm_stderr": 0.03557806245087314 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.31724137931034485, - "acc_stderr": 0.03878352372138623, - "acc_norm": 0.31724137931034485, - "acc_norm_stderr": 0.03878352372138623 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.23529411764705882, - "acc_stderr": 0.04220773659171453, - "acc_norm": 0.23529411764705882, - "acc_norm_stderr": 0.04220773659171453 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.3907563025210084, - "acc_stderr": 0.031693802357129965, - "acc_norm": 0.3907563025210084, - "acc_norm_stderr": 0.031693802357129965 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.43333333333333335, - "acc_stderr": 0.02512465352588513, - "acc_norm": 0.43333333333333335, - "acc_norm_stderr": 0.02512465352588513 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.5, - "acc_stderr": 0.050251890762960605, - "acc_norm": 0.5, - "acc_norm_stderr": 0.050251890762960605 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542127, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542127 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.4074074074074074, - "acc_stderr": 0.047500773411999854, - "acc_norm": 0.4074074074074074, - "acc_norm_stderr": 0.047500773411999854 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.3842364532019704, - "acc_stderr": 0.034223985656575515, - "acc_norm": 0.3842364532019704, - "acc_norm_stderr": 0.034223985656575515 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3870967741935484, - "acc_stderr": 0.027709359675032488, - "acc_norm": 0.3870967741935484, - "acc_norm_stderr": 0.027709359675032488 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.5299145299145299, - "acc_stderr": 0.03269741106812443, - "acc_norm": 0.5299145299145299, - "acc_norm_stderr": 0.03269741106812443 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.37735849056603776, - "acc_stderr": 0.029832808114796005, - "acc_norm": 0.37735849056603776, - "acc_norm_stderr": 0.029832808114796005 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.39090909090909093, - "acc_stderr": 0.04673752333670238, - "acc_norm": 0.39090909090909093, - "acc_norm_stderr": 0.04673752333670238 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.25925925925925924, - "acc_stderr": 0.02671924078371218, - "acc_norm": 0.25925925925925924, - "acc_norm_stderr": 0.02671924078371218 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.25165562913907286, - "acc_stderr": 0.03543304234389985, - "acc_norm": 0.25165562913907286, - "acc_norm_stderr": 0.03543304234389985 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.4626865671641791, - "acc_stderr": 0.035256751674679745, - "acc_norm": 0.4626865671641791, - "acc_norm_stderr": 0.035256751674679745 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.3699421965317919, - "acc_stderr": 0.03681229633394319, - "acc_norm": 0.3699421965317919, - "acc_norm_stderr": 0.03681229633394319 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2857142857142857, - "acc_stderr": 0.023266512213730578, - "acc_norm": 0.2857142857142857, - "acc_norm_stderr": 0.023266512213730578 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3472222222222222, - "acc_stderr": 0.039812405437178615, - "acc_norm": 0.3472222222222222, - "acc_norm_stderr": 0.039812405437178615 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.4, - "acc_stderr": 0.049236596391733084, - "acc_norm": 0.4, - "acc_norm_stderr": 0.049236596391733084 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.44, - "acc_stderr": 0.04988876515698589, - "acc_norm": 0.44, - "acc_norm_stderr": 0.04988876515698589 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.3583815028901734, - "acc_stderr": 0.025816756791584215, - "acc_norm": 0.3583815028901734, - "acc_norm_stderr": 0.025816756791584215 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.36809815950920244, - "acc_stderr": 0.03789213935838396, - "acc_norm": 0.36809815950920244, - "acc_norm_stderr": 0.03789213935838396 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.41358024691358025, - "acc_stderr": 0.027402042040269952, - "acc_norm": 0.41358024691358025, - "acc_norm_stderr": 0.027402042040269952 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.43005181347150256, - "acc_stderr": 0.03572954333144809, - "acc_norm": 0.43005181347150256, - "acc_norm_stderr": 0.03572954333144809 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2807017543859649, - "acc_stderr": 0.04227054451232199, - "acc_norm": 0.2807017543859649, - "acc_norm_stderr": 0.04227054451232199 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.41100917431192663, - "acc_stderr": 0.021095050687277638, - "acc_norm": 0.41100917431192663, - "acc_norm_stderr": 0.021095050687277638 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.30158730158730157, - "acc_stderr": 0.04104947269903394, - "acc_norm": 0.30158730158730157, - "acc_norm_stderr": 0.04104947269903394 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.4084967320261438, - "acc_stderr": 0.028146405993096358, - "acc_norm": 0.4084967320261438, - "acc_norm_stderr": 0.028146405993096358 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.38, - "acc_stderr": 0.04878317312145632, - "acc_norm": 0.38, - "acc_norm_stderr": 0.04878317312145632 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.4297520661157025, - "acc_stderr": 0.04519082021319772, - "acc_norm": 0.4297520661157025, - "acc_norm_stderr": 0.04519082021319772 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.3618421052631579, - "acc_stderr": 0.03910525752849726, - "acc_norm": 0.3618421052631579, - "acc_norm_stderr": 0.03910525752849726 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.29248366013071897, - "acc_stderr": 0.01840341571010979, - "acc_norm": 0.29248366013071897, - "acc_norm_stderr": 0.01840341571010979 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2872340425531915, - "acc_stderr": 0.026992199173064356, - "acc_norm": 0.2872340425531915, - "acc_norm_stderr": 0.026992199173064356 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.21428571428571427, - "acc_stderr": 0.038946411200447915, - "acc_norm": 0.21428571428571427, - "acc_norm_stderr": 0.038946411200447915 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.3888888888888889, - "acc_stderr": 0.033247089118091176, - "acc_norm": 0.3888888888888889, - "acc_norm_stderr": 0.033247089118091176 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2905027932960894, - "acc_stderr": 0.015183844307206157, - "acc_norm": 0.2905027932960894, - "acc_norm_stderr": 0.015183844307206157 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.36, - "acc_stderr": 0.048241815132442176, - "acc_norm": 0.36, - "acc_norm_stderr": 0.048241815132442176 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.45, - "acc_stderr": 0.05, - "acc_norm": 0.45, - "acc_norm_stderr": 0.05 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.40441176470588236, - "acc_stderr": 0.02981263070156974, - "acc_norm": 0.40441176470588236, - "acc_norm_stderr": 0.02981263070156974 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.37142857142857144, - "acc_stderr": 0.03093285879278984, - "acc_norm": 0.37142857142857144, - "acc_norm_stderr": 0.03093285879278984 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.4810126582278481, - "acc_stderr": 0.03252375148090447, - "acc_norm": 0.4810126582278481, - "acc_norm_stderr": 0.03252375148090447 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2966101694915254, - "acc_stderr": 0.011665946586082844, - "acc_norm": 0.2966101694915254, - "acc_norm_stderr": 0.011665946586082844 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.37254901960784315, - "acc_stderr": 0.03393388584958403, - "acc_norm": 0.37254901960784315, - "acc_norm_stderr": 0.03393388584958403 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.4, - "acc_stderr": 0.03825460278380026, - "acc_norm": 0.4, - "acc_norm_stderr": 0.03825460278380026 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2864137086903305, - "mc1_stderr": 0.015826142439502346, - "mc2": 0.449359001521154, - "mc2_stderr": 0.016084396495163696 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.22183098591549297, - "acc_stderr": 0.01424240369419945, - "acc_norm": 0.2734741784037559, - "acc_norm_stderr": 0.015279843644424517 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "caisarl76/Mistral-7B-orca-platy-2k-ep4", - "model_sha": "fd2682689d7efd4dd350d71f64a7a8ff09842fd7", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/caisarl76/Mistral-7B-v0.1-orca_platy-1k-ep4/result_2023-10-22 15:19:27.json b/caisarl76/Mistral-7B-v0.1-orca_platy-1k-ep4/result_2023-10-22 15:19:27.json deleted file mode 100644 index 168122deff83dafba755f99f2d9b3537854faef2..0000000000000000000000000000000000000000 --- a/caisarl76/Mistral-7B-v0.1-orca_platy-1k-ep4/result_2023-10-22 15:19:27.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.318259385665529, - "acc_stderr": 0.013611993916971451, - "acc_norm": 0.3583617747440273, - "acc_norm_stderr": 0.01401288333485986 - }, - "harness|ko_hellaswag|10": { - "acc": 0.368352917745469, - "acc_stderr": 0.00481371995282996, - "acc_norm": 0.46265684126667994, - "acc_norm_stderr": 0.0049758453350866195 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.40350877192982454, - "acc_stderr": 0.03762738699917055, - "acc_norm": 0.40350877192982454, - "acc_norm_stderr": 0.03762738699917055 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.4368932038834951, - "acc_stderr": 0.04911147107365777, - "acc_norm": 0.4368932038834951, - "acc_norm_stderr": 0.04911147107365777 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.44061302681992337, - "acc_stderr": 0.017753396973908486, - "acc_norm": 0.44061302681992337, - "acc_norm_stderr": 0.017753396973908486 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.4, - "acc_stderr": 0.04232073695151589, - "acc_norm": 0.4, - "acc_norm_stderr": 0.04232073695151589 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.32340425531914896, - "acc_stderr": 0.030579442773610334, - "acc_norm": 0.32340425531914896, - "acc_norm_stderr": 0.030579442773610334 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3493975903614458, - "acc_stderr": 0.0371172519074075, - "acc_norm": 0.3493975903614458, - "acc_norm_stderr": 0.0371172519074075 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.43086816720257237, - "acc_stderr": 0.02812534098397271, - "acc_norm": 0.43086816720257237, - "acc_norm_stderr": 0.02812534098397271 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.31390134529147984, - "acc_stderr": 0.03114679648297246, - "acc_norm": 0.31390134529147984, - "acc_norm_stderr": 0.03114679648297246 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.4351145038167939, - "acc_stderr": 0.04348208051644858, - "acc_norm": 0.4351145038167939, - "acc_norm_stderr": 0.04348208051644858 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.37, - "acc_stderr": 0.04852365870939099, - "acc_norm": 0.37, - "acc_norm_stderr": 0.04852365870939099 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.4898989898989899, - "acc_stderr": 0.035616254886737454, - "acc_norm": 0.4898989898989899, - "acc_norm_stderr": 0.035616254886737454 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.3793103448275862, - "acc_stderr": 0.04043461861916747, - "acc_norm": 0.3793103448275862, - "acc_norm_stderr": 0.04043461861916747 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.27450980392156865, - "acc_stderr": 0.04440521906179327, - "acc_norm": 0.27450980392156865, - "acc_norm_stderr": 0.04440521906179327 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.35714285714285715, - "acc_stderr": 0.031124619309328177, - "acc_norm": 0.35714285714285715, - "acc_norm_stderr": 0.031124619309328177 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.38974358974358975, - "acc_stderr": 0.024726967886647078, - "acc_norm": 0.38974358974358975, - "acc_norm_stderr": 0.024726967886647078 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.53, - "acc_stderr": 0.05016135580465919, - "acc_norm": 0.53, - "acc_norm_stderr": 0.05016135580465919 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.19, - "acc_stderr": 0.039427724440366234, - "acc_norm": 0.19, - "acc_norm_stderr": 0.039427724440366234 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.4074074074074074, - "acc_stderr": 0.04750077341199985, - "acc_norm": 0.4074074074074074, - "acc_norm_stderr": 0.04750077341199985 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.33497536945812806, - "acc_stderr": 0.033208527423483104, - "acc_norm": 0.33497536945812806, - "acc_norm_stderr": 0.033208527423483104 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.35161290322580646, - "acc_stderr": 0.027162537826948458, - "acc_norm": 0.35161290322580646, - "acc_norm_stderr": 0.027162537826948458 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.6666666666666666, - "acc_stderr": 0.03088273697413866, - "acc_norm": 0.6666666666666666, - "acc_norm_stderr": 0.03088273697413866 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.35471698113207545, - "acc_stderr": 0.029445175328199596, - "acc_norm": 0.35471698113207545, - "acc_norm_stderr": 0.029445175328199596 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.4, - "acc_stderr": 0.0469237132203465, - "acc_norm": 0.4, - "acc_norm_stderr": 0.0469237132203465 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.29259259259259257, - "acc_stderr": 0.027738969632176088, - "acc_norm": 0.29259259259259257, - "acc_norm_stderr": 0.027738969632176088 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.31125827814569534, - "acc_stderr": 0.03780445850526732, - "acc_norm": 0.31125827814569534, - "acc_norm_stderr": 0.03780445850526732 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.4626865671641791, - "acc_stderr": 0.03525675167467974, - "acc_norm": 0.4626865671641791, - "acc_norm_stderr": 0.03525675167467974 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.34104046242774566, - "acc_stderr": 0.03614665424180826, - "acc_norm": 0.34104046242774566, - "acc_norm_stderr": 0.03614665424180826 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.3253968253968254, - "acc_stderr": 0.02413015829976262, - "acc_norm": 0.3253968253968254, - "acc_norm_stderr": 0.02413015829976262 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3194444444444444, - "acc_stderr": 0.03899073687357335, - "acc_norm": 0.3194444444444444, - "acc_norm_stderr": 0.03899073687357335 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.36, - "acc_stderr": 0.04824181513244218, - "acc_norm": 0.36, - "acc_norm_stderr": 0.04824181513244218 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.49, - "acc_stderr": 0.05024183937956912, - "acc_norm": 0.49, - "acc_norm_stderr": 0.05024183937956912 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.36416184971098264, - "acc_stderr": 0.025906632631016127, - "acc_norm": 0.36416184971098264, - "acc_norm_stderr": 0.025906632631016127 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.44171779141104295, - "acc_stderr": 0.03901591825836184, - "acc_norm": 0.44171779141104295, - "acc_norm_stderr": 0.03901591825836184 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.3734567901234568, - "acc_stderr": 0.02691500301138015, - "acc_norm": 0.3734567901234568, - "acc_norm_stderr": 0.02691500301138015 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.37, - "acc_stderr": 0.04852365870939098, - "acc_norm": 0.37, - "acc_norm_stderr": 0.04852365870939098 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.37305699481865284, - "acc_stderr": 0.03490205592048574, - "acc_norm": 0.37305699481865284, - "acc_norm_stderr": 0.03490205592048574 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.044346007015849245, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.044346007015849245 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.45688073394495415, - "acc_stderr": 0.021357458785226206, - "acc_norm": 0.45688073394495415, - "acc_norm_stderr": 0.021357458785226206 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.36507936507936506, - "acc_stderr": 0.04306241259127153, - "acc_norm": 0.36507936507936506, - "acc_norm_stderr": 0.04306241259127153 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.3464052287581699, - "acc_stderr": 0.027245613047215362, - "acc_norm": 0.3464052287581699, - "acc_norm_stderr": 0.027245613047215362 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.37, - "acc_stderr": 0.04852365870939099, - "acc_norm": 0.37, - "acc_norm_stderr": 0.04852365870939099 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.5206611570247934, - "acc_stderr": 0.04560456086387235, - "acc_norm": 0.5206611570247934, - "acc_norm_stderr": 0.04560456086387235 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.34868421052631576, - "acc_stderr": 0.03878139888797611, - "acc_norm": 0.34868421052631576, - "acc_norm_stderr": 0.03878139888797611 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.3366013071895425, - "acc_stderr": 0.019117213911495175, - "acc_norm": 0.3366013071895425, - "acc_norm_stderr": 0.019117213911495175 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.29432624113475175, - "acc_stderr": 0.027187127011503803, - "acc_norm": 0.29432624113475175, - "acc_norm_stderr": 0.027187127011503803 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.25892857142857145, - "acc_stderr": 0.04157751539865629, - "acc_norm": 0.25892857142857145, - "acc_norm_stderr": 0.04157751539865629 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.37962962962962965, - "acc_stderr": 0.03309682581119035, - "acc_norm": 0.37962962962962965, - "acc_norm_stderr": 0.03309682581119035 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2435754189944134, - "acc_stderr": 0.014355911964767867, - "acc_norm": 0.2435754189944134, - "acc_norm_stderr": 0.014355911964767867 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.26, - "acc_stderr": 0.044084400227680794, - "acc_norm": 0.26, - "acc_norm_stderr": 0.044084400227680794 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.45, - "acc_stderr": 0.05, - "acc_norm": 0.45, - "acc_norm_stderr": 0.05 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.4227941176470588, - "acc_stderr": 0.03000856284500348, - "acc_norm": 0.4227941176470588, - "acc_norm_stderr": 0.03000856284500348 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.3510204081632653, - "acc_stderr": 0.030555316755573637, - "acc_norm": 0.3510204081632653, - "acc_norm_stderr": 0.030555316755573637 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.45569620253164556, - "acc_stderr": 0.03241920684693334, - "acc_norm": 0.45569620253164556, - "acc_norm_stderr": 0.03241920684693334 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2835723598435463, - "acc_stderr": 0.011511900775968302, - "acc_norm": 0.2835723598435463, - "acc_norm_stderr": 0.011511900775968302 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.38235294117647056, - "acc_stderr": 0.03410785338904719, - "acc_norm": 0.38235294117647056, - "acc_norm_stderr": 0.03410785338904719 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.4121212121212121, - "acc_stderr": 0.03843566993588717, - "acc_norm": 0.4121212121212121, - "acc_norm_stderr": 0.03843566993588717 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.29008567931456547, - "mc1_stderr": 0.01588623687420952, - "mc2": 0.459471439183592, - "mc2_stderr": 0.016149154578981872 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.18661971830985916, - "acc_stderr": 0.013355520080660188, - "acc_norm": 0.2147887323943662, - "acc_norm_stderr": 0.014077781780936443 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "caisarl76/Mistral-7B-v0.1-orca_platy-1k-ep4", - "model_sha": "e3e91aad9d307bf43b516f95440a35a1db3e1c68", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/choco9966/Llama-2-7b-instruct-tuning/result_2023-10-19 08:44:42.json b/choco9966/Llama-2-7b-instruct-tuning/result_2023-10-19 08:44:42.json deleted file mode 100644 index c0d04b2efa785e8f8df5cd7c5cbca357e0efa4ee..0000000000000000000000000000000000000000 --- a/choco9966/Llama-2-7b-instruct-tuning/result_2023-10-19 08:44:42.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.29692832764505117, - "acc_stderr": 0.013352025976725222, - "acc_norm": 0.33447098976109213, - "acc_norm_stderr": 0.01378746032244138 - }, - "harness|ko_hellaswag|10": { - "acc": 0.345947022505477, - "acc_stderr": 0.004747038768172532, - "acc_norm": 0.4251145190201155, - "acc_norm_stderr": 0.004933500261683597 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.4269005847953216, - "acc_stderr": 0.03793620616529916, - "acc_norm": 0.4269005847953216, - "acc_norm_stderr": 0.03793620616529916 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.34951456310679613, - "acc_stderr": 0.047211885060971716, - "acc_norm": 0.34951456310679613, - "acc_norm_stderr": 0.047211885060971716 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.3895274584929757, - "acc_stderr": 0.017438082556264594, - "acc_norm": 0.3895274584929757, - "acc_norm_stderr": 0.017438082556264594 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.31851851851851853, - "acc_stderr": 0.040247784019771124, - "acc_norm": 0.31851851851851853, - "acc_norm_stderr": 0.040247784019771124 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.38, - "acc_stderr": 0.048783173121456316, - "acc_norm": 0.38, - "acc_norm_stderr": 0.048783173121456316 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.3659574468085106, - "acc_stderr": 0.03148955829745529, - "acc_norm": 0.3659574468085106, - "acc_norm_stderr": 0.03148955829745529 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.29518072289156627, - "acc_stderr": 0.03550920185689629, - "acc_norm": 0.29518072289156627, - "acc_norm_stderr": 0.03550920185689629 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.3858520900321543, - "acc_stderr": 0.02764814959975147, - "acc_norm": 0.3858520900321543, - "acc_norm_stderr": 0.02764814959975147 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.34977578475336324, - "acc_stderr": 0.03200736719484503, - "acc_norm": 0.34977578475336324, - "acc_norm_stderr": 0.03200736719484503 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.45038167938931295, - "acc_stderr": 0.04363643698524779, - "acc_norm": 0.45038167938931295, - "acc_norm_stderr": 0.04363643698524779 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.35, - "acc_stderr": 0.04793724854411021, - "acc_norm": 0.35, - "acc_norm_stderr": 0.04793724854411021 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.35353535353535354, - "acc_stderr": 0.03406086723547153, - "acc_norm": 0.35353535353535354, - "acc_norm_stderr": 0.03406086723547153 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.3724137931034483, - "acc_stderr": 0.040287315329475604, - "acc_norm": 0.3724137931034483, - "acc_norm_stderr": 0.040287315329475604 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.21568627450980393, - "acc_stderr": 0.04092563958237654, - "acc_norm": 0.21568627450980393, - "acc_norm_stderr": 0.04092563958237654 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.33613445378151263, - "acc_stderr": 0.030684737115135377, - "acc_norm": 0.33613445378151263, - "acc_norm_stderr": 0.030684737115135377 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.31025641025641026, - "acc_stderr": 0.023454674889404288, - "acc_norm": 0.31025641025641026, - "acc_norm_stderr": 0.023454674889404288 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.37, - "acc_stderr": 0.04852365870939099, - "acc_norm": 0.37, - "acc_norm_stderr": 0.04852365870939099 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.37, - "acc_stderr": 0.048523658709391, - "acc_norm": 0.37, - "acc_norm_stderr": 0.048523658709391 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.3888888888888889, - "acc_stderr": 0.047128212574267705, - "acc_norm": 0.3888888888888889, - "acc_norm_stderr": 0.047128212574267705 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.27586206896551724, - "acc_stderr": 0.0314471258167824, - "acc_norm": 0.27586206896551724, - "acc_norm_stderr": 0.0314471258167824 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3580645161290323, - "acc_stderr": 0.027273890594300642, - "acc_norm": 0.3580645161290323, - "acc_norm_stderr": 0.027273890594300642 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.5128205128205128, - "acc_stderr": 0.032745319388423504, - "acc_norm": 0.5128205128205128, - "acc_norm_stderr": 0.032745319388423504 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.32452830188679244, - "acc_stderr": 0.028815615713432115, - "acc_norm": 0.32452830188679244, - "acc_norm_stderr": 0.028815615713432115 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.39090909090909093, - "acc_stderr": 0.04673752333670237, - "acc_norm": 0.39090909090909093, - "acc_norm_stderr": 0.04673752333670237 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.29259259259259257, - "acc_stderr": 0.027738969632176095, - "acc_norm": 0.29259259259259257, - "acc_norm_stderr": 0.027738969632176095 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.26490066225165565, - "acc_stderr": 0.03603038545360384, - "acc_norm": 0.26490066225165565, - "acc_norm_stderr": 0.03603038545360384 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.48258706467661694, - "acc_stderr": 0.03533389234739245, - "acc_norm": 0.48258706467661694, - "acc_norm_stderr": 0.03533389234739245 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.31213872832369943, - "acc_stderr": 0.035331333893236574, - "acc_norm": 0.31213872832369943, - "acc_norm_stderr": 0.035331333893236574 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.30158730158730157, - "acc_stderr": 0.0236369759961018, - "acc_norm": 0.30158730158730157, - "acc_norm_stderr": 0.0236369759961018 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3055555555555556, - "acc_stderr": 0.03852084696008534, - "acc_norm": 0.3055555555555556, - "acc_norm_stderr": 0.03852084696008534 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.44, - "acc_stderr": 0.04988876515698589, - "acc_norm": 0.44, - "acc_norm_stderr": 0.04988876515698589 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.3439306358381503, - "acc_stderr": 0.025574123786546648, - "acc_norm": 0.3439306358381503, - "acc_norm_stderr": 0.025574123786546648 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.34355828220858897, - "acc_stderr": 0.03731133519673893, - "acc_norm": 0.34355828220858897, - "acc_norm_stderr": 0.03731133519673893 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.38271604938271603, - "acc_stderr": 0.027044538138402616, - "acc_norm": 0.38271604938271603, - "acc_norm_stderr": 0.027044538138402616 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542129, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542129 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.43005181347150256, - "acc_stderr": 0.03572954333144808, - "acc_norm": 0.43005181347150256, - "acc_norm_stderr": 0.03572954333144808 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2719298245614035, - "acc_stderr": 0.041857744240220575, - "acc_norm": 0.2719298245614035, - "acc_norm_stderr": 0.041857744240220575 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.3412844036697248, - "acc_stderr": 0.020328612816592432, - "acc_norm": 0.3412844036697248, - "acc_norm_stderr": 0.020328612816592432 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.30158730158730157, - "acc_stderr": 0.04104947269903394, - "acc_norm": 0.30158730158730157, - "acc_norm_stderr": 0.04104947269903394 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.39215686274509803, - "acc_stderr": 0.027956046165424516, - "acc_norm": 0.39215686274509803, - "acc_norm_stderr": 0.027956046165424516 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.46, - "acc_stderr": 0.05009082659620332, - "acc_norm": 0.46, - "acc_norm_stderr": 0.05009082659620332 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.49586776859504134, - "acc_stderr": 0.04564198767432754, - "acc_norm": 0.49586776859504134, - "acc_norm_stderr": 0.04564198767432754 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.35526315789473684, - "acc_stderr": 0.03894734487013315, - "acc_norm": 0.35526315789473684, - "acc_norm_stderr": 0.03894734487013315 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.29411764705882354, - "acc_stderr": 0.018433427649401896, - "acc_norm": 0.29411764705882354, - "acc_norm_stderr": 0.018433427649401896 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2765957446808511, - "acc_stderr": 0.026684564340460987, - "acc_norm": 0.2765957446808511, - "acc_norm_stderr": 0.026684564340460987 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.24107142857142858, - "acc_stderr": 0.04059867246952687, - "acc_norm": 0.24107142857142858, - "acc_norm_stderr": 0.04059867246952687 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.4074074074074074, - "acc_stderr": 0.03350991604696043, - "acc_norm": 0.4074074074074074, - "acc_norm_stderr": 0.03350991604696043 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.24804469273743016, - "acc_stderr": 0.01444415780826145, - "acc_norm": 0.24804469273743016, - "acc_norm_stderr": 0.01444415780826145 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695235, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695235 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695236, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695236 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.3088235294117647, - "acc_stderr": 0.028064998167040094, - "acc_norm": 0.3088235294117647, - "acc_norm_stderr": 0.028064998167040094 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.47346938775510206, - "acc_stderr": 0.03196412734523272, - "acc_norm": 0.47346938775510206, - "acc_norm_stderr": 0.03196412734523272 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.35443037974683544, - "acc_stderr": 0.031137304297185798, - "acc_norm": 0.35443037974683544, - "acc_norm_stderr": 0.031137304297185798 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2627118644067797, - "acc_stderr": 0.011240545514995669, - "acc_norm": 0.2627118644067797, - "acc_norm_stderr": 0.011240545514995669 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.31862745098039214, - "acc_stderr": 0.032702871814820816, - "acc_norm": 0.31862745098039214, - "acc_norm_stderr": 0.032702871814820816 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.38181818181818183, - "acc_stderr": 0.03793713171165634, - "acc_norm": 0.38181818181818183, - "acc_norm_stderr": 0.03793713171165634 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.33047735618115054, - "mc1_stderr": 0.016466769613698293, - "mc2": 0.5139753799906011, - "mc2_stderr": 0.016082624616035393 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.29225352112676056, - "acc_stderr": 0.015590281423747496, - "acc_norm": 0.32511737089201875, - "acc_norm_stderr": 0.016057185777207585 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "choco9966/Llama-2-7b-instruct-tuning", - "model_sha": "0914768714fca5e74eef736b357d9f82ccc9e089", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/etri-xainlp/llama2-ko-13b-instruct/result_2023-10-06 10:52:22.json b/etri-xainlp/llama2-ko-13b-instruct/result_2023-10-06 10:52:22.json deleted file mode 100644 index 9d44d584260ffff5c368f285e7df95b80d7c7d7f..0000000000000000000000000000000000000000 --- a/etri-xainlp/llama2-ko-13b-instruct/result_2023-10-06 10:52:22.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.4087030716723549, - "acc_stderr": 0.014365750345427006, - "acc_norm": 0.44795221843003413, - "acc_norm_stderr": 0.01453201149821167 - }, - "harness|ko_hellaswag|10": { - "acc": 0.4176458872734515, - "acc_stderr": 0.00492163264510238, - "acc_norm": 0.5456084445329615, - "acc_norm_stderr": 0.004968979259738337 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.5029239766081871, - "acc_stderr": 0.03834759370936839, - "acc_norm": 0.5029239766081871, - "acc_norm_stderr": 0.03834759370936839 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.5048543689320388, - "acc_stderr": 0.049505043821289195, - "acc_norm": 0.5048543689320388, - "acc_norm_stderr": 0.049505043821289195 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.47381864623243936, - "acc_stderr": 0.017855434554041982, - "acc_norm": 0.47381864623243936, - "acc_norm_stderr": 0.017855434554041982 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.4148148148148148, - "acc_stderr": 0.04256193767901407, - "acc_norm": 0.4148148148148148, - "acc_norm_stderr": 0.04256193767901407 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.3617021276595745, - "acc_stderr": 0.0314108219759624, - "acc_norm": 0.3617021276595745, - "acc_norm_stderr": 0.0314108219759624 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.40963855421686746, - "acc_stderr": 0.03828401115079022, - "acc_norm": 0.40963855421686746, - "acc_norm_stderr": 0.03828401115079022 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.4758842443729904, - "acc_stderr": 0.02836504154256457, - "acc_norm": 0.4758842443729904, - "acc_norm_stderr": 0.02836504154256457 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.4125560538116592, - "acc_stderr": 0.03304062175449297, - "acc_norm": 0.4125560538116592, - "acc_norm_stderr": 0.03304062175449297 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.5114503816793893, - "acc_stderr": 0.043841400240780176, - "acc_norm": 0.5114503816793893, - "acc_norm_stderr": 0.043841400240780176 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.32, - "acc_stderr": 0.046882617226215034, - "acc_norm": 0.32, - "acc_norm_stderr": 0.046882617226215034 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.5, - "acc_stderr": 0.035623524993954825, - "acc_norm": 0.5, - "acc_norm_stderr": 0.035623524993954825 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.4206896551724138, - "acc_stderr": 0.0411391498118926, - "acc_norm": 0.4206896551724138, - "acc_norm_stderr": 0.0411391498118926 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.3235294117647059, - "acc_stderr": 0.046550104113196177, - "acc_norm": 0.3235294117647059, - "acc_norm_stderr": 0.046550104113196177 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.42436974789915966, - "acc_stderr": 0.032104790510157764, - "acc_norm": 0.42436974789915966, - "acc_norm_stderr": 0.032104790510157764 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.41794871794871796, - "acc_stderr": 0.025007329882461227, - "acc_norm": 0.41794871794871796, - "acc_norm_stderr": 0.025007329882461227 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.47, - "acc_stderr": 0.050161355804659205, - "acc_norm": 0.47, - "acc_norm_stderr": 0.050161355804659205 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.32, - "acc_stderr": 0.04688261722621505, - "acc_norm": 0.32, - "acc_norm_stderr": 0.04688261722621505 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.46296296296296297, - "acc_stderr": 0.04820403072760627, - "acc_norm": 0.46296296296296297, - "acc_norm_stderr": 0.04820403072760627 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.35467980295566504, - "acc_stderr": 0.03366124489051448, - "acc_norm": 0.35467980295566504, - "acc_norm_stderr": 0.03366124489051448 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.47096774193548385, - "acc_stderr": 0.028396016402761005, - "acc_norm": 0.47096774193548385, - "acc_norm_stderr": 0.028396016402761005 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.6495726495726496, - "acc_stderr": 0.0312561082442188, - "acc_norm": 0.6495726495726496, - "acc_norm_stderr": 0.0312561082442188 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.4867924528301887, - "acc_stderr": 0.030762134874500476, - "acc_norm": 0.4867924528301887, - "acc_norm_stderr": 0.030762134874500476 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.5272727272727272, - "acc_stderr": 0.04782001791380061, - "acc_norm": 0.5272727272727272, - "acc_norm_stderr": 0.04782001791380061 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.29259259259259257, - "acc_stderr": 0.02773896963217609, - "acc_norm": 0.29259259259259257, - "acc_norm_stderr": 0.02773896963217609 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.31125827814569534, - "acc_stderr": 0.03780445850526732, - "acc_norm": 0.31125827814569534, - "acc_norm_stderr": 0.03780445850526732 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.572139303482587, - "acc_stderr": 0.03498541988407795, - "acc_norm": 0.572139303482587, - "acc_norm_stderr": 0.03498541988407795 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.45664739884393063, - "acc_stderr": 0.03798106566014498, - "acc_norm": 0.45664739884393063, - "acc_norm_stderr": 0.03798106566014498 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.3253968253968254, - "acc_stderr": 0.02413015829976262, - "acc_norm": 0.3253968253968254, - "acc_norm_stderr": 0.02413015829976262 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3541666666666667, - "acc_stderr": 0.039994111357535424, - "acc_norm": 0.3541666666666667, - "acc_norm_stderr": 0.039994111357535424 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.35, - "acc_stderr": 0.0479372485441102, - "acc_norm": 0.35, - "acc_norm_stderr": 0.0479372485441102 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.62, - "acc_stderr": 0.04878317312145633, - "acc_norm": 0.62, - "acc_norm_stderr": 0.04878317312145633 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.43641618497109824, - "acc_stderr": 0.026700545424943684, - "acc_norm": 0.43641618497109824, - "acc_norm_stderr": 0.026700545424943684 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.4171779141104294, - "acc_stderr": 0.038741028598180814, - "acc_norm": 0.4171779141104294, - "acc_norm_stderr": 0.038741028598180814 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.4012345679012346, - "acc_stderr": 0.027272582849839796, - "acc_norm": 0.4012345679012346, - "acc_norm_stderr": 0.027272582849839796 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.5077720207253886, - "acc_stderr": 0.036080032255696545, - "acc_norm": 0.5077720207253886, - "acc_norm_stderr": 0.036080032255696545 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.24561403508771928, - "acc_stderr": 0.0404933929774814, - "acc_norm": 0.24561403508771928, - "acc_norm_stderr": 0.0404933929774814 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.5211009174311927, - "acc_stderr": 0.021418224754264643, - "acc_norm": 0.5211009174311927, - "acc_norm_stderr": 0.021418224754264643 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.3492063492063492, - "acc_stderr": 0.04263906892795133, - "acc_norm": 0.3492063492063492, - "acc_norm_stderr": 0.04263906892795133 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.46078431372549017, - "acc_stderr": 0.028541722692618874, - "acc_norm": 0.46078431372549017, - "acc_norm_stderr": 0.028541722692618874 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.44, - "acc_stderr": 0.049888765156985884, - "acc_norm": 0.44, - "acc_norm_stderr": 0.049888765156985884 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.6363636363636364, - "acc_stderr": 0.043913262867240704, - "acc_norm": 0.6363636363636364, - "acc_norm_stderr": 0.043913262867240704 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.4605263157894737, - "acc_stderr": 0.04056242252249033, - "acc_norm": 0.4605263157894737, - "acc_norm_stderr": 0.04056242252249033 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.3660130718954248, - "acc_stderr": 0.01948802574552967, - "acc_norm": 0.3660130718954248, - "acc_norm_stderr": 0.01948802574552967 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.30141843971631205, - "acc_stderr": 0.02737412888263115, - "acc_norm": 0.30141843971631205, - "acc_norm_stderr": 0.02737412888263115 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.20535714285714285, - "acc_stderr": 0.038342410214190714, - "acc_norm": 0.20535714285714285, - "acc_norm_stderr": 0.038342410214190714 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.38425925925925924, - "acc_stderr": 0.03317354514310742, - "acc_norm": 0.38425925925925924, - "acc_norm_stderr": 0.03317354514310742 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.24022346368715083, - "acc_stderr": 0.014288343803925295, - "acc_norm": 0.24022346368715083, - "acc_norm_stderr": 0.014288343803925295 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.43, - "acc_stderr": 0.049756985195624284, - "acc_norm": 0.43, - "acc_norm_stderr": 0.049756985195624284 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.43, - "acc_stderr": 0.049756985195624284, - "acc_norm": 0.43, - "acc_norm_stderr": 0.049756985195624284 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.4117647058823529, - "acc_stderr": 0.029896163033125474, - "acc_norm": 0.4117647058823529, - "acc_norm_stderr": 0.029896163033125474 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.5224489795918368, - "acc_stderr": 0.03197694118713672, - "acc_norm": 0.5224489795918368, - "acc_norm_stderr": 0.03197694118713672 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.459915611814346, - "acc_stderr": 0.03244246810187914, - "acc_norm": 0.459915611814346, - "acc_norm_stderr": 0.03244246810187914 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.3135593220338983, - "acc_stderr": 0.011849234291459315, - "acc_norm": 0.3135593220338983, - "acc_norm_stderr": 0.011849234291459315 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.44607843137254904, - "acc_stderr": 0.03488845451304974, - "acc_norm": 0.44607843137254904, - "acc_norm_stderr": 0.03488845451304974 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.4484848484848485, - "acc_stderr": 0.03883565977956929, - "acc_norm": 0.4484848484848485, - "acc_norm_stderr": 0.03883565977956929 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2778457772337821, - "mc1_stderr": 0.015680929364024623, - "mc2": 0.4417936176466885, - "mc2_stderr": 0.015776414620892073 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.4072769953051643, - "acc_stderr": 0.01684248043954552, - "acc_norm": 0.45539906103286387, - "acc_norm_stderr": 0.01707145266733428 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "etri-xainlp/llama2-ko-13b-instruct", - "model_sha": "5be30496ddc86d18eff1df9aab04e5c246fb2d86", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/etri-xainlp/polyglot-ko-12.8b-instruct/result_2023-10-05 00:56:42.json b/etri-xainlp/polyglot-ko-12.8b-instruct/result_2023-10-05 00:56:42.json deleted file mode 100644 index b8e26297012c75abece4fe28280a8b25354a73df..0000000000000000000000000000000000000000 --- a/etri-xainlp/polyglot-ko-12.8b-instruct/result_2023-10-05 00:56:42.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.31143344709897613, - "acc_stderr": 0.013532472099850947, - "acc_norm": 0.3464163822525597, - "acc_norm_stderr": 0.013905011180063247 - }, - "harness|ko_hellaswag|10": { - "acc": 0.4026090420235013, - "acc_stderr": 0.0048942100113032105, - "acc_norm": 0.5198167695678152, - "acc_norm_stderr": 0.004985860853427639 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.29239766081871343, - "acc_stderr": 0.03488647713457922, - "acc_norm": 0.29239766081871343, - "acc_norm_stderr": 0.03488647713457922 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.21359223300970873, - "acc_stderr": 0.040580420156460344, - "acc_norm": 0.21359223300970873, - "acc_norm_stderr": 0.040580420156460344 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.2822477650063857, - "acc_stderr": 0.01609530296987857, - "acc_norm": 0.2822477650063857, - "acc_norm_stderr": 0.01609530296987857 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.26666666666666666, - "acc_stderr": 0.03820169914517905, - "acc_norm": 0.26666666666666666, - "acc_norm_stderr": 0.03820169914517905 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.23, - "acc_stderr": 0.042295258468165065, - "acc_norm": 0.23, - "acc_norm_stderr": 0.042295258468165065 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.28085106382978725, - "acc_stderr": 0.02937917046412483, - "acc_norm": 0.28085106382978725, - "acc_norm_stderr": 0.02937917046412483 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.2289156626506024, - "acc_stderr": 0.03270745277352477, - "acc_norm": 0.2289156626506024, - "acc_norm_stderr": 0.03270745277352477 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.21864951768488747, - "acc_stderr": 0.023475581417861113, - "acc_norm": 0.21864951768488747, - "acc_norm_stderr": 0.023475581417861113 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.26905829596412556, - "acc_stderr": 0.02976377940687498, - "acc_norm": 0.26905829596412556, - "acc_norm_stderr": 0.02976377940687498 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.2595419847328244, - "acc_stderr": 0.03844876139785271, - "acc_norm": 0.2595419847328244, - "acc_norm_stderr": 0.03844876139785271 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.24242424242424243, - "acc_stderr": 0.030532892233932036, - "acc_norm": 0.24242424242424243, - "acc_norm_stderr": 0.030532892233932036 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.3103448275862069, - "acc_stderr": 0.03855289616378947, - "acc_norm": 0.3103448275862069, - "acc_norm_stderr": 0.03855289616378947 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.24509803921568626, - "acc_stderr": 0.042801058373643966, - "acc_norm": 0.24509803921568626, - "acc_norm_stderr": 0.042801058373643966 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.23109243697478993, - "acc_stderr": 0.027381406927868956, - "acc_norm": 0.23109243697478993, - "acc_norm_stderr": 0.027381406927868956 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.23846153846153847, - "acc_stderr": 0.021606294494647727, - "acc_norm": 0.23846153846153847, - "acc_norm_stderr": 0.021606294494647727 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.23, - "acc_stderr": 0.04229525846816506, - "acc_norm": 0.23, - "acc_norm_stderr": 0.04229525846816506 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.32, - "acc_stderr": 0.04688261722621504, - "acc_norm": 0.32, - "acc_norm_stderr": 0.04688261722621504 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.25925925925925924, - "acc_stderr": 0.04236511258094632, - "acc_norm": 0.25925925925925924, - "acc_norm_stderr": 0.04236511258094632 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.2315270935960591, - "acc_stderr": 0.029678333141444465, - "acc_norm": 0.2315270935960591, - "acc_norm_stderr": 0.029678333141444465 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.25483870967741934, - "acc_stderr": 0.024790118459332208, - "acc_norm": 0.25483870967741934, - "acc_norm_stderr": 0.024790118459332208 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.29914529914529914, - "acc_stderr": 0.029996951858349476, - "acc_norm": 0.29914529914529914, - "acc_norm_stderr": 0.029996951858349476 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.2792452830188679, - "acc_stderr": 0.02761116340239972, - "acc_norm": 0.2792452830188679, - "acc_norm_stderr": 0.02761116340239972 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.2545454545454545, - "acc_stderr": 0.041723430387053825, - "acc_norm": 0.2545454545454545, - "acc_norm_stderr": 0.041723430387053825 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.24074074074074073, - "acc_stderr": 0.026067159222275794, - "acc_norm": 0.24074074074074073, - "acc_norm_stderr": 0.026067159222275794 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.24503311258278146, - "acc_stderr": 0.03511807571804723, - "acc_norm": 0.24503311258278146, - "acc_norm_stderr": 0.03511807571804723 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.22885572139303484, - "acc_stderr": 0.02970528405677244, - "acc_norm": 0.22885572139303484, - "acc_norm_stderr": 0.02970528405677244 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.2543352601156069, - "acc_stderr": 0.0332055644308557, - "acc_norm": 0.2543352601156069, - "acc_norm_stderr": 0.0332055644308557 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2804232804232804, - "acc_stderr": 0.02313528797432563, - "acc_norm": 0.2804232804232804, - "acc_norm_stderr": 0.02313528797432563 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.25, - "acc_stderr": 0.03621034121889507, - "acc_norm": 0.25, - "acc_norm_stderr": 0.03621034121889507 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.21, - "acc_stderr": 0.040936018074033256, - "acc_norm": 0.21, - "acc_norm_stderr": 0.040936018074033256 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.3092485549132948, - "acc_stderr": 0.02488314057007176, - "acc_norm": 0.3092485549132948, - "acc_norm_stderr": 0.02488314057007176 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.25766871165644173, - "acc_stderr": 0.03436150827846917, - "acc_norm": 0.25766871165644173, - "acc_norm_stderr": 0.03436150827846917 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.29012345679012347, - "acc_stderr": 0.025251173936495026, - "acc_norm": 0.29012345679012347, - "acc_norm_stderr": 0.025251173936495026 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542128, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542128 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.21761658031088082, - "acc_stderr": 0.02977866303775296, - "acc_norm": 0.21761658031088082, - "acc_norm_stderr": 0.02977866303775296 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.21052631578947367, - "acc_stderr": 0.038351539543994194, - "acc_norm": 0.21052631578947367, - "acc_norm_stderr": 0.038351539543994194 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.20917431192660552, - "acc_stderr": 0.01743793717334323, - "acc_norm": 0.20917431192660552, - "acc_norm_stderr": 0.01743793717334323 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.30158730158730157, - "acc_stderr": 0.04104947269903394, - "acc_norm": 0.30158730158730157, - "acc_norm_stderr": 0.04104947269903394 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.2908496732026144, - "acc_stderr": 0.026004800363952113, - "acc_norm": 0.2908496732026144, - "acc_norm_stderr": 0.026004800363952113 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.2644628099173554, - "acc_stderr": 0.040261875275912046, - "acc_norm": 0.2644628099173554, - "acc_norm_stderr": 0.040261875275912046 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.35526315789473684, - "acc_stderr": 0.03894734487013317, - "acc_norm": 0.35526315789473684, - "acc_norm_stderr": 0.03894734487013317 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.2679738562091503, - "acc_stderr": 0.017917974069594726, - "acc_norm": 0.2679738562091503, - "acc_norm_stderr": 0.017917974069594726 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.23049645390070922, - "acc_stderr": 0.025123739226872416, - "acc_norm": 0.23049645390070922, - "acc_norm_stderr": 0.025123739226872416 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.29464285714285715, - "acc_stderr": 0.043270409325787296, - "acc_norm": 0.29464285714285715, - "acc_norm_stderr": 0.043270409325787296 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.3472222222222222, - "acc_stderr": 0.03246887243637649, - "acc_norm": 0.3472222222222222, - "acc_norm_stderr": 0.03246887243637649 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.23575418994413408, - "acc_stderr": 0.014196375686290804, - "acc_norm": 0.23575418994413408, - "acc_norm_stderr": 0.014196375686290804 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.35, - "acc_stderr": 0.04793724854411021, - "acc_norm": 0.35, - "acc_norm_stderr": 0.04793724854411021 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.26, - "acc_stderr": 0.044084400227680794, - "acc_norm": 0.26, - "acc_norm_stderr": 0.044084400227680794 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.28308823529411764, - "acc_stderr": 0.02736586113151381, - "acc_norm": 0.28308823529411764, - "acc_norm_stderr": 0.02736586113151381 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.19591836734693877, - "acc_stderr": 0.025409301953225678, - "acc_norm": 0.19591836734693877, - "acc_norm_stderr": 0.025409301953225678 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.22362869198312235, - "acc_stderr": 0.027123298205229972, - "acc_norm": 0.22362869198312235, - "acc_norm_stderr": 0.027123298205229972 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.26401564537157757, - "acc_stderr": 0.011258435537723812, - "acc_norm": 0.26401564537157757, - "acc_norm_stderr": 0.011258435537723812 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.27450980392156865, - "acc_stderr": 0.03132179803083291, - "acc_norm": 0.27450980392156865, - "acc_norm_stderr": 0.03132179803083291 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.26666666666666666, - "acc_stderr": 0.03453131801885416, - "acc_norm": 0.26666666666666666, - "acc_norm_stderr": 0.03453131801885416 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.25458996328029376, - "mc1_stderr": 0.015250117079156472, - "mc2": 0.4202272328082401, - "mc2_stderr": 0.016142378134497877 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.3474178403755869, - "acc_stderr": 0.016322206819108932, - "acc_norm": 0.3744131455399061, - "acc_norm_stderr": 0.016590312676984496 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "etri-xainlp/polyglot-ko-12.8b-instruct", - "model_sha": "ec0113994052a77ef4741cf14d7a9af887b2e1d5", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/fiveflow/kolong-llama-v0.1/result_2023-10-10 02:31:12.json b/fiveflow/kolong-llama-v0.1/result_2023-10-10 02:31:12.json deleted file mode 100644 index 68d1ddb6e93179ddb36c03bed875d1518ce49d95..0000000000000000000000000000000000000000 --- a/fiveflow/kolong-llama-v0.1/result_2023-10-10 02:31:12.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.27474402730375425, - "acc_stderr": 0.013044617212771227, - "acc_norm": 0.32081911262798635, - "acc_norm_stderr": 0.013640943091946526 - }, - "harness|ko_hellaswag|10": { - "acc": 0.35660227046405096, - "acc_stderr": 0.0047801698733328435, - "acc_norm": 0.45717984465245964, - "acc_norm_stderr": 0.004971449552787173 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.3157894736842105, - "acc_stderr": 0.035650796707083106, - "acc_norm": 0.3157894736842105, - "acc_norm_stderr": 0.035650796707083106 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.2524271844660194, - "acc_stderr": 0.04301250399690878, - "acc_norm": 0.2524271844660194, - "acc_norm_stderr": 0.04301250399690878 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.30268199233716475, - "acc_stderr": 0.016428781581749367, - "acc_norm": 0.30268199233716475, - "acc_norm_stderr": 0.016428781581749367 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.31851851851851853, - "acc_stderr": 0.04024778401977111, - "acc_norm": 0.31851851851851853, - "acc_norm_stderr": 0.04024778401977111 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.37, - "acc_stderr": 0.04852365870939101, - "acc_norm": 0.37, - "acc_norm_stderr": 0.04852365870939101 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.2680851063829787, - "acc_stderr": 0.028957342788342343, - "acc_norm": 0.2680851063829787, - "acc_norm_stderr": 0.028957342788342343 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.20481927710843373, - "acc_stderr": 0.03141784291663925, - "acc_norm": 0.20481927710843373, - "acc_norm_stderr": 0.03141784291663925 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.3183279742765273, - "acc_stderr": 0.026457225067811018, - "acc_norm": 0.3183279742765273, - "acc_norm_stderr": 0.026457225067811018 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.28699551569506726, - "acc_stderr": 0.030360379710291954, - "acc_norm": 0.28699551569506726, - "acc_norm_stderr": 0.030360379710291954 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.2900763358778626, - "acc_stderr": 0.03980066246467765, - "acc_norm": 0.2900763358778626, - "acc_norm_stderr": 0.03980066246467765 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.37, - "acc_stderr": 0.04852365870939099, - "acc_norm": 0.37, - "acc_norm_stderr": 0.04852365870939099 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.30808080808080807, - "acc_stderr": 0.03289477330098614, - "acc_norm": 0.30808080808080807, - "acc_norm_stderr": 0.03289477330098614 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.2620689655172414, - "acc_stderr": 0.03664666337225256, - "acc_norm": 0.2620689655172414, - "acc_norm_stderr": 0.03664666337225256 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.22549019607843138, - "acc_stderr": 0.04158307533083286, - "acc_norm": 0.22549019607843138, - "acc_norm_stderr": 0.04158307533083286 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.24369747899159663, - "acc_stderr": 0.027886828078380558, - "acc_norm": 0.24369747899159663, - "acc_norm_stderr": 0.027886828078380558 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.2282051282051282, - "acc_stderr": 0.021278393863586282, - "acc_norm": 0.2282051282051282, - "acc_norm_stderr": 0.021278393863586282 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.32, - "acc_stderr": 0.04688261722621504, - "acc_norm": 0.32, - "acc_norm_stderr": 0.04688261722621504 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.27, - "acc_stderr": 0.044619604333847394, - "acc_norm": 0.27, - "acc_norm_stderr": 0.044619604333847394 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.24074074074074073, - "acc_stderr": 0.04133119440243838, - "acc_norm": 0.24074074074074073, - "acc_norm_stderr": 0.04133119440243838 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.22167487684729065, - "acc_stderr": 0.029225575892489607, - "acc_norm": 0.22167487684729065, - "acc_norm_stderr": 0.029225575892489607 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.267741935483871, - "acc_stderr": 0.02518900666021238, - "acc_norm": 0.267741935483871, - "acc_norm_stderr": 0.02518900666021238 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.29914529914529914, - "acc_stderr": 0.029996951858349476, - "acc_norm": 0.29914529914529914, - "acc_norm_stderr": 0.029996951858349476 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.3018867924528302, - "acc_stderr": 0.028254200344438665, - "acc_norm": 0.3018867924528302, - "acc_norm_stderr": 0.028254200344438665 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.32727272727272727, - "acc_stderr": 0.044942908662520896, - "acc_norm": 0.32727272727272727, - "acc_norm_stderr": 0.044942908662520896 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.26666666666666666, - "acc_stderr": 0.026962424325073838, - "acc_norm": 0.26666666666666666, - "acc_norm_stderr": 0.026962424325073838 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.31125827814569534, - "acc_stderr": 0.03780445850526733, - "acc_norm": 0.31125827814569534, - "acc_norm_stderr": 0.03780445850526733 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.263681592039801, - "acc_stderr": 0.03115715086935555, - "acc_norm": 0.263681592039801, - "acc_norm_stderr": 0.03115715086935555 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.2543352601156069, - "acc_stderr": 0.0332055644308557, - "acc_norm": 0.2543352601156069, - "acc_norm_stderr": 0.0332055644308557 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2671957671957672, - "acc_stderr": 0.022789673145776575, - "acc_norm": 0.2671957671957672, - "acc_norm_stderr": 0.022789673145776575 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2708333333333333, - "acc_stderr": 0.03716177437566017, - "acc_norm": 0.2708333333333333, - "acc_norm_stderr": 0.03716177437566017 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.36, - "acc_stderr": 0.04824181513244218, - "acc_norm": 0.36, - "acc_norm_stderr": 0.04824181513244218 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.2947976878612717, - "acc_stderr": 0.02454761779480383, - "acc_norm": 0.2947976878612717, - "acc_norm_stderr": 0.02454761779480383 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.26993865030674846, - "acc_stderr": 0.034878251684978906, - "acc_norm": 0.26993865030674846, - "acc_norm_stderr": 0.034878251684978906 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.2654320987654321, - "acc_stderr": 0.024569223600460845, - "acc_norm": 0.2654320987654321, - "acc_norm_stderr": 0.024569223600460845 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.25906735751295334, - "acc_stderr": 0.031618779179354094, - "acc_norm": 0.25906735751295334, - "acc_norm_stderr": 0.031618779179354094 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2982456140350877, - "acc_stderr": 0.04303684033537315, - "acc_norm": 0.2982456140350877, - "acc_norm_stderr": 0.04303684033537315 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.29541284403669726, - "acc_stderr": 0.019560619182976, - "acc_norm": 0.29541284403669726, - "acc_norm_stderr": 0.019560619182976 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.18253968253968253, - "acc_stderr": 0.034550710191021475, - "acc_norm": 0.18253968253968253, - "acc_norm_stderr": 0.034550710191021475 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.3202614379084967, - "acc_stderr": 0.026716118380156837, - "acc_norm": 0.3202614379084967, - "acc_norm_stderr": 0.026716118380156837 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.19834710743801653, - "acc_stderr": 0.03640118271990945, - "acc_norm": 0.19834710743801653, - "acc_norm_stderr": 0.03640118271990945 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.23684210526315788, - "acc_stderr": 0.034597776068105365, - "acc_norm": 0.23684210526315788, - "acc_norm_stderr": 0.034597776068105365 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.28104575163398693, - "acc_stderr": 0.018185218954318075, - "acc_norm": 0.28104575163398693, - "acc_norm_stderr": 0.018185218954318075 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2872340425531915, - "acc_stderr": 0.026992199173064356, - "acc_norm": 0.2872340425531915, - "acc_norm_stderr": 0.026992199173064356 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.2767857142857143, - "acc_stderr": 0.042466243366976256, - "acc_norm": 0.2767857142857143, - "acc_norm_stderr": 0.042466243366976256 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.24537037037037038, - "acc_stderr": 0.029346665094372944, - "acc_norm": 0.24537037037037038, - "acc_norm_stderr": 0.029346665094372944 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2547486033519553, - "acc_stderr": 0.014572650383409163, - "acc_norm": 0.2547486033519553, - "acc_norm_stderr": 0.014572650383409163 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542127, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542127 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.24, - "acc_stderr": 0.042923469599092816, - "acc_norm": 0.24, - "acc_norm_stderr": 0.042923469599092816 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.3860294117647059, - "acc_stderr": 0.029573269134411127, - "acc_norm": 0.3860294117647059, - "acc_norm_stderr": 0.029573269134411127 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.2693877551020408, - "acc_stderr": 0.02840125202902294, - "acc_norm": 0.2693877551020408, - "acc_norm_stderr": 0.02840125202902294 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.3459915611814346, - "acc_stderr": 0.03096481058878671, - "acc_norm": 0.3459915611814346, - "acc_norm_stderr": 0.03096481058878671 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.288135593220339, - "acc_stderr": 0.011567140661324561, - "acc_norm": 0.288135593220339, - "acc_norm_stderr": 0.011567140661324561 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.28921568627450983, - "acc_stderr": 0.031822318676475544, - "acc_norm": 0.28921568627450983, - "acc_norm_stderr": 0.031822318676475544 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.21818181818181817, - "acc_stderr": 0.03225078108306289, - "acc_norm": 0.21818181818181817, - "acc_norm_stderr": 0.03225078108306289 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.21542227662178703, - "mc1_stderr": 0.01439190265242768, - "mc2": 0.37745653236553117, - "mc2_stderr": 0.015551417113340219 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.18309859154929578, - "acc_stderr": 0.01325752772979992, - "acc_norm": 0.23591549295774647, - "acc_norm_stderr": 0.014554059570736366 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "fiveflow/kolong-llama-v0.1", - "model_sha": "e9ed499df932c04d7d3106603136f469c2f57aaa", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/garage-bAInd/Platypus2-13B/result_2023-10-10 06:53:53.json b/garage-bAInd/Platypus2-13B/result_2023-10-10 06:53:53.json deleted file mode 100644 index 2a459e59784b40e57bfdd55cdf4eb1ec8ef4d76d..0000000000000000000000000000000000000000 --- a/garage-bAInd/Platypus2-13B/result_2023-10-10 06:53:53.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.3370307167235495, - "acc_stderr": 0.01381347665290228, - "acc_norm": 0.38054607508532423, - "acc_norm_stderr": 0.014188277712349822 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3623780123481378, - "acc_stderr": 0.004797048154893968, - "acc_norm": 0.46883091017725553, - "acc_norm_stderr": 0.00498007670739243 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.4327485380116959, - "acc_stderr": 0.03799978644370607, - "acc_norm": 0.4327485380116959, - "acc_norm_stderr": 0.03799978644370607 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.49514563106796117, - "acc_stderr": 0.049505043821289195, - "acc_norm": 0.49514563106796117, - "acc_norm_stderr": 0.049505043821289195 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.4878671775223499, - "acc_stderr": 0.017874698667491355, - "acc_norm": 0.4878671775223499, - "acc_norm_stderr": 0.017874698667491355 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.3851851851851852, - "acc_stderr": 0.042039210401562783, - "acc_norm": 0.3851851851851852, - "acc_norm_stderr": 0.042039210401562783 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.37, - "acc_stderr": 0.048523658709391, - "acc_norm": 0.37, - "acc_norm_stderr": 0.048523658709391 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.3872340425531915, - "acc_stderr": 0.03184389265339525, - "acc_norm": 0.3872340425531915, - "acc_norm_stderr": 0.03184389265339525 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3614457831325301, - "acc_stderr": 0.0374005938202932, - "acc_norm": 0.3614457831325301, - "acc_norm_stderr": 0.0374005938202932 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.4887459807073955, - "acc_stderr": 0.028390897396863533, - "acc_norm": 0.4887459807073955, - "acc_norm_stderr": 0.028390897396863533 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.4260089686098655, - "acc_stderr": 0.0331883328621728, - "acc_norm": 0.4260089686098655, - "acc_norm_stderr": 0.0331883328621728 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.4732824427480916, - "acc_stderr": 0.04379024936553894, - "acc_norm": 0.4732824427480916, - "acc_norm_stderr": 0.04379024936553894 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.41, - "acc_stderr": 0.049431107042371025, - "acc_norm": 0.41, - "acc_norm_stderr": 0.049431107042371025 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.45454545454545453, - "acc_stderr": 0.03547601494006937, - "acc_norm": 0.45454545454545453, - "acc_norm_stderr": 0.03547601494006937 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.4068965517241379, - "acc_stderr": 0.040937939812662374, - "acc_norm": 0.4068965517241379, - "acc_norm_stderr": 0.040937939812662374 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.20588235294117646, - "acc_stderr": 0.04023382273617749, - "acc_norm": 0.20588235294117646, - "acc_norm_stderr": 0.04023382273617749 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.4495798319327731, - "acc_stderr": 0.03231293497137707, - "acc_norm": 0.4495798319327731, - "acc_norm_stderr": 0.03231293497137707 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.4256410256410256, - "acc_stderr": 0.025069094387296546, - "acc_norm": 0.4256410256410256, - "acc_norm_stderr": 0.025069094387296546 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.44, - "acc_stderr": 0.049888765156985884, - "acc_norm": 0.44, - "acc_norm_stderr": 0.049888765156985884 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.33, - "acc_stderr": 0.047258156262526045, - "acc_norm": 0.33, - "acc_norm_stderr": 0.047258156262526045 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.48148148148148145, - "acc_stderr": 0.04830366024635331, - "acc_norm": 0.48148148148148145, - "acc_norm_stderr": 0.04830366024635331 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.35960591133004927, - "acc_stderr": 0.03376458246509567, - "acc_norm": 0.35960591133004927, - "acc_norm_stderr": 0.03376458246509567 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.47419354838709676, - "acc_stderr": 0.02840609505765332, - "acc_norm": 0.47419354838709676, - "acc_norm_stderr": 0.02840609505765332 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.6153846153846154, - "acc_stderr": 0.03187195347942466, - "acc_norm": 0.6153846153846154, - "acc_norm_stderr": 0.03187195347942466 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.4188679245283019, - "acc_stderr": 0.030365050829115215, - "acc_norm": 0.4188679245283019, - "acc_norm_stderr": 0.030365050829115215 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.4818181818181818, - "acc_stderr": 0.04785964010794917, - "acc_norm": 0.4818181818181818, - "acc_norm_stderr": 0.04785964010794917 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.31851851851851853, - "acc_stderr": 0.028406533090608466, - "acc_norm": 0.31851851851851853, - "acc_norm_stderr": 0.028406533090608466 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2847682119205298, - "acc_stderr": 0.03684881521389023, - "acc_norm": 0.2847682119205298, - "acc_norm_stderr": 0.03684881521389023 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.5522388059701493, - "acc_stderr": 0.03516184772952166, - "acc_norm": 0.5522388059701493, - "acc_norm_stderr": 0.03516184772952166 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.3468208092485549, - "acc_stderr": 0.036291466701596636, - "acc_norm": 0.3468208092485549, - "acc_norm_stderr": 0.036291466701596636 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.30423280423280424, - "acc_stderr": 0.023695415009463084, - "acc_norm": 0.30423280423280424, - "acc_norm_stderr": 0.023695415009463084 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3680555555555556, - "acc_stderr": 0.04032999053960719, - "acc_norm": 0.3680555555555556, - "acc_norm_stderr": 0.04032999053960719 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695235, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695235 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.57, - "acc_stderr": 0.04975698519562428, - "acc_norm": 0.57, - "acc_norm_stderr": 0.04975698519562428 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.49421965317919075, - "acc_stderr": 0.02691729617914911, - "acc_norm": 0.49421965317919075, - "acc_norm_stderr": 0.02691729617914911 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.4049079754601227, - "acc_stderr": 0.03856672163548913, - "acc_norm": 0.4049079754601227, - "acc_norm_stderr": 0.03856672163548913 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.42592592592592593, - "acc_stderr": 0.027513747284379417, - "acc_norm": 0.42592592592592593, - "acc_norm_stderr": 0.027513747284379417 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.32, - "acc_stderr": 0.04688261722621504, - "acc_norm": 0.32, - "acc_norm_stderr": 0.04688261722621504 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.533678756476684, - "acc_stderr": 0.03600244069867178, - "acc_norm": 0.533678756476684, - "acc_norm_stderr": 0.03600244069867178 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2631578947368421, - "acc_stderr": 0.04142439719489361, - "acc_norm": 0.2631578947368421, - "acc_norm_stderr": 0.04142439719489361 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.46788990825688076, - "acc_stderr": 0.021393071222680804, - "acc_norm": 0.46788990825688076, - "acc_norm_stderr": 0.021393071222680804 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.36507936507936506, - "acc_stderr": 0.04306241259127153, - "acc_norm": 0.36507936507936506, - "acc_norm_stderr": 0.04306241259127153 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.40522875816993464, - "acc_stderr": 0.028110928492809075, - "acc_norm": 0.40522875816993464, - "acc_norm_stderr": 0.028110928492809075 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.41, - "acc_stderr": 0.049431107042371025, - "acc_norm": 0.41, - "acc_norm_stderr": 0.049431107042371025 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.5537190082644629, - "acc_stderr": 0.04537935177947879, - "acc_norm": 0.5537190082644629, - "acc_norm_stderr": 0.04537935177947879 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.4342105263157895, - "acc_stderr": 0.04033565667848319, - "acc_norm": 0.4342105263157895, - "acc_norm_stderr": 0.04033565667848319 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.36437908496732024, - "acc_stderr": 0.019469518221573702, - "acc_norm": 0.36437908496732024, - "acc_norm_stderr": 0.019469518221573702 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.3120567375886525, - "acc_stderr": 0.02764012054516993, - "acc_norm": 0.3120567375886525, - "acc_norm_stderr": 0.02764012054516993 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.25, - "acc_stderr": 0.04109974682633932, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04109974682633932 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.3425925925925926, - "acc_stderr": 0.03236585252602157, - "acc_norm": 0.3425925925925926, - "acc_norm_stderr": 0.03236585252602157 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.26927374301675977, - "acc_stderr": 0.014835616582882601, - "acc_norm": 0.26927374301675977, - "acc_norm_stderr": 0.014835616582882601 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.39, - "acc_stderr": 0.04902071300001975, - "acc_norm": 0.39, - "acc_norm_stderr": 0.04902071300001975 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.38, - "acc_stderr": 0.048783173121456316, - "acc_norm": 0.38, - "acc_norm_stderr": 0.048783173121456316 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.4117647058823529, - "acc_stderr": 0.029896163033125468, - "acc_norm": 0.4117647058823529, - "acc_norm_stderr": 0.029896163033125468 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.5061224489795918, - "acc_stderr": 0.03200682020163907, - "acc_norm": 0.5061224489795918, - "acc_norm_stderr": 0.03200682020163907 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.5738396624472574, - "acc_stderr": 0.03219035703131774, - "acc_norm": 0.5738396624472574, - "acc_norm_stderr": 0.03219035703131774 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.34615384615384615, - "acc_stderr": 0.012150699768228568, - "acc_norm": 0.34615384615384615, - "acc_norm_stderr": 0.012150699768228568 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.44607843137254904, - "acc_stderr": 0.03488845451304974, - "acc_norm": 0.44607843137254904, - "acc_norm_stderr": 0.03488845451304974 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.4909090909090909, - "acc_stderr": 0.03903698647748441, - "acc_norm": 0.4909090909090909, - "acc_norm_stderr": 0.03903698647748441 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2766217870257038, - "mc1_stderr": 0.0156596057553269, - "mc2": 0.44247428746712286, - "mc2_stderr": 0.015350644205547385 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.20187793427230047, - "acc_stderr": 0.013759869182275586, - "acc_norm": 0.22769953051643194, - "acc_norm_stderr": 0.014375052416765491 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "garage-bAInd/Platypus2-13B", - "model_sha": "0a474bc0e76203528db789f027f4d6cce2727cce", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/garage-bAInd/Platypus2-7B/result_2023-10-10 06:53:10.json b/garage-bAInd/Platypus2-7B/result_2023-10-10 06:53:10.json deleted file mode 100644 index 0f0c907b83f340f3159b5c1b7467a09ca4ae5ef7..0000000000000000000000000000000000000000 --- a/garage-bAInd/Platypus2-7B/result_2023-10-10 06:53:10.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.2858361774744027, - "acc_stderr": 0.013203196088537365, - "acc_norm": 0.3242320819112628, - "acc_norm_stderr": 0.013678810399518822 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3348934475204143, - "acc_stderr": 0.004709886644157085, - "acc_norm": 0.4153555068711412, - "acc_norm_stderr": 0.0049177611817401625 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.47368421052631576, - "acc_stderr": 0.038295098689947286, - "acc_norm": 0.47368421052631576, - "acc_norm_stderr": 0.038295098689947286 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.3883495145631068, - "acc_stderr": 0.0482572933735639, - "acc_norm": 0.3883495145631068, - "acc_norm_stderr": 0.0482572933735639 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.41762452107279696, - "acc_stderr": 0.01763563732695152, - "acc_norm": 0.41762452107279696, - "acc_norm_stderr": 0.01763563732695152 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.362962962962963, - "acc_stderr": 0.04153948404742399, - "acc_norm": 0.362962962962963, - "acc_norm_stderr": 0.04153948404742399 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252604, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252604 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.2851063829787234, - "acc_stderr": 0.02951319662553935, - "acc_norm": 0.2851063829787234, - "acc_norm_stderr": 0.02951319662553935 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.29518072289156627, - "acc_stderr": 0.03550920185689629, - "acc_norm": 0.29518072289156627, - "acc_norm_stderr": 0.03550920185689629 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.3858520900321543, - "acc_stderr": 0.027648149599751475, - "acc_norm": 0.3858520900321543, - "acc_norm_stderr": 0.027648149599751475 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.3721973094170404, - "acc_stderr": 0.032443052830087304, - "acc_norm": 0.3721973094170404, - "acc_norm_stderr": 0.032443052830087304 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.366412213740458, - "acc_stderr": 0.04225875451969638, - "acc_norm": 0.366412213740458, - "acc_norm_stderr": 0.04225875451969638 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.3434343434343434, - "acc_stderr": 0.033832012232444426, - "acc_norm": 0.3434343434343434, - "acc_norm_stderr": 0.033832012232444426 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.27586206896551724, - "acc_stderr": 0.037245636197746325, - "acc_norm": 0.27586206896551724, - "acc_norm_stderr": 0.037245636197746325 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.18627450980392157, - "acc_stderr": 0.038739587141493524, - "acc_norm": 0.18627450980392157, - "acc_norm_stderr": 0.038739587141493524 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.3739495798319328, - "acc_stderr": 0.031429466378837076, - "acc_norm": 0.3739495798319328, - "acc_norm_stderr": 0.031429466378837076 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.2948717948717949, - "acc_stderr": 0.02311936275823229, - "acc_norm": 0.2948717948717949, - "acc_norm_stderr": 0.02311936275823229 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.38, - "acc_stderr": 0.04878317312145633, - "acc_norm": 0.38, - "acc_norm_stderr": 0.04878317312145633 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695235, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695235 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.5277777777777778, - "acc_stderr": 0.04826217294139894, - "acc_norm": 0.5277777777777778, - "acc_norm_stderr": 0.04826217294139894 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.35960591133004927, - "acc_stderr": 0.03376458246509568, - "acc_norm": 0.35960591133004927, - "acc_norm_stderr": 0.03376458246509568 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.36451612903225805, - "acc_stderr": 0.027379871229943238, - "acc_norm": 0.36451612903225805, - "acc_norm_stderr": 0.027379871229943238 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.5512820512820513, - "acc_stderr": 0.032583346493868806, - "acc_norm": 0.5512820512820513, - "acc_norm_stderr": 0.032583346493868806 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.3132075471698113, - "acc_stderr": 0.02854479331905533, - "acc_norm": 0.3132075471698113, - "acc_norm_stderr": 0.02854479331905533 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.37272727272727274, - "acc_stderr": 0.04631381319425464, - "acc_norm": 0.37272727272727274, - "acc_norm_stderr": 0.04631381319425464 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.26666666666666666, - "acc_stderr": 0.02696242432507383, - "acc_norm": 0.26666666666666666, - "acc_norm_stderr": 0.02696242432507383 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.24503311258278146, - "acc_stderr": 0.03511807571804724, - "acc_norm": 0.24503311258278146, - "acc_norm_stderr": 0.03511807571804724 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.4626865671641791, - "acc_stderr": 0.035256751674679745, - "acc_norm": 0.4626865671641791, - "acc_norm_stderr": 0.035256751674679745 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.31213872832369943, - "acc_stderr": 0.03533133389323657, - "acc_norm": 0.31213872832369943, - "acc_norm_stderr": 0.03533133389323657 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2619047619047619, - "acc_stderr": 0.022644212615525218, - "acc_norm": 0.2619047619047619, - "acc_norm_stderr": 0.022644212615525218 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.25, - "acc_stderr": 0.03621034121889507, - "acc_norm": 0.25, - "acc_norm_stderr": 0.03621034121889507 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.24, - "acc_stderr": 0.04292346959909284, - "acc_norm": 0.24, - "acc_norm_stderr": 0.04292346959909284 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.41, - "acc_stderr": 0.049431107042371025, - "acc_norm": 0.41, - "acc_norm_stderr": 0.049431107042371025 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.4046242774566474, - "acc_stderr": 0.026424816594009845, - "acc_norm": 0.4046242774566474, - "acc_norm_stderr": 0.026424816594009845 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.37423312883435583, - "acc_stderr": 0.03802068102899616, - "acc_norm": 0.37423312883435583, - "acc_norm_stderr": 0.03802068102899616 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.38580246913580246, - "acc_stderr": 0.027085401226132143, - "acc_norm": 0.38580246913580246, - "acc_norm_stderr": 0.027085401226132143 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.24, - "acc_stderr": 0.04292346959909283, - "acc_norm": 0.24, - "acc_norm_stderr": 0.04292346959909283 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.43005181347150256, - "acc_stderr": 0.03572954333144808, - "acc_norm": 0.43005181347150256, - "acc_norm_stderr": 0.03572954333144808 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2982456140350877, - "acc_stderr": 0.04303684033537317, - "acc_norm": 0.2982456140350877, - "acc_norm_stderr": 0.04303684033537317 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.3559633027522936, - "acc_stderr": 0.020528559278244218, - "acc_norm": 0.3559633027522936, - "acc_norm_stderr": 0.020528559278244218 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.042163702135578345, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.042163702135578345 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.3300653594771242, - "acc_stderr": 0.026925654653615686, - "acc_norm": 0.3300653594771242, - "acc_norm_stderr": 0.026925654653615686 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.42, - "acc_stderr": 0.04960449637488584, - "acc_norm": 0.42, - "acc_norm_stderr": 0.04960449637488584 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.6033057851239669, - "acc_stderr": 0.044658697805310094, - "acc_norm": 0.6033057851239669, - "acc_norm_stderr": 0.044658697805310094 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.34868421052631576, - "acc_stderr": 0.038781398887976104, - "acc_norm": 0.34868421052631576, - "acc_norm_stderr": 0.038781398887976104 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.35947712418300654, - "acc_stderr": 0.01941253924203216, - "acc_norm": 0.35947712418300654, - "acc_norm_stderr": 0.01941253924203216 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.3191489361702128, - "acc_stderr": 0.0278079901413202, - "acc_norm": 0.3191489361702128, - "acc_norm_stderr": 0.0278079901413202 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.3392857142857143, - "acc_stderr": 0.0449394906861354, - "acc_norm": 0.3392857142857143, - "acc_norm_stderr": 0.0449394906861354 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.2777777777777778, - "acc_stderr": 0.030546745264953195, - "acc_norm": 0.2777777777777778, - "acc_norm_stderr": 0.030546745264953195 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2446927374301676, - "acc_stderr": 0.01437816988409842, - "acc_norm": 0.2446927374301676, - "acc_norm_stderr": 0.01437816988409842 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.38, - "acc_stderr": 0.048783173121456316, - "acc_norm": 0.38, - "acc_norm_stderr": 0.048783173121456316 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.27205882352941174, - "acc_stderr": 0.027033041151681456, - "acc_norm": 0.27205882352941174, - "acc_norm_stderr": 0.027033041151681456 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.27755102040816326, - "acc_stderr": 0.02866685779027465, - "acc_norm": 0.27755102040816326, - "acc_norm_stderr": 0.02866685779027465 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.46835443037974683, - "acc_stderr": 0.03248197400511075, - "acc_norm": 0.46835443037974683, - "acc_norm_stderr": 0.03248197400511075 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.31486310299869624, - "acc_stderr": 0.011862561755715923, - "acc_norm": 0.31486310299869624, - "acc_norm_stderr": 0.011862561755715923 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.43137254901960786, - "acc_stderr": 0.03476099060501637, - "acc_norm": 0.43137254901960786, - "acc_norm_stderr": 0.03476099060501637 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.42424242424242425, - "acc_stderr": 0.038592681420702615, - "acc_norm": 0.42424242424242425, - "acc_norm_stderr": 0.038592681420702615 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2766217870257038, - "mc1_stderr": 0.01565960575532691, - "mc2": 0.4571739435072619, - "mc2_stderr": 0.015454282704862585 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.4295774647887324, - "acc_stderr": 0.01696892392010678, - "acc_norm": 0.4835680751173709, - "acc_norm_stderr": 0.01713052099393602 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "garage-bAInd/Platypus2-7B", - "model_sha": "c27aff7201e611f301c0e19f351cbe74b1a9f1f1", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/gl2een/polyglot-ko-12.8b-instrcut-full-finetune/result_2023-10-23 06:43:21.json b/gl2een/polyglot-ko-12.8b-instrcut-full-finetune/result_2023-10-23 06:43:21.json deleted file mode 100644 index 9a156404cadabde194b68b1c9cb08cc6b7e95fb7..0000000000000000000000000000000000000000 --- a/gl2een/polyglot-ko-12.8b-instrcut-full-finetune/result_2023-10-23 06:43:21.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.2960750853242321, - "acc_stderr": 0.01334091608524627, - "acc_norm": 0.34982935153583616, - "acc_norm_stderr": 0.013936809212158287 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3895638319059948, - "acc_stderr": 0.004866547422355568, - "acc_norm": 0.5007966540529775, - "acc_norm_stderr": 0.004989775077835652 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.2222222222222222, - "acc_stderr": 0.03188578017686398, - "acc_norm": 0.2222222222222222, - "acc_norm_stderr": 0.03188578017686398 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.2815533980582524, - "acc_stderr": 0.044532548363264673, - "acc_norm": 0.2815533980582524, - "acc_norm_stderr": 0.044532548363264673 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.22988505747126436, - "acc_stderr": 0.015046301846691815, - "acc_norm": 0.22988505747126436, - "acc_norm_stderr": 0.015046301846691815 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.21481481481481482, - "acc_stderr": 0.035478541985608264, - "acc_norm": 0.21481481481481482, - "acc_norm_stderr": 0.035478541985608264 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.20851063829787234, - "acc_stderr": 0.026556982117838746, - "acc_norm": 0.20851063829787234, - "acc_norm_stderr": 0.026556982117838746 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.1927710843373494, - "acc_stderr": 0.03070982405056527, - "acc_norm": 0.1927710843373494, - "acc_norm_stderr": 0.03070982405056527 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.26366559485530544, - "acc_stderr": 0.02502553850053234, - "acc_norm": 0.26366559485530544, - "acc_norm_stderr": 0.02502553850053234 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.10762331838565023, - "acc_stderr": 0.02079940008288001, - "acc_norm": 0.10762331838565023, - "acc_norm_stderr": 0.02079940008288001 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.26717557251908397, - "acc_stderr": 0.03880848301082396, - "acc_norm": 0.26717557251908397, - "acc_norm_stderr": 0.03880848301082396 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695236, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695236 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.37373737373737376, - "acc_stderr": 0.03446897738659333, - "acc_norm": 0.37373737373737376, - "acc_norm_stderr": 0.03446897738659333 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.2413793103448276, - "acc_stderr": 0.03565998174135302, - "acc_norm": 0.2413793103448276, - "acc_norm_stderr": 0.03565998174135302 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.35294117647058826, - "acc_stderr": 0.04755129616062947, - "acc_norm": 0.35294117647058826, - "acc_norm_stderr": 0.04755129616062947 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.31512605042016806, - "acc_stderr": 0.030176808288974337, - "acc_norm": 0.31512605042016806, - "acc_norm_stderr": 0.030176808288974337 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.3384615384615385, - "acc_stderr": 0.023991500500313036, - "acc_norm": 0.3384615384615385, - "acc_norm_stderr": 0.023991500500313036 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.15, - "acc_stderr": 0.035887028128263686, - "acc_norm": 0.15, - "acc_norm_stderr": 0.035887028128263686 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.18, - "acc_stderr": 0.03861229196653694, - "acc_norm": 0.18, - "acc_norm_stderr": 0.03861229196653694 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.26851851851851855, - "acc_stderr": 0.04284467968052191, - "acc_norm": 0.26851851851851855, - "acc_norm_stderr": 0.04284467968052191 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.1921182266009852, - "acc_stderr": 0.027719315709614775, - "acc_norm": 0.1921182266009852, - "acc_norm_stderr": 0.027719315709614775 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.26129032258064516, - "acc_stderr": 0.024993053397764812, - "acc_norm": 0.26129032258064516, - "acc_norm_stderr": 0.024993053397764812 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.2564102564102564, - "acc_stderr": 0.028605953702004236, - "acc_norm": 0.2564102564102564, - "acc_norm_stderr": 0.028605953702004236 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.2981132075471698, - "acc_stderr": 0.02815283794249386, - "acc_norm": 0.2981132075471698, - "acc_norm_stderr": 0.02815283794249386 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.2545454545454545, - "acc_stderr": 0.04172343038705383, - "acc_norm": 0.2545454545454545, - "acc_norm_stderr": 0.04172343038705383 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.2777777777777778, - "acc_stderr": 0.027309140588230175, - "acc_norm": 0.2777777777777778, - "acc_norm_stderr": 0.027309140588230175 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.32450331125827814, - "acc_stderr": 0.038227469376587525, - "acc_norm": 0.32450331125827814, - "acc_norm_stderr": 0.038227469376587525 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.2537313432835821, - "acc_stderr": 0.030769444967296014, - "acc_norm": 0.2537313432835821, - "acc_norm_stderr": 0.030769444967296014 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.26011560693641617, - "acc_stderr": 0.033450369167889925, - "acc_norm": 0.26011560693641617, - "acc_norm_stderr": 0.033450369167889925 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2619047619047619, - "acc_stderr": 0.022644212615525218, - "acc_norm": 0.2619047619047619, - "acc_norm_stderr": 0.022644212615525218 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.24305555555555555, - "acc_stderr": 0.035868792800803406, - "acc_norm": 0.24305555555555555, - "acc_norm_stderr": 0.035868792800803406 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.32, - "acc_stderr": 0.046882617226215034, - "acc_norm": 0.32, - "acc_norm_stderr": 0.046882617226215034 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.2630057803468208, - "acc_stderr": 0.02370309952525818, - "acc_norm": 0.2630057803468208, - "acc_norm_stderr": 0.02370309952525818 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.2147239263803681, - "acc_stderr": 0.03226219377286775, - "acc_norm": 0.2147239263803681, - "acc_norm_stderr": 0.03226219377286775 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.22530864197530864, - "acc_stderr": 0.02324620264781975, - "acc_norm": 0.22530864197530864, - "acc_norm_stderr": 0.02324620264781975 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.29015544041450775, - "acc_stderr": 0.03275264467791515, - "acc_norm": 0.29015544041450775, - "acc_norm_stderr": 0.03275264467791515 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2719298245614035, - "acc_stderr": 0.041857744240220575, - "acc_norm": 0.2719298245614035, - "acc_norm_stderr": 0.041857744240220575 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.3504587155963303, - "acc_stderr": 0.02045607759982446, - "acc_norm": 0.3504587155963303, - "acc_norm_stderr": 0.02045607759982446 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.20634920634920634, - "acc_stderr": 0.03619604524124252, - "acc_norm": 0.20634920634920634, - "acc_norm_stderr": 0.03619604524124252 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.27450980392156865, - "acc_stderr": 0.025553169991826528, - "acc_norm": 0.27450980392156865, - "acc_norm_stderr": 0.025553169991826528 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.12396694214876033, - "acc_stderr": 0.03008309871603522, - "acc_norm": 0.12396694214876033, - "acc_norm_stderr": 0.03008309871603522 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.21052631578947367, - "acc_stderr": 0.033176727875331574, - "acc_norm": 0.21052631578947367, - "acc_norm_stderr": 0.033176727875331574 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.25, - "acc_stderr": 0.01751781884501444, - "acc_norm": 0.25, - "acc_norm_stderr": 0.01751781884501444 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.22340425531914893, - "acc_stderr": 0.02484792135806396, - "acc_norm": 0.22340425531914893, - "acc_norm_stderr": 0.02484792135806396 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.23214285714285715, - "acc_stderr": 0.04007341809755805, - "acc_norm": 0.23214285714285715, - "acc_norm_stderr": 0.04007341809755805 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.33796296296296297, - "acc_stderr": 0.03225941352631294, - "acc_norm": 0.33796296296296297, - "acc_norm_stderr": 0.03225941352631294 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.27262569832402234, - "acc_stderr": 0.014893391735249608, - "acc_norm": 0.27262569832402234, - "acc_norm_stderr": 0.014893391735249608 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.27, - "acc_stderr": 0.0446196043338474, - "acc_norm": 0.27, - "acc_norm_stderr": 0.0446196043338474 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.21, - "acc_stderr": 0.040936018074033256, - "acc_norm": 0.21, - "acc_norm_stderr": 0.040936018074033256 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.4411764705882353, - "acc_stderr": 0.030161911930767102, - "acc_norm": 0.4411764705882353, - "acc_norm_stderr": 0.030161911930767102 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.40816326530612246, - "acc_stderr": 0.03146465712827423, - "acc_norm": 0.40816326530612246, - "acc_norm_stderr": 0.03146465712827423 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.2109704641350211, - "acc_stderr": 0.02655837250266192, - "acc_norm": 0.2109704641350211, - "acc_norm_stderr": 0.02655837250266192 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.22946544980443284, - "acc_stderr": 0.010739489382279506, - "acc_norm": 0.22946544980443284, - "acc_norm_stderr": 0.010739489382279506 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.25, - "acc_stderr": 0.03039153369274154, - "acc_norm": 0.25, - "acc_norm_stderr": 0.03039153369274154 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.23030303030303031, - "acc_stderr": 0.03287666758603488, - "acc_norm": 0.23030303030303031, - "acc_norm_stderr": 0.03287666758603488 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.23990208078335373, - "mc1_stderr": 0.014948812679062137, - "mc2": 0.3868411014459314, - "mc2_stderr": 0.014624336944729052 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.5950704225352113, - "acc_stderr": 0.016827095223977993, - "acc_norm": 0.6525821596244131, - "acc_norm_stderr": 0.01632220681910895 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "gl2een/polyglot-ko-12.8b-instrcut-full-finetune", - "model_sha": "86980ac70816c3cdc4314085af34e8e3b104b324", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/heegyu/42dot_LLM-PLM-1.3B-mt/result_2023-10-15 11:19:51.json b/heegyu/42dot_LLM-PLM-1.3B-mt/result_2023-10-15 11:19:51.json deleted file mode 100644 index 44771d0d4bac3595dc9313ac4d38a10780afd66f..0000000000000000000000000000000000000000 --- a/heegyu/42dot_LLM-PLM-1.3B-mt/result_2023-10-15 11:19:51.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.2175767918088737, - "acc_stderr": 0.012057262020972506, - "acc_norm": 0.2627986348122867, - "acc_norm_stderr": 0.012862523175351331 - }, - "harness|ko_hellaswag|10": { - "acc": 0.31447918741286596, - "acc_stderr": 0.004633592029065801, - "acc_norm": 0.37890858394742083, - "acc_norm_stderr": 0.004841238763529378 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.22807017543859648, - "acc_stderr": 0.03218093795602357, - "acc_norm": 0.22807017543859648, - "acc_norm_stderr": 0.03218093795602357 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.30097087378640774, - "acc_stderr": 0.045416094465039476, - "acc_norm": 0.30097087378640774, - "acc_norm_stderr": 0.045416094465039476 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.22349936143039592, - "acc_stderr": 0.01489723522945071, - "acc_norm": 0.22349936143039592, - "acc_norm_stderr": 0.01489723522945071 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.22962962962962963, - "acc_stderr": 0.036333844140734636, - "acc_norm": 0.22962962962962963, - "acc_norm_stderr": 0.036333844140734636 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542128, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542128 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.2680851063829787, - "acc_stderr": 0.02895734278834235, - "acc_norm": 0.2680851063829787, - "acc_norm_stderr": 0.02895734278834235 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.2891566265060241, - "acc_stderr": 0.03529486801511116, - "acc_norm": 0.2891566265060241, - "acc_norm_stderr": 0.03529486801511116 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.24758842443729903, - "acc_stderr": 0.024513879973621967, - "acc_norm": 0.24758842443729903, - "acc_norm_stderr": 0.024513879973621967 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.19282511210762332, - "acc_stderr": 0.026478240960489365, - "acc_norm": 0.19282511210762332, - "acc_norm_stderr": 0.026478240960489365 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.2595419847328244, - "acc_stderr": 0.03844876139785271, - "acc_norm": 0.2595419847328244, - "acc_norm_stderr": 0.03844876139785271 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.36363636363636365, - "acc_stderr": 0.03427308652999934, - "acc_norm": 0.36363636363636365, - "acc_norm_stderr": 0.03427308652999934 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.22758620689655173, - "acc_stderr": 0.03493950380131184, - "acc_norm": 0.22758620689655173, - "acc_norm_stderr": 0.03493950380131184 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.21568627450980393, - "acc_stderr": 0.04092563958237654, - "acc_norm": 0.21568627450980393, - "acc_norm_stderr": 0.04092563958237654 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.31932773109243695, - "acc_stderr": 0.0302839955258844, - "acc_norm": 0.31932773109243695, - "acc_norm_stderr": 0.0302839955258844 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.3641025641025641, - "acc_stderr": 0.024396672985094778, - "acc_norm": 0.3641025641025641, - "acc_norm_stderr": 0.024396672985094778 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.16, - "acc_stderr": 0.0368452949177471, - "acc_norm": 0.16, - "acc_norm_stderr": 0.0368452949177471 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.24074074074074073, - "acc_stderr": 0.04133119440243838, - "acc_norm": 0.24074074074074073, - "acc_norm_stderr": 0.04133119440243838 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.28078817733990147, - "acc_stderr": 0.0316185633535861, - "acc_norm": 0.28078817733990147, - "acc_norm_stderr": 0.0316185633535861 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.2903225806451613, - "acc_stderr": 0.02582210611941589, - "acc_norm": 0.2903225806451613, - "acc_norm_stderr": 0.02582210611941589 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.19658119658119658, - "acc_stderr": 0.02603538609895129, - "acc_norm": 0.19658119658119658, - "acc_norm_stderr": 0.02603538609895129 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.21509433962264152, - "acc_stderr": 0.02528839450289137, - "acc_norm": 0.21509433962264152, - "acc_norm_stderr": 0.02528839450289137 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.24545454545454545, - "acc_stderr": 0.041220665028782834, - "acc_norm": 0.24545454545454545, - "acc_norm_stderr": 0.041220665028782834 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.2740740740740741, - "acc_stderr": 0.027195934804085622, - "acc_norm": 0.2740740740740741, - "acc_norm_stderr": 0.027195934804085622 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.3509933774834437, - "acc_stderr": 0.03896981964257374, - "acc_norm": 0.3509933774834437, - "acc_norm_stderr": 0.03896981964257374 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.19402985074626866, - "acc_stderr": 0.02796267760476893, - "acc_norm": 0.19402985074626866, - "acc_norm_stderr": 0.02796267760476893 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.1791907514450867, - "acc_stderr": 0.02924251305906327, - "acc_norm": 0.1791907514450867, - "acc_norm_stderr": 0.02924251305906327 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.24603174603174602, - "acc_stderr": 0.022182037202948368, - "acc_norm": 0.24603174603174602, - "acc_norm_stderr": 0.022182037202948368 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2569444444444444, - "acc_stderr": 0.03653946969442099, - "acc_norm": 0.2569444444444444, - "acc_norm_stderr": 0.03653946969442099 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.34, - "acc_stderr": 0.047609522856952365, - "acc_norm": 0.34, - "acc_norm_stderr": 0.047609522856952365 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.32, - "acc_stderr": 0.046882617226215034, - "acc_norm": 0.32, - "acc_norm_stderr": 0.046882617226215034 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.23699421965317918, - "acc_stderr": 0.02289408248992599, - "acc_norm": 0.23699421965317918, - "acc_norm_stderr": 0.02289408248992599 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.22085889570552147, - "acc_stderr": 0.03259177392742178, - "acc_norm": 0.22085889570552147, - "acc_norm_stderr": 0.03259177392742178 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.23148148148148148, - "acc_stderr": 0.023468429832451163, - "acc_norm": 0.23148148148148148, - "acc_norm_stderr": 0.023468429832451163 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.3, - "acc_stderr": 0.04605661864718381, - "acc_norm": 0.3, - "acc_norm_stderr": 0.04605661864718381 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.27461139896373055, - "acc_stderr": 0.032210245080411544, - "acc_norm": 0.27461139896373055, - "acc_norm_stderr": 0.032210245080411544 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.30701754385964913, - "acc_stderr": 0.043391383225798594, - "acc_norm": 0.30701754385964913, - "acc_norm_stderr": 0.043391383225798594 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.3522935779816514, - "acc_stderr": 0.020480568843999, - "acc_norm": 0.3522935779816514, - "acc_norm_stderr": 0.020480568843999 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.373015873015873, - "acc_stderr": 0.04325506042017086, - "acc_norm": 0.373015873015873, - "acc_norm_stderr": 0.04325506042017086 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.25163398692810457, - "acc_stderr": 0.024848018263875195, - "acc_norm": 0.25163398692810457, - "acc_norm_stderr": 0.024848018263875195 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.22, - "acc_stderr": 0.0416333199893227, - "acc_norm": 0.22, - "acc_norm_stderr": 0.0416333199893227 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.23140495867768596, - "acc_stderr": 0.03849856098794087, - "acc_norm": 0.23140495867768596, - "acc_norm_stderr": 0.03849856098794087 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.29605263157894735, - "acc_stderr": 0.037150621549989035, - "acc_norm": 0.29605263157894735, - "acc_norm_stderr": 0.037150621549989035 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.238562091503268, - "acc_stderr": 0.017242385828779606, - "acc_norm": 0.238562091503268, - "acc_norm_stderr": 0.017242385828779606 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.23049645390070922, - "acc_stderr": 0.025123739226872405, - "acc_norm": 0.23049645390070922, - "acc_norm_stderr": 0.025123739226872405 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.16071428571428573, - "acc_stderr": 0.034859460964757394, - "acc_norm": 0.16071428571428573, - "acc_norm_stderr": 0.034859460964757394 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.4722222222222222, - "acc_stderr": 0.0340470532865388, - "acc_norm": 0.4722222222222222, - "acc_norm_stderr": 0.0340470532865388 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.27262569832402234, - "acc_stderr": 0.014893391735249608, - "acc_norm": 0.27262569832402234, - "acc_norm_stderr": 0.014893391735249608 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252604, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252604 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.2, - "acc_stderr": 0.04020151261036846, - "acc_norm": 0.2, - "acc_norm_stderr": 0.04020151261036846 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.44485294117647056, - "acc_stderr": 0.03018753206032938, - "acc_norm": 0.44485294117647056, - "acc_norm_stderr": 0.03018753206032938 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.22040816326530613, - "acc_stderr": 0.026537045312145315, - "acc_norm": 0.22040816326530613, - "acc_norm_stderr": 0.026537045312145315 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.22784810126582278, - "acc_stderr": 0.02730348459906942, - "acc_norm": 0.22784810126582278, - "acc_norm_stderr": 0.02730348459906942 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2457627118644068, - "acc_stderr": 0.01099615663514269, - "acc_norm": 0.2457627118644068, - "acc_norm_stderr": 0.01099615663514269 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.22058823529411764, - "acc_stderr": 0.0291022543896741, - "acc_norm": 0.22058823529411764, - "acc_norm_stderr": 0.0291022543896741 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.2727272727272727, - "acc_stderr": 0.0347769116216366, - "acc_norm": 0.2727272727272727, - "acc_norm_stderr": 0.0347769116216366 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2484700122399021, - "mc1_stderr": 0.0151274270965207, - "mc2": 0.3837063373774927, - "mc2_stderr": 0.01511245687075564 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.12089201877934272, - "acc_stderr": 0.011175195935673987, - "acc_norm": 0.18896713615023475, - "acc_norm_stderr": 0.013419847693240476 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "heegyu/42dot_LLM-PLM-1.3B-mt", - "model_sha": "2521354392f2de9e23703e72db4b0c04a13e29aa", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/heegyu/AULM-5.8b-v0804-hf/result_2023-10-15 11:21:40.json b/heegyu/AULM-5.8b-v0804-hf/result_2023-10-15 11:21:40.json deleted file mode 100644 index 7fe4fa2e2240b6bc80f7112ec88ba7612567d2cf..0000000000000000000000000000000000000000 --- a/heegyu/AULM-5.8b-v0804-hf/result_2023-10-15 11:21:40.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.2619453924914676, - "acc_stderr": 0.012849054826858114, - "acc_norm": 0.3302047781569966, - "acc_norm_stderr": 0.013743085603760427 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3604859589723163, - "acc_stderr": 0.004791601975612766, - "acc_norm": 0.45429197371041624, - "acc_norm_stderr": 0.004968888130290065 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.1871345029239766, - "acc_stderr": 0.029913127232368025, - "acc_norm": 0.1871345029239766, - "acc_norm_stderr": 0.029913127232368025 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.1941747572815534, - "acc_stderr": 0.03916667762822584, - "acc_norm": 0.1941747572815534, - "acc_norm_stderr": 0.03916667762822584 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.21966794380587484, - "acc_stderr": 0.014805384478371163, - "acc_norm": 0.21966794380587484, - "acc_norm_stderr": 0.014805384478371163 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.2740740740740741, - "acc_stderr": 0.03853254836552003, - "acc_norm": 0.2740740740740741, - "acc_norm_stderr": 0.03853254836552003 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.32, - "acc_stderr": 0.046882617226215034, - "acc_norm": 0.32, - "acc_norm_stderr": 0.046882617226215034 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.2127659574468085, - "acc_stderr": 0.026754391348039783, - "acc_norm": 0.2127659574468085, - "acc_norm_stderr": 0.026754391348039783 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.21686746987951808, - "acc_stderr": 0.03208284450356365, - "acc_norm": 0.21686746987951808, - "acc_norm_stderr": 0.03208284450356365 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.28938906752411575, - "acc_stderr": 0.02575586592263294, - "acc_norm": 0.28938906752411575, - "acc_norm_stderr": 0.02575586592263294 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.11659192825112108, - "acc_stderr": 0.02153963981624447, - "acc_norm": 0.11659192825112108, - "acc_norm_stderr": 0.02153963981624447 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.26717557251908397, - "acc_stderr": 0.03880848301082396, - "acc_norm": 0.26717557251908397, - "acc_norm_stderr": 0.03880848301082396 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.29797979797979796, - "acc_stderr": 0.032586303838365555, - "acc_norm": 0.29797979797979796, - "acc_norm_stderr": 0.032586303838365555 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.2413793103448276, - "acc_stderr": 0.03565998174135302, - "acc_norm": 0.2413793103448276, - "acc_norm_stderr": 0.03565998174135302 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.23529411764705882, - "acc_stderr": 0.04220773659171452, - "acc_norm": 0.23529411764705882, - "acc_norm_stderr": 0.04220773659171452 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.31512605042016806, - "acc_stderr": 0.030176808288974337, - "acc_norm": 0.31512605042016806, - "acc_norm_stderr": 0.030176808288974337 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.32564102564102565, - "acc_stderr": 0.02375966576741229, - "acc_norm": 0.32564102564102565, - "acc_norm_stderr": 0.02375966576741229 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.2, - "acc_stderr": 0.04020151261036846, - "acc_norm": 0.2, - "acc_norm_stderr": 0.04020151261036846 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.21296296296296297, - "acc_stderr": 0.03957835471980981, - "acc_norm": 0.21296296296296297, - "acc_norm_stderr": 0.03957835471980981 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.30049261083743845, - "acc_stderr": 0.03225799476233485, - "acc_norm": 0.30049261083743845, - "acc_norm_stderr": 0.03225799476233485 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3, - "acc_stderr": 0.026069362295335134, - "acc_norm": 0.3, - "acc_norm_stderr": 0.026069362295335134 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.19658119658119658, - "acc_stderr": 0.02603538609895129, - "acc_norm": 0.19658119658119658, - "acc_norm_stderr": 0.02603538609895129 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.27547169811320754, - "acc_stderr": 0.02749566368372406, - "acc_norm": 0.27547169811320754, - "acc_norm_stderr": 0.02749566368372406 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.21818181818181817, - "acc_stderr": 0.03955932861795833, - "acc_norm": 0.21818181818181817, - "acc_norm_stderr": 0.03955932861795833 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.22592592592592592, - "acc_stderr": 0.02549753263960954, - "acc_norm": 0.22592592592592592, - "acc_norm_stderr": 0.02549753263960954 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.3509933774834437, - "acc_stderr": 0.03896981964257375, - "acc_norm": 0.3509933774834437, - "acc_norm_stderr": 0.03896981964257375 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.24875621890547264, - "acc_stderr": 0.030567675938916707, - "acc_norm": 0.24875621890547264, - "acc_norm_stderr": 0.030567675938916707 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.2832369942196532, - "acc_stderr": 0.034355680560478746, - "acc_norm": 0.2832369942196532, - "acc_norm_stderr": 0.034355680560478746 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.23809523809523808, - "acc_stderr": 0.021935878081184756, - "acc_norm": 0.23809523809523808, - "acc_norm_stderr": 0.021935878081184756 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2847222222222222, - "acc_stderr": 0.03773809990686935, - "acc_norm": 0.2847222222222222, - "acc_norm_stderr": 0.03773809990686935 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.29, - "acc_stderr": 0.04560480215720683, - "acc_norm": 0.29, - "acc_norm_stderr": 0.04560480215720683 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.2658959537572254, - "acc_stderr": 0.023786203255508297, - "acc_norm": 0.2658959537572254, - "acc_norm_stderr": 0.023786203255508297 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.2392638036809816, - "acc_stderr": 0.03351953879521272, - "acc_norm": 0.2392638036809816, - "acc_norm_stderr": 0.03351953879521272 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.19753086419753085, - "acc_stderr": 0.02215288992789894, - "acc_norm": 0.19753086419753085, - "acc_norm_stderr": 0.02215288992789894 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.27, - "acc_stderr": 0.044619604333847394, - "acc_norm": 0.27, - "acc_norm_stderr": 0.044619604333847394 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.32124352331606215, - "acc_stderr": 0.033699508685490674, - "acc_norm": 0.32124352331606215, - "acc_norm_stderr": 0.033699508685490674 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2631578947368421, - "acc_stderr": 0.0414243971948936, - "acc_norm": 0.2631578947368421, - "acc_norm_stderr": 0.0414243971948936 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.3669724770642202, - "acc_stderr": 0.02066467565952053, - "acc_norm": 0.3669724770642202, - "acc_norm_stderr": 0.02066467565952053 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.29365079365079366, - "acc_stderr": 0.04073524322147126, - "acc_norm": 0.29365079365079366, - "acc_norm_stderr": 0.04073524322147126 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.2973856209150327, - "acc_stderr": 0.026173908506718576, - "acc_norm": 0.2973856209150327, - "acc_norm_stderr": 0.026173908506718576 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.2, - "acc_stderr": 0.04020151261036844, - "acc_norm": 0.2, - "acc_norm_stderr": 0.04020151261036844 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.1322314049586777, - "acc_stderr": 0.030922788320445812, - "acc_norm": 0.1322314049586777, - "acc_norm_stderr": 0.030922788320445812 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.34210526315789475, - "acc_stderr": 0.03860731599316092, - "acc_norm": 0.34210526315789475, - "acc_norm_stderr": 0.03860731599316092 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.2173202614379085, - "acc_stderr": 0.016684820929148598, - "acc_norm": 0.2173202614379085, - "acc_norm_stderr": 0.016684820929148598 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.21631205673758866, - "acc_stderr": 0.024561720560562793, - "acc_norm": 0.21631205673758866, - "acc_norm_stderr": 0.024561720560562793 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.20535714285714285, - "acc_stderr": 0.03834241021419073, - "acc_norm": 0.20535714285714285, - "acc_norm_stderr": 0.03834241021419073 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.4675925925925926, - "acc_stderr": 0.03402801581358966, - "acc_norm": 0.4675925925925926, - "acc_norm_stderr": 0.03402801581358966 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.27262569832402234, - "acc_stderr": 0.014893391735249608, - "acc_norm": 0.27262569832402234, - "acc_norm_stderr": 0.014893391735249608 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252604, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252604 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.23, - "acc_stderr": 0.04229525846816506, - "acc_norm": 0.23, - "acc_norm_stderr": 0.04229525846816506 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.44485294117647056, - "acc_stderr": 0.030187532060329383, - "acc_norm": 0.44485294117647056, - "acc_norm_stderr": 0.030187532060329383 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.39591836734693875, - "acc_stderr": 0.03130802899065685, - "acc_norm": 0.39591836734693875, - "acc_norm_stderr": 0.03130802899065685 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.22362869198312235, - "acc_stderr": 0.027123298205229972, - "acc_norm": 0.22362869198312235, - "acc_norm_stderr": 0.027123298205229972 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.24967405475880053, - "acc_stderr": 0.011054538377832327, - "acc_norm": 0.24967405475880053, - "acc_norm_stderr": 0.011054538377832327 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.2549019607843137, - "acc_stderr": 0.030587591351604246, - "acc_norm": 0.2549019607843137, - "acc_norm_stderr": 0.030587591351604246 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.2787878787878788, - "acc_stderr": 0.03501438706296781, - "acc_norm": 0.2787878787878788, - "acc_norm_stderr": 0.03501438706296781 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.26193390452876375, - "mc1_stderr": 0.015392118805015016, - "mc2": 0.40797537743571977, - "mc2_stderr": 0.014976707161150397 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.2335680751173709, - "acc_stderr": 0.014503698195409349, - "acc_norm": 0.2981220657276995, - "acc_norm_stderr": 0.015680614408195483 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "heegyu/AULM-5.8b-v0804-hf", - "model_sha": "ddcfd46cc8b42d7fb6ad822d97b6c30dfd3c028b", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/heegyu/LIMA-13b-hf/result_2023-10-15 11:18:07.json b/heegyu/LIMA-13b-hf/result_2023-10-15 11:18:07.json deleted file mode 100644 index 45d920f1045f01d3fa2f00b7bc83c2fe27d222cb..0000000000000000000000000000000000000000 --- a/heegyu/LIMA-13b-hf/result_2023-10-15 11:18:07.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.21331058020477817, - "acc_stderr": 0.011970971742326334, - "acc_norm": 0.26023890784982934, - "acc_norm_stderr": 0.012821930225112552 - }, - "harness|ko_hellaswag|10": { - "acc": 0.30302728540131446, - "acc_stderr": 0.004586276903267076, - "acc_norm": 0.3558056164110735, - "acc_norm_stderr": 0.00477778258481779 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.36257309941520466, - "acc_stderr": 0.036871306155620606, - "acc_norm": 0.36257309941520466, - "acc_norm_stderr": 0.036871306155620606 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.30097087378640774, - "acc_stderr": 0.045416094465039476, - "acc_norm": 0.30097087378640774, - "acc_norm_stderr": 0.045416094465039476 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.30268199233716475, - "acc_stderr": 0.01642878158174936, - "acc_norm": 0.30268199233716475, - "acc_norm_stderr": 0.01642878158174936 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.26666666666666666, - "acc_stderr": 0.03820169914517905, - "acc_norm": 0.26666666666666666, - "acc_norm_stderr": 0.03820169914517905 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.30638297872340425, - "acc_stderr": 0.03013590647851756, - "acc_norm": 0.30638297872340425, - "acc_norm_stderr": 0.03013590647851756 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.21686746987951808, - "acc_stderr": 0.03208284450356365, - "acc_norm": 0.21686746987951808, - "acc_norm_stderr": 0.03208284450356365 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.3086816720257235, - "acc_stderr": 0.026236965881153266, - "acc_norm": 0.3086816720257235, - "acc_norm_stderr": 0.026236965881153266 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.31390134529147984, - "acc_stderr": 0.031146796482972465, - "acc_norm": 0.31390134529147984, - "acc_norm_stderr": 0.031146796482972465 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.2748091603053435, - "acc_stderr": 0.039153454088478354, - "acc_norm": 0.2748091603053435, - "acc_norm_stderr": 0.039153454088478354 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.31, - "acc_stderr": 0.046482319871173156, - "acc_norm": 0.31, - "acc_norm_stderr": 0.046482319871173156 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.29292929292929293, - "acc_stderr": 0.03242497958178815, - "acc_norm": 0.29292929292929293, - "acc_norm_stderr": 0.03242497958178815 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.3448275862068966, - "acc_stderr": 0.03960933549451209, - "acc_norm": 0.3448275862068966, - "acc_norm_stderr": 0.03960933549451209 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.21568627450980393, - "acc_stderr": 0.040925639582376536, - "acc_norm": 0.21568627450980393, - "acc_norm_stderr": 0.040925639582376536 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.3277310924369748, - "acc_stderr": 0.03048991141767323, - "acc_norm": 0.3277310924369748, - "acc_norm_stderr": 0.03048991141767323 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.3487179487179487, - "acc_stderr": 0.02416278028401772, - "acc_norm": 0.3487179487179487, - "acc_norm_stderr": 0.02416278028401772 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.35, - "acc_stderr": 0.0479372485441102, - "acc_norm": 0.35, - "acc_norm_stderr": 0.0479372485441102 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.23, - "acc_stderr": 0.042295258468165044, - "acc_norm": 0.23, - "acc_norm_stderr": 0.042295258468165044 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.4537037037037037, - "acc_stderr": 0.04812917324536823, - "acc_norm": 0.4537037037037037, - "acc_norm_stderr": 0.04812917324536823 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.29064039408866993, - "acc_stderr": 0.0319474007226554, - "acc_norm": 0.29064039408866993, - "acc_norm_stderr": 0.0319474007226554 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3096774193548387, - "acc_stderr": 0.026302774983517418, - "acc_norm": 0.3096774193548387, - "acc_norm_stderr": 0.026302774983517418 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.452991452991453, - "acc_stderr": 0.032610998730986204, - "acc_norm": 0.452991452991453, - "acc_norm_stderr": 0.032610998730986204 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.2792452830188679, - "acc_stderr": 0.02761116340239972, - "acc_norm": 0.2792452830188679, - "acc_norm_stderr": 0.02761116340239972 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.32727272727272727, - "acc_stderr": 0.04494290866252088, - "acc_norm": 0.32727272727272727, - "acc_norm_stderr": 0.04494290866252088 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.21481481481481482, - "acc_stderr": 0.025040443877000693, - "acc_norm": 0.21481481481481482, - "acc_norm_stderr": 0.025040443877000693 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.26490066225165565, - "acc_stderr": 0.03603038545360385, - "acc_norm": 0.26490066225165565, - "acc_norm_stderr": 0.03603038545360385 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.34328358208955223, - "acc_stderr": 0.03357379665433431, - "acc_norm": 0.34328358208955223, - "acc_norm_stderr": 0.03357379665433431 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.3063583815028902, - "acc_stderr": 0.03514942551267437, - "acc_norm": 0.3063583815028902, - "acc_norm_stderr": 0.03514942551267437 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.25132275132275134, - "acc_stderr": 0.022340482339643898, - "acc_norm": 0.25132275132275134, - "acc_norm_stderr": 0.022340482339643898 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2638888888888889, - "acc_stderr": 0.03685651095897532, - "acc_norm": 0.2638888888888889, - "acc_norm_stderr": 0.03685651095897532 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.24, - "acc_stderr": 0.04292346959909283, - "acc_norm": 0.24, - "acc_norm_stderr": 0.04292346959909283 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.3236994219653179, - "acc_stderr": 0.0251901813276084, - "acc_norm": 0.3236994219653179, - "acc_norm_stderr": 0.0251901813276084 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.27607361963190186, - "acc_stderr": 0.0351238528370505, - "acc_norm": 0.27607361963190186, - "acc_norm_stderr": 0.0351238528370505 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.3271604938271605, - "acc_stderr": 0.026105673861409814, - "acc_norm": 0.3271604938271605, - "acc_norm_stderr": 0.026105673861409814 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695235, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695235 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.3160621761658031, - "acc_stderr": 0.033553973696861736, - "acc_norm": 0.3160621761658031, - "acc_norm_stderr": 0.033553973696861736 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.19298245614035087, - "acc_stderr": 0.037124548537213684, - "acc_norm": 0.19298245614035087, - "acc_norm_stderr": 0.037124548537213684 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.30825688073394497, - "acc_stderr": 0.01979836669836725, - "acc_norm": 0.30825688073394497, - "acc_norm_stderr": 0.01979836669836725 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.31746031746031744, - "acc_stderr": 0.04163453031302859, - "acc_norm": 0.31746031746031744, - "acc_norm_stderr": 0.04163453031302859 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.369281045751634, - "acc_stderr": 0.02763417668960266, - "acc_norm": 0.369281045751634, - "acc_norm_stderr": 0.02763417668960266 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.22, - "acc_stderr": 0.041633319989322695, - "acc_norm": 0.22, - "acc_norm_stderr": 0.041633319989322695 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.4049586776859504, - "acc_stderr": 0.044811377559424694, - "acc_norm": 0.4049586776859504, - "acc_norm_stderr": 0.044811377559424694 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.2631578947368421, - "acc_stderr": 0.035834961763610625, - "acc_norm": 0.2631578947368421, - "acc_norm_stderr": 0.035834961763610625 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.2875816993464052, - "acc_stderr": 0.018311653053648222, - "acc_norm": 0.2875816993464052, - "acc_norm_stderr": 0.018311653053648222 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.24113475177304963, - "acc_stderr": 0.025518731049537773, - "acc_norm": 0.24113475177304963, - "acc_norm_stderr": 0.025518731049537773 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.32142857142857145, - "acc_stderr": 0.044328040552915206, - "acc_norm": 0.32142857142857145, - "acc_norm_stderr": 0.044328040552915206 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.33796296296296297, - "acc_stderr": 0.03225941352631295, - "acc_norm": 0.33796296296296297, - "acc_norm_stderr": 0.03225941352631295 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.26033519553072626, - "acc_stderr": 0.014676252009319463, - "acc_norm": 0.26033519553072626, - "acc_norm_stderr": 0.014676252009319463 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542127, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542127 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.38, - "acc_stderr": 0.04878317312145632, - "acc_norm": 0.38, - "acc_norm_stderr": 0.04878317312145632 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.23529411764705882, - "acc_stderr": 0.02576725201085595, - "acc_norm": 0.23529411764705882, - "acc_norm_stderr": 0.02576725201085595 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.37142857142857144, - "acc_stderr": 0.030932858792789848, - "acc_norm": 0.37142857142857144, - "acc_norm_stderr": 0.030932858792789848 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.21940928270042195, - "acc_stderr": 0.026939106581553945, - "acc_norm": 0.21940928270042195, - "acc_norm_stderr": 0.026939106581553945 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.26792698826597133, - "acc_stderr": 0.011311347690633886, - "acc_norm": 0.26792698826597133, - "acc_norm_stderr": 0.011311347690633886 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.24509803921568626, - "acc_stderr": 0.030190282453501943, - "acc_norm": 0.24509803921568626, - "acc_norm_stderr": 0.030190282453501943 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.2787878787878788, - "acc_stderr": 0.03501438706296781, - "acc_norm": 0.2787878787878788, - "acc_norm_stderr": 0.03501438706296781 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2717258261933905, - "mc1_stderr": 0.015572840452875835, - "mc2": 0.43296733660801473, - "mc2_stderr": 0.015927191551239974 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.2476525821596244, - "acc_stderr": 0.014796734034366533, - "acc_norm": 0.29694835680751175, - "acc_norm_stderr": 0.015662796197363153 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "heegyu/LIMA-13b-hf", - "model_sha": "98faa74a9b41cbd9033904cd58420705936849eb", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/heegyu/LIMA2-7b-hf/result_2023-10-15 11:18:57.json b/heegyu/LIMA2-7b-hf/result_2023-10-15 11:18:57.json deleted file mode 100644 index 581d38c66dba5efe059772f1186b9ca56273b618..0000000000000000000000000000000000000000 --- a/heegyu/LIMA2-7b-hf/result_2023-10-15 11:18:57.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.26706484641638223, - "acc_stderr": 0.01292893319649633, - "acc_norm": 0.3046075085324232, - "acc_norm_stderr": 0.013449522109932492 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3309101772555268, - "acc_stderr": 0.004695791340502858, - "acc_norm": 0.4010157339175463, - "acc_norm_stderr": 0.0048910255336330226 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.3684210526315789, - "acc_stderr": 0.036996580176568775, - "acc_norm": 0.3684210526315789, - "acc_norm_stderr": 0.036996580176568775 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.3883495145631068, - "acc_stderr": 0.0482572933735639, - "acc_norm": 0.3883495145631068, - "acc_norm_stderr": 0.0482572933735639 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.2937420178799489, - "acc_stderr": 0.016287759388491675, - "acc_norm": 0.2937420178799489, - "acc_norm_stderr": 0.016287759388491675 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.34814814814814815, - "acc_stderr": 0.041153246103369526, - "acc_norm": 0.34814814814814815, - "acc_norm_stderr": 0.041153246103369526 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.29, - "acc_stderr": 0.04560480215720683, - "acc_norm": 0.29, - "acc_norm_stderr": 0.04560480215720683 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.2297872340425532, - "acc_stderr": 0.027501752944412424, - "acc_norm": 0.2297872340425532, - "acc_norm_stderr": 0.027501752944412424 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.24096385542168675, - "acc_stderr": 0.03329394119073532, - "acc_norm": 0.24096385542168675, - "acc_norm_stderr": 0.03329394119073532 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.2829581993569132, - "acc_stderr": 0.02558306248998482, - "acc_norm": 0.2829581993569132, - "acc_norm_stderr": 0.02558306248998482 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.19730941704035873, - "acc_stderr": 0.02670985334496796, - "acc_norm": 0.19730941704035873, - "acc_norm_stderr": 0.02670985334496796 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.3053435114503817, - "acc_stderr": 0.040393149787245605, - "acc_norm": 0.3053435114503817, - "acc_norm_stderr": 0.040393149787245605 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.27, - "acc_stderr": 0.0446196043338474, - "acc_norm": 0.27, - "acc_norm_stderr": 0.0446196043338474 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.35858585858585856, - "acc_stderr": 0.03416903640391521, - "acc_norm": 0.35858585858585856, - "acc_norm_stderr": 0.03416903640391521 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.25517241379310346, - "acc_stderr": 0.03632984052707842, - "acc_norm": 0.25517241379310346, - "acc_norm_stderr": 0.03632984052707842 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.28431372549019607, - "acc_stderr": 0.04488482852329017, - "acc_norm": 0.28431372549019607, - "acc_norm_stderr": 0.04488482852329017 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.27310924369747897, - "acc_stderr": 0.028942004040998167, - "acc_norm": 0.27310924369747897, - "acc_norm_stderr": 0.028942004040998167 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.3153846153846154, - "acc_stderr": 0.023559646983189957, - "acc_norm": 0.3153846153846154, - "acc_norm_stderr": 0.023559646983189957 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.27, - "acc_stderr": 0.0446196043338474, - "acc_norm": 0.27, - "acc_norm_stderr": 0.0446196043338474 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.25, - "acc_stderr": 0.04186091791394607, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04186091791394607 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.3054187192118227, - "acc_stderr": 0.03240661565868408, - "acc_norm": 0.3054187192118227, - "acc_norm_stderr": 0.03240661565868408 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.32903225806451614, - "acc_stderr": 0.02672949906834996, - "acc_norm": 0.32903225806451614, - "acc_norm_stderr": 0.02672949906834996 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.38461538461538464, - "acc_stderr": 0.03187195347942466, - "acc_norm": 0.38461538461538464, - "acc_norm_stderr": 0.03187195347942466 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.2981132075471698, - "acc_stderr": 0.028152837942493857, - "acc_norm": 0.2981132075471698, - "acc_norm_stderr": 0.028152837942493857 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.2909090909090909, - "acc_stderr": 0.04350271442923243, - "acc_norm": 0.2909090909090909, - "acc_norm_stderr": 0.04350271442923243 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.2814814814814815, - "acc_stderr": 0.027420019350945273, - "acc_norm": 0.2814814814814815, - "acc_norm_stderr": 0.027420019350945273 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.304635761589404, - "acc_stderr": 0.03757949922943342, - "acc_norm": 0.304635761589404, - "acc_norm_stderr": 0.03757949922943342 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.36318407960199006, - "acc_stderr": 0.034005985055990146, - "acc_norm": 0.36318407960199006, - "acc_norm_stderr": 0.034005985055990146 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.26011560693641617, - "acc_stderr": 0.033450369167889904, - "acc_norm": 0.26011560693641617, - "acc_norm_stderr": 0.033450369167889904 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2830687830687831, - "acc_stderr": 0.02320139293819498, - "acc_norm": 0.2830687830687831, - "acc_norm_stderr": 0.02320139293819498 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3263888888888889, - "acc_stderr": 0.03921067198982266, - "acc_norm": 0.3263888888888889, - "acc_norm_stderr": 0.03921067198982266 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.4, - "acc_stderr": 0.04923659639173309, - "acc_norm": 0.4, - "acc_norm_stderr": 0.04923659639173309 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.35, - "acc_stderr": 0.0479372485441102, - "acc_norm": 0.35, - "acc_norm_stderr": 0.0479372485441102 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.2976878612716763, - "acc_stderr": 0.024617055388677003, - "acc_norm": 0.2976878612716763, - "acc_norm_stderr": 0.024617055388677003 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.3067484662576687, - "acc_stderr": 0.036230899157241474, - "acc_norm": 0.3067484662576687, - "acc_norm_stderr": 0.036230899157241474 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.33641975308641975, - "acc_stderr": 0.02628973494595293, - "acc_norm": 0.33641975308641975, - "acc_norm_stderr": 0.02628973494595293 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.21, - "acc_stderr": 0.040936018074033256, - "acc_norm": 0.21, - "acc_norm_stderr": 0.040936018074033256 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.32124352331606215, - "acc_stderr": 0.033699508685490674, - "acc_norm": 0.32124352331606215, - "acc_norm_stderr": 0.033699508685490674 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.23684210526315788, - "acc_stderr": 0.03999423879281336, - "acc_norm": 0.23684210526315788, - "acc_norm_stderr": 0.03999423879281336 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.30091743119266057, - "acc_stderr": 0.019664751366802114, - "acc_norm": 0.30091743119266057, - "acc_norm_stderr": 0.019664751366802114 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.30952380952380953, - "acc_stderr": 0.04134913018303316, - "acc_norm": 0.30952380952380953, - "acc_norm_stderr": 0.04134913018303316 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.3366013071895425, - "acc_stderr": 0.027057974624494382, - "acc_norm": 0.3366013071895425, - "acc_norm_stderr": 0.027057974624494382 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.21, - "acc_stderr": 0.040936018074033256, - "acc_norm": 0.21, - "acc_norm_stderr": 0.040936018074033256 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.2809917355371901, - "acc_stderr": 0.041032038305145124, - "acc_norm": 0.2809917355371901, - "acc_norm_stderr": 0.041032038305145124 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.34210526315789475, - "acc_stderr": 0.03860731599316091, - "acc_norm": 0.34210526315789475, - "acc_norm_stderr": 0.03860731599316091 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.2696078431372549, - "acc_stderr": 0.017952449196987866, - "acc_norm": 0.2696078431372549, - "acc_norm_stderr": 0.017952449196987866 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2801418439716312, - "acc_stderr": 0.026789172351140242, - "acc_norm": 0.2801418439716312, - "acc_norm_stderr": 0.026789172351140242 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.17857142857142858, - "acc_stderr": 0.036352091215778065, - "acc_norm": 0.17857142857142858, - "acc_norm_stderr": 0.036352091215778065 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.38425925925925924, - "acc_stderr": 0.03317354514310742, - "acc_norm": 0.38425925925925924, - "acc_norm_stderr": 0.03317354514310742 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.24804469273743016, - "acc_stderr": 0.01444415780826145, - "acc_norm": 0.24804469273743016, - "acc_norm_stderr": 0.01444415780826145 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.28, - "acc_stderr": 0.045126085985421276, - "acc_norm": 0.28, - "acc_norm_stderr": 0.045126085985421276 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.22, - "acc_stderr": 0.04163331998932267, - "acc_norm": 0.22, - "acc_norm_stderr": 0.04163331998932267 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.16911764705882354, - "acc_stderr": 0.02277086801011303, - "acc_norm": 0.16911764705882354, - "acc_norm_stderr": 0.02277086801011303 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.3306122448979592, - "acc_stderr": 0.03011642629654059, - "acc_norm": 0.3306122448979592, - "acc_norm_stderr": 0.03011642629654059 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.2489451476793249, - "acc_stderr": 0.028146970599422644, - "acc_norm": 0.2489451476793249, - "acc_norm_stderr": 0.028146970599422644 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.26988265971316816, - "acc_stderr": 0.011337381084250404, - "acc_norm": 0.26988265971316816, - "acc_norm_stderr": 0.011337381084250404 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.27450980392156865, - "acc_stderr": 0.0313217980308329, - "acc_norm": 0.27450980392156865, - "acc_norm_stderr": 0.0313217980308329 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.26666666666666666, - "acc_stderr": 0.03453131801885416, - "acc_norm": 0.26666666666666666, - "acc_norm_stderr": 0.03453131801885416 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.27539779681762544, - "mc1_stderr": 0.015638135667775523, - "mc2": 0.45638880812290744, - "mc2_stderr": 0.01588078280533526 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.2981220657276995, - "acc_stderr": 0.015680614408195476, - "acc_norm": 0.32511737089201875, - "acc_norm_stderr": 0.016057185777207567 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "heegyu/LIMA2-7b-hf", - "model_sha": "6a1aa59cb7624f059728840ce68b20b1070ebdcb", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/heegyu/WizardVicuna-3B-0719/result_2023-10-15 11:20:31.json b/heegyu/WizardVicuna-3B-0719/result_2023-10-15 11:20:31.json deleted file mode 100644 index 6f0574d554af8d2801ea9437b26daced68da1428..0000000000000000000000000000000000000000 --- a/heegyu/WizardVicuna-3B-0719/result_2023-10-15 11:20:31.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.19112627986348124, - "acc_stderr": 0.011490055292778599, - "acc_norm": 0.22781569965870307, - "acc_norm_stderr": 0.012256708602326907 - }, - "harness|ko_hellaswag|10": { - "acc": 0.2822146982672774, - "acc_stderr": 0.004491574539441884, - "acc_norm": 0.30770762796255724, - "acc_norm_stderr": 0.004606015773125627 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.21052631578947367, - "acc_stderr": 0.0312678171466318, - "acc_norm": 0.21052631578947367, - "acc_norm_stderr": 0.0312678171466318 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.2524271844660194, - "acc_stderr": 0.04301250399690877, - "acc_norm": 0.2524271844660194, - "acc_norm_stderr": 0.04301250399690877 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.24393358876117496, - "acc_stderr": 0.015357212665829479, - "acc_norm": 0.24393358876117496, - "acc_norm_stderr": 0.015357212665829479 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.2518518518518518, - "acc_stderr": 0.03749850709174021, - "acc_norm": 0.2518518518518518, - "acc_norm_stderr": 0.03749850709174021 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.22, - "acc_stderr": 0.04163331998932269, - "acc_norm": 0.22, - "acc_norm_stderr": 0.04163331998932269 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.30638297872340425, - "acc_stderr": 0.030135906478517563, - "acc_norm": 0.30638297872340425, - "acc_norm_stderr": 0.030135906478517563 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3192771084337349, - "acc_stderr": 0.03629335329947859, - "acc_norm": 0.3192771084337349, - "acc_norm_stderr": 0.03629335329947859 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.24758842443729903, - "acc_stderr": 0.024513879973621963, - "acc_norm": 0.24758842443729903, - "acc_norm_stderr": 0.024513879973621963 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.32286995515695066, - "acc_stderr": 0.031381476375754995, - "acc_norm": 0.32286995515695066, - "acc_norm_stderr": 0.031381476375754995 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.2595419847328244, - "acc_stderr": 0.03844876139785271, - "acc_norm": 0.2595419847328244, - "acc_norm_stderr": 0.03844876139785271 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.28, - "acc_stderr": 0.045126085985421276, - "acc_norm": 0.28, - "acc_norm_stderr": 0.045126085985421276 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.1919191919191919, - "acc_stderr": 0.028057791672989017, - "acc_norm": 0.1919191919191919, - "acc_norm_stderr": 0.028057791672989017 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.27586206896551724, - "acc_stderr": 0.03724563619774633, - "acc_norm": 0.27586206896551724, - "acc_norm_stderr": 0.03724563619774633 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.17647058823529413, - "acc_stderr": 0.03793281185307809, - "acc_norm": 0.17647058823529413, - "acc_norm_stderr": 0.03793281185307809 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.226890756302521, - "acc_stderr": 0.027205371538279472, - "acc_norm": 0.226890756302521, - "acc_norm_stderr": 0.027205371538279472 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.20256410256410257, - "acc_stderr": 0.020377660970371383, - "acc_norm": 0.20256410256410257, - "acc_norm_stderr": 0.020377660970371383 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.2, - "acc_stderr": 0.04020151261036845, - "acc_norm": 0.2, - "acc_norm_stderr": 0.04020151261036845 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.2222222222222222, - "acc_stderr": 0.040191074725573483, - "acc_norm": 0.2222222222222222, - "acc_norm_stderr": 0.040191074725573483 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.27586206896551724, - "acc_stderr": 0.03144712581678243, - "acc_norm": 0.27586206896551724, - "acc_norm_stderr": 0.03144712581678243 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.22903225806451613, - "acc_stderr": 0.023904914311782658, - "acc_norm": 0.22903225806451613, - "acc_norm_stderr": 0.023904914311782658 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.28205128205128205, - "acc_stderr": 0.029480360549541194, - "acc_norm": 0.28205128205128205, - "acc_norm_stderr": 0.029480360549541194 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.22641509433962265, - "acc_stderr": 0.025757559893106737, - "acc_norm": 0.22641509433962265, - "acc_norm_stderr": 0.025757559893106737 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.22727272727272727, - "acc_stderr": 0.04013964554072775, - "acc_norm": 0.22727272727272727, - "acc_norm_stderr": 0.04013964554072775 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.26296296296296295, - "acc_stderr": 0.02684205787383371, - "acc_norm": 0.26296296296296295, - "acc_norm_stderr": 0.02684205787383371 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2052980132450331, - "acc_stderr": 0.03297986648473835, - "acc_norm": 0.2052980132450331, - "acc_norm_stderr": 0.03297986648473835 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.22885572139303484, - "acc_stderr": 0.02970528405677243, - "acc_norm": 0.22885572139303484, - "acc_norm_stderr": 0.02970528405677243 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.2254335260115607, - "acc_stderr": 0.03186209851641144, - "acc_norm": 0.2254335260115607, - "acc_norm_stderr": 0.03186209851641144 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.20634920634920634, - "acc_stderr": 0.020842290930114665, - "acc_norm": 0.20634920634920634, - "acc_norm_stderr": 0.020842290930114665 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2361111111111111, - "acc_stderr": 0.03551446610810826, - "acc_norm": 0.2361111111111111, - "acc_norm_stderr": 0.03551446610810826 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.19, - "acc_stderr": 0.03942772444036623, - "acc_norm": 0.19, - "acc_norm_stderr": 0.03942772444036623 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.22, - "acc_stderr": 0.04163331998932269, - "acc_norm": 0.22, - "acc_norm_stderr": 0.04163331998932269 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.2514450867052023, - "acc_stderr": 0.02335736578587403, - "acc_norm": 0.2514450867052023, - "acc_norm_stderr": 0.02335736578587403 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.2392638036809816, - "acc_stderr": 0.033519538795212696, - "acc_norm": 0.2392638036809816, - "acc_norm_stderr": 0.033519538795212696 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.2654320987654321, - "acc_stderr": 0.02456922360046085, - "acc_norm": 0.2654320987654321, - "acc_norm_stderr": 0.02456922360046085 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.23, - "acc_stderr": 0.04229525846816506, - "acc_norm": 0.23, - "acc_norm_stderr": 0.04229525846816506 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.16580310880829016, - "acc_stderr": 0.026839845022314415, - "acc_norm": 0.16580310880829016, - "acc_norm_stderr": 0.026839845022314415 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.24561403508771928, - "acc_stderr": 0.0404933929774814, - "acc_norm": 0.24561403508771928, - "acc_norm_stderr": 0.0404933929774814 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.2036697247706422, - "acc_stderr": 0.017266742087630783, - "acc_norm": 0.2036697247706422, - "acc_norm_stderr": 0.017266742087630783 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.2777777777777778, - "acc_stderr": 0.040061680838488774, - "acc_norm": 0.2777777777777778, - "acc_norm_stderr": 0.040061680838488774 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.22875816993464052, - "acc_stderr": 0.02405102973991225, - "acc_norm": 0.22875816993464052, - "acc_norm_stderr": 0.02405102973991225 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.29, - "acc_stderr": 0.04560480215720684, - "acc_norm": 0.29, - "acc_norm_stderr": 0.04560480215720684 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.2975206611570248, - "acc_stderr": 0.041733491480835, - "acc_norm": 0.2975206611570248, - "acc_norm_stderr": 0.041733491480835 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.2236842105263158, - "acc_stderr": 0.03391160934343604, - "acc_norm": 0.2236842105263158, - "acc_norm_stderr": 0.03391160934343604 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.25163398692810457, - "acc_stderr": 0.01755581809132227, - "acc_norm": 0.25163398692810457, - "acc_norm_stderr": 0.01755581809132227 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.24113475177304963, - "acc_stderr": 0.02551873104953775, - "acc_norm": 0.24113475177304963, - "acc_norm_stderr": 0.02551873104953775 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.3392857142857143, - "acc_stderr": 0.04493949068613539, - "acc_norm": 0.3392857142857143, - "acc_norm_stderr": 0.04493949068613539 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.1712962962962963, - "acc_stderr": 0.025695341643824688, - "acc_norm": 0.1712962962962963, - "acc_norm_stderr": 0.025695341643824688 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.23128491620111732, - "acc_stderr": 0.01410222362315258, - "acc_norm": 0.23128491620111732, - "acc_norm_stderr": 0.01410222362315258 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.16, - "acc_stderr": 0.03684529491774708, - "acc_norm": 0.16, - "acc_norm_stderr": 0.03684529491774708 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.27, - "acc_stderr": 0.044619604333847394, - "acc_norm": 0.27, - "acc_norm_stderr": 0.044619604333847394 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.20220588235294118, - "acc_stderr": 0.02439819298665492, - "acc_norm": 0.20220588235294118, - "acc_norm_stderr": 0.02439819298665492 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.22857142857142856, - "acc_stderr": 0.026882144922307744, - "acc_norm": 0.22857142857142856, - "acc_norm_stderr": 0.026882144922307744 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.25738396624472576, - "acc_stderr": 0.02845882099146029, - "acc_norm": 0.25738396624472576, - "acc_norm_stderr": 0.02845882099146029 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2301173402868318, - "acc_stderr": 0.010750183177375559, - "acc_norm": 0.2301173402868318, - "acc_norm_stderr": 0.010750183177375559 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.25, - "acc_stderr": 0.03039153369274154, - "acc_norm": 0.25, - "acc_norm_stderr": 0.03039153369274154 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.20606060606060606, - "acc_stderr": 0.0315841532404771, - "acc_norm": 0.20606060606060606, - "acc_norm_stderr": 0.0315841532404771 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2876376988984088, - "mc1_stderr": 0.015846315101394805, - "mc2": 0.4653887573676535, - "mc2_stderr": 0.01614389294463642 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.215962441314554, - "acc_stderr": 0.014105639097711667, - "acc_norm": 0.30633802816901406, - "acc_norm_stderr": 0.015801911286714734 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "heegyu/WizardVicuna-3B-0719", - "model_sha": "66621ebc9e2fa15e4fe229dfbea725c916cb7c5e", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/heegyu/WizardVicuna-open-llama-3b-v2/result_2023-10-14 16:00:50.json b/heegyu/WizardVicuna-open-llama-3b-v2/result_2023-10-14 16:00:50.json deleted file mode 100644 index 4e254ffe834dc7bb6d7690a866ad9b26c1b5823d..0000000000000000000000000000000000000000 --- a/heegyu/WizardVicuna-open-llama-3b-v2/result_2023-10-14 16:00:50.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.20563139931740615, - "acc_stderr": 0.011810745260742581, - "acc_norm": 0.257679180887372, - "acc_norm_stderr": 0.012780770562768414 - }, - "harness|ko_hellaswag|10": { - "acc": 0.27693686516630156, - "acc_stderr": 0.004465704810893538, - "acc_norm": 0.30611431985660226, - "acc_norm_stderr": 0.004599358920909526 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.34502923976608185, - "acc_stderr": 0.03645981377388807, - "acc_norm": 0.34502923976608185, - "acc_norm_stderr": 0.03645981377388807 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.23300970873786409, - "acc_stderr": 0.04185832598928315, - "acc_norm": 0.23300970873786409, - "acc_norm_stderr": 0.04185832598928315 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.24904214559386972, - "acc_stderr": 0.015464676163395983, - "acc_norm": 0.24904214559386972, - "acc_norm_stderr": 0.015464676163395983 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.3037037037037037, - "acc_stderr": 0.039725528847851375, - "acc_norm": 0.3037037037037037, - "acc_norm_stderr": 0.039725528847851375 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.2, - "acc_stderr": 0.04020151261036846, - "acc_norm": 0.2, - "acc_norm_stderr": 0.04020151261036846 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.2680851063829787, - "acc_stderr": 0.02895734278834235, - "acc_norm": 0.2680851063829787, - "acc_norm_stderr": 0.02895734278834235 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.2289156626506024, - "acc_stderr": 0.03270745277352477, - "acc_norm": 0.2289156626506024, - "acc_norm_stderr": 0.03270745277352477 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.20257234726688103, - "acc_stderr": 0.022827317491059682, - "acc_norm": 0.20257234726688103, - "acc_norm_stderr": 0.022827317491059682 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.2825112107623318, - "acc_stderr": 0.030216831011508762, - "acc_norm": 0.2825112107623318, - "acc_norm_stderr": 0.030216831011508762 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.2366412213740458, - "acc_stderr": 0.03727673575596918, - "acc_norm": 0.2366412213740458, - "acc_norm_stderr": 0.03727673575596918 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.23, - "acc_stderr": 0.042295258468165044, - "acc_norm": 0.23, - "acc_norm_stderr": 0.042295258468165044 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.21212121212121213, - "acc_stderr": 0.029126522834586825, - "acc_norm": 0.21212121212121213, - "acc_norm_stderr": 0.029126522834586825 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.2689655172413793, - "acc_stderr": 0.03695183311650232, - "acc_norm": 0.2689655172413793, - "acc_norm_stderr": 0.03695183311650232 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.23529411764705882, - "acc_stderr": 0.04220773659171452, - "acc_norm": 0.23529411764705882, - "acc_norm_stderr": 0.04220773659171452 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.23529411764705882, - "acc_stderr": 0.027553614467863773, - "acc_norm": 0.23529411764705882, - "acc_norm_stderr": 0.027553614467863773 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.2230769230769231, - "acc_stderr": 0.021107730127243998, - "acc_norm": 0.2230769230769231, - "acc_norm_stderr": 0.021107730127243998 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.28, - "acc_stderr": 0.045126085985421296, - "acc_norm": 0.28, - "acc_norm_stderr": 0.045126085985421296 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.2, - "acc_stderr": 0.040201512610368445, - "acc_norm": 0.2, - "acc_norm_stderr": 0.040201512610368445 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.3055555555555556, - "acc_stderr": 0.044531975073749834, - "acc_norm": 0.3055555555555556, - "acc_norm_stderr": 0.044531975073749834 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.28078817733990147, - "acc_stderr": 0.0316185633535861, - "acc_norm": 0.28078817733990147, - "acc_norm_stderr": 0.0316185633535861 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.22258064516129034, - "acc_stderr": 0.02366421667164251, - "acc_norm": 0.22258064516129034, - "acc_norm_stderr": 0.02366421667164251 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.2905982905982906, - "acc_stderr": 0.029745048572674054, - "acc_norm": 0.2905982905982906, - "acc_norm_stderr": 0.029745048572674054 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.23018867924528302, - "acc_stderr": 0.025907897122408173, - "acc_norm": 0.23018867924528302, - "acc_norm_stderr": 0.025907897122408173 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.22727272727272727, - "acc_stderr": 0.040139645540727735, - "acc_norm": 0.22727272727272727, - "acc_norm_stderr": 0.040139645540727735 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.24814814814814815, - "acc_stderr": 0.0263357394040558, - "acc_norm": 0.24814814814814815, - "acc_norm_stderr": 0.0263357394040558 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2781456953642384, - "acc_stderr": 0.03658603262763744, - "acc_norm": 0.2781456953642384, - "acc_norm_stderr": 0.03658603262763744 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.25870646766169153, - "acc_stderr": 0.030965903123573037, - "acc_norm": 0.25870646766169153, - "acc_norm_stderr": 0.030965903123573037 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.21965317919075145, - "acc_stderr": 0.031568093627031744, - "acc_norm": 0.21965317919075145, - "acc_norm_stderr": 0.031568093627031744 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.24867724867724866, - "acc_stderr": 0.022261817692400175, - "acc_norm": 0.24867724867724866, - "acc_norm_stderr": 0.022261817692400175 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2569444444444444, - "acc_stderr": 0.03653946969442099, - "acc_norm": 0.2569444444444444, - "acc_norm_stderr": 0.03653946969442099 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.23, - "acc_stderr": 0.04229525846816507, - "acc_norm": 0.23, - "acc_norm_stderr": 0.04229525846816507 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.27, - "acc_stderr": 0.044619604333847394, - "acc_norm": 0.27, - "acc_norm_stderr": 0.044619604333847394 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.28034682080924855, - "acc_stderr": 0.024182427496577622, - "acc_norm": 0.28034682080924855, - "acc_norm_stderr": 0.024182427496577622 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.25153374233128833, - "acc_stderr": 0.034089978868575295, - "acc_norm": 0.25153374233128833, - "acc_norm_stderr": 0.034089978868575295 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.2808641975308642, - "acc_stderr": 0.025006469755799215, - "acc_norm": 0.2808641975308642, - "acc_norm_stderr": 0.025006469755799215 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.24, - "acc_stderr": 0.04292346959909283, - "acc_norm": 0.24, - "acc_norm_stderr": 0.04292346959909283 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.21243523316062177, - "acc_stderr": 0.02951928261681725, - "acc_norm": 0.21243523316062177, - "acc_norm_stderr": 0.02951928261681725 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.23684210526315788, - "acc_stderr": 0.03999423879281336, - "acc_norm": 0.23684210526315788, - "acc_norm_stderr": 0.03999423879281336 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.2036697247706422, - "acc_stderr": 0.01726674208763079, - "acc_norm": 0.2036697247706422, - "acc_norm_stderr": 0.01726674208763079 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.18253968253968253, - "acc_stderr": 0.03455071019102148, - "acc_norm": 0.18253968253968253, - "acc_norm_stderr": 0.03455071019102148 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.25163398692810457, - "acc_stderr": 0.0248480182638752, - "acc_norm": 0.25163398692810457, - "acc_norm_stderr": 0.0248480182638752 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542129, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542129 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.371900826446281, - "acc_stderr": 0.04412015806624504, - "acc_norm": 0.371900826446281, - "acc_norm_stderr": 0.04412015806624504 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.19736842105263158, - "acc_stderr": 0.03238981601699397, - "acc_norm": 0.19736842105263158, - "acc_norm_stderr": 0.03238981601699397 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.2826797385620915, - "acc_stderr": 0.018217269552053435, - "acc_norm": 0.2826797385620915, - "acc_norm_stderr": 0.018217269552053435 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.26595744680851063, - "acc_stderr": 0.026358065698880592, - "acc_norm": 0.26595744680851063, - "acc_norm_stderr": 0.026358065698880592 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.3125, - "acc_stderr": 0.043994650575715215, - "acc_norm": 0.3125, - "acc_norm_stderr": 0.043994650575715215 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.2824074074074074, - "acc_stderr": 0.030701372111510934, - "acc_norm": 0.2824074074074074, - "acc_norm_stderr": 0.030701372111510934 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2335195530726257, - "acc_stderr": 0.014149575348976264, - "acc_norm": 0.2335195530726257, - "acc_norm_stderr": 0.014149575348976264 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252605, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252605 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.22426470588235295, - "acc_stderr": 0.025336848563332372, - "acc_norm": 0.22426470588235295, - "acc_norm_stderr": 0.025336848563332372 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.23265306122448978, - "acc_stderr": 0.02704925791589618, - "acc_norm": 0.23265306122448978, - "acc_norm_stderr": 0.02704925791589618 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.2489451476793249, - "acc_stderr": 0.028146970599422644, - "acc_norm": 0.2489451476793249, - "acc_norm_stderr": 0.028146970599422644 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2301173402868318, - "acc_stderr": 0.01075018317737556, - "acc_norm": 0.2301173402868318, - "acc_norm_stderr": 0.01075018317737556 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.23039215686274508, - "acc_stderr": 0.029554292605695063, - "acc_norm": 0.23039215686274508, - "acc_norm_stderr": 0.029554292605695063 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.21818181818181817, - "acc_stderr": 0.03225078108306289, - "acc_norm": 0.21818181818181817, - "acc_norm_stderr": 0.03225078108306289 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2839657282741738, - "mc1_stderr": 0.015785370858396718, - "mc2": 0.46188658792557263, - "mc2_stderr": 0.016386200757722597 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.30985915492957744, - "acc_stderr": 0.015852079863534953, - "acc_norm": 0.44366197183098594, - "acc_norm_stderr": 0.01703062930161308 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "heegyu/WizardVicuna-open-llama-3b-v2", - "model_sha": "0946550dfbf40d926d6ba816d0ca13e9c810fa72", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/heegyu/WizardVicuna2-13b-hf/result_2023-10-15 11:17:29.json b/heegyu/WizardVicuna2-13b-hf/result_2023-10-15 11:17:29.json deleted file mode 100644 index a6cd8b243d83a2440ce62f208b1fdf96b8b3f81f..0000000000000000000000000000000000000000 --- a/heegyu/WizardVicuna2-13b-hf/result_2023-10-15 11:17:29.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.2551194539249147, - "acc_stderr": 0.012739038695202105, - "acc_norm": 0.3191126279863481, - "acc_norm_stderr": 0.013621696119173306 - }, - "harness|ko_hellaswag|10": { - "acc": 0.32812188807010556, - "acc_stderr": 0.0046856987521048075, - "acc_norm": 0.39225253933479387, - "acc_norm_stderr": 0.004872546302641858 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.40350877192982454, - "acc_stderr": 0.03762738699917055, - "acc_norm": 0.40350877192982454, - "acc_norm_stderr": 0.03762738699917055 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.3300970873786408, - "acc_stderr": 0.0465614711001235, - "acc_norm": 0.3300970873786408, - "acc_norm_stderr": 0.0465614711001235 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.37037037037037035, - "acc_stderr": 0.017268607560005773, - "acc_norm": 0.37037037037037035, - "acc_norm_stderr": 0.017268607560005773 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.2518518518518518, - "acc_stderr": 0.03749850709174021, - "acc_norm": 0.2518518518518518, - "acc_norm_stderr": 0.03749850709174021 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.32340425531914896, - "acc_stderr": 0.030579442773610334, - "acc_norm": 0.32340425531914896, - "acc_norm_stderr": 0.030579442773610334 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3493975903614458, - "acc_stderr": 0.03711725190740749, - "acc_norm": 0.3493975903614458, - "acc_norm_stderr": 0.03711725190740749 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.3408360128617363, - "acc_stderr": 0.026920841260776165, - "acc_norm": 0.3408360128617363, - "acc_norm_stderr": 0.026920841260776165 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.38565022421524664, - "acc_stderr": 0.03266842214289201, - "acc_norm": 0.38565022421524664, - "acc_norm_stderr": 0.03266842214289201 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.3816793893129771, - "acc_stderr": 0.0426073515764456, - "acc_norm": 0.3816793893129771, - "acc_norm_stderr": 0.0426073515764456 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695236, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695236 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.35858585858585856, - "acc_stderr": 0.03416903640391521, - "acc_norm": 0.35858585858585856, - "acc_norm_stderr": 0.03416903640391521 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.3793103448275862, - "acc_stderr": 0.04043461861916747, - "acc_norm": 0.3793103448275862, - "acc_norm_stderr": 0.04043461861916747 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.17647058823529413, - "acc_stderr": 0.03793281185307809, - "acc_norm": 0.17647058823529413, - "acc_norm_stderr": 0.03793281185307809 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.3067226890756303, - "acc_stderr": 0.029953823891887048, - "acc_norm": 0.3067226890756303, - "acc_norm_stderr": 0.029953823891887048 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.2948717948717949, - "acc_stderr": 0.023119362758232294, - "acc_norm": 0.2948717948717949, - "acc_norm_stderr": 0.023119362758232294 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.39, - "acc_stderr": 0.04902071300001974, - "acc_norm": 0.39, - "acc_norm_stderr": 0.04902071300001974 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.35185185185185186, - "acc_stderr": 0.04616631111801714, - "acc_norm": 0.35185185185185186, - "acc_norm_stderr": 0.04616631111801714 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.2512315270935961, - "acc_stderr": 0.030516530732694436, - "acc_norm": 0.2512315270935961, - "acc_norm_stderr": 0.030516530732694436 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3580645161290323, - "acc_stderr": 0.027273890594300642, - "acc_norm": 0.3580645161290323, - "acc_norm_stderr": 0.027273890594300642 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.49145299145299143, - "acc_stderr": 0.032751303000970296, - "acc_norm": 0.49145299145299143, - "acc_norm_stderr": 0.032751303000970296 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.3471698113207547, - "acc_stderr": 0.029300101705549655, - "acc_norm": 0.3471698113207547, - "acc_norm_stderr": 0.029300101705549655 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.4090909090909091, - "acc_stderr": 0.047093069786618966, - "acc_norm": 0.4090909090909091, - "acc_norm_stderr": 0.047093069786618966 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.25925925925925924, - "acc_stderr": 0.02671924078371216, - "acc_norm": 0.25925925925925924, - "acc_norm_stderr": 0.02671924078371216 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2185430463576159, - "acc_stderr": 0.03374235550425694, - "acc_norm": 0.2185430463576159, - "acc_norm_stderr": 0.03374235550425694 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.40298507462686567, - "acc_stderr": 0.034683432951111266, - "acc_norm": 0.40298507462686567, - "acc_norm_stderr": 0.034683432951111266 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.2832369942196532, - "acc_stderr": 0.03435568056047873, - "acc_norm": 0.2832369942196532, - "acc_norm_stderr": 0.03435568056047873 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.24867724867724866, - "acc_stderr": 0.02226181769240016, - "acc_norm": 0.24867724867724866, - "acc_norm_stderr": 0.02226181769240016 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2638888888888889, - "acc_stderr": 0.03685651095897532, - "acc_norm": 0.2638888888888889, - "acc_norm_stderr": 0.03685651095897532 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.49, - "acc_stderr": 0.05024183937956913, - "acc_norm": 0.49, - "acc_norm_stderr": 0.05024183937956913 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.3468208092485549, - "acc_stderr": 0.025624723994030457, - "acc_norm": 0.3468208092485549, - "acc_norm_stderr": 0.025624723994030457 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.3619631901840491, - "acc_stderr": 0.037757007291414416, - "acc_norm": 0.3619631901840491, - "acc_norm_stderr": 0.037757007291414416 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.37962962962962965, - "acc_stderr": 0.02700252103451647, - "acc_norm": 0.37962962962962965, - "acc_norm_stderr": 0.02700252103451647 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.36, - "acc_stderr": 0.04824181513244218, - "acc_norm": 0.36, - "acc_norm_stderr": 0.04824181513244218 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.33678756476683935, - "acc_stderr": 0.034107802518361846, - "acc_norm": 0.33678756476683935, - "acc_norm_stderr": 0.034107802518361846 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.22807017543859648, - "acc_stderr": 0.03947152782669415, - "acc_norm": 0.22807017543859648, - "acc_norm_stderr": 0.03947152782669415 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.3155963302752294, - "acc_stderr": 0.019926117513869662, - "acc_norm": 0.3155963302752294, - "acc_norm_stderr": 0.019926117513869662 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.2222222222222222, - "acc_stderr": 0.037184890068181146, - "acc_norm": 0.2222222222222222, - "acc_norm_stderr": 0.037184890068181146 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.30392156862745096, - "acc_stderr": 0.026336613469046637, - "acc_norm": 0.30392156862745096, - "acc_norm_stderr": 0.026336613469046637 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.42, - "acc_stderr": 0.049604496374885836, - "acc_norm": 0.42, - "acc_norm_stderr": 0.049604496374885836 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.5537190082644629, - "acc_stderr": 0.0453793517794788, - "acc_norm": 0.5537190082644629, - "acc_norm_stderr": 0.0453793517794788 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.2894736842105263, - "acc_stderr": 0.036906779861372814, - "acc_norm": 0.2894736842105263, - "acc_norm_stderr": 0.036906779861372814 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.3006535947712418, - "acc_stderr": 0.018550634502952964, - "acc_norm": 0.3006535947712418, - "acc_norm_stderr": 0.018550634502952964 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.29432624113475175, - "acc_stderr": 0.02718712701150379, - "acc_norm": 0.29432624113475175, - "acc_norm_stderr": 0.02718712701150379 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.2767857142857143, - "acc_stderr": 0.04246624336697624, - "acc_norm": 0.2767857142857143, - "acc_norm_stderr": 0.04246624336697624 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.2175925925925926, - "acc_stderr": 0.02813968944485967, - "acc_norm": 0.2175925925925926, - "acc_norm_stderr": 0.02813968944485967 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2424581005586592, - "acc_stderr": 0.01433352205921789, - "acc_norm": 0.2424581005586592, - "acc_norm_stderr": 0.01433352205921789 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.35, - "acc_stderr": 0.047937248544110196, - "acc_norm": 0.35, - "acc_norm_stderr": 0.047937248544110196 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.24632352941176472, - "acc_stderr": 0.02617343857052, - "acc_norm": 0.24632352941176472, - "acc_norm_stderr": 0.02617343857052 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.3020408163265306, - "acc_stderr": 0.02939360931987982, - "acc_norm": 0.3020408163265306, - "acc_norm_stderr": 0.02939360931987982 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.3628691983122363, - "acc_stderr": 0.03129920825530213, - "acc_norm": 0.3628691983122363, - "acc_norm_stderr": 0.03129920825530213 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2646675358539765, - "acc_stderr": 0.011267332992845535, - "acc_norm": 0.2646675358539765, - "acc_norm_stderr": 0.011267332992845535 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.27941176470588236, - "acc_stderr": 0.03149328104507956, - "acc_norm": 0.27941176470588236, - "acc_norm_stderr": 0.03149328104507956 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.23636363636363636, - "acc_stderr": 0.033175059300091805, - "acc_norm": 0.23636363636363636, - "acc_norm_stderr": 0.033175059300091805 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2729498164014688, - "mc1_stderr": 0.015594753632006509, - "mc2": 0.44330415731488865, - "mc2_stderr": 0.015557823529945149 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.21009389671361503, - "acc_stderr": 0.013964637699696681, - "acc_norm": 0.25704225352112675, - "acc_norm_stderr": 0.014980266433015246 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "heegyu/WizardVicuna2-13b-hf", - "model_sha": "6cfd95e2dcdb6996afa9eb5c63273a1a3524c6c6", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/heegyu/koalpaca-355m/result_2023-10-15 11:22:22.json b/heegyu/koalpaca-355m/result_2023-10-15 11:22:22.json deleted file mode 100644 index e7480b4c5f994d080b8204065ce6af5d6af763b6..0000000000000000000000000000000000000000 --- a/heegyu/koalpaca-355m/result_2023-10-15 11:22:22.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.21075085324232082, - "acc_stderr": 0.011918271754852189, - "acc_norm": 0.2687713310580205, - "acc_norm_stderr": 0.01295506596371068 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3009360685122486, - "acc_stderr": 0.004577275844432453, - "acc_norm": 0.3458474407488548, - "acc_norm_stderr": 0.004746716805735747 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.2222222222222222, - "acc_stderr": 0.03188578017686398, - "acc_norm": 0.2222222222222222, - "acc_norm_stderr": 0.03188578017686398 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.3786407766990291, - "acc_stderr": 0.048026946982589726, - "acc_norm": 0.3786407766990291, - "acc_norm_stderr": 0.048026946982589726 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.27330779054916987, - "acc_stderr": 0.015936681062628556, - "acc_norm": 0.27330779054916987, - "acc_norm_stderr": 0.015936681062628556 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.21481481481481482, - "acc_stderr": 0.03547854198560826, - "acc_norm": 0.21481481481481482, - "acc_norm_stderr": 0.03547854198560826 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.34893617021276596, - "acc_stderr": 0.03115852213135778, - "acc_norm": 0.34893617021276596, - "acc_norm_stderr": 0.03115852213135778 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.25301204819277107, - "acc_stderr": 0.03384429155233137, - "acc_norm": 0.25301204819277107, - "acc_norm_stderr": 0.03384429155233137 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.26366559485530544, - "acc_stderr": 0.025025538500532338, - "acc_norm": 0.26366559485530544, - "acc_norm_stderr": 0.025025538500532338 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.19730941704035873, - "acc_stderr": 0.02670985334496796, - "acc_norm": 0.19730941704035873, - "acc_norm_stderr": 0.02670985334496796 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.22900763358778625, - "acc_stderr": 0.036853466317118506, - "acc_norm": 0.22900763358778625, - "acc_norm_stderr": 0.036853466317118506 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.26, - "acc_stderr": 0.044084400227680794, - "acc_norm": 0.26, - "acc_norm_stderr": 0.044084400227680794 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.25757575757575757, - "acc_stderr": 0.031156269519646836, - "acc_norm": 0.25757575757575757, - "acc_norm_stderr": 0.031156269519646836 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.20689655172413793, - "acc_stderr": 0.03375672449560554, - "acc_norm": 0.20689655172413793, - "acc_norm_stderr": 0.03375672449560554 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.23529411764705882, - "acc_stderr": 0.04220773659171453, - "acc_norm": 0.23529411764705882, - "acc_norm_stderr": 0.04220773659171453 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.35294117647058826, - "acc_stderr": 0.031041941304059288, - "acc_norm": 0.35294117647058826, - "acc_norm_stderr": 0.031041941304059288 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.2923076923076923, - "acc_stderr": 0.023060438380857726, - "acc_norm": 0.2923076923076923, - "acc_norm_stderr": 0.023060438380857726 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.28, - "acc_stderr": 0.045126085985421276, - "acc_norm": 0.28, - "acc_norm_stderr": 0.045126085985421276 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.26, - "acc_stderr": 0.04408440022768078, - "acc_norm": 0.26, - "acc_norm_stderr": 0.04408440022768078 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.24074074074074073, - "acc_stderr": 0.041331194402438376, - "acc_norm": 0.24074074074074073, - "acc_norm_stderr": 0.041331194402438376 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.29064039408866993, - "acc_stderr": 0.0319474007226554, - "acc_norm": 0.29064039408866993, - "acc_norm_stderr": 0.0319474007226554 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3161290322580645, - "acc_stderr": 0.026450874489042764, - "acc_norm": 0.3161290322580645, - "acc_norm_stderr": 0.026450874489042764 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.28205128205128205, - "acc_stderr": 0.02948036054954119, - "acc_norm": 0.28205128205128205, - "acc_norm_stderr": 0.02948036054954119 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.24528301886792453, - "acc_stderr": 0.0264803571798957, - "acc_norm": 0.24528301886792453, - "acc_norm_stderr": 0.0264803571798957 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.21818181818181817, - "acc_stderr": 0.03955932861795833, - "acc_norm": 0.21818181818181817, - "acc_norm_stderr": 0.03955932861795833 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.3111111111111111, - "acc_stderr": 0.028226446749683515, - "acc_norm": 0.3111111111111111, - "acc_norm_stderr": 0.028226446749683515 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.32450331125827814, - "acc_stderr": 0.03822746937658754, - "acc_norm": 0.32450331125827814, - "acc_norm_stderr": 0.03822746937658754 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.22885572139303484, - "acc_stderr": 0.029705284056772436, - "acc_norm": 0.22885572139303484, - "acc_norm_stderr": 0.029705284056772436 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.20809248554913296, - "acc_stderr": 0.030952890217749884, - "acc_norm": 0.20809248554913296, - "acc_norm_stderr": 0.030952890217749884 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2275132275132275, - "acc_stderr": 0.021591269407823764, - "acc_norm": 0.2275132275132275, - "acc_norm_stderr": 0.021591269407823764 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2222222222222222, - "acc_stderr": 0.03476590104304134, - "acc_norm": 0.2222222222222222, - "acc_norm_stderr": 0.03476590104304134 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542127, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542127 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.23699421965317918, - "acc_stderr": 0.02289408248992599, - "acc_norm": 0.23699421965317918, - "acc_norm_stderr": 0.02289408248992599 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.25153374233128833, - "acc_stderr": 0.034089978868575295, - "acc_norm": 0.25153374233128833, - "acc_norm_stderr": 0.034089978868575295 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.25617283950617287, - "acc_stderr": 0.0242885336377261, - "acc_norm": 0.25617283950617287, - "acc_norm_stderr": 0.0242885336377261 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.22, - "acc_stderr": 0.04163331998932269, - "acc_norm": 0.22, - "acc_norm_stderr": 0.04163331998932269 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.18652849740932642, - "acc_stderr": 0.02811209121011746, - "acc_norm": 0.18652849740932642, - "acc_norm_stderr": 0.02811209121011746 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2719298245614035, - "acc_stderr": 0.04185774424022056, - "acc_norm": 0.2719298245614035, - "acc_norm_stderr": 0.04185774424022056 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.21467889908256882, - "acc_stderr": 0.017604304149256483, - "acc_norm": 0.21467889908256882, - "acc_norm_stderr": 0.017604304149256483 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.2222222222222222, - "acc_stderr": 0.037184890068181146, - "acc_norm": 0.2222222222222222, - "acc_norm_stderr": 0.037184890068181146 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.2973856209150327, - "acc_stderr": 0.02617390850671858, - "acc_norm": 0.2973856209150327, - "acc_norm_stderr": 0.02617390850671858 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.38, - "acc_stderr": 0.04878317312145633, - "acc_norm": 0.38, - "acc_norm_stderr": 0.04878317312145633 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.2809917355371901, - "acc_stderr": 0.04103203830514511, - "acc_norm": 0.2809917355371901, - "acc_norm_stderr": 0.04103203830514511 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.16447368421052633, - "acc_stderr": 0.030167533468632726, - "acc_norm": 0.16447368421052633, - "acc_norm_stderr": 0.030167533468632726 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.24509803921568626, - "acc_stderr": 0.017401816711427653, - "acc_norm": 0.24509803921568626, - "acc_norm_stderr": 0.017401816711427653 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.22695035460992907, - "acc_stderr": 0.024987106365642973, - "acc_norm": 0.22695035460992907, - "acc_norm_stderr": 0.024987106365642973 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.2767857142857143, - "acc_stderr": 0.04246624336697625, - "acc_norm": 0.2767857142857143, - "acc_norm_stderr": 0.04246624336697625 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.4583333333333333, - "acc_stderr": 0.033981108902946366, - "acc_norm": 0.4583333333333333, - "acc_norm_stderr": 0.033981108902946366 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.23798882681564246, - "acc_stderr": 0.014242630070574892, - "acc_norm": 0.23798882681564246, - "acc_norm_stderr": 0.014242630070574892 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.23, - "acc_stderr": 0.04229525846816506, - "acc_norm": 0.23, - "acc_norm_stderr": 0.04229525846816506 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.4522058823529412, - "acc_stderr": 0.030233758551596452, - "acc_norm": 0.4522058823529412, - "acc_norm_stderr": 0.030233758551596452 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.1836734693877551, - "acc_stderr": 0.024789071332007633, - "acc_norm": 0.1836734693877551, - "acc_norm_stderr": 0.024789071332007633 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.2616033755274262, - "acc_stderr": 0.028609516716994934, - "acc_norm": 0.2616033755274262, - "acc_norm_stderr": 0.028609516716994934 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.242503259452412, - "acc_stderr": 0.01094657096634878, - "acc_norm": 0.242503259452412, - "acc_norm_stderr": 0.01094657096634878 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.25, - "acc_stderr": 0.03039153369274154, - "acc_norm": 0.25, - "acc_norm_stderr": 0.03039153369274154 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.21818181818181817, - "acc_stderr": 0.03225078108306289, - "acc_norm": 0.21818181818181817, - "acc_norm_stderr": 0.03225078108306289 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.23623011015911874, - "mc1_stderr": 0.014869755015871098, - "mc2": 0.428122521678851, - "mc2_stderr": 0.015366900048399064 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.42370892018779344, - "acc_stderr": 0.016939085971158445, - "acc_norm": 0.5446009389671361, - "acc_norm_stderr": 0.01707145266733428 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "heegyu/koalpaca-355m", - "model_sha": "a1f4b5022e95bd808e2375dd3ed4c9bfbb64df32", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/heegyu/kodialogpt-v1/result_2023-10-15 11:16:23.json b/heegyu/kodialogpt-v1/result_2023-10-15 11:16:23.json deleted file mode 100644 index 44302b5d344efcf78d937f959286ec66615db210..0000000000000000000000000000000000000000 --- a/heegyu/kodialogpt-v1/result_2023-10-15 11:16:23.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.19283276450511946, - "acc_stderr": 0.011529055465663324, - "acc_norm": 0.23122866894197952, - "acc_norm_stderr": 0.012320858834772274 - }, - "harness|ko_hellaswag|10": { - "acc": 0.25652260505875324, - "acc_stderr": 0.004358210689442262, - "acc_norm": 0.2560246962756423, - "acc_norm_stderr": 0.004355436696716298 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.21052631578947367, - "acc_stderr": 0.0312678171466318, - "acc_norm": 0.21052631578947367, - "acc_norm_stderr": 0.0312678171466318 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.1941747572815534, - "acc_stderr": 0.03916667762822582, - "acc_norm": 0.1941747572815534, - "acc_norm_stderr": 0.03916667762822582 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.2835249042145594, - "acc_stderr": 0.016117318166832283, - "acc_norm": 0.2835249042145594, - "acc_norm_stderr": 0.016117318166832283 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.31851851851851853, - "acc_stderr": 0.04024778401977111, - "acc_norm": 0.31851851851851853, - "acc_norm_stderr": 0.04024778401977111 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.20425531914893616, - "acc_stderr": 0.02635515841334941, - "acc_norm": 0.20425531914893616, - "acc_norm_stderr": 0.02635515841334941 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.2710843373493976, - "acc_stderr": 0.03460579907553027, - "acc_norm": 0.2710843373493976, - "acc_norm_stderr": 0.03460579907553027 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.2990353697749196, - "acc_stderr": 0.02600330111788514, - "acc_norm": 0.2990353697749196, - "acc_norm_stderr": 0.02600330111788514 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.24663677130044842, - "acc_stderr": 0.028930413120910877, - "acc_norm": 0.24663677130044842, - "acc_norm_stderr": 0.028930413120910877 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.20610687022900764, - "acc_stderr": 0.03547771004159462, - "acc_norm": 0.20610687022900764, - "acc_norm_stderr": 0.03547771004159462 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.28, - "acc_stderr": 0.045126085985421276, - "acc_norm": 0.28, - "acc_norm_stderr": 0.045126085985421276 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.23232323232323232, - "acc_stderr": 0.030088629490217483, - "acc_norm": 0.23232323232323232, - "acc_norm_stderr": 0.030088629490217483 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.296551724137931, - "acc_stderr": 0.03806142687309994, - "acc_norm": 0.296551724137931, - "acc_norm_stderr": 0.03806142687309994 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.2549019607843137, - "acc_stderr": 0.043364327079931785, - "acc_norm": 0.2549019607843137, - "acc_norm_stderr": 0.043364327079931785 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.2184873949579832, - "acc_stderr": 0.02684151432295893, - "acc_norm": 0.2184873949579832, - "acc_norm_stderr": 0.02684151432295893 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.2128205128205128, - "acc_stderr": 0.020752423722128002, - "acc_norm": 0.2128205128205128, - "acc_norm_stderr": 0.020752423722128002 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.26, - "acc_stderr": 0.04408440022768078, - "acc_norm": 0.26, - "acc_norm_stderr": 0.04408440022768078 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.25, - "acc_stderr": 0.04186091791394607, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04186091791394607 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.2955665024630542, - "acc_stderr": 0.032104944337514575, - "acc_norm": 0.2955665024630542, - "acc_norm_stderr": 0.032104944337514575 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.25161290322580643, - "acc_stderr": 0.024685979286239956, - "acc_norm": 0.25161290322580643, - "acc_norm_stderr": 0.024685979286239956 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.24358974358974358, - "acc_stderr": 0.028120966503914404, - "acc_norm": 0.24358974358974358, - "acc_norm_stderr": 0.028120966503914404 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.21509433962264152, - "acc_stderr": 0.025288394502891363, - "acc_norm": 0.21509433962264152, - "acc_norm_stderr": 0.025288394502891363 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.21818181818181817, - "acc_stderr": 0.03955932861795833, - "acc_norm": 0.21818181818181817, - "acc_norm_stderr": 0.03955932861795833 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.26666666666666666, - "acc_stderr": 0.026962424325073838, - "acc_norm": 0.26666666666666666, - "acc_norm_stderr": 0.026962424325073838 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2847682119205298, - "acc_stderr": 0.03684881521389023, - "acc_norm": 0.2847682119205298, - "acc_norm_stderr": 0.03684881521389023 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.24378109452736318, - "acc_stderr": 0.030360490154014666, - "acc_norm": 0.24378109452736318, - "acc_norm_stderr": 0.030360490154014666 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.2832369942196532, - "acc_stderr": 0.034355680560478746, - "acc_norm": 0.2832369942196532, - "acc_norm_stderr": 0.034355680560478746 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2566137566137566, - "acc_stderr": 0.022494510767503154, - "acc_norm": 0.2566137566137566, - "acc_norm_stderr": 0.022494510767503154 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.25, - "acc_stderr": 0.03621034121889507, - "acc_norm": 0.25, - "acc_norm_stderr": 0.03621034121889507 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.21, - "acc_stderr": 0.040936018074033256, - "acc_norm": 0.21, - "acc_norm_stderr": 0.040936018074033256 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.26, - "acc_stderr": 0.044084400227680814, - "acc_norm": 0.26, - "acc_norm_stderr": 0.044084400227680814 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.2745664739884393, - "acc_stderr": 0.02402774515526503, - "acc_norm": 0.2745664739884393, - "acc_norm_stderr": 0.02402774515526503 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.2883435582822086, - "acc_stderr": 0.035590395316173425, - "acc_norm": 0.2883435582822086, - "acc_norm_stderr": 0.035590395316173425 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.30246913580246915, - "acc_stderr": 0.025557653981868038, - "acc_norm": 0.30246913580246915, - "acc_norm_stderr": 0.025557653981868038 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542127, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542127 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.22797927461139897, - "acc_stderr": 0.030276909945178256, - "acc_norm": 0.22797927461139897, - "acc_norm_stderr": 0.030276909945178256 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.23684210526315788, - "acc_stderr": 0.03999423879281336, - "acc_norm": 0.23684210526315788, - "acc_norm_stderr": 0.03999423879281336 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.23302752293577983, - "acc_stderr": 0.0181256691808615, - "acc_norm": 0.23302752293577983, - "acc_norm_stderr": 0.0181256691808615 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.1984126984126984, - "acc_stderr": 0.03567016675276865, - "acc_norm": 0.1984126984126984, - "acc_norm_stderr": 0.03567016675276865 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.27124183006535946, - "acc_stderr": 0.025457756696667878, - "acc_norm": 0.27124183006535946, - "acc_norm_stderr": 0.025457756696667878 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.23, - "acc_stderr": 0.04229525846816506, - "acc_norm": 0.23, - "acc_norm_stderr": 0.04229525846816506 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.38016528925619836, - "acc_stderr": 0.04431324501968432, - "acc_norm": 0.38016528925619836, - "acc_norm_stderr": 0.04431324501968432 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.21052631578947367, - "acc_stderr": 0.033176727875331574, - "acc_norm": 0.21052631578947367, - "acc_norm_stderr": 0.033176727875331574 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.23366013071895425, - "acc_stderr": 0.017119158496044506, - "acc_norm": 0.23366013071895425, - "acc_norm_stderr": 0.017119158496044506 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2730496453900709, - "acc_stderr": 0.026577860943307854, - "acc_norm": 0.2730496453900709, - "acc_norm_stderr": 0.026577860943307854 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.23214285714285715, - "acc_stderr": 0.04007341809755807, - "acc_norm": 0.23214285714285715, - "acc_norm_stderr": 0.04007341809755807 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.21296296296296297, - "acc_stderr": 0.027920963147993662, - "acc_norm": 0.21296296296296297, - "acc_norm_stderr": 0.027920963147993662 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.24692737430167597, - "acc_stderr": 0.014422292204808852, - "acc_norm": 0.24692737430167597, - "acc_norm_stderr": 0.014422292204808852 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.26, - "acc_stderr": 0.04408440022768079, - "acc_norm": 0.26, - "acc_norm_stderr": 0.04408440022768079 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252605, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252605 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.19852941176470587, - "acc_stderr": 0.02423101337054111, - "acc_norm": 0.19852941176470587, - "acc_norm_stderr": 0.02423101337054111 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.24081632653061225, - "acc_stderr": 0.027372942201788167, - "acc_norm": 0.24081632653061225, - "acc_norm_stderr": 0.027372942201788167 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.2742616033755274, - "acc_stderr": 0.029041333510598018, - "acc_norm": 0.2742616033755274, - "acc_norm_stderr": 0.029041333510598018 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.27053455019556716, - "acc_stderr": 0.011345996743539265, - "acc_norm": 0.27053455019556716, - "acc_norm_stderr": 0.011345996743539265 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.20588235294117646, - "acc_stderr": 0.028379449451588667, - "acc_norm": 0.20588235294117646, - "acc_norm_stderr": 0.028379449451588667 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.2787878787878788, - "acc_stderr": 0.03501438706296781, - "acc_norm": 0.2787878787878788, - "acc_norm_stderr": 0.03501438706296781 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2717258261933905, - "mc1_stderr": 0.015572840452875833, - "mc2": 0.5203988868301895, - "mc2_stderr": 0.016282877106771964 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.17488262910798122, - "acc_stderr": 0.013021662108610242, - "acc_norm": 0.4295774647887324, - "acc_norm_stderr": 0.01696892392010679 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "heegyu/kodialogpt-v1", - "model_sha": "f8b2ddbf8feed75a3e4b8b9de8b17b37efb4d5e0", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/heegyu/llama-2-ko-7b-chat/result_2023-09-27 06:17:19.json b/heegyu/llama-2-ko-7b-chat/result_2023-09-27 06:17:19.json deleted file mode 100644 index c0c0b4b9f0e6d26b137633969774cbd0b87cede6..0000000000000000000000000000000000000000 --- a/heegyu/llama-2-ko-7b-chat/result_2023-09-27 06:17:19.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.30802047781569963, - "acc_stderr": 0.013491429517292038, - "acc_norm": 0.34726962457337884, - "acc_norm_stderr": 0.013913034529620439 - }, - "harness|ko_hellaswag|10": { - "acc": 0.37004580760804623, - "acc_stderr": 0.004818298991012552, - "acc_norm": 0.47231627165903206, - "acc_norm_stderr": 0.004982127315605219 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.26900584795321636, - "acc_stderr": 0.03401052620104088, - "acc_norm": 0.26900584795321636, - "acc_norm_stderr": 0.03401052620104088 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.2621359223300971, - "acc_stderr": 0.043546310772605956, - "acc_norm": 0.2621359223300971, - "acc_norm_stderr": 0.043546310772605956 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.2988505747126437, - "acc_stderr": 0.016369256815093127, - "acc_norm": 0.2988505747126437, - "acc_norm_stderr": 0.016369256815093127 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.3111111111111111, - "acc_stderr": 0.03999262876617724, - "acc_norm": 0.3111111111111111, - "acc_norm_stderr": 0.03999262876617724 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.225531914893617, - "acc_stderr": 0.02732107841738753, - "acc_norm": 0.225531914893617, - "acc_norm_stderr": 0.02732107841738753 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.27710843373493976, - "acc_stderr": 0.034843315926805875, - "acc_norm": 0.27710843373493976, - "acc_norm_stderr": 0.034843315926805875 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.33762057877813506, - "acc_stderr": 0.02685882587948855, - "acc_norm": 0.33762057877813506, - "acc_norm_stderr": 0.02685882587948855 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.2645739910313901, - "acc_stderr": 0.029605103217038332, - "acc_norm": 0.2645739910313901, - "acc_norm_stderr": 0.029605103217038332 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.25190839694656486, - "acc_stderr": 0.038073871163060866, - "acc_norm": 0.25190839694656486, - "acc_norm_stderr": 0.038073871163060866 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.22, - "acc_stderr": 0.04163331998932269, - "acc_norm": 0.22, - "acc_norm_stderr": 0.04163331998932269 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.2777777777777778, - "acc_stderr": 0.03191178226713547, - "acc_norm": 0.2777777777777778, - "acc_norm_stderr": 0.03191178226713547 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.30344827586206896, - "acc_stderr": 0.03831226048850333, - "acc_norm": 0.30344827586206896, - "acc_norm_stderr": 0.03831226048850333 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.23529411764705882, - "acc_stderr": 0.042207736591714506, - "acc_norm": 0.23529411764705882, - "acc_norm_stderr": 0.042207736591714506 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.21428571428571427, - "acc_stderr": 0.02665353159671549, - "acc_norm": 0.21428571428571427, - "acc_norm_stderr": 0.02665353159671549 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.21794871794871795, - "acc_stderr": 0.02093244577446317, - "acc_norm": 0.21794871794871795, - "acc_norm_stderr": 0.02093244577446317 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.38, - "acc_stderr": 0.04878317312145632, - "acc_norm": 0.38, - "acc_norm_stderr": 0.04878317312145632 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.26, - "acc_stderr": 0.04408440022768077, - "acc_norm": 0.26, - "acc_norm_stderr": 0.04408440022768077 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.35185185185185186, - "acc_stderr": 0.046166311118017125, - "acc_norm": 0.35185185185185186, - "acc_norm_stderr": 0.046166311118017125 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.270935960591133, - "acc_stderr": 0.03127090713297698, - "acc_norm": 0.270935960591133, - "acc_norm_stderr": 0.03127090713297698 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.267741935483871, - "acc_stderr": 0.025189006660212385, - "acc_norm": 0.267741935483871, - "acc_norm_stderr": 0.025189006660212385 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.3076923076923077, - "acc_stderr": 0.030236389942173106, - "acc_norm": 0.3076923076923077, - "acc_norm_stderr": 0.030236389942173106 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.27169811320754716, - "acc_stderr": 0.027377706624670713, - "acc_norm": 0.27169811320754716, - "acc_norm_stderr": 0.027377706624670713 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.2545454545454545, - "acc_stderr": 0.041723430387053825, - "acc_norm": 0.2545454545454545, - "acc_norm_stderr": 0.041723430387053825 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.25925925925925924, - "acc_stderr": 0.026719240783712166, - "acc_norm": 0.25925925925925924, - "acc_norm_stderr": 0.026719240783712166 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2781456953642384, - "acc_stderr": 0.036586032627637426, - "acc_norm": 0.2781456953642384, - "acc_norm_stderr": 0.036586032627637426 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.3482587064676617, - "acc_stderr": 0.033687874661154596, - "acc_norm": 0.3482587064676617, - "acc_norm_stderr": 0.033687874661154596 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.24855491329479767, - "acc_stderr": 0.03295304696818317, - "acc_norm": 0.24855491329479767, - "acc_norm_stderr": 0.03295304696818317 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.24867724867724866, - "acc_stderr": 0.022261817692400175, - "acc_norm": 0.24867724867724866, - "acc_norm_stderr": 0.022261817692400175 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2638888888888889, - "acc_stderr": 0.03685651095897532, - "acc_norm": 0.2638888888888889, - "acc_norm_stderr": 0.03685651095897532 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.18, - "acc_stderr": 0.038612291966536955, - "acc_norm": 0.18, - "acc_norm_stderr": 0.038612291966536955 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.38, - "acc_stderr": 0.04878317312145632, - "acc_norm": 0.38, - "acc_norm_stderr": 0.04878317312145632 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.30057803468208094, - "acc_stderr": 0.024685316867257806, - "acc_norm": 0.30057803468208094, - "acc_norm_stderr": 0.024685316867257806 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.3067484662576687, - "acc_stderr": 0.036230899157241474, - "acc_norm": 0.3067484662576687, - "acc_norm_stderr": 0.036230899157241474 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.3055555555555556, - "acc_stderr": 0.02563082497562135, - "acc_norm": 0.3055555555555556, - "acc_norm_stderr": 0.02563082497562135 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.24352331606217617, - "acc_stderr": 0.030975436386845436, - "acc_norm": 0.24352331606217617, - "acc_norm_stderr": 0.030975436386845436 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.24561403508771928, - "acc_stderr": 0.04049339297748141, - "acc_norm": 0.24561403508771928, - "acc_norm_stderr": 0.04049339297748141 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.29541284403669726, - "acc_stderr": 0.019560619182976, - "acc_norm": 0.29541284403669726, - "acc_norm_stderr": 0.019560619182976 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.14285714285714285, - "acc_stderr": 0.0312984318574381, - "acc_norm": 0.14285714285714285, - "acc_norm_stderr": 0.0312984318574381 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.30392156862745096, - "acc_stderr": 0.026336613469046644, - "acc_norm": 0.30392156862745096, - "acc_norm_stderr": 0.026336613469046644 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.23, - "acc_stderr": 0.04229525846816508, - "acc_norm": 0.23, - "acc_norm_stderr": 0.04229525846816508 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.4462809917355372, - "acc_stderr": 0.0453793517794788, - "acc_norm": 0.4462809917355372, - "acc_norm_stderr": 0.0453793517794788 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.3355263157894737, - "acc_stderr": 0.038424985593952694, - "acc_norm": 0.3355263157894737, - "acc_norm_stderr": 0.038424985593952694 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.3022875816993464, - "acc_stderr": 0.018579232711113877, - "acc_norm": 0.3022875816993464, - "acc_norm_stderr": 0.018579232711113877 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2907801418439716, - "acc_stderr": 0.027090664368353178, - "acc_norm": 0.2907801418439716, - "acc_norm_stderr": 0.027090664368353178 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.26785714285714285, - "acc_stderr": 0.0420327729146776, - "acc_norm": 0.26785714285714285, - "acc_norm_stderr": 0.0420327729146776 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.2037037037037037, - "acc_stderr": 0.02746740180405799, - "acc_norm": 0.2037037037037037, - "acc_norm_stderr": 0.02746740180405799 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2424581005586592, - "acc_stderr": 0.01433352205921789, - "acc_norm": 0.2424581005586592, - "acc_norm_stderr": 0.01433352205921789 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.17279411764705882, - "acc_stderr": 0.022966067585581788, - "acc_norm": 0.17279411764705882, - "acc_norm_stderr": 0.022966067585581788 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.2163265306122449, - "acc_stderr": 0.026358916334904028, - "acc_norm": 0.2163265306122449, - "acc_norm_stderr": 0.026358916334904028 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.3080168776371308, - "acc_stderr": 0.030052389335605695, - "acc_norm": 0.3080168776371308, - "acc_norm_stderr": 0.030052389335605695 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.26140808344198174, - "acc_stderr": 0.011222528169771312, - "acc_norm": 0.26140808344198174, - "acc_norm_stderr": 0.011222528169771312 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.29901960784313725, - "acc_stderr": 0.03213325717373618, - "acc_norm": 0.29901960784313725, - "acc_norm_stderr": 0.03213325717373618 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.3393939393939394, - "acc_stderr": 0.03697442205031595, - "acc_norm": 0.3393939393939394, - "acc_norm_stderr": 0.03697442205031595 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.23745410036719705, - "mc1_stderr": 0.014896277441041867, - "mc2": 0.3946101299678252, - "mc2_stderr": 0.01496139592173614 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.4061032863849765, - "acc_stderr": 0.016834837668044094, - "acc_norm": 0.4460093896713615, - "acc_norm_stderr": 0.017039561832563683 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "heegyu/llama-2-ko-7b-chat", - "model_sha": "98096a3f4d095e42ba10daec38ad329d9576f4cd", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/heegyu/polyglot-ko-1.3b-chat/result_2023-10-14 15:59:35.json b/heegyu/polyglot-ko-1.3b-chat/result_2023-10-14 15:59:35.json deleted file mode 100644 index 533db025ccde6c34d3071a6eadf08efbec465ab0..0000000000000000000000000000000000000000 --- a/heegyu/polyglot-ko-1.3b-chat/result_2023-10-14 15:59:35.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.2295221843003413, - "acc_stderr": 0.012288926760890793, - "acc_norm": 0.27559726962457337, - "acc_norm_stderr": 0.013057169655761838 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3253335988846843, - "acc_stderr": 0.004675418774314241, - "acc_norm": 0.3995220075682135, - "acc_norm_stderr": 0.004887991225950282 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.27485380116959063, - "acc_stderr": 0.03424042924691583, - "acc_norm": 0.27485380116959063, - "acc_norm_stderr": 0.03424042924691583 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.3106796116504854, - "acc_stderr": 0.04582124160161551, - "acc_norm": 0.3106796116504854, - "acc_norm_stderr": 0.04582124160161551 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.2784163473818646, - "acc_stderr": 0.016028295188992455, - "acc_norm": 0.2784163473818646, - "acc_norm_stderr": 0.016028295188992455 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.2222222222222222, - "acc_stderr": 0.03591444084196969, - "acc_norm": 0.2222222222222222, - "acc_norm_stderr": 0.03591444084196969 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.2, - "acc_stderr": 0.04020151261036843, - "acc_norm": 0.2, - "acc_norm_stderr": 0.04020151261036843 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.20851063829787234, - "acc_stderr": 0.026556982117838752, - "acc_norm": 0.20851063829787234, - "acc_norm_stderr": 0.026556982117838752 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.20481927710843373, - "acc_stderr": 0.031417842916639245, - "acc_norm": 0.20481927710843373, - "acc_norm_stderr": 0.031417842916639245 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.24758842443729903, - "acc_stderr": 0.024513879973621967, - "acc_norm": 0.24758842443729903, - "acc_norm_stderr": 0.024513879973621967 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.21076233183856502, - "acc_stderr": 0.027373095500540193, - "acc_norm": 0.21076233183856502, - "acc_norm_stderr": 0.027373095500540193 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.21374045801526717, - "acc_stderr": 0.035954616117746904, - "acc_norm": 0.21374045801526717, - "acc_norm_stderr": 0.035954616117746904 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.19, - "acc_stderr": 0.03942772444036623, - "acc_norm": 0.19, - "acc_norm_stderr": 0.03942772444036623 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.2474747474747475, - "acc_stderr": 0.030746300742124498, - "acc_norm": 0.2474747474747475, - "acc_norm_stderr": 0.030746300742124498 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.296551724137931, - "acc_stderr": 0.03806142687309994, - "acc_norm": 0.296551724137931, - "acc_norm_stderr": 0.03806142687309994 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.21568627450980393, - "acc_stderr": 0.04092563958237656, - "acc_norm": 0.21568627450980393, - "acc_norm_stderr": 0.04092563958237656 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.33613445378151263, - "acc_stderr": 0.03068473711513537, - "acc_norm": 0.33613445378151263, - "acc_norm_stderr": 0.03068473711513537 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.31025641025641026, - "acc_stderr": 0.02345467488940429, - "acc_norm": 0.31025641025641026, - "acc_norm_stderr": 0.02345467488940429 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.22, - "acc_stderr": 0.04163331998932269, - "acc_norm": 0.22, - "acc_norm_stderr": 0.04163331998932269 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695235, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695235 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.21296296296296297, - "acc_stderr": 0.03957835471980981, - "acc_norm": 0.21296296296296297, - "acc_norm_stderr": 0.03957835471980981 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.28078817733990147, - "acc_stderr": 0.031618563353586114, - "acc_norm": 0.28078817733990147, - "acc_norm_stderr": 0.031618563353586114 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3, - "acc_stderr": 0.026069362295335137, - "acc_norm": 0.3, - "acc_norm_stderr": 0.026069362295335137 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.2222222222222222, - "acc_stderr": 0.027236013946196687, - "acc_norm": 0.2222222222222222, - "acc_norm_stderr": 0.027236013946196687 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.2528301886792453, - "acc_stderr": 0.026749899771241235, - "acc_norm": 0.2528301886792453, - "acc_norm_stderr": 0.026749899771241235 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.2545454545454545, - "acc_stderr": 0.041723430387053825, - "acc_norm": 0.2545454545454545, - "acc_norm_stderr": 0.041723430387053825 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.3074074074074074, - "acc_stderr": 0.028133252578815635, - "acc_norm": 0.3074074074074074, - "acc_norm_stderr": 0.028133252578815635 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2582781456953642, - "acc_stderr": 0.035737053147634576, - "acc_norm": 0.2582781456953642, - "acc_norm_stderr": 0.035737053147634576 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.27860696517412936, - "acc_stderr": 0.031700561834973086, - "acc_norm": 0.27860696517412936, - "acc_norm_stderr": 0.031700561834973086 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.30057803468208094, - "acc_stderr": 0.03496101481191179, - "acc_norm": 0.30057803468208094, - "acc_norm_stderr": 0.03496101481191179 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2724867724867725, - "acc_stderr": 0.022930973071633345, - "acc_norm": 0.2724867724867725, - "acc_norm_stderr": 0.022930973071633345 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2361111111111111, - "acc_stderr": 0.03551446610810826, - "acc_norm": 0.2361111111111111, - "acc_norm_stderr": 0.03551446610810826 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542127, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542127 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.21098265895953758, - "acc_stderr": 0.021966309947043117, - "acc_norm": 0.21098265895953758, - "acc_norm_stderr": 0.021966309947043117 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.294478527607362, - "acc_stderr": 0.03581165790474082, - "acc_norm": 0.294478527607362, - "acc_norm_stderr": 0.03581165790474082 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.25308641975308643, - "acc_stderr": 0.024191808600713002, - "acc_norm": 0.25308641975308643, - "acc_norm_stderr": 0.024191808600713002 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252605, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252605 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.36787564766839376, - "acc_stderr": 0.034801756684660366, - "acc_norm": 0.36787564766839376, - "acc_norm_stderr": 0.034801756684660366 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.23684210526315788, - "acc_stderr": 0.039994238792813365, - "acc_norm": 0.23684210526315788, - "acc_norm_stderr": 0.039994238792813365 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.20550458715596331, - "acc_stderr": 0.017324352325016015, - "acc_norm": 0.20550458715596331, - "acc_norm_stderr": 0.017324352325016015 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.20634920634920634, - "acc_stderr": 0.0361960452412425, - "acc_norm": 0.20634920634920634, - "acc_norm_stderr": 0.0361960452412425 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.2647058823529412, - "acc_stderr": 0.02526169121972948, - "acc_norm": 0.2647058823529412, - "acc_norm_stderr": 0.02526169121972948 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.19, - "acc_stderr": 0.039427724440366234, - "acc_norm": 0.19, - "acc_norm_stderr": 0.039427724440366234 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.2644628099173554, - "acc_stderr": 0.04026187527591206, - "acc_norm": 0.2644628099173554, - "acc_norm_stderr": 0.04026187527591206 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.17105263157894737, - "acc_stderr": 0.03064360707167709, - "acc_norm": 0.17105263157894737, - "acc_norm_stderr": 0.03064360707167709 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.20261437908496732, - "acc_stderr": 0.01626105528374612, - "acc_norm": 0.20261437908496732, - "acc_norm_stderr": 0.01626105528374612 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.30141843971631205, - "acc_stderr": 0.027374128882631157, - "acc_norm": 0.30141843971631205, - "acc_norm_stderr": 0.027374128882631157 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.22321428571428573, - "acc_stderr": 0.03952301967702511, - "acc_norm": 0.22321428571428573, - "acc_norm_stderr": 0.03952301967702511 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.4444444444444444, - "acc_stderr": 0.03388857118502325, - "acc_norm": 0.4444444444444444, - "acc_norm_stderr": 0.03388857118502325 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2737430167597765, - "acc_stderr": 0.014912413096372432, - "acc_norm": 0.2737430167597765, - "acc_norm_stderr": 0.014912413096372432 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542129, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542129 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.41544117647058826, - "acc_stderr": 0.02993534270787775, - "acc_norm": 0.41544117647058826, - "acc_norm_stderr": 0.02993534270787775 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.20816326530612245, - "acc_stderr": 0.025991117672813292, - "acc_norm": 0.20816326530612245, - "acc_norm_stderr": 0.025991117672813292 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.2616033755274262, - "acc_stderr": 0.028609516716994927, - "acc_norm": 0.2616033755274262, - "acc_norm_stderr": 0.028609516716994927 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2379400260756193, - "acc_stderr": 0.010875700787694243, - "acc_norm": 0.2379400260756193, - "acc_norm_stderr": 0.010875700787694243 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.24509803921568626, - "acc_stderr": 0.030190282453501933, - "acc_norm": 0.24509803921568626, - "acc_norm_stderr": 0.030190282453501933 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.2606060606060606, - "acc_stderr": 0.034277431758165236, - "acc_norm": 0.2606060606060606, - "acc_norm_stderr": 0.034277431758165236 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.23990208078335373, - "mc1_stderr": 0.014948812679062137, - "mc2": 0.4105215346532836, - "mc2_stderr": 0.015140606421446082 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.2147887323943662, - "acc_stderr": 0.014077781780936452, - "acc_norm": 0.2852112676056338, - "acc_norm_stderr": 0.015477735058269455 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "heegyu/polyglot-ko-1.3b-chat", - "model_sha": "156656e44a70bc0905777f682f16237758d16b16", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/heegyu/polyglot-ko-3.8b-chat/result_2023-10-14 16:00:21.json b/heegyu/polyglot-ko-3.8b-chat/result_2023-10-14 16:00:21.json deleted file mode 100644 index 223d56aad37331fe6793ff993384cd97f212b93d..0000000000000000000000000000000000000000 --- a/heegyu/polyglot-ko-3.8b-chat/result_2023-10-14 16:00:21.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.2568259385665529, - "acc_stderr": 0.012766923794116801, - "acc_norm": 0.30887372013651876, - "acc_norm_stderr": 0.013501770929344003 - }, - "harness|ko_hellaswag|10": { - "acc": 0.35172276438956385, - "acc_stderr": 0.004765320784902128, - "acc_norm": 0.4396534554869548, - "acc_norm_stderr": 0.004953305461311753 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.30994152046783624, - "acc_stderr": 0.03546976959393161, - "acc_norm": 0.30994152046783624, - "acc_norm_stderr": 0.03546976959393161 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.23300970873786409, - "acc_stderr": 0.04185832598928315, - "acc_norm": 0.23300970873786409, - "acc_norm_stderr": 0.04185832598928315 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.2656449553001277, - "acc_stderr": 0.01579430248788873, - "acc_norm": 0.2656449553001277, - "acc_norm_stderr": 0.01579430248788873 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.2518518518518518, - "acc_stderr": 0.03749850709174022, - "acc_norm": 0.2518518518518518, - "acc_norm_stderr": 0.03749850709174022 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.32, - "acc_stderr": 0.046882617226215034, - "acc_norm": 0.32, - "acc_norm_stderr": 0.046882617226215034 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.20851063829787234, - "acc_stderr": 0.02655698211783875, - "acc_norm": 0.20851063829787234, - "acc_norm_stderr": 0.02655698211783875 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.2289156626506024, - "acc_stderr": 0.03270745277352477, - "acc_norm": 0.2289156626506024, - "acc_norm_stderr": 0.03270745277352477 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.27009646302250806, - "acc_stderr": 0.025218040373410622, - "acc_norm": 0.27009646302250806, - "acc_norm_stderr": 0.025218040373410622 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.2062780269058296, - "acc_stderr": 0.027157150479563824, - "acc_norm": 0.2062780269058296, - "acc_norm_stderr": 0.027157150479563824 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.22900763358778625, - "acc_stderr": 0.036853466317118506, - "acc_norm": 0.22900763358778625, - "acc_norm_stderr": 0.036853466317118506 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.24, - "acc_stderr": 0.04292346959909282, - "acc_norm": 0.24, - "acc_norm_stderr": 0.04292346959909282 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.23232323232323232, - "acc_stderr": 0.030088629490217487, - "acc_norm": 0.23232323232323232, - "acc_norm_stderr": 0.030088629490217487 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.30344827586206896, - "acc_stderr": 0.03831226048850333, - "acc_norm": 0.30344827586206896, - "acc_norm_stderr": 0.03831226048850333 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.22549019607843138, - "acc_stderr": 0.041583075330832865, - "acc_norm": 0.22549019607843138, - "acc_norm_stderr": 0.041583075330832865 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.24369747899159663, - "acc_stderr": 0.027886828078380558, - "acc_norm": 0.24369747899159663, - "acc_norm_stderr": 0.027886828078380558 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.2717948717948718, - "acc_stderr": 0.022556551010132354, - "acc_norm": 0.2717948717948718, - "acc_norm_stderr": 0.022556551010132354 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542129, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542129 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.32, - "acc_stderr": 0.04688261722621504, - "acc_norm": 0.32, - "acc_norm_stderr": 0.04688261722621504 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.2222222222222222, - "acc_stderr": 0.040191074725573483, - "acc_norm": 0.2222222222222222, - "acc_norm_stderr": 0.040191074725573483 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.31527093596059114, - "acc_stderr": 0.03269080871970186, - "acc_norm": 0.31527093596059114, - "acc_norm_stderr": 0.03269080871970186 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.26129032258064516, - "acc_stderr": 0.024993053397764833, - "acc_norm": 0.26129032258064516, - "acc_norm_stderr": 0.024993053397764833 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.23931623931623933, - "acc_stderr": 0.027951826808924333, - "acc_norm": 0.23931623931623933, - "acc_norm_stderr": 0.027951826808924333 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.22641509433962265, - "acc_stderr": 0.025757559893106734, - "acc_norm": 0.22641509433962265, - "acc_norm_stderr": 0.025757559893106734 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.17272727272727273, - "acc_stderr": 0.03620691833929217, - "acc_norm": 0.17272727272727273, - "acc_norm_stderr": 0.03620691833929217 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.27037037037037037, - "acc_stderr": 0.027080372815145668, - "acc_norm": 0.27037037037037037, - "acc_norm_stderr": 0.027080372815145668 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.25165562913907286, - "acc_stderr": 0.03543304234389985, - "acc_norm": 0.25165562913907286, - "acc_norm_stderr": 0.03543304234389985 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.2537313432835821, - "acc_stderr": 0.03076944496729602, - "acc_norm": 0.2537313432835821, - "acc_norm_stderr": 0.03076944496729602 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.23121387283236994, - "acc_stderr": 0.03214737302029468, - "acc_norm": 0.23121387283236994, - "acc_norm_stderr": 0.03214737302029468 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.26455026455026454, - "acc_stderr": 0.022717467897708607, - "acc_norm": 0.26455026455026454, - "acc_norm_stderr": 0.022717467897708607 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2708333333333333, - "acc_stderr": 0.03716177437566016, - "acc_norm": 0.2708333333333333, - "acc_norm_stderr": 0.03716177437566016 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.2, - "acc_stderr": 0.04020151261036845, - "acc_norm": 0.2, - "acc_norm_stderr": 0.04020151261036845 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.2630057803468208, - "acc_stderr": 0.023703099525258158, - "acc_norm": 0.2630057803468208, - "acc_norm_stderr": 0.023703099525258158 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.27607361963190186, - "acc_stderr": 0.0351238528370505, - "acc_norm": 0.27607361963190186, - "acc_norm_stderr": 0.0351238528370505 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.2777777777777778, - "acc_stderr": 0.024922001168886338, - "acc_norm": 0.2777777777777778, - "acc_norm_stderr": 0.024922001168886338 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.34196891191709844, - "acc_stderr": 0.03423465100104283, - "acc_norm": 0.34196891191709844, - "acc_norm_stderr": 0.03423465100104283 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.23684210526315788, - "acc_stderr": 0.03999423879281336, - "acc_norm": 0.23684210526315788, - "acc_norm_stderr": 0.03999423879281336 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.24220183486238533, - "acc_stderr": 0.01836817630659862, - "acc_norm": 0.24220183486238533, - "acc_norm_stderr": 0.01836817630659862 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.2698412698412698, - "acc_stderr": 0.03970158273235173, - "acc_norm": 0.2698412698412698, - "acc_norm_stderr": 0.03970158273235173 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.24836601307189543, - "acc_stderr": 0.02473998135511359, - "acc_norm": 0.24836601307189543, - "acc_norm_stderr": 0.02473998135511359 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.23, - "acc_stderr": 0.04229525846816506, - "acc_norm": 0.23, - "acc_norm_stderr": 0.04229525846816506 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.371900826446281, - "acc_stderr": 0.04412015806624504, - "acc_norm": 0.371900826446281, - "acc_norm_stderr": 0.04412015806624504 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.29605263157894735, - "acc_stderr": 0.03715062154998904, - "acc_norm": 0.29605263157894735, - "acc_norm_stderr": 0.03715062154998904 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.26633986928104575, - "acc_stderr": 0.017883188134667192, - "acc_norm": 0.26633986928104575, - "acc_norm_stderr": 0.017883188134667192 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2730496453900709, - "acc_stderr": 0.026577860943307854, - "acc_norm": 0.2730496453900709, - "acc_norm_stderr": 0.026577860943307854 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.23214285714285715, - "acc_stderr": 0.04007341809755808, - "acc_norm": 0.23214285714285715, - "acc_norm_stderr": 0.04007341809755808 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.3888888888888889, - "acc_stderr": 0.03324708911809117, - "acc_norm": 0.3888888888888889, - "acc_norm_stderr": 0.03324708911809117 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.24804469273743016, - "acc_stderr": 0.014444157808261453, - "acc_norm": 0.24804469273743016, - "acc_norm_stderr": 0.014444157808261453 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.24, - "acc_stderr": 0.04292346959909283, - "acc_norm": 0.24, - "acc_norm_stderr": 0.04292346959909283 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.26838235294117646, - "acc_stderr": 0.02691748122437723, - "acc_norm": 0.26838235294117646, - "acc_norm_stderr": 0.02691748122437723 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.2979591836734694, - "acc_stderr": 0.029279567411065684, - "acc_norm": 0.2979591836734694, - "acc_norm_stderr": 0.029279567411065684 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.26582278481012656, - "acc_stderr": 0.028756799629658332, - "acc_norm": 0.26582278481012656, - "acc_norm_stderr": 0.028756799629658332 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.25945241199478486, - "acc_stderr": 0.011195262076350309, - "acc_norm": 0.25945241199478486, - "acc_norm_stderr": 0.011195262076350309 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.24019607843137256, - "acc_stderr": 0.02998373305591361, - "acc_norm": 0.24019607843137256, - "acc_norm_stderr": 0.02998373305591361 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.26666666666666666, - "acc_stderr": 0.03453131801885416, - "acc_norm": 0.26666666666666666, - "acc_norm_stderr": 0.03453131801885416 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.26438188494492043, - "mc1_stderr": 0.015438211119522517, - "mc2": 0.42818983286182555, - "mc2_stderr": 0.015309048799107149 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.11619718309859155, - "acc_stderr": 0.010985269366452478, - "acc_norm": 0.1596244131455399, - "acc_norm_stderr": 0.012555140888040404 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "heegyu/polyglot-ko-3.8b-chat", - "model_sha": "0e8739e22d15d44f6196fb281895856a0372564a", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/heegyu/polyglot-ko-5.8b-chat/result_2023-10-14 16:01:20.json b/heegyu/polyglot-ko-5.8b-chat/result_2023-10-14 16:01:20.json deleted file mode 100644 index a9f69df93fd192124726ead42264a1ef9dae3e9a..0000000000000000000000000000000000000000 --- a/heegyu/polyglot-ko-5.8b-chat/result_2023-10-14 16:01:20.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.2815699658703072, - "acc_stderr": 0.013143376735009007, - "acc_norm": 0.3165529010238908, - "acc_norm_stderr": 0.01359243151906808 - }, - "harness|ko_hellaswag|10": { - "acc": 0.35899223262298346, - "acc_stderr": 0.004787245377967104, - "acc_norm": 0.4522007568213503, - "acc_norm_stderr": 0.004966928094797578 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.23391812865497075, - "acc_stderr": 0.03246721765117827, - "acc_norm": 0.23391812865497075, - "acc_norm_stderr": 0.03246721765117827 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.1553398058252427, - "acc_stderr": 0.03586594738573974, - "acc_norm": 0.1553398058252427, - "acc_norm_stderr": 0.03586594738573974 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.29757343550446996, - "acc_stderr": 0.016349111912909418, - "acc_norm": 0.29757343550446996, - "acc_norm_stderr": 0.016349111912909418 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.2074074074074074, - "acc_stderr": 0.03502553170678316, - "acc_norm": 0.2074074074074074, - "acc_norm_stderr": 0.03502553170678316 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.26, - "acc_stderr": 0.04408440022768078, - "acc_norm": 0.26, - "acc_norm_stderr": 0.04408440022768078 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.3021276595744681, - "acc_stderr": 0.030017554471880557, - "acc_norm": 0.3021276595744681, - "acc_norm_stderr": 0.030017554471880557 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3132530120481928, - "acc_stderr": 0.036108050180310235, - "acc_norm": 0.3132530120481928, - "acc_norm_stderr": 0.036108050180310235 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.26366559485530544, - "acc_stderr": 0.02502553850053234, - "acc_norm": 0.26366559485530544, - "acc_norm_stderr": 0.02502553850053234 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.3542600896860987, - "acc_stderr": 0.03210062154134988, - "acc_norm": 0.3542600896860987, - "acc_norm_stderr": 0.03210062154134988 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.2595419847328244, - "acc_stderr": 0.03844876139785271, - "acc_norm": 0.2595419847328244, - "acc_norm_stderr": 0.03844876139785271 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.23232323232323232, - "acc_stderr": 0.03008862949021749, - "acc_norm": 0.23232323232323232, - "acc_norm_stderr": 0.03008862949021749 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.22758620689655173, - "acc_stderr": 0.03493950380131184, - "acc_norm": 0.22758620689655173, - "acc_norm_stderr": 0.03493950380131184 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.20588235294117646, - "acc_stderr": 0.04023382273617747, - "acc_norm": 0.20588235294117646, - "acc_norm_stderr": 0.04023382273617747 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.29831932773109243, - "acc_stderr": 0.029719142876342853, - "acc_norm": 0.29831932773109243, - "acc_norm_stderr": 0.029719142876342853 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.32564102564102565, - "acc_stderr": 0.02375966576741229, - "acc_norm": 0.32564102564102565, - "acc_norm_stderr": 0.02375966576741229 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.24, - "acc_stderr": 0.04292346959909284, - "acc_norm": 0.24, - "acc_norm_stderr": 0.04292346959909284 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.26, - "acc_stderr": 0.04408440022768079, - "acc_norm": 0.26, - "acc_norm_stderr": 0.04408440022768079 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.24074074074074073, - "acc_stderr": 0.04133119440243839, - "acc_norm": 0.24074074074074073, - "acc_norm_stderr": 0.04133119440243839 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.2413793103448276, - "acc_stderr": 0.030108330718011625, - "acc_norm": 0.2413793103448276, - "acc_norm_stderr": 0.030108330718011625 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3064516129032258, - "acc_stderr": 0.026226485652553883, - "acc_norm": 0.3064516129032258, - "acc_norm_stderr": 0.026226485652553883 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.2564102564102564, - "acc_stderr": 0.02860595370200425, - "acc_norm": 0.2564102564102564, - "acc_norm_stderr": 0.02860595370200425 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.2943396226415094, - "acc_stderr": 0.028049186315695245, - "acc_norm": 0.2943396226415094, - "acc_norm_stderr": 0.028049186315695245 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.2, - "acc_stderr": 0.038313051408846, - "acc_norm": 0.2, - "acc_norm_stderr": 0.038313051408846 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.2851851851851852, - "acc_stderr": 0.027528599210340492, - "acc_norm": 0.2851851851851852, - "acc_norm_stderr": 0.027528599210340492 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2185430463576159, - "acc_stderr": 0.03374235550425694, - "acc_norm": 0.2185430463576159, - "acc_norm_stderr": 0.03374235550425694 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.27860696517412936, - "acc_stderr": 0.031700561834973086, - "acc_norm": 0.27860696517412936, - "acc_norm_stderr": 0.031700561834973086 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.2023121387283237, - "acc_stderr": 0.030631145539198823, - "acc_norm": 0.2023121387283237, - "acc_norm_stderr": 0.030631145539198823 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2698412698412698, - "acc_stderr": 0.022860838309232072, - "acc_norm": 0.2698412698412698, - "acc_norm_stderr": 0.022860838309232072 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2638888888888889, - "acc_stderr": 0.03685651095897532, - "acc_norm": 0.2638888888888889, - "acc_norm_stderr": 0.03685651095897532 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.24, - "acc_stderr": 0.04292346959909282, - "acc_norm": 0.24, - "acc_norm_stderr": 0.04292346959909282 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542127, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542127 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.20520231213872833, - "acc_stderr": 0.021742519835276284, - "acc_norm": 0.20520231213872833, - "acc_norm_stderr": 0.021742519835276284 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.25153374233128833, - "acc_stderr": 0.03408997886857529, - "acc_norm": 0.25153374233128833, - "acc_norm_stderr": 0.03408997886857529 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.2962962962962963, - "acc_stderr": 0.025407197798890162, - "acc_norm": 0.2962962962962963, - "acc_norm_stderr": 0.025407197798890162 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.32, - "acc_stderr": 0.04688261722621505, - "acc_norm": 0.32, - "acc_norm_stderr": 0.04688261722621505 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.26424870466321243, - "acc_stderr": 0.03182155050916646, - "acc_norm": 0.26424870466321243, - "acc_norm_stderr": 0.03182155050916646 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2631578947368421, - "acc_stderr": 0.0414243971948936, - "acc_norm": 0.2631578947368421, - "acc_norm_stderr": 0.0414243971948936 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.26055045871559634, - "acc_stderr": 0.01881918203485007, - "acc_norm": 0.26055045871559634, - "acc_norm_stderr": 0.01881918203485007 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.23015873015873015, - "acc_stderr": 0.03764950879790606, - "acc_norm": 0.23015873015873015, - "acc_norm_stderr": 0.03764950879790606 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.238562091503268, - "acc_stderr": 0.024404394928087873, - "acc_norm": 0.238562091503268, - "acc_norm_stderr": 0.024404394928087873 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.27, - "acc_stderr": 0.04461960433384741, - "acc_norm": 0.27, - "acc_norm_stderr": 0.04461960433384741 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.256198347107438, - "acc_stderr": 0.039849796533028704, - "acc_norm": 0.256198347107438, - "acc_norm_stderr": 0.039849796533028704 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.24342105263157895, - "acc_stderr": 0.034923496688842384, - "acc_norm": 0.24342105263157895, - "acc_norm_stderr": 0.034923496688842384 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.22549019607843138, - "acc_stderr": 0.01690661592728815, - "acc_norm": 0.22549019607843138, - "acc_norm_stderr": 0.01690661592728815 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2553191489361702, - "acc_stderr": 0.026011992930902013, - "acc_norm": 0.2553191489361702, - "acc_norm_stderr": 0.026011992930902013 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.3392857142857143, - "acc_stderr": 0.044939490686135404, - "acc_norm": 0.3392857142857143, - "acc_norm_stderr": 0.044939490686135404 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.35185185185185186, - "acc_stderr": 0.03256850570293646, - "acc_norm": 0.35185185185185186, - "acc_norm_stderr": 0.03256850570293646 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.24022346368715083, - "acc_stderr": 0.014288343803925295, - "acc_norm": 0.24022346368715083, - "acc_norm_stderr": 0.014288343803925295 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.23, - "acc_stderr": 0.04229525846816505, - "acc_norm": 0.23, - "acc_norm_stderr": 0.04229525846816505 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.26, - "acc_stderr": 0.04408440022768079, - "acc_norm": 0.26, - "acc_norm_stderr": 0.04408440022768079 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.2867647058823529, - "acc_stderr": 0.02747227447323382, - "acc_norm": 0.2867647058823529, - "acc_norm_stderr": 0.02747227447323382 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.3551020408163265, - "acc_stderr": 0.03063565515038764, - "acc_norm": 0.3551020408163265, - "acc_norm_stderr": 0.03063565515038764 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.29957805907172996, - "acc_stderr": 0.029818024749753102, - "acc_norm": 0.29957805907172996, - "acc_norm_stderr": 0.029818024749753102 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.25488917861799215, - "acc_stderr": 0.011130509812662979, - "acc_norm": 0.25488917861799215, - "acc_norm_stderr": 0.011130509812662979 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.25980392156862747, - "acc_stderr": 0.030778554678693285, - "acc_norm": 0.25980392156862747, - "acc_norm_stderr": 0.030778554678693285 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.2727272727272727, - "acc_stderr": 0.0347769116216366, - "acc_norm": 0.2727272727272727, - "acc_norm_stderr": 0.0347769116216366 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.25458996328029376, - "mc1_stderr": 0.015250117079156475, - "mc2": 0.4027649410811347, - "mc2_stderr": 0.014993381048704797 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.3286384976525822, - "acc_stderr": 0.016101734275119243, - "acc_norm": 0.3908450704225352, - "acc_norm_stderr": 0.016726359269640344 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "heegyu/polyglot-ko-5.8b-chat", - "model_sha": "58d274dbd13bd1829a6bd17d90c493bd9039564f", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/huggyllama/llama-13b/result_2023-09-27 04:58:53.json b/huggyllama/llama-13b/result_2023-09-27 04:58:53.json deleted file mode 100644 index c4fcffff6534fac0d9fb28a1f79d7a469bd77636..0000000000000000000000000000000000000000 --- a/huggyllama/llama-13b/result_2023-09-27 04:58:53.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.2098976109215017, - "acc_stderr": 0.01190054874804745, - "acc_norm": 0.2593856655290102, - "acc_norm_stderr": 0.012808273573927092 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3014339772953595, - "acc_stderr": 0.004579429184835869, - "acc_norm": 0.3571001792471619, - "acc_norm_stderr": 0.004781654610857135 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.036155076303109344, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.036155076303109344 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.2815533980582524, - "acc_stderr": 0.04453254836326467, - "acc_norm": 0.2815533980582524, - "acc_norm_stderr": 0.04453254836326467 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.3065134099616858, - "acc_stderr": 0.016486952893041515, - "acc_norm": 0.3065134099616858, - "acc_norm_stderr": 0.016486952893041515 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.2814814814814815, - "acc_stderr": 0.03885004245800254, - "acc_norm": 0.2814814814814815, - "acc_norm_stderr": 0.03885004245800254 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.3148936170212766, - "acc_stderr": 0.03036358219723816, - "acc_norm": 0.3148936170212766, - "acc_norm_stderr": 0.03036358219723816 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.28313253012048195, - "acc_stderr": 0.03507295431370519, - "acc_norm": 0.28313253012048195, - "acc_norm_stderr": 0.03507295431370519 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.3536977491961415, - "acc_stderr": 0.02715520810320086, - "acc_norm": 0.3536977491961415, - "acc_norm_stderr": 0.02715520810320086 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.27802690582959644, - "acc_stderr": 0.03006958487449405, - "acc_norm": 0.27802690582959644, - "acc_norm_stderr": 0.03006958487449405 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.31297709923664124, - "acc_stderr": 0.04066962905677697, - "acc_norm": 0.31297709923664124, - "acc_norm_stderr": 0.04066962905677697 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.29797979797979796, - "acc_stderr": 0.03258630383836556, - "acc_norm": 0.29797979797979796, - "acc_norm_stderr": 0.03258630383836556 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.3310344827586207, - "acc_stderr": 0.03921545312467121, - "acc_norm": 0.3310344827586207, - "acc_norm_stderr": 0.03921545312467121 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.22549019607843138, - "acc_stderr": 0.04158307533083286, - "acc_norm": 0.22549019607843138, - "acc_norm_stderr": 0.04158307533083286 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.3235294117647059, - "acc_stderr": 0.030388353551886838, - "acc_norm": 0.3235294117647059, - "acc_norm_stderr": 0.030388353551886838 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.33076923076923076, - "acc_stderr": 0.0238547956809711, - "acc_norm": 0.33076923076923076, - "acc_norm_stderr": 0.0238547956809711 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.32, - "acc_stderr": 0.04688261722621505, - "acc_norm": 0.32, - "acc_norm_stderr": 0.04688261722621505 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.21, - "acc_stderr": 0.040936018074033256, - "acc_norm": 0.21, - "acc_norm_stderr": 0.040936018074033256 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.39814814814814814, - "acc_stderr": 0.04732332615978814, - "acc_norm": 0.39814814814814814, - "acc_norm_stderr": 0.04732332615978814 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.2660098522167488, - "acc_stderr": 0.031089826002937523, - "acc_norm": 0.2660098522167488, - "acc_norm_stderr": 0.031089826002937523 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.33548387096774196, - "acc_stderr": 0.02686020644472434, - "acc_norm": 0.33548387096774196, - "acc_norm_stderr": 0.02686020644472434 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.46153846153846156, - "acc_stderr": 0.03265903381186195, - "acc_norm": 0.46153846153846156, - "acc_norm_stderr": 0.03265903381186195 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.27169811320754716, - "acc_stderr": 0.027377706624670716, - "acc_norm": 0.27169811320754716, - "acc_norm_stderr": 0.027377706624670716 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.3, - "acc_stderr": 0.04389311454644286, - "acc_norm": 0.3, - "acc_norm_stderr": 0.04389311454644286 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.21851851851851853, - "acc_stderr": 0.025195752251823793, - "acc_norm": 0.21851851851851853, - "acc_norm_stderr": 0.025195752251823793 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2582781456953642, - "acc_stderr": 0.035737053147634576, - "acc_norm": 0.2582781456953642, - "acc_norm_stderr": 0.035737053147634576 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.3283582089552239, - "acc_stderr": 0.033206858897443244, - "acc_norm": 0.3283582089552239, - "acc_norm_stderr": 0.033206858897443244 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.27167630057803466, - "acc_stderr": 0.03391750322321659, - "acc_norm": 0.27167630057803466, - "acc_norm_stderr": 0.03391750322321659 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.23544973544973544, - "acc_stderr": 0.021851509822031705, - "acc_norm": 0.23544973544973544, - "acc_norm_stderr": 0.021851509822031705 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2777777777777778, - "acc_stderr": 0.03745554791462457, - "acc_norm": 0.2777777777777778, - "acc_norm_stderr": 0.03745554791462457 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.27, - "acc_stderr": 0.0446196043338474, - "acc_norm": 0.27, - "acc_norm_stderr": 0.0446196043338474 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.35260115606936415, - "acc_stderr": 0.025722802200895817, - "acc_norm": 0.35260115606936415, - "acc_norm_stderr": 0.025722802200895817 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.2883435582822086, - "acc_stderr": 0.035590395316173425, - "acc_norm": 0.2883435582822086, - "acc_norm_stderr": 0.035590395316173425 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.30864197530864196, - "acc_stderr": 0.025702640260603753, - "acc_norm": 0.30864197530864196, - "acc_norm_stderr": 0.025702640260603753 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.33, - "acc_stderr": 0.047258156262526045, - "acc_norm": 0.33, - "acc_norm_stderr": 0.047258156262526045 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.3316062176165803, - "acc_stderr": 0.03397636541089117, - "acc_norm": 0.3316062176165803, - "acc_norm_stderr": 0.03397636541089117 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.22807017543859648, - "acc_stderr": 0.03947152782669415, - "acc_norm": 0.22807017543859648, - "acc_norm_stderr": 0.03947152782669415 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.29724770642201837, - "acc_stderr": 0.019595707224643533, - "acc_norm": 0.29724770642201837, - "acc_norm_stderr": 0.019595707224643533 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.042163702135578345, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.042163702135578345 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.3790849673202614, - "acc_stderr": 0.02778014120702335, - "acc_norm": 0.3790849673202614, - "acc_norm_stderr": 0.02778014120702335 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542127, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542127 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.45454545454545453, - "acc_stderr": 0.04545454545454546, - "acc_norm": 0.45454545454545453, - "acc_norm_stderr": 0.04545454545454546 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.2894736842105263, - "acc_stderr": 0.03690677986137282, - "acc_norm": 0.2894736842105263, - "acc_norm_stderr": 0.03690677986137282 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.2826797385620915, - "acc_stderr": 0.018217269552053446, - "acc_norm": 0.2826797385620915, - "acc_norm_stderr": 0.018217269552053446 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.26595744680851063, - "acc_stderr": 0.026358065698880585, - "acc_norm": 0.26595744680851063, - "acc_norm_stderr": 0.026358065698880585 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.35714285714285715, - "acc_stderr": 0.04547960999764376, - "acc_norm": 0.35714285714285715, - "acc_norm_stderr": 0.04547960999764376 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.3287037037037037, - "acc_stderr": 0.032036140846700596, - "acc_norm": 0.3287037037037037, - "acc_norm_stderr": 0.032036140846700596 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.27150837988826815, - "acc_stderr": 0.014874252168095278, - "acc_norm": 0.27150837988826815, - "acc_norm_stderr": 0.014874252168095278 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.32, - "acc_stderr": 0.04688261722621504, - "acc_norm": 0.32, - "acc_norm_stderr": 0.04688261722621504 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.41, - "acc_stderr": 0.049431107042371025, - "acc_norm": 0.41, - "acc_norm_stderr": 0.049431107042371025 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.2536764705882353, - "acc_stderr": 0.026431329870789513, - "acc_norm": 0.2536764705882353, - "acc_norm_stderr": 0.026431329870789513 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.44081632653061226, - "acc_stderr": 0.03178419114175363, - "acc_norm": 0.44081632653061226, - "acc_norm_stderr": 0.03178419114175363 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.29535864978902954, - "acc_stderr": 0.029696338713422893, - "acc_norm": 0.29535864978902954, - "acc_norm_stderr": 0.029696338713422893 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.26140808344198174, - "acc_stderr": 0.011222528169771312, - "acc_norm": 0.26140808344198174, - "acc_norm_stderr": 0.011222528169771312 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.29411764705882354, - "acc_stderr": 0.031980016601150706, - "acc_norm": 0.29411764705882354, - "acc_norm_stderr": 0.031980016601150706 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.30303030303030304, - "acc_stderr": 0.03588624800091709, - "acc_norm": 0.30303030303030304, - "acc_norm_stderr": 0.03588624800091709 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.27050183598531213, - "mc1_stderr": 0.01555077833284288, - "mc2": 0.43560981343267496, - "mc2_stderr": 0.01587676917939091 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.3204225352112676, - "acc_stderr": 0.015996178088626942, - "acc_norm": 0.37089201877934275, - "acc_norm_stderr": 0.016558521692487352 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "huggyllama/llama-13b", - "model_sha": "bf57045473f207bb1de1ed035ace226f4d9f9bba", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/huggyllama/llama-7b/result_2023-09-28 00:26:14.json b/huggyllama/llama-7b/result_2023-09-28 00:26:14.json deleted file mode 100644 index 71a7212e1b6906f17bb2e975086f1237340359f3..0000000000000000000000000000000000000000 --- a/huggyllama/llama-7b/result_2023-09-28 00:26:14.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.2175767918088737, - "acc_stderr": 0.012057262020972508, - "acc_norm": 0.2525597269624573, - "acc_norm_stderr": 0.012696728980207708 - }, - "harness|ko_hellaswag|10": { - "acc": 0.29197371041625175, - "acc_stderr": 0.004537410615572941, - "acc_norm": 0.3343955387373033, - "acc_norm_stderr": 0.004708145393411397 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.24561403508771928, - "acc_stderr": 0.0330140594698725, - "acc_norm": 0.24561403508771928, - "acc_norm_stderr": 0.0330140594698725 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.21359223300970873, - "acc_stderr": 0.040580420156460344, - "acc_norm": 0.21359223300970873, - "acc_norm_stderr": 0.040580420156460344 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.2554278416347382, - "acc_stderr": 0.01559495538445577, - "acc_norm": 0.2554278416347382, - "acc_norm_stderr": 0.01559495538445577 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.28888888888888886, - "acc_stderr": 0.0391545063041425, - "acc_norm": 0.28888888888888886, - "acc_norm_stderr": 0.0391545063041425 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.26, - "acc_stderr": 0.044084400227680794, - "acc_norm": 0.26, - "acc_norm_stderr": 0.044084400227680794 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.2680851063829787, - "acc_stderr": 0.02895734278834235, - "acc_norm": 0.2680851063829787, - "acc_norm_stderr": 0.02895734278834235 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3373493975903614, - "acc_stderr": 0.0368078369072758, - "acc_norm": 0.3373493975903614, - "acc_norm_stderr": 0.0368078369072758 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.2347266881028939, - "acc_stderr": 0.024071805887677048, - "acc_norm": 0.2347266881028939, - "acc_norm_stderr": 0.024071805887677048 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.19282511210762332, - "acc_stderr": 0.026478240960489365, - "acc_norm": 0.19282511210762332, - "acc_norm_stderr": 0.026478240960489365 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.26717557251908397, - "acc_stderr": 0.038808483010823944, - "acc_norm": 0.26717557251908397, - "acc_norm_stderr": 0.038808483010823944 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542127, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542127 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.3484848484848485, - "acc_stderr": 0.033948539651564025, - "acc_norm": 0.3484848484848485, - "acc_norm_stderr": 0.033948539651564025 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.23448275862068965, - "acc_stderr": 0.035306258743465914, - "acc_norm": 0.23448275862068965, - "acc_norm_stderr": 0.035306258743465914 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.17647058823529413, - "acc_stderr": 0.03793281185307811, - "acc_norm": 0.17647058823529413, - "acc_norm_stderr": 0.03793281185307811 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.23949579831932774, - "acc_stderr": 0.027722065493361266, - "acc_norm": 0.23949579831932774, - "acc_norm_stderr": 0.027722065493361266 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.23846153846153847, - "acc_stderr": 0.021606294494647727, - "acc_norm": 0.23846153846153847, - "acc_norm_stderr": 0.021606294494647727 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.36, - "acc_stderr": 0.048241815132442176, - "acc_norm": 0.36, - "acc_norm_stderr": 0.048241815132442176 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.17, - "acc_stderr": 0.03775251680686371, - "acc_norm": 0.17, - "acc_norm_stderr": 0.03775251680686371 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.32407407407407407, - "acc_stderr": 0.04524596007030049, - "acc_norm": 0.32407407407407407, - "acc_norm_stderr": 0.04524596007030049 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.2857142857142857, - "acc_stderr": 0.0317852971064275, - "acc_norm": 0.2857142857142857, - "acc_norm_stderr": 0.0317852971064275 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.27419354838709675, - "acc_stderr": 0.025378139970885193, - "acc_norm": 0.27419354838709675, - "acc_norm_stderr": 0.025378139970885193 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.2948717948717949, - "acc_stderr": 0.029872577708891172, - "acc_norm": 0.2948717948717949, - "acc_norm_stderr": 0.029872577708891172 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.27169811320754716, - "acc_stderr": 0.027377706624670713, - "acc_norm": 0.27169811320754716, - "acc_norm_stderr": 0.027377706624670713 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.2636363636363636, - "acc_stderr": 0.04220224692971987, - "acc_norm": 0.2636363636363636, - "acc_norm_stderr": 0.04220224692971987 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.22592592592592592, - "acc_stderr": 0.025497532639609546, - "acc_norm": 0.22592592592592592, - "acc_norm_stderr": 0.025497532639609546 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.271523178807947, - "acc_stderr": 0.036313298039696525, - "acc_norm": 0.271523178807947, - "acc_norm_stderr": 0.036313298039696525 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.24875621890547264, - "acc_stderr": 0.030567675938916718, - "acc_norm": 0.24875621890547264, - "acc_norm_stderr": 0.030567675938916718 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.24855491329479767, - "acc_stderr": 0.03295304696818318, - "acc_norm": 0.24855491329479767, - "acc_norm_stderr": 0.03295304696818318 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.26455026455026454, - "acc_stderr": 0.022717467897708607, - "acc_norm": 0.26455026455026454, - "acc_norm_stderr": 0.022717467897708607 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3055555555555556, - "acc_stderr": 0.03852084696008534, - "acc_norm": 0.3055555555555556, - "acc_norm_stderr": 0.03852084696008534 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.26, - "acc_stderr": 0.0440844002276808, - "acc_norm": 0.26, - "acc_norm_stderr": 0.0440844002276808 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.2832369942196532, - "acc_stderr": 0.024257901705323374, - "acc_norm": 0.2832369942196532, - "acc_norm_stderr": 0.024257901705323374 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.31901840490797545, - "acc_stderr": 0.03661997551073836, - "acc_norm": 0.31901840490797545, - "acc_norm_stderr": 0.03661997551073836 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.29012345679012347, - "acc_stderr": 0.025251173936495033, - "acc_norm": 0.29012345679012347, - "acc_norm_stderr": 0.025251173936495033 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.24, - "acc_stderr": 0.04292346959909283, - "acc_norm": 0.24, - "acc_norm_stderr": 0.04292346959909283 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.23834196891191708, - "acc_stderr": 0.030748905363909906, - "acc_norm": 0.23834196891191708, - "acc_norm_stderr": 0.030748905363909906 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2982456140350877, - "acc_stderr": 0.04303684033537316, - "acc_norm": 0.2982456140350877, - "acc_norm_stderr": 0.04303684033537316 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.326605504587156, - "acc_stderr": 0.020106990889937303, - "acc_norm": 0.326605504587156, - "acc_norm_stderr": 0.020106990889937303 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.20634920634920634, - "acc_stderr": 0.036196045241242494, - "acc_norm": 0.20634920634920634, - "acc_norm_stderr": 0.036196045241242494 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.2908496732026144, - "acc_stderr": 0.026004800363952113, - "acc_norm": 0.2908496732026144, - "acc_norm_stderr": 0.026004800363952113 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.21, - "acc_stderr": 0.040936018074033256, - "acc_norm": 0.21, - "acc_norm_stderr": 0.040936018074033256 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.4049586776859504, - "acc_stderr": 0.044811377559424694, - "acc_norm": 0.4049586776859504, - "acc_norm_stderr": 0.044811377559424694 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.2565789473684211, - "acc_stderr": 0.0355418036802569, - "acc_norm": 0.2565789473684211, - "acc_norm_stderr": 0.0355418036802569 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.3104575163398693, - "acc_stderr": 0.018718067052623227, - "acc_norm": 0.3104575163398693, - "acc_norm_stderr": 0.018718067052623227 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.25886524822695034, - "acc_stderr": 0.026129572527180844, - "acc_norm": 0.25886524822695034, - "acc_norm_stderr": 0.026129572527180844 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.16964285714285715, - "acc_stderr": 0.0356236785009539, - "acc_norm": 0.16964285714285715, - "acc_norm_stderr": 0.0356236785009539 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.4212962962962963, - "acc_stderr": 0.03367462138896078, - "acc_norm": 0.4212962962962963, - "acc_norm_stderr": 0.03367462138896078 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2424581005586592, - "acc_stderr": 0.01433352205921789, - "acc_norm": 0.2424581005586592, - "acc_norm_stderr": 0.01433352205921789 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.15, - "acc_stderr": 0.03588702812826371, - "acc_norm": 0.15, - "acc_norm_stderr": 0.03588702812826371 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.22426470588235295, - "acc_stderr": 0.025336848563332372, - "acc_norm": 0.22426470588235295, - "acc_norm_stderr": 0.025336848563332372 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.24081632653061225, - "acc_stderr": 0.027372942201788163, - "acc_norm": 0.24081632653061225, - "acc_norm_stderr": 0.027372942201788163 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.27848101265822783, - "acc_stderr": 0.029178682304842555, - "acc_norm": 0.27848101265822783, - "acc_norm_stderr": 0.029178682304842555 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2542372881355932, - "acc_stderr": 0.011121129007840685, - "acc_norm": 0.2542372881355932, - "acc_norm_stderr": 0.011121129007840685 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.23039215686274508, - "acc_stderr": 0.02955429260569506, - "acc_norm": 0.23039215686274508, - "acc_norm_stderr": 0.02955429260569506 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.24242424242424243, - "acc_stderr": 0.03346409881055953, - "acc_norm": 0.24242424242424243, - "acc_norm_stderr": 0.03346409881055953 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2668298653610771, - "mc1_stderr": 0.015483691939237272, - "mc2": 0.4405577919486417, - "mc2_stderr": 0.01601590664012013 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.12910798122065728, - "acc_stderr": 0.011494601522741265, - "acc_norm": 0.16901408450704225, - "acc_norm_stderr": 0.012846756724465036 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "huggyllama/llama-7b", - "model_sha": "8416d3fefb0cb3ff5775a7b13c1692d10ff1aa16", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/hyunseoki/ko-en-llama2-13b/result_2023-10-02 15:05:47.json b/hyunseoki/ko-en-llama2-13b/result_2023-10-02 15:05:47.json deleted file mode 100644 index bc7eca7fe05a9d06714b9c27a732201341186889..0000000000000000000000000000000000000000 --- a/hyunseoki/ko-en-llama2-13b/result_2023-10-02 15:05:47.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.3660409556313993, - "acc_stderr": 0.01407722310847014, - "acc_norm": 0.42150170648464164, - "acc_norm_stderr": 0.014430197069326021 - }, - "harness|ko_hellaswag|10": { - "acc": 0.4063931487751444, - "acc_stderr": 0.004901558132335531, - "acc_norm": 0.5423222465644294, - "acc_norm_stderr": 0.004971874159777693 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.5672514619883041, - "acc_stderr": 0.03799978644370607, - "acc_norm": 0.5672514619883041, - "acc_norm_stderr": 0.03799978644370607 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.49514563106796117, - "acc_stderr": 0.049505043821289195, - "acc_norm": 0.49514563106796117, - "acc_norm_stderr": 0.049505043821289195 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.4827586206896552, - "acc_stderr": 0.017869330154003705, - "acc_norm": 0.4827586206896552, - "acc_norm_stderr": 0.017869330154003705 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.37777777777777777, - "acc_stderr": 0.04188307537595853, - "acc_norm": 0.37777777777777777, - "acc_norm_stderr": 0.04188307537595853 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206824, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206824 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.3702127659574468, - "acc_stderr": 0.03156564682236784, - "acc_norm": 0.3702127659574468, - "acc_norm_stderr": 0.03156564682236784 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.40963855421686746, - "acc_stderr": 0.038284011150790206, - "acc_norm": 0.40963855421686746, - "acc_norm_stderr": 0.038284011150790206 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.47266881028938906, - "acc_stderr": 0.02835563356832818, - "acc_norm": 0.47266881028938906, - "acc_norm_stderr": 0.02835563356832818 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.34977578475336324, - "acc_stderr": 0.03200736719484503, - "acc_norm": 0.34977578475336324, - "acc_norm_stderr": 0.03200736719484503 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.48854961832061067, - "acc_stderr": 0.04384140024078016, - "acc_norm": 0.48854961832061067, - "acc_norm_stderr": 0.04384140024078016 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.36, - "acc_stderr": 0.04824181513244218, - "acc_norm": 0.36, - "acc_norm_stderr": 0.04824181513244218 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.5252525252525253, - "acc_stderr": 0.035578062450873145, - "acc_norm": 0.5252525252525253, - "acc_norm_stderr": 0.035578062450873145 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.4206896551724138, - "acc_stderr": 0.0411391498118926, - "acc_norm": 0.4206896551724138, - "acc_norm_stderr": 0.0411391498118926 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.18627450980392157, - "acc_stderr": 0.03873958714149351, - "acc_norm": 0.18627450980392157, - "acc_norm_stderr": 0.03873958714149351 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.42436974789915966, - "acc_stderr": 0.032104790510157764, - "acc_norm": 0.42436974789915966, - "acc_norm_stderr": 0.032104790510157764 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.4, - "acc_stderr": 0.02483881198803315, - "acc_norm": 0.4, - "acc_norm_stderr": 0.02483881198803315 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.44, - "acc_stderr": 0.04988876515698589, - "acc_norm": 0.44, - "acc_norm_stderr": 0.04988876515698589 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695235, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695235 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.42592592592592593, - "acc_stderr": 0.0478034362693679, - "acc_norm": 0.42592592592592593, - "acc_norm_stderr": 0.0478034362693679 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.3694581280788177, - "acc_stderr": 0.03395970381998575, - "acc_norm": 0.3694581280788177, - "acc_norm_stderr": 0.03395970381998575 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.41935483870967744, - "acc_stderr": 0.028071588901091852, - "acc_norm": 0.41935483870967744, - "acc_norm_stderr": 0.028071588901091852 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.6196581196581197, - "acc_stderr": 0.03180425204384099, - "acc_norm": 0.6196581196581197, - "acc_norm_stderr": 0.03180425204384099 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.4641509433962264, - "acc_stderr": 0.030693675018458003, - "acc_norm": 0.4641509433962264, - "acc_norm_stderr": 0.030693675018458003 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.4, - "acc_stderr": 0.0469237132203465, - "acc_norm": 0.4, - "acc_norm_stderr": 0.0469237132203465 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.27037037037037037, - "acc_stderr": 0.027080372815145647, - "acc_norm": 0.27037037037037037, - "acc_norm_stderr": 0.027080372815145647 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2781456953642384, - "acc_stderr": 0.03658603262763743, - "acc_norm": 0.2781456953642384, - "acc_norm_stderr": 0.03658603262763743 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.5124378109452736, - "acc_stderr": 0.03534439848539579, - "acc_norm": 0.5124378109452736, - "acc_norm_stderr": 0.03534439848539579 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.34104046242774566, - "acc_stderr": 0.03614665424180826, - "acc_norm": 0.34104046242774566, - "acc_norm_stderr": 0.03614665424180826 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2698412698412698, - "acc_stderr": 0.022860838309232072, - "acc_norm": 0.2698412698412698, - "acc_norm_stderr": 0.022860838309232072 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3611111111111111, - "acc_stderr": 0.04016660030451233, - "acc_norm": 0.3611111111111111, - "acc_norm_stderr": 0.04016660030451233 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.36, - "acc_stderr": 0.048241815132442176, - "acc_norm": 0.36, - "acc_norm_stderr": 0.048241815132442176 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.51, - "acc_stderr": 0.05024183937956913, - "acc_norm": 0.51, - "acc_norm_stderr": 0.05024183937956913 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.4190751445086705, - "acc_stderr": 0.02656417811142262, - "acc_norm": 0.4190751445086705, - "acc_norm_stderr": 0.02656417811142262 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.36809815950920244, - "acc_stderr": 0.03789213935838396, - "acc_norm": 0.36809815950920244, - "acc_norm_stderr": 0.03789213935838396 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.4567901234567901, - "acc_stderr": 0.02771666165019404, - "acc_norm": 0.4567901234567901, - "acc_norm_stderr": 0.02771666165019404 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.42487046632124353, - "acc_stderr": 0.0356747133521254, - "acc_norm": 0.42487046632124353, - "acc_norm_stderr": 0.0356747133521254 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2894736842105263, - "acc_stderr": 0.04266339443159394, - "acc_norm": 0.2894736842105263, - "acc_norm_stderr": 0.04266339443159394 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.46972477064220186, - "acc_stderr": 0.021397988604936965, - "acc_norm": 0.46972477064220186, - "acc_norm_stderr": 0.021397988604936965 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.2698412698412698, - "acc_stderr": 0.03970158273235172, - "acc_norm": 0.2698412698412698, - "acc_norm_stderr": 0.03970158273235172 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.42810457516339867, - "acc_stderr": 0.028332397483664274, - "acc_norm": 0.42810457516339867, - "acc_norm_stderr": 0.028332397483664274 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.5619834710743802, - "acc_stderr": 0.04529146804435792, - "acc_norm": 0.5619834710743802, - "acc_norm_stderr": 0.04529146804435792 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.4342105263157895, - "acc_stderr": 0.0403356566784832, - "acc_norm": 0.4342105263157895, - "acc_norm_stderr": 0.0403356566784832 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.3006535947712418, - "acc_stderr": 0.018550634502952964, - "acc_norm": 0.3006535947712418, - "acc_norm_stderr": 0.018550634502952964 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.3120567375886525, - "acc_stderr": 0.027640120545169938, - "acc_norm": 0.3120567375886525, - "acc_norm_stderr": 0.027640120545169938 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.20535714285714285, - "acc_stderr": 0.03834241021419073, - "acc_norm": 0.20535714285714285, - "acc_norm_stderr": 0.03834241021419073 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.37962962962962965, - "acc_stderr": 0.03309682581119035, - "acc_norm": 0.37962962962962965, - "acc_norm_stderr": 0.03309682581119035 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2424581005586592, - "acc_stderr": 0.01433352205921789, - "acc_norm": 0.2424581005586592, - "acc_norm_stderr": 0.01433352205921789 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.29, - "acc_stderr": 0.04560480215720684, - "acc_norm": 0.29, - "acc_norm_stderr": 0.04560480215720684 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.39, - "acc_stderr": 0.04902071300001974, - "acc_norm": 0.39, - "acc_norm_stderr": 0.04902071300001974 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.2610294117647059, - "acc_stderr": 0.02667925227010311, - "acc_norm": 0.2610294117647059, - "acc_norm_stderr": 0.02667925227010311 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.46122448979591835, - "acc_stderr": 0.03191282052669277, - "acc_norm": 0.46122448979591835, - "acc_norm_stderr": 0.03191282052669277 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.47257383966244726, - "acc_stderr": 0.03249822718301303, - "acc_norm": 0.47257383966244726, - "acc_norm_stderr": 0.03249822718301303 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.3005215123859192, - "acc_stderr": 0.011709918883039119, - "acc_norm": 0.3005215123859192, - "acc_norm_stderr": 0.011709918883039119 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.38235294117647056, - "acc_stderr": 0.03410785338904719, - "acc_norm": 0.38235294117647056, - "acc_norm_stderr": 0.03410785338904719 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.4484848484848485, - "acc_stderr": 0.03883565977956929, - "acc_norm": 0.4484848484848485, - "acc_norm_stderr": 0.03883565977956929 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.24724602203182375, - "mc1_stderr": 0.015102404797359649, - "mc2": 0.40740955216969593, - "mc2_stderr": 0.01489940591651966 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.47417840375586856, - "acc_stderr": 0.017116907933735916, - "acc_norm": 0.573943661971831, - "acc_norm_stderr": 0.016951313945591816 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "hyunseoki/ko-en-llama2-13b", - "model_sha": "9347de85c1b640502a320973279f249c8046c450", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/hyunseoki/ko-ref-llama2-13b/result_2023-10-04 08:17:01.json b/hyunseoki/ko-ref-llama2-13b/result_2023-10-04 08:17:01.json deleted file mode 100644 index c1e2a01d6157ddd1601401ae68202b3b31083b7f..0000000000000000000000000000000000000000 --- a/hyunseoki/ko-ref-llama2-13b/result_2023-10-04 08:17:01.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.37457337883959047, - "acc_stderr": 0.014144193471893456, - "acc_norm": 0.43600682593856654, - "acc_norm_stderr": 0.014491225699230916 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3990240987851026, - "acc_stderr": 0.004886969266944274, - "acc_norm": 0.5257916749651463, - "acc_norm_stderr": 0.00498313847960438 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.4619883040935672, - "acc_stderr": 0.03823727092882307, - "acc_norm": 0.4619883040935672, - "acc_norm_stderr": 0.03823727092882307 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.2621359223300971, - "acc_stderr": 0.04354631077260595, - "acc_norm": 0.2621359223300971, - "acc_norm_stderr": 0.04354631077260595 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.40229885057471265, - "acc_stderr": 0.01753529452906895, - "acc_norm": 0.40229885057471265, - "acc_norm_stderr": 0.01753529452906895 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.362962962962963, - "acc_stderr": 0.041539484047424, - "acc_norm": 0.362962962962963, - "acc_norm_stderr": 0.041539484047424 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695236, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695236 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.3276595744680851, - "acc_stderr": 0.030683020843231008, - "acc_norm": 0.3276595744680851, - "acc_norm_stderr": 0.030683020843231008 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.2710843373493976, - "acc_stderr": 0.03460579907553026, - "acc_norm": 0.2710843373493976, - "acc_norm_stderr": 0.03460579907553026 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.3440514469453376, - "acc_stderr": 0.026981478043648022, - "acc_norm": 0.3440514469453376, - "acc_norm_stderr": 0.026981478043648022 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.38565022421524664, - "acc_stderr": 0.03266842214289201, - "acc_norm": 0.38565022421524664, - "acc_norm_stderr": 0.03266842214289201 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.31297709923664124, - "acc_stderr": 0.04066962905677697, - "acc_norm": 0.31297709923664124, - "acc_norm_stderr": 0.04066962905677697 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.3282828282828283, - "acc_stderr": 0.03345678422756777, - "acc_norm": 0.3282828282828283, - "acc_norm_stderr": 0.03345678422756777 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.3103448275862069, - "acc_stderr": 0.03855289616378948, - "acc_norm": 0.3103448275862069, - "acc_norm_stderr": 0.03855289616378948 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.17647058823529413, - "acc_stderr": 0.03793281185307811, - "acc_norm": 0.17647058823529413, - "acc_norm_stderr": 0.03793281185307811 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.29411764705882354, - "acc_stderr": 0.029597329730978103, - "acc_norm": 0.29411764705882354, - "acc_norm_stderr": 0.029597329730978103 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.2717948717948718, - "acc_stderr": 0.022556551010132354, - "acc_norm": 0.2717948717948718, - "acc_norm_stderr": 0.022556551010132354 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.36, - "acc_stderr": 0.04824181513244218, - "acc_norm": 0.36, - "acc_norm_stderr": 0.04824181513244218 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.35, - "acc_stderr": 0.047937248544110196, - "acc_norm": 0.35, - "acc_norm_stderr": 0.047937248544110196 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.35185185185185186, - "acc_stderr": 0.046166311118017125, - "acc_norm": 0.35185185185185186, - "acc_norm_stderr": 0.046166311118017125 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.2857142857142857, - "acc_stderr": 0.03178529710642749, - "acc_norm": 0.2857142857142857, - "acc_norm_stderr": 0.03178529710642749 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3096774193548387, - "acc_stderr": 0.026302774983517418, - "acc_norm": 0.3096774193548387, - "acc_norm_stderr": 0.026302774983517418 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.41452991452991456, - "acc_stderr": 0.03227396567623778, - "acc_norm": 0.41452991452991456, - "acc_norm_stderr": 0.03227396567623778 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.2943396226415094, - "acc_stderr": 0.028049186315695238, - "acc_norm": 0.2943396226415094, - "acc_norm_stderr": 0.028049186315695238 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.2727272727272727, - "acc_stderr": 0.04265792110940588, - "acc_norm": 0.2727272727272727, - "acc_norm_stderr": 0.04265792110940588 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.2740740740740741, - "acc_stderr": 0.027195934804085622, - "acc_norm": 0.2740740740740741, - "acc_norm_stderr": 0.027195934804085622 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2781456953642384, - "acc_stderr": 0.03658603262763743, - "acc_norm": 0.2781456953642384, - "acc_norm_stderr": 0.03658603262763743 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.373134328358209, - "acc_stderr": 0.034198326081760065, - "acc_norm": 0.373134328358209, - "acc_norm_stderr": 0.034198326081760065 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.2543352601156069, - "acc_stderr": 0.0332055644308557, - "acc_norm": 0.2543352601156069, - "acc_norm_stderr": 0.0332055644308557 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.30423280423280424, - "acc_stderr": 0.023695415009463084, - "acc_norm": 0.30423280423280424, - "acc_norm_stderr": 0.023695415009463084 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2708333333333333, - "acc_stderr": 0.03716177437566017, - "acc_norm": 0.2708333333333333, - "acc_norm_stderr": 0.03716177437566017 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.19, - "acc_stderr": 0.03942772444036624, - "acc_norm": 0.19, - "acc_norm_stderr": 0.03942772444036624 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.39, - "acc_stderr": 0.04902071300001975, - "acc_norm": 0.39, - "acc_norm_stderr": 0.04902071300001975 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.35260115606936415, - "acc_stderr": 0.025722802200895803, - "acc_norm": 0.35260115606936415, - "acc_norm_stderr": 0.025722802200895803 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.294478527607362, - "acc_stderr": 0.03581165790474082, - "acc_norm": 0.294478527607362, - "acc_norm_stderr": 0.03581165790474082 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.32407407407407407, - "acc_stderr": 0.026041766202717163, - "acc_norm": 0.32407407407407407, - "acc_norm_stderr": 0.026041766202717163 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.26, - "acc_stderr": 0.04408440022768079, - "acc_norm": 0.26, - "acc_norm_stderr": 0.04408440022768079 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.2694300518134715, - "acc_stderr": 0.032018671228777947, - "acc_norm": 0.2694300518134715, - "acc_norm_stderr": 0.032018671228777947 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.19298245614035087, - "acc_stderr": 0.03712454853721368, - "acc_norm": 0.19298245614035087, - "acc_norm_stderr": 0.03712454853721368 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.326605504587156, - "acc_stderr": 0.020106990889937306, - "acc_norm": 0.326605504587156, - "acc_norm_stderr": 0.020106990889937306 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.11904761904761904, - "acc_stderr": 0.028965535858562975, - "acc_norm": 0.11904761904761904, - "acc_norm_stderr": 0.028965535858562975 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.30392156862745096, - "acc_stderr": 0.026336613469046626, - "acc_norm": 0.30392156862745096, - "acc_norm_stderr": 0.026336613469046626 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.4380165289256198, - "acc_stderr": 0.045291468044357915, - "acc_norm": 0.4380165289256198, - "acc_norm_stderr": 0.045291468044357915 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.29605263157894735, - "acc_stderr": 0.03715062154998905, - "acc_norm": 0.29605263157894735, - "acc_norm_stderr": 0.03715062154998905 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.29248366013071897, - "acc_stderr": 0.018403415710109783, - "acc_norm": 0.29248366013071897, - "acc_norm_stderr": 0.018403415710109783 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.25177304964539005, - "acc_stderr": 0.0258921511567094, - "acc_norm": 0.25177304964539005, - "acc_norm_stderr": 0.0258921511567094 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.23214285714285715, - "acc_stderr": 0.04007341809755808, - "acc_norm": 0.23214285714285715, - "acc_norm_stderr": 0.04007341809755808 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.23148148148148148, - "acc_stderr": 0.028765111718046937, - "acc_norm": 0.23148148148148148, - "acc_norm_stderr": 0.028765111718046937 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2424581005586592, - "acc_stderr": 0.01433352205921789, - "acc_norm": 0.2424581005586592, - "acc_norm_stderr": 0.01433352205921789 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.24, - "acc_stderr": 0.042923469599092816, - "acc_norm": 0.24, - "acc_norm_stderr": 0.042923469599092816 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.41, - "acc_stderr": 0.049431107042371025, - "acc_norm": 0.41, - "acc_norm_stderr": 0.049431107042371025 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.18382352941176472, - "acc_stderr": 0.02352924218519311, - "acc_norm": 0.18382352941176472, - "acc_norm_stderr": 0.02352924218519311 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.2897959183673469, - "acc_stderr": 0.02904308868330433, - "acc_norm": 0.2897959183673469, - "acc_norm_stderr": 0.02904308868330433 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.3881856540084388, - "acc_stderr": 0.03172295004332329, - "acc_norm": 0.3881856540084388, - "acc_norm_stderr": 0.03172295004332329 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2985658409387223, - "acc_stderr": 0.011688060141794228, - "acc_norm": 0.2985658409387223, - "acc_norm_stderr": 0.011688060141794228 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.03308611113236436, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.03308611113236436 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.37575757575757573, - "acc_stderr": 0.037818873532059816, - "acc_norm": 0.37575757575757573, - "acc_norm_stderr": 0.037818873532059816 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.25091799265605874, - "mc1_stderr": 0.015176985027707679, - "mc2": 0.4089327594647445, - "mc2_stderr": 0.01512159542972759 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.43309859154929575, - "acc_stderr": 0.016985657928418076, - "acc_norm": 0.5504694835680751, - "acc_norm_stderr": 0.017052239885414475 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "hyunseoki/ko-ref-llama2-13b", - "model_sha": "c5d09631c88ab5012b48187ecd90ae773cd4bbd9", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/hyunseoki/ko-ref-llama2-7b/result_2023-10-04 08:18:22.json b/hyunseoki/ko-ref-llama2-7b/result_2023-10-04 08:18:22.json deleted file mode 100644 index 80a3db8fb6016356af1491e45341a469255503cd..0000000000000000000000000000000000000000 --- a/hyunseoki/ko-ref-llama2-7b/result_2023-10-04 08:18:22.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.33447098976109213, - "acc_stderr": 0.013787460322441387, - "acc_norm": 0.3848122866894198, - "acc_norm_stderr": 0.0142183710652511 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3836885082652858, - "acc_stderr": 0.0048528966817367606, - "acc_norm": 0.4970125473013344, - "acc_norm_stderr": 0.004989692344313999 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.29239766081871343, - "acc_stderr": 0.03488647713457922, - "acc_norm": 0.29239766081871343, - "acc_norm_stderr": 0.03488647713457922 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.21359223300970873, - "acc_stderr": 0.04058042015646034, - "acc_norm": 0.21359223300970873, - "acc_norm_stderr": 0.04058042015646034 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.31417624521072796, - "acc_stderr": 0.01659929173588491, - "acc_norm": 0.31417624521072796, - "acc_norm_stderr": 0.01659929173588491 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.37037037037037035, - "acc_stderr": 0.04171654161354543, - "acc_norm": 0.37037037037037035, - "acc_norm_stderr": 0.04171654161354543 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.33, - "acc_stderr": 0.047258156262526045, - "acc_norm": 0.33, - "acc_norm_stderr": 0.047258156262526045 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.35319148936170214, - "acc_stderr": 0.031245325202761926, - "acc_norm": 0.35319148936170214, - "acc_norm_stderr": 0.031245325202761926 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.28313253012048195, - "acc_stderr": 0.03507295431370519, - "acc_norm": 0.28313253012048195, - "acc_norm_stderr": 0.03507295431370519 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.2958199356913183, - "acc_stderr": 0.025922371788818784, - "acc_norm": 0.2958199356913183, - "acc_norm_stderr": 0.025922371788818784 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.30493273542600896, - "acc_stderr": 0.030898610882477515, - "acc_norm": 0.30493273542600896, - "acc_norm_stderr": 0.030898610882477515 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.2900763358778626, - "acc_stderr": 0.03980066246467765, - "acc_norm": 0.2900763358778626, - "acc_norm_stderr": 0.03980066246467765 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252604, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252604 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.29797979797979796, - "acc_stderr": 0.03258630383836554, - "acc_norm": 0.29797979797979796, - "acc_norm_stderr": 0.03258630383836554 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.3724137931034483, - "acc_stderr": 0.0402873153294756, - "acc_norm": 0.3724137931034483, - "acc_norm_stderr": 0.0402873153294756 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.2549019607843137, - "acc_stderr": 0.04336432707993179, - "acc_norm": 0.2549019607843137, - "acc_norm_stderr": 0.04336432707993179 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.31512605042016806, - "acc_stderr": 0.030176808288974337, - "acc_norm": 0.31512605042016806, - "acc_norm_stderr": 0.030176808288974337 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.23076923076923078, - "acc_stderr": 0.02136202772522273, - "acc_norm": 0.23076923076923078, - "acc_norm_stderr": 0.02136202772522273 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.32, - "acc_stderr": 0.046882617226215034, - "acc_norm": 0.32, - "acc_norm_stderr": 0.046882617226215034 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.17, - "acc_stderr": 0.0377525168068637, - "acc_norm": 0.17, - "acc_norm_stderr": 0.0377525168068637 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.23148148148148148, - "acc_stderr": 0.04077494709252626, - "acc_norm": 0.23148148148148148, - "acc_norm_stderr": 0.04077494709252626 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.2413793103448276, - "acc_stderr": 0.030108330718011625, - "acc_norm": 0.2413793103448276, - "acc_norm_stderr": 0.030108330718011625 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3193548387096774, - "acc_stderr": 0.02652270967466777, - "acc_norm": 0.3193548387096774, - "acc_norm_stderr": 0.02652270967466777 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.3888888888888889, - "acc_stderr": 0.031937057262002924, - "acc_norm": 0.3888888888888889, - "acc_norm_stderr": 0.031937057262002924 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.22264150943396227, - "acc_stderr": 0.02560423347089909, - "acc_norm": 0.22264150943396227, - "acc_norm_stderr": 0.02560423347089909 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.24545454545454545, - "acc_stderr": 0.041220665028782834, - "acc_norm": 0.24545454545454545, - "acc_norm_stderr": 0.041220665028782834 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.29259259259259257, - "acc_stderr": 0.02773896963217609, - "acc_norm": 0.29259259259259257, - "acc_norm_stderr": 0.02773896963217609 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2582781456953642, - "acc_stderr": 0.035737053147634576, - "acc_norm": 0.2582781456953642, - "acc_norm_stderr": 0.035737053147634576 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.31840796019900497, - "acc_stderr": 0.032941184790540944, - "acc_norm": 0.31840796019900497, - "acc_norm_stderr": 0.032941184790540944 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.2832369942196532, - "acc_stderr": 0.03435568056047876, - "acc_norm": 0.2832369942196532, - "acc_norm_stderr": 0.03435568056047876 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2804232804232804, - "acc_stderr": 0.02313528797432563, - "acc_norm": 0.2804232804232804, - "acc_norm_stderr": 0.02313528797432563 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.25, - "acc_stderr": 0.03621034121889507, - "acc_norm": 0.25, - "acc_norm_stderr": 0.03621034121889507 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.2, - "acc_stderr": 0.04020151261036846, - "acc_norm": 0.2, - "acc_norm_stderr": 0.04020151261036846 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.29, - "acc_stderr": 0.04560480215720683, - "acc_norm": 0.29, - "acc_norm_stderr": 0.04560480215720683 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.30346820809248554, - "acc_stderr": 0.02475241196091721, - "acc_norm": 0.30346820809248554, - "acc_norm_stderr": 0.02475241196091721 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.3006134969325153, - "acc_stderr": 0.03602511318806771, - "acc_norm": 0.3006134969325153, - "acc_norm_stderr": 0.03602511318806771 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.3117283950617284, - "acc_stderr": 0.025773111169630443, - "acc_norm": 0.3117283950617284, - "acc_norm_stderr": 0.025773111169630443 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.22797927461139897, - "acc_stderr": 0.030276909945178256, - "acc_norm": 0.22797927461139897, - "acc_norm_stderr": 0.030276909945178256 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.21052631578947367, - "acc_stderr": 0.038351539543994194, - "acc_norm": 0.21052631578947367, - "acc_norm_stderr": 0.038351539543994194 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.24403669724770644, - "acc_stderr": 0.018415286351416416, - "acc_norm": 0.24403669724770644, - "acc_norm_stderr": 0.018415286351416416 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.2222222222222222, - "acc_stderr": 0.03718489006818114, - "acc_norm": 0.2222222222222222, - "acc_norm_stderr": 0.03718489006818114 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.3104575163398693, - "acc_stderr": 0.02649303322514589, - "acc_norm": 0.3104575163398693, - "acc_norm_stderr": 0.02649303322514589 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252606, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252606 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.371900826446281, - "acc_stderr": 0.044120158066245044, - "acc_norm": 0.371900826446281, - "acc_norm_stderr": 0.044120158066245044 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.34210526315789475, - "acc_stderr": 0.03860731599316091, - "acc_norm": 0.34210526315789475, - "acc_norm_stderr": 0.03860731599316091 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.26143790849673204, - "acc_stderr": 0.017776947157528037, - "acc_norm": 0.26143790849673204, - "acc_norm_stderr": 0.017776947157528037 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.26595744680851063, - "acc_stderr": 0.026358065698880592, - "acc_norm": 0.26595744680851063, - "acc_norm_stderr": 0.026358065698880592 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.21428571428571427, - "acc_stderr": 0.03894641120044792, - "acc_norm": 0.21428571428571427, - "acc_norm_stderr": 0.03894641120044792 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.2777777777777778, - "acc_stderr": 0.030546745264953185, - "acc_norm": 0.2777777777777778, - "acc_norm_stderr": 0.030546745264953185 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.27262569832402234, - "acc_stderr": 0.014893391735249608, - "acc_norm": 0.27262569832402234, - "acc_norm_stderr": 0.014893391735249608 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695236, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695236 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.28, - "acc_stderr": 0.045126085985421296, - "acc_norm": 0.28, - "acc_norm_stderr": 0.045126085985421296 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.16176470588235295, - "acc_stderr": 0.02236867256288675, - "acc_norm": 0.16176470588235295, - "acc_norm_stderr": 0.02236867256288675 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.2693877551020408, - "acc_stderr": 0.02840125202902294, - "acc_norm": 0.2693877551020408, - "acc_norm_stderr": 0.02840125202902294 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.26582278481012656, - "acc_stderr": 0.028756799629658332, - "acc_norm": 0.26582278481012656, - "acc_norm_stderr": 0.028756799629658332 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2620599739243807, - "acc_stderr": 0.011231552795890394, - "acc_norm": 0.2620599739243807, - "acc_norm_stderr": 0.011231552795890394 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.25980392156862747, - "acc_stderr": 0.030778554678693264, - "acc_norm": 0.25980392156862747, - "acc_norm_stderr": 0.030778554678693264 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.3151515151515151, - "acc_stderr": 0.0362773057502241, - "acc_norm": 0.3151515151515151, - "acc_norm_stderr": 0.0362773057502241 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.24724602203182375, - "mc1_stderr": 0.015102404797359649, - "mc2": 0.3953129040998704, - "mc2_stderr": 0.015062425593708578 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.34976525821596244, - "acc_stderr": 0.016347774542860783, - "acc_norm": 0.5117370892018779, - "acc_norm_stderr": 0.017135056277338974 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "hyunseoki/ko-ref-llama2-7b", - "model_sha": "1ee08c79ae7393473754b77e82b1472ef63d5dd2", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/iknow-lab/AULM-12.8b-v0/result_2023-10-14 16:04:08.json b/iknow-lab/AULM-12.8b-v0/result_2023-10-14 16:04:08.json deleted file mode 100644 index cc185fb3e2264d796866ebd75e891f3aa82010fc..0000000000000000000000000000000000000000 --- a/iknow-lab/AULM-12.8b-v0/result_2023-10-14 16:04:08.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.26706484641638223, - "acc_stderr": 0.012928933196496337, - "acc_norm": 0.3310580204778157, - "acc_norm_stderr": 0.013752062419817836 - }, - "harness|ko_hellaswag|10": { - "acc": 0.37134037044413465, - "acc_stderr": 0.004821757734156723, - "acc_norm": 0.47470623381796456, - "acc_norm_stderr": 0.004983392650570962 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.27485380116959063, - "acc_stderr": 0.03424042924691584, - "acc_norm": 0.27485380116959063, - "acc_norm_stderr": 0.03424042924691584 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.22330097087378642, - "acc_stderr": 0.04123553189891431, - "acc_norm": 0.22330097087378642, - "acc_norm_stderr": 0.04123553189891431 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.2413793103448276, - "acc_stderr": 0.0153023801235421, - "acc_norm": 0.2413793103448276, - "acc_norm_stderr": 0.0153023801235421 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.25925925925925924, - "acc_stderr": 0.03785714465066654, - "acc_norm": 0.25925925925925924, - "acc_norm_stderr": 0.03785714465066654 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.23, - "acc_stderr": 0.042295258468165044, - "acc_norm": 0.23, - "acc_norm_stderr": 0.042295258468165044 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.2, - "acc_stderr": 0.026148818018424513, - "acc_norm": 0.2, - "acc_norm_stderr": 0.026148818018424513 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.24096385542168675, - "acc_stderr": 0.03329394119073529, - "acc_norm": 0.24096385542168675, - "acc_norm_stderr": 0.03329394119073529 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.21864951768488747, - "acc_stderr": 0.02347558141786111, - "acc_norm": 0.21864951768488747, - "acc_norm_stderr": 0.02347558141786111 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.3004484304932735, - "acc_stderr": 0.030769352008229136, - "acc_norm": 0.3004484304932735, - "acc_norm_stderr": 0.030769352008229136 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.24427480916030533, - "acc_stderr": 0.037683359597287434, - "acc_norm": 0.24427480916030533, - "acc_norm_stderr": 0.037683359597287434 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.29292929292929293, - "acc_stderr": 0.03242497958178817, - "acc_norm": 0.29292929292929293, - "acc_norm_stderr": 0.03242497958178817 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.25517241379310346, - "acc_stderr": 0.03632984052707842, - "acc_norm": 0.25517241379310346, - "acc_norm_stderr": 0.03632984052707842 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.27450980392156865, - "acc_stderr": 0.044405219061793275, - "acc_norm": 0.27450980392156865, - "acc_norm_stderr": 0.044405219061793275 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.25630252100840334, - "acc_stderr": 0.028359620870533953, - "acc_norm": 0.25630252100840334, - "acc_norm_stderr": 0.028359620870533953 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.2128205128205128, - "acc_stderr": 0.020752423722128002, - "acc_norm": 0.2128205128205128, - "acc_norm_stderr": 0.020752423722128002 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.24, - "acc_stderr": 0.042923469599092816, - "acc_norm": 0.24, - "acc_norm_stderr": 0.042923469599092816 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.19, - "acc_stderr": 0.039427724440366234, - "acc_norm": 0.19, - "acc_norm_stderr": 0.039427724440366234 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.2222222222222222, - "acc_stderr": 0.040191074725573483, - "acc_norm": 0.2222222222222222, - "acc_norm_stderr": 0.040191074725573483 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.30049261083743845, - "acc_stderr": 0.03225799476233483, - "acc_norm": 0.30049261083743845, - "acc_norm_stderr": 0.03225799476233483 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.24193548387096775, - "acc_stderr": 0.024362599693031093, - "acc_norm": 0.24193548387096775, - "acc_norm_stderr": 0.024362599693031093 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.2777777777777778, - "acc_stderr": 0.02934311479809445, - "acc_norm": 0.2777777777777778, - "acc_norm_stderr": 0.02934311479809445 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.2490566037735849, - "acc_stderr": 0.026616482980501704, - "acc_norm": 0.2490566037735849, - "acc_norm_stderr": 0.026616482980501704 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.2636363636363636, - "acc_stderr": 0.04220224692971987, - "acc_norm": 0.2636363636363636, - "acc_norm_stderr": 0.04220224692971987 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.23703703703703705, - "acc_stderr": 0.025928876132766114, - "acc_norm": 0.23703703703703705, - "acc_norm_stderr": 0.025928876132766114 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.25165562913907286, - "acc_stderr": 0.03543304234389985, - "acc_norm": 0.25165562913907286, - "acc_norm_stderr": 0.03543304234389985 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.21393034825870647, - "acc_stderr": 0.028996909693328927, - "acc_norm": 0.21393034825870647, - "acc_norm_stderr": 0.028996909693328927 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.2023121387283237, - "acc_stderr": 0.03063114553919882, - "acc_norm": 0.2023121387283237, - "acc_norm_stderr": 0.03063114553919882 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.23015873015873015, - "acc_stderr": 0.021679219663693138, - "acc_norm": 0.23015873015873015, - "acc_norm_stderr": 0.021679219663693138 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.25, - "acc_stderr": 0.03621034121889507, - "acc_norm": 0.25, - "acc_norm_stderr": 0.03621034121889507 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.24, - "acc_stderr": 0.04292346959909284, - "acc_norm": 0.24, - "acc_norm_stderr": 0.04292346959909284 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.28, - "acc_stderr": 0.045126085985421296, - "acc_norm": 0.28, - "acc_norm_stderr": 0.045126085985421296 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.26011560693641617, - "acc_stderr": 0.023618678310069363, - "acc_norm": 0.26011560693641617, - "acc_norm_stderr": 0.023618678310069363 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.24539877300613497, - "acc_stderr": 0.03380939813943354, - "acc_norm": 0.24539877300613497, - "acc_norm_stderr": 0.03380939813943354 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.26851851851851855, - "acc_stderr": 0.02465968518596729, - "acc_norm": 0.26851851851851855, - "acc_norm_stderr": 0.02465968518596729 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.24, - "acc_stderr": 0.04292346959909282, - "acc_norm": 0.24, - "acc_norm_stderr": 0.04292346959909282 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.21243523316062177, - "acc_stderr": 0.029519282616817244, - "acc_norm": 0.21243523316062177, - "acc_norm_stderr": 0.029519282616817244 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2543859649122807, - "acc_stderr": 0.040969851398436716, - "acc_norm": 0.2543859649122807, - "acc_norm_stderr": 0.040969851398436716 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.25688073394495414, - "acc_stderr": 0.01873249292834247, - "acc_norm": 0.25688073394495414, - "acc_norm_stderr": 0.01873249292834247 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.16666666666666666, - "acc_stderr": 0.03333333333333338, - "acc_norm": 0.16666666666666666, - "acc_norm_stderr": 0.03333333333333338 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.2549019607843137, - "acc_stderr": 0.02495418432487991, - "acc_norm": 0.2549019607843137, - "acc_norm_stderr": 0.02495418432487991 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.2396694214876033, - "acc_stderr": 0.038968789850704164, - "acc_norm": 0.2396694214876033, - "acc_norm_stderr": 0.038968789850704164 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.18421052631578946, - "acc_stderr": 0.0315469804508223, - "acc_norm": 0.18421052631578946, - "acc_norm_stderr": 0.0315469804508223 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.24836601307189543, - "acc_stderr": 0.017479487001364764, - "acc_norm": 0.24836601307189543, - "acc_norm_stderr": 0.017479487001364764 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.24113475177304963, - "acc_stderr": 0.02551873104953777, - "acc_norm": 0.24113475177304963, - "acc_norm_stderr": 0.02551873104953777 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.25892857142857145, - "acc_stderr": 0.04157751539865629, - "acc_norm": 0.25892857142857145, - "acc_norm_stderr": 0.04157751539865629 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.19907407407407407, - "acc_stderr": 0.027232298462690218, - "acc_norm": 0.19907407407407407, - "acc_norm_stderr": 0.027232298462690218 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2547486033519553, - "acc_stderr": 0.014572650383409162, - "acc_norm": 0.2547486033519553, - "acc_norm_stderr": 0.014572650383409162 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.27, - "acc_stderr": 0.044619604333847394, - "acc_norm": 0.27, - "acc_norm_stderr": 0.044619604333847394 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542128, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542128 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.34191176470588236, - "acc_stderr": 0.028814722422254184, - "acc_norm": 0.34191176470588236, - "acc_norm_stderr": 0.028814722422254184 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.21224489795918366, - "acc_stderr": 0.026176967197866767, - "acc_norm": 0.21224489795918366, - "acc_norm_stderr": 0.026176967197866767 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.2616033755274262, - "acc_stderr": 0.028609516716994934, - "acc_norm": 0.2616033755274262, - "acc_norm_stderr": 0.028609516716994934 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.25097783572359844, - "acc_stderr": 0.011073730299187234, - "acc_norm": 0.25097783572359844, - "acc_norm_stderr": 0.011073730299187234 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.2696078431372549, - "acc_stderr": 0.031145570659486782, - "acc_norm": 0.2696078431372549, - "acc_norm_stderr": 0.031145570659486782 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.2909090909090909, - "acc_stderr": 0.03546563019624335, - "acc_norm": 0.2909090909090909, - "acc_norm_stderr": 0.03546563019624335 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.27539779681762544, - "mc1_stderr": 0.015638135667775523, - "mc2": 0.4336773026110262, - "mc2_stderr": 0.01517918566270363 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.45774647887323944, - "acc_stderr": 0.01707846824220106, - "acc_norm": 0.5469483568075117, - "acc_norm_stderr": 0.01706405474980085 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "iknow-lab/AULM-12.8b-v0", - "model_sha": "daeca40346ba44b1fbb6939cc635adf467fa6cab", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/ingeol/ppo_test/result_2023-10-16 23:46:09.json b/ingeol/ppo_test/result_2023-10-16 23:46:09.json deleted file mode 100644 index 06a7d621ee8e2399d3838fd93a43d40a0562aa9b..0000000000000000000000000000000000000000 --- a/ingeol/ppo_test/result_2023-10-16 23:46:09.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.29266211604095566, - "acc_stderr": 0.013295916103619404, - "acc_norm": 0.3438566552901024, - "acc_norm_stderr": 0.013880644570156213 - }, - "harness|ko_hellaswag|10": { - "acc": 0.39016132244572793, - "acc_stderr": 0.004867893927258242, - "acc_norm": 0.5065723959370644, - "acc_norm_stderr": 0.004989350311751651 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.2982456140350877, - "acc_stderr": 0.03508771929824564, - "acc_norm": 0.2982456140350877, - "acc_norm_stderr": 0.03508771929824564 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.17475728155339806, - "acc_stderr": 0.037601780060266196, - "acc_norm": 0.17475728155339806, - "acc_norm_stderr": 0.037601780060266196 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.2515964240102171, - "acc_stderr": 0.015517322365529631, - "acc_norm": 0.2515964240102171, - "acc_norm_stderr": 0.015517322365529631 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.35555555555555557, - "acc_stderr": 0.04135176749720386, - "acc_norm": 0.35555555555555557, - "acc_norm_stderr": 0.04135176749720386 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.35, - "acc_stderr": 0.0479372485441102, - "acc_norm": 0.35, - "acc_norm_stderr": 0.0479372485441102 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.20851063829787234, - "acc_stderr": 0.026556982117838756, - "acc_norm": 0.20851063829787234, - "acc_norm_stderr": 0.026556982117838756 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.18072289156626506, - "acc_stderr": 0.029955737855810138, - "acc_norm": 0.18072289156626506, - "acc_norm_stderr": 0.029955737855810138 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.3086816720257235, - "acc_stderr": 0.026236965881153266, - "acc_norm": 0.3086816720257235, - "acc_norm_stderr": 0.026236965881153266 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.17488789237668162, - "acc_stderr": 0.025495284626444972, - "acc_norm": 0.17488789237668162, - "acc_norm_stderr": 0.025495284626444972 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.26717557251908397, - "acc_stderr": 0.03880848301082396, - "acc_norm": 0.26717557251908397, - "acc_norm_stderr": 0.03880848301082396 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.22, - "acc_stderr": 0.041633319989322695, - "acc_norm": 0.22, - "acc_norm_stderr": 0.041633319989322695 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.2777777777777778, - "acc_stderr": 0.03191178226713549, - "acc_norm": 0.2777777777777778, - "acc_norm_stderr": 0.03191178226713549 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.3103448275862069, - "acc_stderr": 0.03855289616378949, - "acc_norm": 0.3103448275862069, - "acc_norm_stderr": 0.03855289616378949 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.21568627450980393, - "acc_stderr": 0.04092563958237656, - "acc_norm": 0.21568627450980393, - "acc_norm_stderr": 0.04092563958237656 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.226890756302521, - "acc_stderr": 0.02720537153827948, - "acc_norm": 0.226890756302521, - "acc_norm_stderr": 0.02720537153827948 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.22564102564102564, - "acc_stderr": 0.02119363252514854, - "acc_norm": 0.22564102564102564, - "acc_norm_stderr": 0.02119363252514854 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.23, - "acc_stderr": 0.042295258468165044, - "acc_norm": 0.23, - "acc_norm_stderr": 0.042295258468165044 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.23148148148148148, - "acc_stderr": 0.04077494709252627, - "acc_norm": 0.23148148148148148, - "acc_norm_stderr": 0.04077494709252627 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.2512315270935961, - "acc_stderr": 0.030516530732694436, - "acc_norm": 0.2512315270935961, - "acc_norm_stderr": 0.030516530732694436 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.27419354838709675, - "acc_stderr": 0.025378139970885203, - "acc_norm": 0.27419354838709675, - "acc_norm_stderr": 0.025378139970885203 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.24358974358974358, - "acc_stderr": 0.028120966503914407, - "acc_norm": 0.24358974358974358, - "acc_norm_stderr": 0.028120966503914407 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.25660377358490566, - "acc_stderr": 0.026880647889051996, - "acc_norm": 0.25660377358490566, - "acc_norm_stderr": 0.026880647889051996 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.19090909090909092, - "acc_stderr": 0.03764425585984927, - "acc_norm": 0.19090909090909092, - "acc_norm_stderr": 0.03764425585984927 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.24814814814814815, - "acc_stderr": 0.0263357394040558, - "acc_norm": 0.24814814814814815, - "acc_norm_stderr": 0.0263357394040558 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.26490066225165565, - "acc_stderr": 0.036030385453603826, - "acc_norm": 0.26490066225165565, - "acc_norm_stderr": 0.036030385453603826 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.21890547263681592, - "acc_stderr": 0.029239174636647, - "acc_norm": 0.21890547263681592, - "acc_norm_stderr": 0.029239174636647 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.2543352601156069, - "acc_stderr": 0.0332055644308557, - "acc_norm": 0.2543352601156069, - "acc_norm_stderr": 0.0332055644308557 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2328042328042328, - "acc_stderr": 0.02176596167215452, - "acc_norm": 0.2328042328042328, - "acc_norm_stderr": 0.02176596167215452 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2569444444444444, - "acc_stderr": 0.03653946969442099, - "acc_norm": 0.2569444444444444, - "acc_norm_stderr": 0.03653946969442099 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.18, - "acc_stderr": 0.03861229196653695, - "acc_norm": 0.18, - "acc_norm_stderr": 0.03861229196653695 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.24855491329479767, - "acc_stderr": 0.023267528432100174, - "acc_norm": 0.24855491329479767, - "acc_norm_stderr": 0.023267528432100174 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.3067484662576687, - "acc_stderr": 0.03623089915724148, - "acc_norm": 0.3067484662576687, - "acc_norm_stderr": 0.03623089915724148 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.30864197530864196, - "acc_stderr": 0.02570264026060376, - "acc_norm": 0.30864197530864196, - "acc_norm_stderr": 0.02570264026060376 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.23316062176165803, - "acc_stderr": 0.03051611137147602, - "acc_norm": 0.23316062176165803, - "acc_norm_stderr": 0.03051611137147602 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.24561403508771928, - "acc_stderr": 0.04049339297748142, - "acc_norm": 0.24561403508771928, - "acc_norm_stderr": 0.04049339297748142 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.23669724770642203, - "acc_stderr": 0.018224078117299078, - "acc_norm": 0.23669724770642203, - "acc_norm_stderr": 0.018224078117299078 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.1746031746031746, - "acc_stderr": 0.0339549002085611, - "acc_norm": 0.1746031746031746, - "acc_norm_stderr": 0.0339549002085611 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.23202614379084968, - "acc_stderr": 0.024170840879341016, - "acc_norm": 0.23202614379084968, - "acc_norm_stderr": 0.024170840879341016 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.22, - "acc_stderr": 0.041633319989322674, - "acc_norm": 0.22, - "acc_norm_stderr": 0.041633319989322674 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.2892561983471074, - "acc_stderr": 0.041391127276354626, - "acc_norm": 0.2892561983471074, - "acc_norm_stderr": 0.041391127276354626 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.3026315789473684, - "acc_stderr": 0.037385206761196665, - "acc_norm": 0.3026315789473684, - "acc_norm_stderr": 0.037385206761196665 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.2777777777777778, - "acc_stderr": 0.018120224251484577, - "acc_norm": 0.2777777777777778, - "acc_norm_stderr": 0.018120224251484577 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.24468085106382978, - "acc_stderr": 0.025645553622266736, - "acc_norm": 0.24468085106382978, - "acc_norm_stderr": 0.025645553622266736 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.21428571428571427, - "acc_stderr": 0.03894641120044793, - "acc_norm": 0.21428571428571427, - "acc_norm_stderr": 0.03894641120044793 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.3055555555555556, - "acc_stderr": 0.03141554629402545, - "acc_norm": 0.3055555555555556, - "acc_norm_stderr": 0.03141554629402545 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.25921787709497207, - "acc_stderr": 0.01465578083749772, - "acc_norm": 0.25921787709497207, - "acc_norm_stderr": 0.01465578083749772 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.3272058823529412, - "acc_stderr": 0.028501452860396563, - "acc_norm": 0.3272058823529412, - "acc_norm_stderr": 0.028501452860396563 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.24897959183673468, - "acc_stderr": 0.027682979522960234, - "acc_norm": 0.24897959183673468, - "acc_norm_stderr": 0.027682979522960234 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.26582278481012656, - "acc_stderr": 0.028756799629658335, - "acc_norm": 0.26582278481012656, - "acc_norm_stderr": 0.028756799629658335 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2816166883963494, - "acc_stderr": 0.011487783272786696, - "acc_norm": 0.2816166883963494, - "acc_norm_stderr": 0.011487783272786696 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.28431372549019607, - "acc_stderr": 0.03166009679399813, - "acc_norm": 0.28431372549019607, - "acc_norm_stderr": 0.03166009679399813 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.24242424242424243, - "acc_stderr": 0.033464098810559534, - "acc_norm": 0.24242424242424243, - "acc_norm_stderr": 0.033464098810559534 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.27050183598531213, - "mc1_stderr": 0.015550778332842883, - "mc2": 0.4208363898748992, - "mc2_stderr": 0.014946599322770709 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.6314553990610329, - "acc_stderr": 0.016536804306154545, - "acc_norm": 0.6936619718309859, - "acc_norm_stderr": 0.0158019112867147 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "ingeol/ppo_test", - "model_sha": "af05b472a278a7bcad3de754828b78e7c284923a", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/ingeol/ppo_test/result_2023-10-16 23:46:16.json b/ingeol/ppo_test/result_2023-10-16 23:46:16.json deleted file mode 100644 index 35004628d8e9ce22d20cd111a9ef44f044bd0413..0000000000000000000000000000000000000000 --- a/ingeol/ppo_test/result_2023-10-16 23:46:16.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.29266211604095566, - "acc_stderr": 0.013295916103619404, - "acc_norm": 0.3438566552901024, - "acc_norm_stderr": 0.013880644570156213 - }, - "harness|ko_hellaswag|10": { - "acc": 0.39026090420235016, - "acc_stderr": 0.004868117598481941, - "acc_norm": 0.5064728141804421, - "acc_norm_stderr": 0.00498936327695524 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.2982456140350877, - "acc_stderr": 0.03508771929824564, - "acc_norm": 0.2982456140350877, - "acc_norm_stderr": 0.03508771929824564 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.18446601941747573, - "acc_stderr": 0.03840423627288276, - "acc_norm": 0.18446601941747573, - "acc_norm_stderr": 0.03840423627288276 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.25287356321839083, - "acc_stderr": 0.015543377313719681, - "acc_norm": 0.25287356321839083, - "acc_norm_stderr": 0.015543377313719681 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.35555555555555557, - "acc_stderr": 0.04135176749720386, - "acc_norm": 0.35555555555555557, - "acc_norm_stderr": 0.04135176749720386 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.36, - "acc_stderr": 0.04824181513244218, - "acc_norm": 0.36, - "acc_norm_stderr": 0.04824181513244218 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.20425531914893616, - "acc_stderr": 0.026355158413349407, - "acc_norm": 0.20425531914893616, - "acc_norm_stderr": 0.026355158413349407 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.18072289156626506, - "acc_stderr": 0.02995573785581014, - "acc_norm": 0.18072289156626506, - "acc_norm_stderr": 0.02995573785581014 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.3279742765273312, - "acc_stderr": 0.026664410886937613, - "acc_norm": 0.3279742765273312, - "acc_norm_stderr": 0.026664410886937613 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.16591928251121077, - "acc_stderr": 0.024967553196547157, - "acc_norm": 0.16591928251121077, - "acc_norm_stderr": 0.024967553196547157 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.26717557251908397, - "acc_stderr": 0.03880848301082396, - "acc_norm": 0.26717557251908397, - "acc_norm_stderr": 0.03880848301082396 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.23, - "acc_stderr": 0.04229525846816505, - "acc_norm": 0.23, - "acc_norm_stderr": 0.04229525846816505 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.2777777777777778, - "acc_stderr": 0.03191178226713549, - "acc_norm": 0.2777777777777778, - "acc_norm_stderr": 0.03191178226713549 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.30344827586206896, - "acc_stderr": 0.038312260488503336, - "acc_norm": 0.30344827586206896, - "acc_norm_stderr": 0.038312260488503336 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.22549019607843138, - "acc_stderr": 0.041583075330832865, - "acc_norm": 0.22549019607843138, - "acc_norm_stderr": 0.041583075330832865 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.22268907563025211, - "acc_stderr": 0.027025433498882378, - "acc_norm": 0.22268907563025211, - "acc_norm_stderr": 0.027025433498882378 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.2282051282051282, - "acc_stderr": 0.02127839386358628, - "acc_norm": 0.2282051282051282, - "acc_norm_stderr": 0.02127839386358628 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.23, - "acc_stderr": 0.042295258468165044, - "acc_norm": 0.23, - "acc_norm_stderr": 0.042295258468165044 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.23148148148148148, - "acc_stderr": 0.04077494709252627, - "acc_norm": 0.23148148148148148, - "acc_norm_stderr": 0.04077494709252627 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.24630541871921183, - "acc_stderr": 0.030315099285617732, - "acc_norm": 0.24630541871921183, - "acc_norm_stderr": 0.030315099285617732 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.2709677419354839, - "acc_stderr": 0.025284416114900156, - "acc_norm": 0.2709677419354839, - "acc_norm_stderr": 0.025284416114900156 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.23931623931623933, - "acc_stderr": 0.027951826808924333, - "acc_norm": 0.23931623931623933, - "acc_norm_stderr": 0.027951826808924333 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.2641509433962264, - "acc_stderr": 0.027134291628741695, - "acc_norm": 0.2641509433962264, - "acc_norm_stderr": 0.027134291628741695 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.19090909090909092, - "acc_stderr": 0.03764425585984927, - "acc_norm": 0.19090909090909092, - "acc_norm_stderr": 0.03764425585984927 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.24814814814814815, - "acc_stderr": 0.0263357394040558, - "acc_norm": 0.24814814814814815, - "acc_norm_stderr": 0.0263357394040558 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.271523178807947, - "acc_stderr": 0.03631329803969654, - "acc_norm": 0.271523178807947, - "acc_norm_stderr": 0.03631329803969654 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.21890547263681592, - "acc_stderr": 0.029239174636647, - "acc_norm": 0.21890547263681592, - "acc_norm_stderr": 0.029239174636647 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.26011560693641617, - "acc_stderr": 0.03345036916788991, - "acc_norm": 0.26011560693641617, - "acc_norm_stderr": 0.03345036916788991 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.23809523809523808, - "acc_stderr": 0.021935878081184766, - "acc_norm": 0.23809523809523808, - "acc_norm_stderr": 0.021935878081184766 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.25, - "acc_stderr": 0.03621034121889507, - "acc_norm": 0.25, - "acc_norm_stderr": 0.03621034121889507 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.19, - "acc_stderr": 0.03942772444036624, - "acc_norm": 0.19, - "acc_norm_stderr": 0.03942772444036624 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.2514450867052023, - "acc_stderr": 0.023357365785874037, - "acc_norm": 0.2514450867052023, - "acc_norm_stderr": 0.023357365785874037 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.3067484662576687, - "acc_stderr": 0.03623089915724148, - "acc_norm": 0.3067484662576687, - "acc_norm_stderr": 0.03623089915724148 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.30864197530864196, - "acc_stderr": 0.02570264026060376, - "acc_norm": 0.30864197530864196, - "acc_norm_stderr": 0.02570264026060376 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.23316062176165803, - "acc_stderr": 0.03051611137147602, - "acc_norm": 0.23316062176165803, - "acc_norm_stderr": 0.03051611137147602 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.23684210526315788, - "acc_stderr": 0.03999423879281336, - "acc_norm": 0.23684210526315788, - "acc_norm_stderr": 0.03999423879281336 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.23302752293577983, - "acc_stderr": 0.0181256691808615, - "acc_norm": 0.23302752293577983, - "acc_norm_stderr": 0.0181256691808615 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.16666666666666666, - "acc_stderr": 0.03333333333333338, - "acc_norm": 0.16666666666666666, - "acc_norm_stderr": 0.03333333333333338 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.24836601307189543, - "acc_stderr": 0.02473998135511359, - "acc_norm": 0.24836601307189543, - "acc_norm_stderr": 0.02473998135511359 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.22, - "acc_stderr": 0.041633319989322674, - "acc_norm": 0.22, - "acc_norm_stderr": 0.041633319989322674 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.2975206611570248, - "acc_stderr": 0.04173349148083499, - "acc_norm": 0.2975206611570248, - "acc_norm_stderr": 0.04173349148083499 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.3026315789473684, - "acc_stderr": 0.03738520676119667, - "acc_norm": 0.3026315789473684, - "acc_norm_stderr": 0.03738520676119667 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.2761437908496732, - "acc_stderr": 0.018087276935663133, - "acc_norm": 0.2761437908496732, - "acc_norm_stderr": 0.018087276935663133 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.24468085106382978, - "acc_stderr": 0.025645553622266736, - "acc_norm": 0.24468085106382978, - "acc_norm_stderr": 0.025645553622266736 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.21428571428571427, - "acc_stderr": 0.03894641120044793, - "acc_norm": 0.21428571428571427, - "acc_norm_stderr": 0.03894641120044793 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.3055555555555556, - "acc_stderr": 0.03141554629402545, - "acc_norm": 0.3055555555555556, - "acc_norm_stderr": 0.03141554629402545 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2659217877094972, - "acc_stderr": 0.014776765066438895, - "acc_norm": 0.2659217877094972, - "acc_norm_stderr": 0.014776765066438895 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.32, - "acc_stderr": 0.046882617226215034, - "acc_norm": 0.32, - "acc_norm_stderr": 0.046882617226215034 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.3272058823529412, - "acc_stderr": 0.028501452860396563, - "acc_norm": 0.3272058823529412, - "acc_norm_stderr": 0.028501452860396563 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.24489795918367346, - "acc_stderr": 0.027529637440174934, - "acc_norm": 0.24489795918367346, - "acc_norm_stderr": 0.027529637440174934 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.270042194092827, - "acc_stderr": 0.028900721906293426, - "acc_norm": 0.270042194092827, - "acc_norm_stderr": 0.028900721906293426 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.27835723598435463, - "acc_stderr": 0.011446990197380982, - "acc_norm": 0.27835723598435463, - "acc_norm_stderr": 0.011446990197380982 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.27450980392156865, - "acc_stderr": 0.03132179803083292, - "acc_norm": 0.27450980392156865, - "acc_norm_stderr": 0.03132179803083292 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.2606060606060606, - "acc_stderr": 0.03427743175816524, - "acc_norm": 0.2606060606060606, - "acc_norm_stderr": 0.03427743175816524 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.27050183598531213, - "mc1_stderr": 0.015550778332842883, - "mc2": 0.4208363898748992, - "mc2_stderr": 0.014946599322770709 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.6314553990610329, - "acc_stderr": 0.016536804306154545, - "acc_norm": 0.6936619718309859, - "acc_norm_stderr": 0.0158019112867147 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "ingeol/ppo_test", - "model_sha": "ec1c89b180c1eb383c5a348b4d113733c3e8e238", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/ingeol/sft_merged/result_2023-10-15 17:15:13.json b/ingeol/sft_merged/result_2023-10-15 17:15:13.json deleted file mode 100644 index 568b273391df2645b477f398de5d541ff9a1306d..0000000000000000000000000000000000000000 --- a/ingeol/sft_merged/result_2023-10-15 17:15:13.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.2960750853242321, - "acc_stderr": 0.013340916085246271, - "acc_norm": 0.3387372013651877, - "acc_norm_stderr": 0.01383056892797433 - }, - "harness|ko_hellaswag|10": { - "acc": 0.39055964947221666, - "acc_stderr": 0.004868787333436579, - "acc_norm": 0.5038836885082653, - "acc_norm_stderr": 0.004989630887066195 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.30994152046783624, - "acc_stderr": 0.03546976959393161, - "acc_norm": 0.30994152046783624, - "acc_norm_stderr": 0.03546976959393161 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.18446601941747573, - "acc_stderr": 0.03840423627288276, - "acc_norm": 0.18446601941747573, - "acc_norm_stderr": 0.03840423627288276 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.2515964240102171, - "acc_stderr": 0.015517322365529631, - "acc_norm": 0.2515964240102171, - "acc_norm_stderr": 0.015517322365529631 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.35555555555555557, - "acc_stderr": 0.04135176749720386, - "acc_norm": 0.35555555555555557, - "acc_norm_stderr": 0.04135176749720386 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.32, - "acc_stderr": 0.046882617226215034, - "acc_norm": 0.32, - "acc_norm_stderr": 0.046882617226215034 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.2170212765957447, - "acc_stderr": 0.026947483121496245, - "acc_norm": 0.2170212765957447, - "acc_norm_stderr": 0.026947483121496245 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.21084337349397592, - "acc_stderr": 0.0317555478662992, - "acc_norm": 0.21084337349397592, - "acc_norm_stderr": 0.0317555478662992 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.3215434083601286, - "acc_stderr": 0.026527724079528872, - "acc_norm": 0.3215434083601286, - "acc_norm_stderr": 0.026527724079528872 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.15695067264573992, - "acc_stderr": 0.02441358717490739, - "acc_norm": 0.15695067264573992, - "acc_norm_stderr": 0.02441358717490739 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.2366412213740458, - "acc_stderr": 0.037276735755969174, - "acc_norm": 0.2366412213740458, - "acc_norm_stderr": 0.037276735755969174 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.22, - "acc_stderr": 0.041633319989322695, - "acc_norm": 0.22, - "acc_norm_stderr": 0.041633319989322695 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.2777777777777778, - "acc_stderr": 0.03191178226713549, - "acc_norm": 0.2777777777777778, - "acc_norm_stderr": 0.03191178226713549 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.2896551724137931, - "acc_stderr": 0.03780019230438015, - "acc_norm": 0.2896551724137931, - "acc_norm_stderr": 0.03780019230438015 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.21568627450980393, - "acc_stderr": 0.04092563958237656, - "acc_norm": 0.21568627450980393, - "acc_norm_stderr": 0.04092563958237656 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.23109243697478993, - "acc_stderr": 0.027381406927868966, - "acc_norm": 0.23109243697478993, - "acc_norm_stderr": 0.027381406927868966 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.2282051282051282, - "acc_stderr": 0.02127839386358628, - "acc_norm": 0.2282051282051282, - "acc_norm_stderr": 0.02127839386358628 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.23, - "acc_stderr": 0.042295258468165044, - "acc_norm": 0.23, - "acc_norm_stderr": 0.042295258468165044 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.2222222222222222, - "acc_stderr": 0.040191074725573483, - "acc_norm": 0.2222222222222222, - "acc_norm_stderr": 0.040191074725573483 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.2660098522167488, - "acc_stderr": 0.03108982600293752, - "acc_norm": 0.2660098522167488, - "acc_norm_stderr": 0.03108982600293752 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.25161290322580643, - "acc_stderr": 0.02468597928623997, - "acc_norm": 0.25161290322580643, - "acc_norm_stderr": 0.02468597928623997 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.24786324786324787, - "acc_stderr": 0.028286324075564386, - "acc_norm": 0.24786324786324787, - "acc_norm_stderr": 0.028286324075564386 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.2490566037735849, - "acc_stderr": 0.026616482980501715, - "acc_norm": 0.2490566037735849, - "acc_norm_stderr": 0.026616482980501715 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.21818181818181817, - "acc_stderr": 0.03955932861795833, - "acc_norm": 0.21818181818181817, - "acc_norm_stderr": 0.03955932861795833 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.25555555555555554, - "acc_stderr": 0.02659393910184408, - "acc_norm": 0.25555555555555554, - "acc_norm_stderr": 0.02659393910184408 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.26490066225165565, - "acc_stderr": 0.03603038545360383, - "acc_norm": 0.26490066225165565, - "acc_norm_stderr": 0.03603038545360383 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.21890547263681592, - "acc_stderr": 0.029239174636647, - "acc_norm": 0.21890547263681592, - "acc_norm_stderr": 0.029239174636647 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.2774566473988439, - "acc_stderr": 0.03414014007044036, - "acc_norm": 0.2774566473988439, - "acc_norm_stderr": 0.03414014007044036 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.23809523809523808, - "acc_stderr": 0.021935878081184766, - "acc_norm": 0.23809523809523808, - "acc_norm_stderr": 0.021935878081184766 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2569444444444444, - "acc_stderr": 0.03653946969442099, - "acc_norm": 0.2569444444444444, - "acc_norm_stderr": 0.03653946969442099 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.19, - "acc_stderr": 0.03942772444036624, - "acc_norm": 0.19, - "acc_norm_stderr": 0.03942772444036624 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.23410404624277456, - "acc_stderr": 0.022797110278071138, - "acc_norm": 0.23410404624277456, - "acc_norm_stderr": 0.022797110278071138 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.3067484662576687, - "acc_stderr": 0.03623089915724148, - "acc_norm": 0.3067484662576687, - "acc_norm_stderr": 0.03623089915724148 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.30864197530864196, - "acc_stderr": 0.02570264026060376, - "acc_norm": 0.30864197530864196, - "acc_norm_stderr": 0.02570264026060376 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.24352331606217617, - "acc_stderr": 0.030975436386845426, - "acc_norm": 0.24352331606217617, - "acc_norm_stderr": 0.030975436386845426 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2543859649122807, - "acc_stderr": 0.040969851398436716, - "acc_norm": 0.2543859649122807, - "acc_norm_stderr": 0.040969851398436716 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.23486238532110093, - "acc_stderr": 0.018175110510343588, - "acc_norm": 0.23486238532110093, - "acc_norm_stderr": 0.018175110510343588 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.16666666666666666, - "acc_stderr": 0.03333333333333338, - "acc_norm": 0.16666666666666666, - "acc_norm_stderr": 0.03333333333333338 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.23202614379084968, - "acc_stderr": 0.024170840879341016, - "acc_norm": 0.23202614379084968, - "acc_norm_stderr": 0.024170840879341016 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.22, - "acc_stderr": 0.041633319989322674, - "acc_norm": 0.22, - "acc_norm_stderr": 0.041633319989322674 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.30578512396694213, - "acc_stderr": 0.04205953933884124, - "acc_norm": 0.30578512396694213, - "acc_norm_stderr": 0.04205953933884124 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.3157894736842105, - "acc_stderr": 0.03782728980865469, - "acc_norm": 0.3157894736842105, - "acc_norm_stderr": 0.03782728980865469 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.28104575163398693, - "acc_stderr": 0.018185218954318082, - "acc_norm": 0.28104575163398693, - "acc_norm_stderr": 0.018185218954318082 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.22695035460992907, - "acc_stderr": 0.024987106365642983, - "acc_norm": 0.22695035460992907, - "acc_norm_stderr": 0.024987106365642983 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.21428571428571427, - "acc_stderr": 0.03894641120044793, - "acc_norm": 0.21428571428571427, - "acc_norm_stderr": 0.03894641120044793 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.30092592592592593, - "acc_stderr": 0.031280390843298804, - "acc_norm": 0.30092592592592593, - "acc_norm_stderr": 0.031280390843298804 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2536312849162011, - "acc_stderr": 0.014551553659369922, - "acc_norm": 0.2536312849162011, - "acc_norm_stderr": 0.014551553659369922 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.33, - "acc_stderr": 0.047258156262526045, - "acc_norm": 0.33, - "acc_norm_stderr": 0.047258156262526045 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.33088235294117646, - "acc_stderr": 0.028582709753898445, - "acc_norm": 0.33088235294117646, - "acc_norm_stderr": 0.028582709753898445 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.2612244897959184, - "acc_stderr": 0.028123429335142783, - "acc_norm": 0.2612244897959184, - "acc_norm_stderr": 0.028123429335142783 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.26582278481012656, - "acc_stderr": 0.028756799629658335, - "acc_norm": 0.26582278481012656, - "acc_norm_stderr": 0.028756799629658335 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.27053455019556716, - "acc_stderr": 0.011345996743539265, - "acc_norm": 0.27053455019556716, - "acc_norm_stderr": 0.011345996743539265 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.27941176470588236, - "acc_stderr": 0.031493281045079556, - "acc_norm": 0.27941176470588236, - "acc_norm_stderr": 0.031493281045079556 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.24242424242424243, - "acc_stderr": 0.033464098810559534, - "acc_norm": 0.24242424242424243, - "acc_norm_stderr": 0.033464098810559534 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2692778457772338, - "mc1_stderr": 0.015528566637087298, - "mc2": 0.4211117529867161, - "mc2_stderr": 0.014959536407311791 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.6291079812206573, - "acc_stderr": 0.016558521692487338, - "acc_norm": 0.6866197183098591, - "acc_norm_stderr": 0.01590117396348766 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "ingeol/sft_merged", - "model_sha": "a958e5054c1935e86f418c797825ebccb9e7fd89", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/ingeol/sft_merged/result_2023-10-15 17:37:34.json b/ingeol/sft_merged/result_2023-10-15 17:37:34.json deleted file mode 100644 index 3d66487f3a2c0eecbb8bd370f077c844f9b04896..0000000000000000000000000000000000000000 --- a/ingeol/sft_merged/result_2023-10-15 17:37:34.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.2977815699658703, - "acc_stderr": 0.01336308010724449, - "acc_norm": 0.3395904436860068, - "acc_norm_stderr": 0.013839039762820167 - }, - "harness|ko_hellaswag|10": { - "acc": 0.39055964947221666, - "acc_stderr": 0.004868787333436579, - "acc_norm": 0.5038836885082653, - "acc_norm_stderr": 0.004989630887066195 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.30994152046783624, - "acc_stderr": 0.03546976959393161, - "acc_norm": 0.30994152046783624, - "acc_norm_stderr": 0.03546976959393161 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.18446601941747573, - "acc_stderr": 0.03840423627288276, - "acc_norm": 0.18446601941747573, - "acc_norm_stderr": 0.03840423627288276 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.2515964240102171, - "acc_stderr": 0.015517322365529631, - "acc_norm": 0.2515964240102171, - "acc_norm_stderr": 0.015517322365529631 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.35555555555555557, - "acc_stderr": 0.04135176749720386, - "acc_norm": 0.35555555555555557, - "acc_norm_stderr": 0.04135176749720386 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.32, - "acc_stderr": 0.046882617226215034, - "acc_norm": 0.32, - "acc_norm_stderr": 0.046882617226215034 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.2170212765957447, - "acc_stderr": 0.026947483121496245, - "acc_norm": 0.2170212765957447, - "acc_norm_stderr": 0.026947483121496245 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.21084337349397592, - "acc_stderr": 0.0317555478662992, - "acc_norm": 0.21084337349397592, - "acc_norm_stderr": 0.0317555478662992 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.3215434083601286, - "acc_stderr": 0.026527724079528872, - "acc_norm": 0.3215434083601286, - "acc_norm_stderr": 0.026527724079528872 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.15695067264573992, - "acc_stderr": 0.02441358717490739, - "acc_norm": 0.15695067264573992, - "acc_norm_stderr": 0.02441358717490739 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.2366412213740458, - "acc_stderr": 0.037276735755969174, - "acc_norm": 0.2366412213740458, - "acc_norm_stderr": 0.037276735755969174 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.22, - "acc_stderr": 0.041633319989322695, - "acc_norm": 0.22, - "acc_norm_stderr": 0.041633319989322695 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.2777777777777778, - "acc_stderr": 0.03191178226713549, - "acc_norm": 0.2777777777777778, - "acc_norm_stderr": 0.03191178226713549 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.2896551724137931, - "acc_stderr": 0.03780019230438015, - "acc_norm": 0.2896551724137931, - "acc_norm_stderr": 0.03780019230438015 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.21568627450980393, - "acc_stderr": 0.04092563958237656, - "acc_norm": 0.21568627450980393, - "acc_norm_stderr": 0.04092563958237656 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.23109243697478993, - "acc_stderr": 0.027381406927868966, - "acc_norm": 0.23109243697478993, - "acc_norm_stderr": 0.027381406927868966 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.2282051282051282, - "acc_stderr": 0.02127839386358628, - "acc_norm": 0.2282051282051282, - "acc_norm_stderr": 0.02127839386358628 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.23, - "acc_stderr": 0.042295258468165044, - "acc_norm": 0.23, - "acc_norm_stderr": 0.042295258468165044 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.23148148148148148, - "acc_stderr": 0.04077494709252627, - "acc_norm": 0.23148148148148148, - "acc_norm_stderr": 0.04077494709252627 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.2660098522167488, - "acc_stderr": 0.03108982600293752, - "acc_norm": 0.2660098522167488, - "acc_norm_stderr": 0.03108982600293752 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.25161290322580643, - "acc_stderr": 0.02468597928623997, - "acc_norm": 0.25161290322580643, - "acc_norm_stderr": 0.02468597928623997 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.24786324786324787, - "acc_stderr": 0.028286324075564386, - "acc_norm": 0.24786324786324787, - "acc_norm_stderr": 0.028286324075564386 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.2490566037735849, - "acc_stderr": 0.026616482980501715, - "acc_norm": 0.2490566037735849, - "acc_norm_stderr": 0.026616482980501715 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.21818181818181817, - "acc_stderr": 0.03955932861795833, - "acc_norm": 0.21818181818181817, - "acc_norm_stderr": 0.03955932861795833 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.25555555555555554, - "acc_stderr": 0.02659393910184408, - "acc_norm": 0.25555555555555554, - "acc_norm_stderr": 0.02659393910184408 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.26490066225165565, - "acc_stderr": 0.03603038545360383, - "acc_norm": 0.26490066225165565, - "acc_norm_stderr": 0.03603038545360383 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.21890547263681592, - "acc_stderr": 0.029239174636647, - "acc_norm": 0.21890547263681592, - "acc_norm_stderr": 0.029239174636647 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.2832369942196532, - "acc_stderr": 0.03435568056047873, - "acc_norm": 0.2832369942196532, - "acc_norm_stderr": 0.03435568056047873 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.23809523809523808, - "acc_stderr": 0.021935878081184766, - "acc_norm": 0.23809523809523808, - "acc_norm_stderr": 0.021935878081184766 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2569444444444444, - "acc_stderr": 0.03653946969442099, - "acc_norm": 0.2569444444444444, - "acc_norm_stderr": 0.03653946969442099 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.19, - "acc_stderr": 0.03942772444036624, - "acc_norm": 0.19, - "acc_norm_stderr": 0.03942772444036624 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.23410404624277456, - "acc_stderr": 0.022797110278071138, - "acc_norm": 0.23410404624277456, - "acc_norm_stderr": 0.022797110278071138 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.3067484662576687, - "acc_stderr": 0.03623089915724148, - "acc_norm": 0.3067484662576687, - "acc_norm_stderr": 0.03623089915724148 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.30864197530864196, - "acc_stderr": 0.02570264026060376, - "acc_norm": 0.30864197530864196, - "acc_norm_stderr": 0.02570264026060376 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.24352331606217617, - "acc_stderr": 0.030975436386845426, - "acc_norm": 0.24352331606217617, - "acc_norm_stderr": 0.030975436386845426 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2543859649122807, - "acc_stderr": 0.040969851398436716, - "acc_norm": 0.2543859649122807, - "acc_norm_stderr": 0.040969851398436716 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.23486238532110093, - "acc_stderr": 0.018175110510343588, - "acc_norm": 0.23486238532110093, - "acc_norm_stderr": 0.018175110510343588 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.16666666666666666, - "acc_stderr": 0.03333333333333338, - "acc_norm": 0.16666666666666666, - "acc_norm_stderr": 0.03333333333333338 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.23202614379084968, - "acc_stderr": 0.024170840879341016, - "acc_norm": 0.23202614379084968, - "acc_norm_stderr": 0.024170840879341016 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.22, - "acc_stderr": 0.041633319989322674, - "acc_norm": 0.22, - "acc_norm_stderr": 0.041633319989322674 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.30578512396694213, - "acc_stderr": 0.04205953933884124, - "acc_norm": 0.30578512396694213, - "acc_norm_stderr": 0.04205953933884124 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.3157894736842105, - "acc_stderr": 0.03782728980865469, - "acc_norm": 0.3157894736842105, - "acc_norm_stderr": 0.03782728980865469 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.28104575163398693, - "acc_stderr": 0.018185218954318082, - "acc_norm": 0.28104575163398693, - "acc_norm_stderr": 0.018185218954318082 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.23404255319148937, - "acc_stderr": 0.025257861359432407, - "acc_norm": 0.23404255319148937, - "acc_norm_stderr": 0.025257861359432407 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.21428571428571427, - "acc_stderr": 0.03894641120044793, - "acc_norm": 0.21428571428571427, - "acc_norm_stderr": 0.03894641120044793 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.30092592592592593, - "acc_stderr": 0.031280390843298804, - "acc_norm": 0.30092592592592593, - "acc_norm_stderr": 0.031280390843298804 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2536312849162011, - "acc_stderr": 0.014551553659369922, - "acc_norm": 0.2536312849162011, - "acc_norm_stderr": 0.014551553659369922 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.33, - "acc_stderr": 0.047258156262526045, - "acc_norm": 0.33, - "acc_norm_stderr": 0.047258156262526045 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.33088235294117646, - "acc_stderr": 0.028582709753898445, - "acc_norm": 0.33088235294117646, - "acc_norm_stderr": 0.028582709753898445 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.2612244897959184, - "acc_stderr": 0.028123429335142783, - "acc_norm": 0.2612244897959184, - "acc_norm_stderr": 0.028123429335142783 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.26582278481012656, - "acc_stderr": 0.028756799629658335, - "acc_norm": 0.26582278481012656, - "acc_norm_stderr": 0.028756799629658335 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.27053455019556716, - "acc_stderr": 0.011345996743539265, - "acc_norm": 0.27053455019556716, - "acc_norm_stderr": 0.011345996743539265 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.27941176470588236, - "acc_stderr": 0.031493281045079556, - "acc_norm": 0.27941176470588236, - "acc_norm_stderr": 0.031493281045079556 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.24242424242424243, - "acc_stderr": 0.033464098810559534, - "acc_norm": 0.24242424242424243, - "acc_norm_stderr": 0.033464098810559534 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2692778457772338, - "mc1_stderr": 0.015528566637087298, - "mc2": 0.4211117529867161, - "mc2_stderr": 0.014959536407311791 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.6291079812206573, - "acc_stderr": 0.016558521692487338, - "acc_norm": 0.6866197183098591, - "acc_norm_stderr": 0.01590117396348766 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "ingeol/sft_merged", - "model_sha": "a958e5054c1935e86f418c797825ebccb9e7fd89", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/ingeol/sft_merged_660/result_2023-10-15 23:44:57.json b/ingeol/sft_merged_660/result_2023-10-15 23:44:57.json deleted file mode 100644 index fd714125554a46580e2d4abb0152d807555cb088..0000000000000000000000000000000000000000 --- a/ingeol/sft_merged_660/result_2023-10-15 23:44:57.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.2858361774744027, - "acc_stderr": 0.013203196088537364, - "acc_norm": 0.33532423208191126, - "acc_norm_stderr": 0.013796182947785564 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3910575582553276, - "acc_stderr": 0.00486989929773455, - "acc_norm": 0.5030870344552878, - "acc_norm_stderr": 0.004989686307484551 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.26900584795321636, - "acc_stderr": 0.03401052620104088, - "acc_norm": 0.26900584795321636, - "acc_norm_stderr": 0.03401052620104088 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.1941747572815534, - "acc_stderr": 0.03916667762822584, - "acc_norm": 0.1941747572815534, - "acc_norm_stderr": 0.03916667762822584 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.23371647509578544, - "acc_stderr": 0.015133383278988832, - "acc_norm": 0.23371647509578544, - "acc_norm_stderr": 0.015133383278988832 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.3037037037037037, - "acc_stderr": 0.039725528847851375, - "acc_norm": 0.3037037037037037, - "acc_norm_stderr": 0.039725528847851375 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.27, - "acc_stderr": 0.04461960433384739, - "acc_norm": 0.27, - "acc_norm_stderr": 0.04461960433384739 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.24680851063829787, - "acc_stderr": 0.02818544130123409, - "acc_norm": 0.24680851063829787, - "acc_norm_stderr": 0.02818544130123409 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.21686746987951808, - "acc_stderr": 0.03208284450356365, - "acc_norm": 0.21686746987951808, - "acc_norm_stderr": 0.03208284450356365 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.3215434083601286, - "acc_stderr": 0.026527724079528872, - "acc_norm": 0.3215434083601286, - "acc_norm_stderr": 0.026527724079528872 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.15695067264573992, - "acc_stderr": 0.024413587174907405, - "acc_norm": 0.15695067264573992, - "acc_norm_stderr": 0.024413587174907405 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.24427480916030533, - "acc_stderr": 0.037683359597287434, - "acc_norm": 0.24427480916030533, - "acc_norm_stderr": 0.037683359597287434 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.27, - "acc_stderr": 0.0446196043338474, - "acc_norm": 0.27, - "acc_norm_stderr": 0.0446196043338474 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.29797979797979796, - "acc_stderr": 0.03258630383836555, - "acc_norm": 0.29797979797979796, - "acc_norm_stderr": 0.03258630383836555 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.2896551724137931, - "acc_stderr": 0.037800192304380156, - "acc_norm": 0.2896551724137931, - "acc_norm_stderr": 0.037800192304380156 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.23529411764705882, - "acc_stderr": 0.04220773659171452, - "acc_norm": 0.23529411764705882, - "acc_norm_stderr": 0.04220773659171452 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.21008403361344538, - "acc_stderr": 0.026461398717471874, - "acc_norm": 0.21008403361344538, - "acc_norm_stderr": 0.026461398717471874 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.24102564102564103, - "acc_stderr": 0.021685546665333188, - "acc_norm": 0.24102564102564103, - "acc_norm_stderr": 0.021685546665333188 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542129, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542129 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.18, - "acc_stderr": 0.03861229196653695, - "acc_norm": 0.18, - "acc_norm_stderr": 0.03861229196653695 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.24074074074074073, - "acc_stderr": 0.04133119440243839, - "acc_norm": 0.24074074074074073, - "acc_norm_stderr": 0.04133119440243839 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.270935960591133, - "acc_stderr": 0.03127090713297698, - "acc_norm": 0.270935960591133, - "acc_norm_stderr": 0.03127090713297698 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.25161290322580643, - "acc_stderr": 0.024685979286239973, - "acc_norm": 0.25161290322580643, - "acc_norm_stderr": 0.024685979286239973 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.24786324786324787, - "acc_stderr": 0.028286324075564393, - "acc_norm": 0.24786324786324787, - "acc_norm_stderr": 0.028286324075564393 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.2528301886792453, - "acc_stderr": 0.02674989977124124, - "acc_norm": 0.2528301886792453, - "acc_norm_stderr": 0.02674989977124124 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.20909090909090908, - "acc_stderr": 0.038950910157241364, - "acc_norm": 0.20909090909090908, - "acc_norm_stderr": 0.038950910157241364 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.25555555555555554, - "acc_stderr": 0.02659393910184408, - "acc_norm": 0.25555555555555554, - "acc_norm_stderr": 0.02659393910184408 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.271523178807947, - "acc_stderr": 0.03631329803969653, - "acc_norm": 0.271523178807947, - "acc_norm_stderr": 0.03631329803969653 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.23383084577114427, - "acc_stderr": 0.029929415408348384, - "acc_norm": 0.23383084577114427, - "acc_norm_stderr": 0.029929415408348384 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.2832369942196532, - "acc_stderr": 0.03435568056047873, - "acc_norm": 0.2832369942196532, - "acc_norm_stderr": 0.03435568056047873 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2275132275132275, - "acc_stderr": 0.021591269407823778, - "acc_norm": 0.2275132275132275, - "acc_norm_stderr": 0.021591269407823778 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3125, - "acc_stderr": 0.038760854559127644, - "acc_norm": 0.3125, - "acc_norm_stderr": 0.038760854559127644 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.23, - "acc_stderr": 0.04229525846816508, - "acc_norm": 0.23, - "acc_norm_stderr": 0.04229525846816508 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.32, - "acc_stderr": 0.046882617226215034, - "acc_norm": 0.32, - "acc_norm_stderr": 0.046882617226215034 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.23410404624277456, - "acc_stderr": 0.022797110278071145, - "acc_norm": 0.23410404624277456, - "acc_norm_stderr": 0.022797110278071145 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.3006134969325153, - "acc_stderr": 0.03602511318806771, - "acc_norm": 0.3006134969325153, - "acc_norm_stderr": 0.03602511318806771 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.3148148148148148, - "acc_stderr": 0.02584224870090218, - "acc_norm": 0.3148148148148148, - "acc_norm_stderr": 0.02584224870090218 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.23316062176165803, - "acc_stderr": 0.03051611137147601, - "acc_norm": 0.23316062176165803, - "acc_norm_stderr": 0.03051611137147601 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.22807017543859648, - "acc_stderr": 0.03947152782669415, - "acc_norm": 0.22807017543859648, - "acc_norm_stderr": 0.03947152782669415 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.26788990825688075, - "acc_stderr": 0.018987462257978652, - "acc_norm": 0.26788990825688075, - "acc_norm_stderr": 0.018987462257978652 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.2222222222222222, - "acc_stderr": 0.03718489006818115, - "acc_norm": 0.2222222222222222, - "acc_norm_stderr": 0.03718489006818115 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.2222222222222222, - "acc_stderr": 0.023805186524888142, - "acc_norm": 0.2222222222222222, - "acc_norm_stderr": 0.023805186524888142 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.24, - "acc_stderr": 0.042923469599092816, - "acc_norm": 0.24, - "acc_norm_stderr": 0.042923469599092816 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.2975206611570248, - "acc_stderr": 0.04173349148083499, - "acc_norm": 0.2975206611570248, - "acc_norm_stderr": 0.04173349148083499 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.19736842105263158, - "acc_stderr": 0.03238981601699397, - "acc_norm": 0.19736842105263158, - "acc_norm_stderr": 0.03238981601699397 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.2549019607843137, - "acc_stderr": 0.017630827375148383, - "acc_norm": 0.2549019607843137, - "acc_norm_stderr": 0.017630827375148383 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.24113475177304963, - "acc_stderr": 0.025518731049537766, - "acc_norm": 0.24113475177304963, - "acc_norm_stderr": 0.025518731049537766 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.22321428571428573, - "acc_stderr": 0.03952301967702511, - "acc_norm": 0.22321428571428573, - "acc_norm_stderr": 0.03952301967702511 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.38425925925925924, - "acc_stderr": 0.03317354514310742, - "acc_norm": 0.38425925925925924, - "acc_norm_stderr": 0.03317354514310742 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.27150837988826815, - "acc_stderr": 0.014874252168095278, - "acc_norm": 0.27150837988826815, - "acc_norm_stderr": 0.014874252168095278 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542127, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542127 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542128, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542128 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.40441176470588236, - "acc_stderr": 0.02981263070156974, - "acc_norm": 0.40441176470588236, - "acc_norm_stderr": 0.02981263070156974 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.2612244897959184, - "acc_stderr": 0.02812342933514278, - "acc_norm": 0.2612244897959184, - "acc_norm_stderr": 0.02812342933514278 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.2616033755274262, - "acc_stderr": 0.028609516716994934, - "acc_norm": 0.2616033755274262, - "acc_norm_stderr": 0.028609516716994934 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2692307692307692, - "acc_stderr": 0.011328734403140332, - "acc_norm": 0.2692307692307692, - "acc_norm_stderr": 0.011328734403140332 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.27941176470588236, - "acc_stderr": 0.031493281045079556, - "acc_norm": 0.27941176470588236, - "acc_norm_stderr": 0.031493281045079556 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.23636363636363636, - "acc_stderr": 0.03317505930009179, - "acc_norm": 0.23636363636363636, - "acc_norm_stderr": 0.03317505930009179 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2802937576499388, - "mc1_stderr": 0.015723139524608753, - "mc2": 0.42256277632208605, - "mc2_stderr": 0.014988663316140667 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.6384976525821596, - "acc_stderr": 0.016469121490430085, - "acc_norm": 0.6948356807511737, - "acc_norm_stderr": 0.015784947890737814 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "ingeol/sft_merged_660", - "model_sha": "2426d1b6f2940a808b68c578e0fafdab1a515707", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/jb723/llama2-ko-7B-model/result_2023-09-27 11:00:22.json b/jb723/llama2-ko-7B-model/result_2023-09-27 11:00:22.json deleted file mode 100644 index 7e873b21bb3824c78d339684bc0e73333f79ae03..0000000000000000000000000000000000000000 --- a/jb723/llama2-ko-7B-model/result_2023-09-27 11:00:22.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.2226962457337884, - "acc_stderr": 0.012158314774829948, - "acc_norm": 0.2627986348122867, - "acc_norm_stderr": 0.012862523175351331 - }, - "harness|ko_hellaswag|10": { - "acc": 0.2726548496315475, - "acc_stderr": 0.004444146875436292, - "acc_norm": 0.29635530770762797, - "acc_norm_stderr": 0.004557163175885563 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.2982456140350877, - "acc_stderr": 0.03508771929824561, - "acc_norm": 0.2982456140350877, - "acc_norm_stderr": 0.03508771929824561 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.3106796116504854, - "acc_stderr": 0.04582124160161549, - "acc_norm": 0.3106796116504854, - "acc_norm_stderr": 0.04582124160161549 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.2848020434227331, - "acc_stderr": 0.016139174096522553, - "acc_norm": 0.2848020434227331, - "acc_norm_stderr": 0.016139174096522553 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.25925925925925924, - "acc_stderr": 0.03785714465066654, - "acc_norm": 0.25925925925925924, - "acc_norm_stderr": 0.03785714465066654 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.3276595744680851, - "acc_stderr": 0.030683020843231008, - "acc_norm": 0.3276595744680851, - "acc_norm_stderr": 0.030683020843231008 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.2891566265060241, - "acc_stderr": 0.03529486801511115, - "acc_norm": 0.2891566265060241, - "acc_norm_stderr": 0.03529486801511115 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.3440514469453376, - "acc_stderr": 0.026981478043648026, - "acc_norm": 0.3440514469453376, - "acc_norm_stderr": 0.026981478043648026 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.34977578475336324, - "acc_stderr": 0.03200736719484503, - "acc_norm": 0.34977578475336324, - "acc_norm_stderr": 0.03200736719484503 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.2366412213740458, - "acc_stderr": 0.037276735755969195, - "acc_norm": 0.2366412213740458, - "acc_norm_stderr": 0.037276735755969195 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.22727272727272727, - "acc_stderr": 0.029857515673386417, - "acc_norm": 0.22727272727272727, - "acc_norm_stderr": 0.029857515673386417 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.31724137931034485, - "acc_stderr": 0.038783523721386215, - "acc_norm": 0.31724137931034485, - "acc_norm_stderr": 0.038783523721386215 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.17647058823529413, - "acc_stderr": 0.03793281185307811, - "acc_norm": 0.17647058823529413, - "acc_norm_stderr": 0.03793281185307811 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.2689075630252101, - "acc_stderr": 0.028801392193631276, - "acc_norm": 0.2689075630252101, - "acc_norm_stderr": 0.028801392193631276 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.2512820512820513, - "acc_stderr": 0.021992016662370568, - "acc_norm": 0.2512820512820513, - "acc_norm_stderr": 0.021992016662370568 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252604, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252604 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.3611111111111111, - "acc_stderr": 0.04643454608906275, - "acc_norm": 0.3611111111111111, - "acc_norm_stderr": 0.04643454608906275 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.26108374384236455, - "acc_stderr": 0.030903796952114454, - "acc_norm": 0.26108374384236455, - "acc_norm_stderr": 0.030903796952114454 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.2709677419354839, - "acc_stderr": 0.02528441611490016, - "acc_norm": 0.2709677419354839, - "acc_norm_stderr": 0.02528441611490016 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.3418803418803419, - "acc_stderr": 0.03107502852650775, - "acc_norm": 0.3418803418803419, - "acc_norm_stderr": 0.03107502852650775 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.2943396226415094, - "acc_stderr": 0.028049186315695248, - "acc_norm": 0.2943396226415094, - "acc_norm_stderr": 0.028049186315695248 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.3181818181818182, - "acc_stderr": 0.044612721759105065, - "acc_norm": 0.3181818181818182, - "acc_norm_stderr": 0.044612721759105065 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.2962962962962963, - "acc_stderr": 0.027840811495871927, - "acc_norm": 0.2962962962962963, - "acc_norm_stderr": 0.027840811495871927 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2582781456953642, - "acc_stderr": 0.035737053147634576, - "acc_norm": 0.2582781456953642, - "acc_norm_stderr": 0.035737053147634576 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.38308457711442784, - "acc_stderr": 0.034375193373382504, - "acc_norm": 0.38308457711442784, - "acc_norm_stderr": 0.034375193373382504 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.24855491329479767, - "acc_stderr": 0.03295304696818318, - "acc_norm": 0.24855491329479767, - "acc_norm_stderr": 0.03295304696818318 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2724867724867725, - "acc_stderr": 0.022930973071633345, - "acc_norm": 0.2724867724867725, - "acc_norm_stderr": 0.022930973071633345 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.25, - "acc_stderr": 0.03621034121889507, - "acc_norm": 0.25, - "acc_norm_stderr": 0.03621034121889507 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.22, - "acc_stderr": 0.04163331998932269, - "acc_norm": 0.22, - "acc_norm_stderr": 0.04163331998932269 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.35, - "acc_stderr": 0.0479372485441102, - "acc_norm": 0.35, - "acc_norm_stderr": 0.0479372485441102 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.315028901734104, - "acc_stderr": 0.025009313790069692, - "acc_norm": 0.315028901734104, - "acc_norm_stderr": 0.025009313790069692 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.3006134969325153, - "acc_stderr": 0.03602511318806771, - "acc_norm": 0.3006134969325153, - "acc_norm_stderr": 0.03602511318806771 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.30864197530864196, - "acc_stderr": 0.02570264026060375, - "acc_norm": 0.30864197530864196, - "acc_norm_stderr": 0.02570264026060375 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542127, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542127 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.3160621761658031, - "acc_stderr": 0.03355397369686173, - "acc_norm": 0.3160621761658031, - "acc_norm_stderr": 0.03355397369686173 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2719298245614035, - "acc_stderr": 0.041857744240220575, - "acc_norm": 0.2719298245614035, - "acc_norm_stderr": 0.041857744240220575 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.23853211009174313, - "acc_stderr": 0.01827257581023186, - "acc_norm": 0.23853211009174313, - "acc_norm_stderr": 0.01827257581023186 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.2857142857142857, - "acc_stderr": 0.04040610178208841, - "acc_norm": 0.2857142857142857, - "acc_norm_stderr": 0.04040610178208841 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.2908496732026144, - "acc_stderr": 0.026004800363952113, - "acc_norm": 0.2908496732026144, - "acc_norm_stderr": 0.026004800363952113 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.23, - "acc_stderr": 0.04229525846816508, - "acc_norm": 0.23, - "acc_norm_stderr": 0.04229525846816508 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.512396694214876, - "acc_stderr": 0.04562951548180765, - "acc_norm": 0.512396694214876, - "acc_norm_stderr": 0.04562951548180765 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.2236842105263158, - "acc_stderr": 0.03391160934343604, - "acc_norm": 0.2236842105263158, - "acc_norm_stderr": 0.03391160934343604 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.2696078431372549, - "acc_stderr": 0.017952449196987866, - "acc_norm": 0.2696078431372549, - "acc_norm_stderr": 0.017952449196987866 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.26595744680851063, - "acc_stderr": 0.026358065698880585, - "acc_norm": 0.26595744680851063, - "acc_norm_stderr": 0.026358065698880585 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.29464285714285715, - "acc_stderr": 0.043270409325787296, - "acc_norm": 0.29464285714285715, - "acc_norm_stderr": 0.043270409325787296 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.19907407407407407, - "acc_stderr": 0.027232298462690218, - "acc_norm": 0.19907407407407407, - "acc_norm_stderr": 0.027232298462690218 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2424581005586592, - "acc_stderr": 0.01433352205921789, - "acc_norm": 0.2424581005586592, - "acc_norm_stderr": 0.01433352205921789 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.23, - "acc_stderr": 0.04229525846816506, - "acc_norm": 0.23, - "acc_norm_stderr": 0.04229525846816506 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.26, - "acc_stderr": 0.04408440022768079, - "acc_norm": 0.26, - "acc_norm_stderr": 0.04408440022768079 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.22058823529411764, - "acc_stderr": 0.02518778666022727, - "acc_norm": 0.22058823529411764, - "acc_norm_stderr": 0.02518778666022727 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.20408163265306123, - "acc_stderr": 0.025801283475090496, - "acc_norm": 0.20408163265306123, - "acc_norm_stderr": 0.025801283475090496 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.3037974683544304, - "acc_stderr": 0.029936696387138598, - "acc_norm": 0.3037974683544304, - "acc_norm_stderr": 0.029936696387138598 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.24837027379400262, - "acc_stderr": 0.011035212598034494, - "acc_norm": 0.24837027379400262, - "acc_norm_stderr": 0.011035212598034494 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.24019607843137256, - "acc_stderr": 0.02998373305591362, - "acc_norm": 0.24019607843137256, - "acc_norm_stderr": 0.02998373305591362 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.24242424242424243, - "acc_stderr": 0.033464098810559534, - "acc_norm": 0.24242424242424243, - "acc_norm_stderr": 0.033464098810559534 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2594859241126071, - "mc1_stderr": 0.015345409485557966, - "mc2": 0.43443146146429873, - "mc2_stderr": 0.01580310882533787 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.22183098591549297, - "acc_stderr": 0.014242403694199452, - "acc_norm": 0.3626760563380282, - "acc_norm_stderr": 0.016480666823965092 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "jb723/llama2-ko-7B-model", - "model_sha": "03d23910fa0f9b0542ce7634cbcd36983321f55a", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/jin05102518/Astral-7B-1.0Epoch-Instruct-v0.05/result_2023-10-22 13:07:21.json b/jin05102518/Astral-7B-1.0Epoch-Instruct-v0.05/result_2023-10-22 13:07:21.json deleted file mode 100644 index 6689365419cbfc9417e571de817e80a7934c3a00..0000000000000000000000000000000000000000 --- a/jin05102518/Astral-7B-1.0Epoch-Instruct-v0.05/result_2023-10-22 13:07:21.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.3148464163822526, - "acc_stderr": 0.01357265770308495, - "acc_norm": 0.37542662116040953, - "acc_norm_stderr": 0.014150631435111726 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3571997610037841, - "acc_stderr": 0.004781950883460504, - "acc_norm": 0.4569806811392153, - "acc_norm_stderr": 0.004971278309204196 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.45614035087719296, - "acc_stderr": 0.03820042586602967, - "acc_norm": 0.45614035087719296, - "acc_norm_stderr": 0.03820042586602967 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.6310679611650486, - "acc_stderr": 0.0477761518115674, - "acc_norm": 0.6310679611650486, - "acc_norm_stderr": 0.0477761518115674 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.4342273307790549, - "acc_stderr": 0.017724589389677785, - "acc_norm": 0.4342273307790549, - "acc_norm_stderr": 0.017724589389677785 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.34814814814814815, - "acc_stderr": 0.041153246103369526, - "acc_norm": 0.34814814814814815, - "acc_norm_stderr": 0.041153246103369526 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.21, - "acc_stderr": 0.040936018074033256, - "acc_norm": 0.21, - "acc_norm_stderr": 0.040936018074033256 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.32340425531914896, - "acc_stderr": 0.030579442773610334, - "acc_norm": 0.32340425531914896, - "acc_norm_stderr": 0.030579442773610334 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.43373493975903615, - "acc_stderr": 0.03858158940685515, - "acc_norm": 0.43373493975903615, - "acc_norm_stderr": 0.03858158940685515 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.4694533762057878, - "acc_stderr": 0.02834504586484068, - "acc_norm": 0.4694533762057878, - "acc_norm_stderr": 0.02834504586484068 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.39461883408071746, - "acc_stderr": 0.03280400504755292, - "acc_norm": 0.39461883408071746, - "acc_norm_stderr": 0.03280400504755292 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.4122137404580153, - "acc_stderr": 0.04317171194870254, - "acc_norm": 0.4122137404580153, - "acc_norm_stderr": 0.04317171194870254 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252606, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252606 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.47474747474747475, - "acc_stderr": 0.035578062450873145, - "acc_norm": 0.47474747474747475, - "acc_norm_stderr": 0.035578062450873145 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.47586206896551725, - "acc_stderr": 0.041618085035015295, - "acc_norm": 0.47586206896551725, - "acc_norm_stderr": 0.041618085035015295 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.3235294117647059, - "acc_stderr": 0.046550104113196177, - "acc_norm": 0.3235294117647059, - "acc_norm_stderr": 0.046550104113196177 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.4831932773109244, - "acc_stderr": 0.03246013680375308, - "acc_norm": 0.4831932773109244, - "acc_norm_stderr": 0.03246013680375308 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.4307692307692308, - "acc_stderr": 0.025106820660539746, - "acc_norm": 0.4307692307692308, - "acc_norm_stderr": 0.025106820660539746 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.58, - "acc_stderr": 0.049604496374885836, - "acc_norm": 0.58, - "acc_norm_stderr": 0.049604496374885836 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.5462962962962963, - "acc_stderr": 0.04812917324536823, - "acc_norm": 0.5462962962962963, - "acc_norm_stderr": 0.04812917324536823 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.4187192118226601, - "acc_stderr": 0.03471192860518468, - "acc_norm": 0.4187192118226601, - "acc_norm_stderr": 0.03471192860518468 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.45161290322580644, - "acc_stderr": 0.02831050034856839, - "acc_norm": 0.45161290322580644, - "acc_norm_stderr": 0.02831050034856839 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.6752136752136753, - "acc_stderr": 0.03067902276549883, - "acc_norm": 0.6752136752136753, - "acc_norm_stderr": 0.03067902276549883 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.41132075471698115, - "acc_stderr": 0.030285009259009805, - "acc_norm": 0.41132075471698115, - "acc_norm_stderr": 0.030285009259009805 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.4727272727272727, - "acc_stderr": 0.04782001791380063, - "acc_norm": 0.4727272727272727, - "acc_norm_stderr": 0.04782001791380063 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.31851851851851853, - "acc_stderr": 0.028406533090608463, - "acc_norm": 0.31851851851851853, - "acc_norm_stderr": 0.028406533090608463 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2980132450331126, - "acc_stderr": 0.037345356767871984, - "acc_norm": 0.2980132450331126, - "acc_norm_stderr": 0.037345356767871984 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.5771144278606966, - "acc_stderr": 0.034932317774212816, - "acc_norm": 0.5771144278606966, - "acc_norm_stderr": 0.034932317774212816 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.3815028901734104, - "acc_stderr": 0.037038511930995215, - "acc_norm": 0.3815028901734104, - "acc_norm_stderr": 0.037038511930995215 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.36243386243386244, - "acc_stderr": 0.02475747390275205, - "acc_norm": 0.36243386243386244, - "acc_norm_stderr": 0.02475747390275205 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2986111111111111, - "acc_stderr": 0.03827052357950756, - "acc_norm": 0.2986111111111111, - "acc_norm_stderr": 0.03827052357950756 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.36, - "acc_stderr": 0.048241815132442176, - "acc_norm": 0.36, - "acc_norm_stderr": 0.048241815132442176 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.56, - "acc_stderr": 0.049888765156985884, - "acc_norm": 0.56, - "acc_norm_stderr": 0.049888765156985884 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.4797687861271676, - "acc_stderr": 0.026897049996382875, - "acc_norm": 0.4797687861271676, - "acc_norm_stderr": 0.026897049996382875 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.4785276073619632, - "acc_stderr": 0.03924746876751129, - "acc_norm": 0.4785276073619632, - "acc_norm_stderr": 0.03924746876751129 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.4382716049382716, - "acc_stderr": 0.027607914087400473, - "acc_norm": 0.4382716049382716, - "acc_norm_stderr": 0.027607914087400473 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.29, - "acc_stderr": 0.04560480215720685, - "acc_norm": 0.29, - "acc_norm_stderr": 0.04560480215720685 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.49740932642487046, - "acc_stderr": 0.03608390745384488, - "acc_norm": 0.49740932642487046, - "acc_norm_stderr": 0.03608390745384488 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2894736842105263, - "acc_stderr": 0.04266339443159394, - "acc_norm": 0.2894736842105263, - "acc_norm_stderr": 0.04266339443159394 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.47889908256880737, - "acc_stderr": 0.021418224754264643, - "acc_norm": 0.47889908256880737, - "acc_norm_stderr": 0.021418224754264643 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.3492063492063492, - "acc_stderr": 0.04263906892795132, - "acc_norm": 0.3492063492063492, - "acc_norm_stderr": 0.04263906892795132 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.45751633986928103, - "acc_stderr": 0.028526383452142624, - "acc_norm": 0.45751633986928103, - "acc_norm_stderr": 0.028526383452142624 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.44, - "acc_stderr": 0.04988876515698589, - "acc_norm": 0.44, - "acc_norm_stderr": 0.04988876515698589 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.6198347107438017, - "acc_stderr": 0.04431324501968431, - "acc_norm": 0.6198347107438017, - "acc_norm_stderr": 0.04431324501968431 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.4473684210526316, - "acc_stderr": 0.040463368839782514, - "acc_norm": 0.4473684210526316, - "acc_norm_stderr": 0.040463368839782514 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.3611111111111111, - "acc_stderr": 0.01943177567703731, - "acc_norm": 0.3611111111111111, - "acc_norm_stderr": 0.01943177567703731 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.32978723404255317, - "acc_stderr": 0.028045946942042405, - "acc_norm": 0.32978723404255317, - "acc_norm_stderr": 0.028045946942042405 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.30357142857142855, - "acc_stderr": 0.04364226155841044, - "acc_norm": 0.30357142857142855, - "acc_norm_stderr": 0.04364226155841044 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.4212962962962963, - "acc_stderr": 0.03367462138896078, - "acc_norm": 0.4212962962962963, - "acc_norm_stderr": 0.03367462138896078 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.33631284916201115, - "acc_stderr": 0.015801003729145908, - "acc_norm": 0.33631284916201115, - "acc_norm_stderr": 0.015801003729145908 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.44, - "acc_stderr": 0.04988876515698589, - "acc_norm": 0.44, - "acc_norm_stderr": 0.04988876515698589 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.53, - "acc_stderr": 0.050161355804659205, - "acc_norm": 0.53, - "acc_norm_stderr": 0.050161355804659205 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.39338235294117646, - "acc_stderr": 0.02967428828131118, - "acc_norm": 0.39338235294117646, - "acc_norm_stderr": 0.02967428828131118 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.5061224489795918, - "acc_stderr": 0.03200682020163906, - "acc_norm": 0.5061224489795918, - "acc_norm_stderr": 0.03200682020163906 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.5358649789029536, - "acc_stderr": 0.03246338898055659, - "acc_norm": 0.5358649789029536, - "acc_norm_stderr": 0.03246338898055659 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.31877444589308995, - "acc_stderr": 0.011901895635786084, - "acc_norm": 0.31877444589308995, - "acc_norm_stderr": 0.011901895635786084 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.45588235294117646, - "acc_stderr": 0.03495624522015474, - "acc_norm": 0.45588235294117646, - "acc_norm_stderr": 0.03495624522015474 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.45454545454545453, - "acc_stderr": 0.03888176921674099, - "acc_norm": 0.45454545454545453, - "acc_norm_stderr": 0.03888176921674099 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.30354957160342716, - "mc1_stderr": 0.016095884155386854, - "mc2": 0.4745826617149022, - "mc2_stderr": 0.015464604846827046 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.1983568075117371, - "acc_stderr": 0.013669396132574585, - "acc_norm": 0.23591549295774647, - "acc_norm_stderr": 0.014554059570736372 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "jin05102518/Astral-7B-1.0Epoch-Instruct-v0.05", - "model_sha": "fb04a8d5574256eefe4faa1783874384c88eea9b", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/jin05102518/Astral-7B-Instruct-v0.01/result_2023-10-13 16:06:56.json b/jin05102518/Astral-7B-Instruct-v0.01/result_2023-10-13 16:06:56.json deleted file mode 100644 index 9cb3be2e51cd4d4bac6ba2d389d8de4f80369273..0000000000000000000000000000000000000000 --- a/jin05102518/Astral-7B-Instruct-v0.01/result_2023-10-13 16:06:56.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.3199658703071672, - "acc_stderr": 0.013631345807016196, - "acc_norm": 0.38310580204778155, - "acc_norm_stderr": 0.014206472661672876 - }, - "harness|ko_hellaswag|10": { - "acc": 0.36875124477195775, - "acc_stderr": 0.004814803098436803, - "acc_norm": 0.4794861581358295, - "acc_norm_stderr": 0.0049855800659464565 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.45614035087719296, - "acc_stderr": 0.03820042586602967, - "acc_norm": 0.45614035087719296, - "acc_norm_stderr": 0.03820042586602967 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.4563106796116505, - "acc_stderr": 0.049318019942204146, - "acc_norm": 0.4563106796116505, - "acc_norm_stderr": 0.049318019942204146 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.454661558109834, - "acc_stderr": 0.017806304585052602, - "acc_norm": 0.454661558109834, - "acc_norm_stderr": 0.017806304585052602 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.3111111111111111, - "acc_stderr": 0.039992628766177214, - "acc_norm": 0.3111111111111111, - "acc_norm_stderr": 0.039992628766177214 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.3829787234042553, - "acc_stderr": 0.03177821250236922, - "acc_norm": 0.3829787234042553, - "acc_norm_stderr": 0.03177821250236922 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.35542168674698793, - "acc_stderr": 0.03726214354322415, - "acc_norm": 0.35542168674698793, - "acc_norm_stderr": 0.03726214354322415 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.4565916398713826, - "acc_stderr": 0.028290869054197604, - "acc_norm": 0.4565916398713826, - "acc_norm_stderr": 0.028290869054197604 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.3901345291479821, - "acc_stderr": 0.032737667254591575, - "acc_norm": 0.3901345291479821, - "acc_norm_stderr": 0.032737667254591575 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.46564885496183206, - "acc_stderr": 0.04374928560599738, - "acc_norm": 0.46564885496183206, - "acc_norm_stderr": 0.04374928560599738 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.32, - "acc_stderr": 0.04688261722621503, - "acc_norm": 0.32, - "acc_norm_stderr": 0.04688261722621503 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.4696969696969697, - "acc_stderr": 0.03555804051763929, - "acc_norm": 0.4696969696969697, - "acc_norm_stderr": 0.03555804051763929 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.4206896551724138, - "acc_stderr": 0.0411391498118926, - "acc_norm": 0.4206896551724138, - "acc_norm_stderr": 0.0411391498118926 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.2647058823529412, - "acc_stderr": 0.0438986995680878, - "acc_norm": 0.2647058823529412, - "acc_norm_stderr": 0.0438986995680878 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.4369747899159664, - "acc_stderr": 0.032219436365661956, - "acc_norm": 0.4369747899159664, - "acc_norm_stderr": 0.032219436365661956 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.37435897435897436, - "acc_stderr": 0.02453759157283053, - "acc_norm": 0.37435897435897436, - "acc_norm_stderr": 0.02453759157283053 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.54, - "acc_stderr": 0.05009082659620332, - "acc_norm": 0.54, - "acc_norm_stderr": 0.05009082659620332 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.32, - "acc_stderr": 0.04688261722621504, - "acc_norm": 0.32, - "acc_norm_stderr": 0.04688261722621504 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.4537037037037037, - "acc_stderr": 0.04812917324536823, - "acc_norm": 0.4537037037037037, - "acc_norm_stderr": 0.04812917324536823 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.3842364532019704, - "acc_stderr": 0.0342239856565755, - "acc_norm": 0.3842364532019704, - "acc_norm_stderr": 0.0342239856565755 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.4290322580645161, - "acc_stderr": 0.02815603653823321, - "acc_norm": 0.4290322580645161, - "acc_norm_stderr": 0.02815603653823321 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.5769230769230769, - "acc_stderr": 0.032366121762202014, - "acc_norm": 0.5769230769230769, - "acc_norm_stderr": 0.032366121762202014 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.4226415094339623, - "acc_stderr": 0.03040233144576954, - "acc_norm": 0.4226415094339623, - "acc_norm_stderr": 0.03040233144576954 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.509090909090909, - "acc_stderr": 0.04788339768702861, - "acc_norm": 0.509090909090909, - "acc_norm_stderr": 0.04788339768702861 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.2962962962962963, - "acc_stderr": 0.027840811495871923, - "acc_norm": 0.2962962962962963, - "acc_norm_stderr": 0.027840811495871923 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.31788079470198677, - "acc_stderr": 0.03802039760107903, - "acc_norm": 0.31788079470198677, - "acc_norm_stderr": 0.03802039760107903 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.5472636815920398, - "acc_stderr": 0.03519702717576915, - "acc_norm": 0.5472636815920398, - "acc_norm_stderr": 0.03519702717576915 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.3468208092485549, - "acc_stderr": 0.036291466701596636, - "acc_norm": 0.3468208092485549, - "acc_norm_stderr": 0.036291466701596636 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.3915343915343915, - "acc_stderr": 0.025138091388851102, - "acc_norm": 0.3915343915343915, - "acc_norm_stderr": 0.025138091388851102 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2986111111111111, - "acc_stderr": 0.03827052357950756, - "acc_norm": 0.2986111111111111, - "acc_norm_stderr": 0.03827052357950756 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.38, - "acc_stderr": 0.048783173121456316, - "acc_norm": 0.38, - "acc_norm_stderr": 0.048783173121456316 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.53, - "acc_stderr": 0.050161355804659205, - "acc_norm": 0.53, - "acc_norm_stderr": 0.050161355804659205 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.430635838150289, - "acc_stderr": 0.02665880027367238, - "acc_norm": 0.430635838150289, - "acc_norm_stderr": 0.02665880027367238 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.4049079754601227, - "acc_stderr": 0.03856672163548913, - "acc_norm": 0.4049079754601227, - "acc_norm_stderr": 0.03856672163548913 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.4506172839506173, - "acc_stderr": 0.027684721415656203, - "acc_norm": 0.4506172839506173, - "acc_norm_stderr": 0.027684721415656203 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542128, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542128 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.39378238341968913, - "acc_stderr": 0.03526077095548237, - "acc_norm": 0.39378238341968913, - "acc_norm_stderr": 0.03526077095548237 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2543859649122807, - "acc_stderr": 0.040969851398436695, - "acc_norm": 0.2543859649122807, - "acc_norm_stderr": 0.040969851398436695 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.42385321100917434, - "acc_stderr": 0.02118726320908752, - "acc_norm": 0.42385321100917434, - "acc_norm_stderr": 0.02118726320908752 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.30952380952380953, - "acc_stderr": 0.04134913018303316, - "acc_norm": 0.30952380952380953, - "acc_norm_stderr": 0.04134913018303316 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.45098039215686275, - "acc_stderr": 0.02849199358617157, - "acc_norm": 0.45098039215686275, - "acc_norm_stderr": 0.02849199358617157 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.45, - "acc_stderr": 0.05, - "acc_norm": 0.45, - "acc_norm_stderr": 0.05 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.5785123966942148, - "acc_stderr": 0.045077322787750874, - "acc_norm": 0.5785123966942148, - "acc_norm_stderr": 0.045077322787750874 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.40789473684210525, - "acc_stderr": 0.03999309712777471, - "acc_norm": 0.40789473684210525, - "acc_norm_stderr": 0.03999309712777471 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.3284313725490196, - "acc_stderr": 0.018999707383162662, - "acc_norm": 0.3284313725490196, - "acc_norm_stderr": 0.018999707383162662 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.34397163120567376, - "acc_stderr": 0.028338017428611313, - "acc_norm": 0.34397163120567376, - "acc_norm_stderr": 0.028338017428611313 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.3125, - "acc_stderr": 0.043994650575715215, - "acc_norm": 0.3125, - "acc_norm_stderr": 0.043994650575715215 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.36574074074074076, - "acc_stderr": 0.03284738857647207, - "acc_norm": 0.36574074074074076, - "acc_norm_stderr": 0.03284738857647207 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.24804469273743016, - "acc_stderr": 0.014444157808261453, - "acc_norm": 0.24804469273743016, - "acc_norm_stderr": 0.014444157808261453 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.41, - "acc_stderr": 0.049431107042371025, - "acc_norm": 0.41, - "acc_norm_stderr": 0.049431107042371025 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.59, - "acc_stderr": 0.049431107042371025, - "acc_norm": 0.59, - "acc_norm_stderr": 0.049431107042371025 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.3713235294117647, - "acc_stderr": 0.02934980313976587, - "acc_norm": 0.3713235294117647, - "acc_norm_stderr": 0.02934980313976587 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.4122448979591837, - "acc_stderr": 0.03151236044674281, - "acc_norm": 0.4122448979591837, - "acc_norm_stderr": 0.03151236044674281 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.5147679324894515, - "acc_stderr": 0.032533028078777386, - "acc_norm": 0.5147679324894515, - "acc_norm_stderr": 0.032533028078777386 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.29465449804432853, - "acc_stderr": 0.011643576764069548, - "acc_norm": 0.29465449804432853, - "acc_norm_stderr": 0.011643576764069548 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.38235294117647056, - "acc_stderr": 0.03410785338904719, - "acc_norm": 0.38235294117647056, - "acc_norm_stderr": 0.03410785338904719 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.3515151515151515, - "acc_stderr": 0.0372820699868265, - "acc_norm": 0.3515151515151515, - "acc_norm_stderr": 0.0372820699868265 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2582619339045288, - "mc1_stderr": 0.015321821688476199, - "mc2": 0.4322824441345256, - "mc2_stderr": 0.015763267859642997 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.13615023474178403, - "acc_stderr": 0.011756106912219616, - "acc_norm": 0.17488262910798122, - "acc_norm_stderr": 0.013021662108610223 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "jin05102518/Astral-7B-Instruct-v0.01", - "model_sha": "095682dac7dc303e13f3c4135333e5c78db5afbf", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/jiwoochris/ko-llama2-13b-v4/result_2023-10-22 15:44:08.json b/jiwoochris/ko-llama2-13b-v4/result_2023-10-22 15:44:08.json deleted file mode 100644 index c6351ad980e2884d8097de9488cf2a75e23ab60f..0000000000000000000000000000000000000000 --- a/jiwoochris/ko-llama2-13b-v4/result_2023-10-22 15:44:08.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.40017064846416384, - "acc_stderr": 0.014317197787809167, - "acc_norm": 0.4539249146757679, - "acc_norm_stderr": 0.014549221105171864 - }, - "harness|ko_hellaswag|10": { - "acc": 0.4224258115913165, - "acc_stderr": 0.004929361040558258, - "acc_norm": 0.5571599283011353, - "acc_norm_stderr": 0.004957068377516512 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.5087719298245614, - "acc_stderr": 0.038342347441649924, - "acc_norm": 0.5087719298245614, - "acc_norm_stderr": 0.038342347441649924 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.5048543689320388, - "acc_stderr": 0.04950504382128921, - "acc_norm": 0.5048543689320388, - "acc_norm_stderr": 0.04950504382128921 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.49169859514687103, - "acc_stderr": 0.017877498991072008, - "acc_norm": 0.49169859514687103, - "acc_norm_stderr": 0.017877498991072008 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.4148148148148148, - "acc_stderr": 0.04256193767901407, - "acc_norm": 0.4148148148148148, - "acc_norm_stderr": 0.04256193767901407 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.34893617021276596, - "acc_stderr": 0.03115852213135778, - "acc_norm": 0.34893617021276596, - "acc_norm_stderr": 0.03115852213135778 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.42168674698795183, - "acc_stderr": 0.038444531817709175, - "acc_norm": 0.42168674698795183, - "acc_norm_stderr": 0.038444531817709175 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.4662379421221865, - "acc_stderr": 0.02833327710956278, - "acc_norm": 0.4662379421221865, - "acc_norm_stderr": 0.02833327710956278 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.3721973094170404, - "acc_stderr": 0.03244305283008732, - "acc_norm": 0.3721973094170404, - "acc_norm_stderr": 0.03244305283008732 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.4961832061068702, - "acc_stderr": 0.043851623256015534, - "acc_norm": 0.4961832061068702, - "acc_norm_stderr": 0.043851623256015534 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695236, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695236 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.5707070707070707, - "acc_stderr": 0.035265527246011986, - "acc_norm": 0.5707070707070707, - "acc_norm_stderr": 0.035265527246011986 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.42758620689655175, - "acc_stderr": 0.041227371113703316, - "acc_norm": 0.42758620689655175, - "acc_norm_stderr": 0.041227371113703316 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.23529411764705882, - "acc_stderr": 0.04220773659171452, - "acc_norm": 0.23529411764705882, - "acc_norm_stderr": 0.04220773659171452 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.42436974789915966, - "acc_stderr": 0.03210479051015776, - "acc_norm": 0.42436974789915966, - "acc_norm_stderr": 0.03210479051015776 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.39487179487179486, - "acc_stderr": 0.02478431694215636, - "acc_norm": 0.39487179487179486, - "acc_norm_stderr": 0.02478431694215636 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.49, - "acc_stderr": 0.05024183937956911, - "acc_norm": 0.49, - "acc_norm_stderr": 0.05024183937956911 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.32, - "acc_stderr": 0.04688261722621504, - "acc_norm": 0.32, - "acc_norm_stderr": 0.04688261722621504 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.49074074074074076, - "acc_stderr": 0.04832853553437056, - "acc_norm": 0.49074074074074076, - "acc_norm_stderr": 0.04832853553437056 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.3793103448275862, - "acc_stderr": 0.034139638059062345, - "acc_norm": 0.3793103448275862, - "acc_norm_stderr": 0.034139638059062345 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.4258064516129032, - "acc_stderr": 0.028129112709165894, - "acc_norm": 0.4258064516129032, - "acc_norm_stderr": 0.028129112709165894 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.6538461538461539, - "acc_stderr": 0.031166957367235903, - "acc_norm": 0.6538461538461539, - "acc_norm_stderr": 0.031166957367235903 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.46037735849056605, - "acc_stderr": 0.03067609659938918, - "acc_norm": 0.46037735849056605, - "acc_norm_stderr": 0.03067609659938918 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.4909090909090909, - "acc_stderr": 0.04788339768702861, - "acc_norm": 0.4909090909090909, - "acc_norm_stderr": 0.04788339768702861 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.26296296296296295, - "acc_stderr": 0.02684205787383371, - "acc_norm": 0.26296296296296295, - "acc_norm_stderr": 0.02684205787383371 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2913907284768212, - "acc_stderr": 0.037101857261199946, - "acc_norm": 0.2913907284768212, - "acc_norm_stderr": 0.037101857261199946 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.6019900497512438, - "acc_stderr": 0.034611994290400135, - "acc_norm": 0.6019900497512438, - "acc_norm_stderr": 0.034611994290400135 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.37572254335260113, - "acc_stderr": 0.03692820767264867, - "acc_norm": 0.37572254335260113, - "acc_norm_stderr": 0.03692820767264867 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.328042328042328, - "acc_stderr": 0.024180497164376882, - "acc_norm": 0.328042328042328, - "acc_norm_stderr": 0.024180497164376882 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3472222222222222, - "acc_stderr": 0.039812405437178615, - "acc_norm": 0.3472222222222222, - "acc_norm_stderr": 0.039812405437178615 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.32, - "acc_stderr": 0.046882617226215034, - "acc_norm": 0.32, - "acc_norm_stderr": 0.046882617226215034 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.59, - "acc_stderr": 0.04943110704237101, - "acc_norm": 0.59, - "acc_norm_stderr": 0.04943110704237101 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.4421965317919075, - "acc_stderr": 0.026738603643807403, - "acc_norm": 0.4421965317919075, - "acc_norm_stderr": 0.026738603643807403 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.44171779141104295, - "acc_stderr": 0.039015918258361836, - "acc_norm": 0.44171779141104295, - "acc_norm_stderr": 0.039015918258361836 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.4506172839506173, - "acc_stderr": 0.027684721415656203, - "acc_norm": 0.4506172839506173, - "acc_norm_stderr": 0.027684721415656203 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.28, - "acc_stderr": 0.045126085985421276, - "acc_norm": 0.28, - "acc_norm_stderr": 0.045126085985421276 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.47668393782383417, - "acc_stderr": 0.03604513672442207, - "acc_norm": 0.47668393782383417, - "acc_norm_stderr": 0.03604513672442207 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2543859649122807, - "acc_stderr": 0.040969851398436716, - "acc_norm": 0.2543859649122807, - "acc_norm_stderr": 0.040969851398436716 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.48623853211009177, - "acc_stderr": 0.02142920208987408, - "acc_norm": 0.48623853211009177, - "acc_norm_stderr": 0.02142920208987408 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.30952380952380953, - "acc_stderr": 0.04134913018303316, - "acc_norm": 0.30952380952380953, - "acc_norm_stderr": 0.04134913018303316 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.4411764705882353, - "acc_stderr": 0.028431095444176643, - "acc_norm": 0.4411764705882353, - "acc_norm_stderr": 0.028431095444176643 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.35, - "acc_stderr": 0.047937248544110196, - "acc_norm": 0.35, - "acc_norm_stderr": 0.047937248544110196 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.5950413223140496, - "acc_stderr": 0.04481137755942469, - "acc_norm": 0.5950413223140496, - "acc_norm_stderr": 0.04481137755942469 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.4473684210526316, - "acc_stderr": 0.040463368839782514, - "acc_norm": 0.4473684210526316, - "acc_norm_stderr": 0.040463368839782514 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.32679738562091504, - "acc_stderr": 0.018975427920507205, - "acc_norm": 0.32679738562091504, - "acc_norm_stderr": 0.018975427920507205 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.028121636040639875, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.028121636040639875 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.20535714285714285, - "acc_stderr": 0.038342410214190735, - "acc_norm": 0.20535714285714285, - "acc_norm_stderr": 0.038342410214190735 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.30092592592592593, - "acc_stderr": 0.031280390843298825, - "acc_norm": 0.30092592592592593, - "acc_norm_stderr": 0.031280390843298825 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2424581005586592, - "acc_stderr": 0.01433352205921789, - "acc_norm": 0.2424581005586592, - "acc_norm_stderr": 0.01433352205921789 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.32, - "acc_stderr": 0.046882617226215034, - "acc_norm": 0.32, - "acc_norm_stderr": 0.046882617226215034 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.48, - "acc_stderr": 0.050211673156867795, - "acc_norm": 0.48, - "acc_norm_stderr": 0.050211673156867795 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.2647058823529412, - "acc_stderr": 0.026799562024887688, - "acc_norm": 0.2647058823529412, - "acc_norm_stderr": 0.026799562024887688 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.49387755102040815, - "acc_stderr": 0.032006820201639086, - "acc_norm": 0.49387755102040815, - "acc_norm_stderr": 0.032006820201639086 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.5316455696202531, - "acc_stderr": 0.032481974005110756, - "acc_norm": 0.5316455696202531, - "acc_norm_stderr": 0.032481974005110756 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.3089960886571056, - "acc_stderr": 0.011801729777239249, - "acc_norm": 0.3089960886571056, - "acc_norm_stderr": 0.011801729777239249 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.4117647058823529, - "acc_stderr": 0.03454236585380609, - "acc_norm": 0.4117647058823529, - "acc_norm_stderr": 0.03454236585380609 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.49696969696969695, - "acc_stderr": 0.03904272341431857, - "acc_norm": 0.49696969696969695, - "acc_norm_stderr": 0.03904272341431857 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.29498164014687883, - "mc1_stderr": 0.015964400965589674, - "mc2": 0.4713625301918517, - "mc2_stderr": 0.015403994277020416 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.3532863849765258, - "acc_stderr": 0.016385310378526204, - "acc_norm": 0.41784037558685444, - "acc_norm_stderr": 0.016906801839282722 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "jiwoochris/ko-llama2-13b-v4", - "model_sha": "a3773012adb9e13b9bd9b15634dfaeb18718c24d", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/jiwoochris/ko-llama2-13b-v5/result_2023-10-24 16:25:31.json b/jiwoochris/ko-llama2-13b-v5/result_2023-10-24 16:25:31.json deleted file mode 100644 index 5b0369db541e330698450e8b477539be8363e50b..0000000000000000000000000000000000000000 --- a/jiwoochris/ko-llama2-13b-v5/result_2023-10-24 16:25:31.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.40273037542662116, - "acc_stderr": 0.014332236306790154, - "acc_norm": 0.45051194539249145, - "acc_norm_stderr": 0.014539646098471627 - }, - "harness|ko_hellaswag|10": { - "acc": 0.42123083051185023, - "acc_stderr": 0.004927473370720142, - "acc_norm": 0.5584544911372237, - "acc_norm_stderr": 0.004955564650016176 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.5321637426900585, - "acc_stderr": 0.03826882417660369, - "acc_norm": 0.5321637426900585, - "acc_norm_stderr": 0.03826882417660369 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.5048543689320388, - "acc_stderr": 0.049505043821289195, - "acc_norm": 0.5048543689320388, - "acc_norm_stderr": 0.049505043821289195 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.49936143039591313, - "acc_stderr": 0.017879948914431665, - "acc_norm": 0.49936143039591313, - "acc_norm_stderr": 0.017879948914431665 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.4074074074074074, - "acc_stderr": 0.042446332383532286, - "acc_norm": 0.4074074074074074, - "acc_norm_stderr": 0.042446332383532286 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695236, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695236 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.35319148936170214, - "acc_stderr": 0.03124532520276193, - "acc_norm": 0.35319148936170214, - "acc_norm_stderr": 0.03124532520276193 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.43373493975903615, - "acc_stderr": 0.03858158940685515, - "acc_norm": 0.43373493975903615, - "acc_norm_stderr": 0.03858158940685515 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.47266881028938906, - "acc_stderr": 0.028355633568328188, - "acc_norm": 0.47266881028938906, - "acc_norm_stderr": 0.028355633568328188 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.3811659192825112, - "acc_stderr": 0.03259625118416828, - "acc_norm": 0.3811659192825112, - "acc_norm_stderr": 0.03259625118416828 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.4961832061068702, - "acc_stderr": 0.043851623256015534, - "acc_norm": 0.4961832061068702, - "acc_norm_stderr": 0.043851623256015534 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.32, - "acc_stderr": 0.046882617226215034, - "acc_norm": 0.32, - "acc_norm_stderr": 0.046882617226215034 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.5656565656565656, - "acc_stderr": 0.035315058793591834, - "acc_norm": 0.5656565656565656, - "acc_norm_stderr": 0.035315058793591834 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.4206896551724138, - "acc_stderr": 0.0411391498118926, - "acc_norm": 0.4206896551724138, - "acc_norm_stderr": 0.0411391498118926 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.22549019607843138, - "acc_stderr": 0.04158307533083286, - "acc_norm": 0.22549019607843138, - "acc_norm_stderr": 0.04158307533083286 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.41596638655462187, - "acc_stderr": 0.03201650100739615, - "acc_norm": 0.41596638655462187, - "acc_norm_stderr": 0.03201650100739615 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.3923076923076923, - "acc_stderr": 0.02475600038213094, - "acc_norm": 0.3923076923076923, - "acc_norm_stderr": 0.02475600038213094 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.49, - "acc_stderr": 0.05024183937956911, - "acc_norm": 0.49, - "acc_norm_stderr": 0.05024183937956911 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252604, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252604 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.4722222222222222, - "acc_stderr": 0.04826217294139894, - "acc_norm": 0.4722222222222222, - "acc_norm_stderr": 0.04826217294139894 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.3793103448275862, - "acc_stderr": 0.034139638059062345, - "acc_norm": 0.3793103448275862, - "acc_norm_stderr": 0.034139638059062345 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.44516129032258067, - "acc_stderr": 0.028272410186214906, - "acc_norm": 0.44516129032258067, - "acc_norm_stderr": 0.028272410186214906 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.6581196581196581, - "acc_stderr": 0.031075028526507748, - "acc_norm": 0.6581196581196581, - "acc_norm_stderr": 0.031075028526507748 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.46037735849056605, - "acc_stderr": 0.030676096599389184, - "acc_norm": 0.46037735849056605, - "acc_norm_stderr": 0.030676096599389184 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.509090909090909, - "acc_stderr": 0.0478833976870286, - "acc_norm": 0.509090909090909, - "acc_norm_stderr": 0.0478833976870286 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.25925925925925924, - "acc_stderr": 0.02671924078371216, - "acc_norm": 0.25925925925925924, - "acc_norm_stderr": 0.02671924078371216 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.304635761589404, - "acc_stderr": 0.03757949922943343, - "acc_norm": 0.304635761589404, - "acc_norm_stderr": 0.03757949922943343 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.5970149253731343, - "acc_stderr": 0.034683432951111266, - "acc_norm": 0.5970149253731343, - "acc_norm_stderr": 0.034683432951111266 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.3930635838150289, - "acc_stderr": 0.03724249595817731, - "acc_norm": 0.3930635838150289, - "acc_norm_stderr": 0.03724249595817731 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.328042328042328, - "acc_stderr": 0.02418049716437689, - "acc_norm": 0.328042328042328, - "acc_norm_stderr": 0.02418049716437689 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3402777777777778, - "acc_stderr": 0.03962135573486219, - "acc_norm": 0.3402777777777778, - "acc_norm_stderr": 0.03962135573486219 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.33, - "acc_stderr": 0.047258156262526045, - "acc_norm": 0.33, - "acc_norm_stderr": 0.047258156262526045 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.59, - "acc_stderr": 0.04943110704237101, - "acc_norm": 0.59, - "acc_norm_stderr": 0.04943110704237101 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.4479768786127168, - "acc_stderr": 0.02677299065336183, - "acc_norm": 0.4479768786127168, - "acc_norm_stderr": 0.02677299065336183 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.4294478527607362, - "acc_stderr": 0.038890666191127216, - "acc_norm": 0.4294478527607362, - "acc_norm_stderr": 0.038890666191127216 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.46296296296296297, - "acc_stderr": 0.027744313443376536, - "acc_norm": 0.46296296296296297, - "acc_norm_stderr": 0.027744313443376536 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.29, - "acc_stderr": 0.04560480215720684, - "acc_norm": 0.29, - "acc_norm_stderr": 0.04560480215720684 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.45595854922279794, - "acc_stderr": 0.03594413711272436, - "acc_norm": 0.45595854922279794, - "acc_norm_stderr": 0.03594413711272436 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2807017543859649, - "acc_stderr": 0.04227054451232199, - "acc_norm": 0.2807017543859649, - "acc_norm_stderr": 0.04227054451232199 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.48256880733944957, - "acc_stderr": 0.02142429187185315, - "acc_norm": 0.48256880733944957, - "acc_norm_stderr": 0.02142429187185315 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.30158730158730157, - "acc_stderr": 0.041049472699033945, - "acc_norm": 0.30158730158730157, - "acc_norm_stderr": 0.041049472699033945 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.43137254901960786, - "acc_stderr": 0.028358956313423552, - "acc_norm": 0.43137254901960786, - "acc_norm_stderr": 0.028358956313423552 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.35, - "acc_stderr": 0.047937248544110196, - "acc_norm": 0.35, - "acc_norm_stderr": 0.047937248544110196 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.6033057851239669, - "acc_stderr": 0.044658697805310094, - "acc_norm": 0.6033057851239669, - "acc_norm_stderr": 0.044658697805310094 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.4407894736842105, - "acc_stderr": 0.04040311062490436, - "acc_norm": 0.4407894736842105, - "acc_norm_stderr": 0.04040311062490436 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.3300653594771242, - "acc_stderr": 0.019023726160724553, - "acc_norm": 0.3300653594771242, - "acc_norm_stderr": 0.019023726160724553 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.028121636040639872, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.028121636040639872 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.21428571428571427, - "acc_stderr": 0.038946411200447915, - "acc_norm": 0.21428571428571427, - "acc_norm_stderr": 0.038946411200447915 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.30092592592592593, - "acc_stderr": 0.03128039084329882, - "acc_norm": 0.30092592592592593, - "acc_norm_stderr": 0.03128039084329882 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2424581005586592, - "acc_stderr": 0.01433352205921789, - "acc_norm": 0.2424581005586592, - "acc_norm_stderr": 0.01433352205921789 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695235, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695235 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.47, - "acc_stderr": 0.05016135580465919, - "acc_norm": 0.47, - "acc_norm_stderr": 0.05016135580465919 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.28308823529411764, - "acc_stderr": 0.0273658611315138, - "acc_norm": 0.28308823529411764, - "acc_norm_stderr": 0.0273658611315138 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.49795918367346936, - "acc_stderr": 0.0320089533497105, - "acc_norm": 0.49795918367346936, - "acc_norm_stderr": 0.0320089533497105 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.5189873417721519, - "acc_stderr": 0.03252375148090447, - "acc_norm": 0.5189873417721519, - "acc_norm_stderr": 0.03252375148090447 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.3116036505867014, - "acc_stderr": 0.011829039182849645, - "acc_norm": 0.3116036505867014, - "acc_norm_stderr": 0.011829039182849645 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.4068627450980392, - "acc_stderr": 0.03447891136353382, - "acc_norm": 0.4068627450980392, - "acc_norm_stderr": 0.03447891136353382 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.48484848484848486, - "acc_stderr": 0.03902551007374449, - "acc_norm": 0.48484848484848486, - "acc_norm_stderr": 0.03902551007374449 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.28886168910648713, - "mc1_stderr": 0.015866346401384304, - "mc2": 0.4663029303420103, - "mc2_stderr": 0.015238838193243256 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.4072769953051643, - "acc_stderr": 0.01684248043954552, - "acc_norm": 0.4753521126760563, - "acc_norm_stderr": 0.017118941126722693 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "jiwoochris/ko-llama2-13b-v5", - "model_sha": "28f20014bc519440b6c16a65adf6545c1c9687b6", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/jiwoochris/ko-llama2-v1/result_2023-10-21 08:55:17.json b/jiwoochris/ko-llama2-v1/result_2023-10-21 08:55:17.json deleted file mode 100644 index e8d63ee84485331018653642f052afcb787c16ab..0000000000000000000000000000000000000000 --- a/jiwoochris/ko-llama2-v1/result_2023-10-21 08:55:17.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.39419795221843, - "acc_stderr": 0.01428052266746733, - "acc_norm": 0.454778156996587, - "acc_norm_stderr": 0.014551507060836355 - }, - "harness|ko_hellaswag|10": { - "acc": 0.4217287392949612, - "acc_stderr": 0.004928263494616727, - "acc_norm": 0.5544712208723361, - "acc_norm_stderr": 0.004960082528852438 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.543859649122807, - "acc_stderr": 0.03820042586602967, - "acc_norm": 0.543859649122807, - "acc_norm_stderr": 0.03820042586602967 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.5436893203883495, - "acc_stderr": 0.049318019942204146, - "acc_norm": 0.5436893203883495, - "acc_norm_stderr": 0.049318019942204146 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.49936143039591313, - "acc_stderr": 0.017879948914431662, - "acc_norm": 0.49936143039591313, - "acc_norm_stderr": 0.017879948914431662 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.4074074074074074, - "acc_stderr": 0.042446332383532286, - "acc_norm": 0.4074074074074074, - "acc_norm_stderr": 0.042446332383532286 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.3659574468085106, - "acc_stderr": 0.0314895582974553, - "acc_norm": 0.3659574468085106, - "acc_norm_stderr": 0.0314895582974553 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.42771084337349397, - "acc_stderr": 0.03851597683718533, - "acc_norm": 0.42771084337349397, - "acc_norm_stderr": 0.03851597683718533 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.4630225080385852, - "acc_stderr": 0.028320325830105915, - "acc_norm": 0.4630225080385852, - "acc_norm_stderr": 0.028320325830105915 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.35874439461883406, - "acc_stderr": 0.03219079200419995, - "acc_norm": 0.35874439461883406, - "acc_norm_stderr": 0.03219079200419995 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.5038167938931297, - "acc_stderr": 0.043851623256015534, - "acc_norm": 0.5038167938931297, - "acc_norm_stderr": 0.043851623256015534 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.5909090909090909, - "acc_stderr": 0.03502975799413007, - "acc_norm": 0.5909090909090909, - "acc_norm_stderr": 0.03502975799413007 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.4068965517241379, - "acc_stderr": 0.040937939812662374, - "acc_norm": 0.4068965517241379, - "acc_norm_stderr": 0.040937939812662374 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.24509803921568626, - "acc_stderr": 0.042801058373643966, - "acc_norm": 0.24509803921568626, - "acc_norm_stderr": 0.042801058373643966 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.42436974789915966, - "acc_stderr": 0.03210479051015776, - "acc_norm": 0.42436974789915966, - "acc_norm_stderr": 0.03210479051015776 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.39487179487179486, - "acc_stderr": 0.02478431694215636, - "acc_norm": 0.39487179487179486, - "acc_norm_stderr": 0.02478431694215636 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.48, - "acc_stderr": 0.050211673156867795, - "acc_norm": 0.48, - "acc_norm_stderr": 0.050211673156867795 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.35, - "acc_stderr": 0.047937248544110196, - "acc_norm": 0.35, - "acc_norm_stderr": 0.047937248544110196 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.4444444444444444, - "acc_stderr": 0.04803752235190193, - "acc_norm": 0.4444444444444444, - "acc_norm_stderr": 0.04803752235190193 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.3399014778325123, - "acc_stderr": 0.033327690684107895, - "acc_norm": 0.3399014778325123, - "acc_norm_stderr": 0.033327690684107895 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.432258064516129, - "acc_stderr": 0.028181739720019416, - "acc_norm": 0.432258064516129, - "acc_norm_stderr": 0.028181739720019416 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.6581196581196581, - "acc_stderr": 0.031075028526507748, - "acc_norm": 0.6581196581196581, - "acc_norm_stderr": 0.031075028526507748 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.4716981132075472, - "acc_stderr": 0.030723535249006107, - "acc_norm": 0.4716981132075472, - "acc_norm_stderr": 0.030723535249006107 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.4818181818181818, - "acc_stderr": 0.04785964010794916, - "acc_norm": 0.4818181818181818, - "acc_norm_stderr": 0.04785964010794916 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.2518518518518518, - "acc_stderr": 0.02646611753895992, - "acc_norm": 0.2518518518518518, - "acc_norm_stderr": 0.02646611753895992 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2913907284768212, - "acc_stderr": 0.03710185726119995, - "acc_norm": 0.2913907284768212, - "acc_norm_stderr": 0.03710185726119995 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.5970149253731343, - "acc_stderr": 0.034683432951111266, - "acc_norm": 0.5970149253731343, - "acc_norm_stderr": 0.034683432951111266 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.3583815028901734, - "acc_stderr": 0.0365634365335316, - "acc_norm": 0.3583815028901734, - "acc_norm_stderr": 0.0365634365335316 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.30952380952380953, - "acc_stderr": 0.023809523809523864, - "acc_norm": 0.30952380952380953, - "acc_norm_stderr": 0.023809523809523864 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3402777777777778, - "acc_stderr": 0.03962135573486219, - "acc_norm": 0.3402777777777778, - "acc_norm_stderr": 0.03962135573486219 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.64, - "acc_stderr": 0.048241815132442176, - "acc_norm": 0.64, - "acc_norm_stderr": 0.048241815132442176 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.4479768786127168, - "acc_stderr": 0.026772990653361826, - "acc_norm": 0.4479768786127168, - "acc_norm_stderr": 0.026772990653361826 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.4171779141104294, - "acc_stderr": 0.038741028598180814, - "acc_norm": 0.4171779141104294, - "acc_norm_stderr": 0.038741028598180814 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.4537037037037037, - "acc_stderr": 0.0277012284685426, - "acc_norm": 0.4537037037037037, - "acc_norm_stderr": 0.0277012284685426 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252606, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252606 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.46632124352331605, - "acc_stderr": 0.03600244069867178, - "acc_norm": 0.46632124352331605, - "acc_norm_stderr": 0.03600244069867178 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2631578947368421, - "acc_stderr": 0.04142439719489361, - "acc_norm": 0.2631578947368421, - "acc_norm_stderr": 0.04142439719489361 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.4990825688073395, - "acc_stderr": 0.021437287056051215, - "acc_norm": 0.4990825688073395, - "acc_norm_stderr": 0.021437287056051215 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.30952380952380953, - "acc_stderr": 0.04134913018303316, - "acc_norm": 0.30952380952380953, - "acc_norm_stderr": 0.04134913018303316 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.4477124183006536, - "acc_stderr": 0.028472938478033522, - "acc_norm": 0.4477124183006536, - "acc_norm_stderr": 0.028472938478033522 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.38, - "acc_stderr": 0.04878317312145634, - "acc_norm": 0.38, - "acc_norm_stderr": 0.04878317312145634 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.5867768595041323, - "acc_stderr": 0.04495087843548408, - "acc_norm": 0.5867768595041323, - "acc_norm_stderr": 0.04495087843548408 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.4407894736842105, - "acc_stderr": 0.04040311062490436, - "acc_norm": 0.4407894736842105, - "acc_norm_stderr": 0.04040311062490436 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.33169934640522875, - "acc_stderr": 0.01904748523936038, - "acc_norm": 0.33169934640522875, - "acc_norm_stderr": 0.01904748523936038 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.02812163604063988, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.02812163604063988 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.20535714285714285, - "acc_stderr": 0.03834241021419073, - "acc_norm": 0.20535714285714285, - "acc_norm_stderr": 0.03834241021419073 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.2824074074074074, - "acc_stderr": 0.03070137211151092, - "acc_norm": 0.2824074074074074, - "acc_norm_stderr": 0.03070137211151092 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2424581005586592, - "acc_stderr": 0.01433352205921789, - "acc_norm": 0.2424581005586592, - "acc_norm_stderr": 0.01433352205921789 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.32, - "acc_stderr": 0.046882617226215034, - "acc_norm": 0.32, - "acc_norm_stderr": 0.046882617226215034 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.46, - "acc_stderr": 0.05009082659620333, - "acc_norm": 0.46, - "acc_norm_stderr": 0.05009082659620333 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.27205882352941174, - "acc_stderr": 0.027033041151681456, - "acc_norm": 0.27205882352941174, - "acc_norm_stderr": 0.027033041151681456 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.4775510204081633, - "acc_stderr": 0.031976941187136725, - "acc_norm": 0.4775510204081633, - "acc_norm_stderr": 0.031976941187136725 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.5189873417721519, - "acc_stderr": 0.03252375148090447, - "acc_norm": 0.5189873417721519, - "acc_norm_stderr": 0.03252375148090447 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2966101694915254, - "acc_stderr": 0.011665946586082854, - "acc_norm": 0.2966101694915254, - "acc_norm_stderr": 0.011665946586082854 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.4215686274509804, - "acc_stderr": 0.03465868196380757, - "acc_norm": 0.4215686274509804, - "acc_norm_stderr": 0.03465868196380757 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.509090909090909, - "acc_stderr": 0.0390369864774844, - "acc_norm": 0.509090909090909, - "acc_norm_stderr": 0.0390369864774844 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.30599755201958384, - "mc1_stderr": 0.016132229728155062, - "mc2": 0.4746429594651757, - "mc2_stderr": 0.01531218992321956 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.31690140845070425, - "acc_stderr": 0.01594920350879057, - "acc_norm": 0.3884976525821596, - "acc_norm_stderr": 0.01670815454631332 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "jiwoochris/ko-llama2-v1", - "model_sha": "4253098940413125f8f0847038c076d42e5b2c59", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/jiwoochris/ko-llama2-v2/result_2023-10-21 08:55:35.json b/jiwoochris/ko-llama2-v2/result_2023-10-21 08:55:35.json deleted file mode 100644 index aa987de9aa07b1abc946b0b3ee60b792b7a4f84b..0000000000000000000000000000000000000000 --- a/jiwoochris/ko-llama2-v2/result_2023-10-21 08:55:35.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.3626279863481229, - "acc_stderr": 0.014049106564955, - "acc_norm": 0.40187713310580203, - "acc_norm_stderr": 0.014327268614578273 - }, - "harness|ko_hellaswag|10": { - "acc": 0.41276638119896436, - "acc_stderr": 0.004913253031155685, - "acc_norm": 0.5246962756423024, - "acc_norm_stderr": 0.004983691099110914 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.5380116959064327, - "acc_stderr": 0.03823727092882307, - "acc_norm": 0.5380116959064327, - "acc_norm_stderr": 0.03823727092882307 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.5339805825242718, - "acc_stderr": 0.04939291447273481, - "acc_norm": 0.5339805825242718, - "acc_norm_stderr": 0.04939291447273481 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.4482758620689655, - "acc_stderr": 0.01778403453499246, - "acc_norm": 0.4482758620689655, - "acc_norm_stderr": 0.01778403453499246 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.35555555555555557, - "acc_stderr": 0.04135176749720386, - "acc_norm": 0.35555555555555557, - "acc_norm_stderr": 0.04135176749720386 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.4085106382978723, - "acc_stderr": 0.03213418026701576, - "acc_norm": 0.4085106382978723, - "acc_norm_stderr": 0.03213418026701576 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3674698795180723, - "acc_stderr": 0.03753267402120574, - "acc_norm": 0.3674698795180723, - "acc_norm_stderr": 0.03753267402120574 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.41479099678456594, - "acc_stderr": 0.02798268045975956, - "acc_norm": 0.41479099678456594, - "acc_norm_stderr": 0.02798268045975956 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.3721973094170404, - "acc_stderr": 0.03244305283008731, - "acc_norm": 0.3721973094170404, - "acc_norm_stderr": 0.03244305283008731 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.46564885496183206, - "acc_stderr": 0.043749285605997376, - "acc_norm": 0.46564885496183206, - "acc_norm_stderr": 0.043749285605997376 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.27, - "acc_stderr": 0.0446196043338474, - "acc_norm": 0.27, - "acc_norm_stderr": 0.0446196043338474 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.5505050505050505, - "acc_stderr": 0.0354413249194797, - "acc_norm": 0.5505050505050505, - "acc_norm_stderr": 0.0354413249194797 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.38620689655172413, - "acc_stderr": 0.04057324734419035, - "acc_norm": 0.38620689655172413, - "acc_norm_stderr": 0.04057324734419035 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.22549019607843138, - "acc_stderr": 0.04158307533083286, - "acc_norm": 0.22549019607843138, - "acc_norm_stderr": 0.04158307533083286 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.4495798319327731, - "acc_stderr": 0.03231293497137707, - "acc_norm": 0.4495798319327731, - "acc_norm_stderr": 0.03231293497137707 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.4153846153846154, - "acc_stderr": 0.024985354923102315, - "acc_norm": 0.4153846153846154, - "acc_norm_stderr": 0.024985354923102315 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.41, - "acc_stderr": 0.049431107042371025, - "acc_norm": 0.41, - "acc_norm_stderr": 0.049431107042371025 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.26, - "acc_stderr": 0.04408440022768078, - "acc_norm": 0.26, - "acc_norm_stderr": 0.04408440022768078 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.4166666666666667, - "acc_stderr": 0.04766075165356461, - "acc_norm": 0.4166666666666667, - "acc_norm_stderr": 0.04766075165356461 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.33497536945812806, - "acc_stderr": 0.033208527423483104, - "acc_norm": 0.33497536945812806, - "acc_norm_stderr": 0.033208527423483104 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.4129032258064516, - "acc_stderr": 0.028009138125400387, - "acc_norm": 0.4129032258064516, - "acc_norm_stderr": 0.028009138125400387 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.6538461538461539, - "acc_stderr": 0.031166957367235897, - "acc_norm": 0.6538461538461539, - "acc_norm_stderr": 0.031166957367235897 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.44528301886792454, - "acc_stderr": 0.030588052974270658, - "acc_norm": 0.44528301886792454, - "acc_norm_stderr": 0.030588052974270658 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.4727272727272727, - "acc_stderr": 0.04782001791380063, - "acc_norm": 0.4727272727272727, - "acc_norm_stderr": 0.04782001791380063 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.27037037037037037, - "acc_stderr": 0.027080372815145654, - "acc_norm": 0.27037037037037037, - "acc_norm_stderr": 0.027080372815145654 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.31125827814569534, - "acc_stderr": 0.03780445850526733, - "acc_norm": 0.31125827814569534, - "acc_norm_stderr": 0.03780445850526733 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.5522388059701493, - "acc_stderr": 0.03516184772952166, - "acc_norm": 0.5522388059701493, - "acc_norm_stderr": 0.03516184772952166 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.37572254335260113, - "acc_stderr": 0.036928207672648664, - "acc_norm": 0.37572254335260113, - "acc_norm_stderr": 0.036928207672648664 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.29894179894179895, - "acc_stderr": 0.02357760479165582, - "acc_norm": 0.29894179894179895, - "acc_norm_stderr": 0.02357760479165582 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3402777777777778, - "acc_stderr": 0.039621355734862175, - "acc_norm": 0.3402777777777778, - "acc_norm_stderr": 0.039621355734862175 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.4, - "acc_stderr": 0.04923659639173309, - "acc_norm": 0.4, - "acc_norm_stderr": 0.04923659639173309 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.55, - "acc_stderr": 0.05, - "acc_norm": 0.55, - "acc_norm_stderr": 0.05 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.4161849710982659, - "acc_stderr": 0.026538189104705484, - "acc_norm": 0.4161849710982659, - "acc_norm_stderr": 0.026538189104705484 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.44785276073619634, - "acc_stderr": 0.03906947479456601, - "acc_norm": 0.44785276073619634, - "acc_norm_stderr": 0.03906947479456601 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.4074074074074074, - "acc_stderr": 0.027339546640662724, - "acc_norm": 0.4074074074074074, - "acc_norm_stderr": 0.027339546640662724 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.35, - "acc_stderr": 0.0479372485441102, - "acc_norm": 0.35, - "acc_norm_stderr": 0.0479372485441102 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.44041450777202074, - "acc_stderr": 0.035827245300360945, - "acc_norm": 0.44041450777202074, - "acc_norm_stderr": 0.035827245300360945 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.21052631578947367, - "acc_stderr": 0.038351539543994194, - "acc_norm": 0.21052631578947367, - "acc_norm_stderr": 0.038351539543994194 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.4954128440366973, - "acc_stderr": 0.021436420955529424, - "acc_norm": 0.4954128440366973, - "acc_norm_stderr": 0.021436420955529424 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.3492063492063492, - "acc_stderr": 0.04263906892795133, - "acc_norm": 0.3492063492063492, - "acc_norm_stderr": 0.04263906892795133 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.47058823529411764, - "acc_stderr": 0.028580341065138286, - "acc_norm": 0.47058823529411764, - "acc_norm_stderr": 0.028580341065138286 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.4, - "acc_stderr": 0.04923659639173309, - "acc_norm": 0.4, - "acc_norm_stderr": 0.04923659639173309 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.6528925619834711, - "acc_stderr": 0.043457245702925335, - "acc_norm": 0.6528925619834711, - "acc_norm_stderr": 0.043457245702925335 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.4407894736842105, - "acc_stderr": 0.04040311062490436, - "acc_norm": 0.4407894736842105, - "acc_norm_stderr": 0.04040311062490436 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.29901960784313725, - "acc_stderr": 0.018521756215423027, - "acc_norm": 0.29901960784313725, - "acc_norm_stderr": 0.018521756215423027 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.31560283687943264, - "acc_stderr": 0.027724989449509314, - "acc_norm": 0.31560283687943264, - "acc_norm_stderr": 0.027724989449509314 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.15178571428571427, - "acc_stderr": 0.034057028381856924, - "acc_norm": 0.15178571428571427, - "acc_norm_stderr": 0.034057028381856924 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.39814814814814814, - "acc_stderr": 0.033384734032074016, - "acc_norm": 0.39814814814814814, - "acc_norm_stderr": 0.033384734032074016 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.24692737430167597, - "acc_stderr": 0.014422292204808852, - "acc_norm": 0.24692737430167597, - "acc_norm_stderr": 0.014422292204808852 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.35, - "acc_stderr": 0.047937248544110196, - "acc_norm": 0.35, - "acc_norm_stderr": 0.047937248544110196 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.36, - "acc_stderr": 0.04824181513244218, - "acc_norm": 0.36, - "acc_norm_stderr": 0.04824181513244218 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.3235294117647059, - "acc_stderr": 0.02841820861940679, - "acc_norm": 0.3235294117647059, - "acc_norm_stderr": 0.02841820861940679 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.46122448979591835, - "acc_stderr": 0.031912820526692774, - "acc_norm": 0.46122448979591835, - "acc_norm_stderr": 0.031912820526692774 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.5232067510548524, - "acc_stderr": 0.032512152011410174, - "acc_norm": 0.5232067510548524, - "acc_norm_stderr": 0.032512152011410174 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.3070404172099087, - "acc_stderr": 0.011780959114513778, - "acc_norm": 0.3070404172099087, - "acc_norm_stderr": 0.011780959114513778 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.4068627450980392, - "acc_stderr": 0.03447891136353382, - "acc_norm": 0.4068627450980392, - "acc_norm_stderr": 0.03447891136353382 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.46060606060606063, - "acc_stderr": 0.03892207016552013, - "acc_norm": 0.46060606060606063, - "acc_norm_stderr": 0.03892207016552013 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.29498164014687883, - "mc1_stderr": 0.015964400965589674, - "mc2": 0.47296551445370655, - "mc2_stderr": 0.016489115600580966 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.2664319248826291, - "acc_stderr": 0.015154742533365828, - "acc_norm": 0.2793427230046948, - "acc_norm_stderr": 0.015380423335680442 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "jiwoochris/ko-llama2-v2", - "model_sha": "bfe6a2095cc43e82103cbdff36721810ef4057e3", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/jiwoochris/ko-llama2-v3/result_2023-10-21 15:59:59.json b/jiwoochris/ko-llama2-v3/result_2023-10-21 15:59:59.json deleted file mode 100644 index 489742777ac3ac8176b34577099dee1ba18816e0..0000000000000000000000000000000000000000 --- a/jiwoochris/ko-llama2-v3/result_2023-10-21 15:59:59.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.39505119453924914, - "acc_stderr": 0.014285898292938169, - "acc_norm": 0.45051194539249145, - "acc_norm_stderr": 0.014539646098471625 - }, - "harness|ko_hellaswag|10": { - "acc": 0.4231228838876718, - "acc_stderr": 0.0049304485271466575, - "acc_norm": 0.5584544911372237, - "acc_norm_stderr": 0.004955564650016177 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.5321637426900585, - "acc_stderr": 0.03826882417660369, - "acc_norm": 0.5321637426900585, - "acc_norm_stderr": 0.03826882417660369 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.5145631067961165, - "acc_stderr": 0.04948637324026637, - "acc_norm": 0.5145631067961165, - "acc_norm_stderr": 0.04948637324026637 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.49936143039591313, - "acc_stderr": 0.017879948914431665, - "acc_norm": 0.49936143039591313, - "acc_norm_stderr": 0.017879948914431665 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.42962962962962964, - "acc_stderr": 0.04276349494376599, - "acc_norm": 0.42962962962962964, - "acc_norm_stderr": 0.04276349494376599 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252604, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252604 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.3574468085106383, - "acc_stderr": 0.03132941789476425, - "acc_norm": 0.3574468085106383, - "acc_norm_stderr": 0.03132941789476425 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.42168674698795183, - "acc_stderr": 0.03844453181770917, - "acc_norm": 0.42168674698795183, - "acc_norm_stderr": 0.03844453181770917 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.4694533762057878, - "acc_stderr": 0.02834504586484068, - "acc_norm": 0.4694533762057878, - "acc_norm_stderr": 0.02834504586484068 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.37668161434977576, - "acc_stderr": 0.03252113489929187, - "acc_norm": 0.37668161434977576, - "acc_norm_stderr": 0.03252113489929187 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.48854961832061067, - "acc_stderr": 0.043841400240780176, - "acc_norm": 0.48854961832061067, - "acc_norm_stderr": 0.043841400240780176 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.5757575757575758, - "acc_stderr": 0.03521224908841586, - "acc_norm": 0.5757575757575758, - "acc_norm_stderr": 0.03521224908841586 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.43448275862068964, - "acc_stderr": 0.04130740879555498, - "acc_norm": 0.43448275862068964, - "acc_norm_stderr": 0.04130740879555498 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.24509803921568626, - "acc_stderr": 0.042801058373643966, - "acc_norm": 0.24509803921568626, - "acc_norm_stderr": 0.042801058373643966 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.4369747899159664, - "acc_stderr": 0.03221943636566196, - "acc_norm": 0.4369747899159664, - "acc_norm_stderr": 0.03221943636566196 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.4076923076923077, - "acc_stderr": 0.02491524398598784, - "acc_norm": 0.4076923076923077, - "acc_norm_stderr": 0.02491524398598784 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.49, - "acc_stderr": 0.05024183937956911, - "acc_norm": 0.49, - "acc_norm_stderr": 0.05024183937956911 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252604, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252604 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.5, - "acc_stderr": 0.04833682445228318, - "acc_norm": 0.5, - "acc_norm_stderr": 0.04833682445228318 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.37438423645320196, - "acc_stderr": 0.03405155380561952, - "acc_norm": 0.37438423645320196, - "acc_norm_stderr": 0.03405155380561952 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.45806451612903226, - "acc_stderr": 0.028343787250540636, - "acc_norm": 0.45806451612903226, - "acc_norm_stderr": 0.028343787250540636 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.6367521367521367, - "acc_stderr": 0.03150712523091264, - "acc_norm": 0.6367521367521367, - "acc_norm_stderr": 0.03150712523091264 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.4339622641509434, - "acc_stderr": 0.03050329201334259, - "acc_norm": 0.4339622641509434, - "acc_norm_stderr": 0.03050329201334259 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.509090909090909, - "acc_stderr": 0.04788339768702861, - "acc_norm": 0.509090909090909, - "acc_norm_stderr": 0.04788339768702861 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.2814814814814815, - "acc_stderr": 0.02742001935094527, - "acc_norm": 0.2814814814814815, - "acc_norm_stderr": 0.02742001935094527 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.304635761589404, - "acc_stderr": 0.03757949922943343, - "acc_norm": 0.304635761589404, - "acc_norm_stderr": 0.03757949922943343 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.6019900497512438, - "acc_stderr": 0.034611994290400135, - "acc_norm": 0.6019900497512438, - "acc_norm_stderr": 0.034611994290400135 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.36416184971098264, - "acc_stderr": 0.03669072477416906, - "acc_norm": 0.36416184971098264, - "acc_norm_stderr": 0.03669072477416906 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.3148148148148148, - "acc_stderr": 0.023919984164047736, - "acc_norm": 0.3148148148148148, - "acc_norm_stderr": 0.023919984164047736 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3611111111111111, - "acc_stderr": 0.040166600304512336, - "acc_norm": 0.3611111111111111, - "acc_norm_stderr": 0.040166600304512336 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.61, - "acc_stderr": 0.04902071300001975, - "acc_norm": 0.61, - "acc_norm_stderr": 0.04902071300001975 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.4393063583815029, - "acc_stderr": 0.026720034380514995, - "acc_norm": 0.4393063583815029, - "acc_norm_stderr": 0.026720034380514995 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.4233128834355828, - "acc_stderr": 0.03881891213334383, - "acc_norm": 0.4233128834355828, - "acc_norm_stderr": 0.03881891213334383 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.4567901234567901, - "acc_stderr": 0.027716661650194045, - "acc_norm": 0.4567901234567901, - "acc_norm_stderr": 0.027716661650194045 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.46113989637305697, - "acc_stderr": 0.03597524411734578, - "acc_norm": 0.46113989637305697, - "acc_norm_stderr": 0.03597524411734578 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2631578947368421, - "acc_stderr": 0.04142439719489361, - "acc_norm": 0.2631578947368421, - "acc_norm_stderr": 0.04142439719489361 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.4954128440366973, - "acc_stderr": 0.021436420955529424, - "acc_norm": 0.4954128440366973, - "acc_norm_stderr": 0.021436420955529424 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.30952380952380953, - "acc_stderr": 0.04134913018303316, - "acc_norm": 0.30952380952380953, - "acc_norm_stderr": 0.04134913018303316 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.4411764705882353, - "acc_stderr": 0.028431095444176647, - "acc_norm": 0.4411764705882353, - "acc_norm_stderr": 0.028431095444176647 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.33, - "acc_stderr": 0.047258156262526045, - "acc_norm": 0.33, - "acc_norm_stderr": 0.047258156262526045 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.5950413223140496, - "acc_stderr": 0.04481137755942469, - "acc_norm": 0.5950413223140496, - "acc_norm_stderr": 0.04481137755942469 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.4276315789473684, - "acc_stderr": 0.04026097083296558, - "acc_norm": 0.4276315789473684, - "acc_norm_stderr": 0.04026097083296558 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.3137254901960784, - "acc_stderr": 0.018771683893528186, - "acc_norm": 0.3137254901960784, - "acc_norm_stderr": 0.018771683893528186 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.028121636040639872, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.028121636040639872 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.23214285714285715, - "acc_stderr": 0.04007341809755806, - "acc_norm": 0.23214285714285715, - "acc_norm_stderr": 0.04007341809755806 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.2962962962962963, - "acc_stderr": 0.03114144782353604, - "acc_norm": 0.2962962962962963, - "acc_norm_stderr": 0.03114144782353604 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2424581005586592, - "acc_stderr": 0.01433352205921789, - "acc_norm": 0.2424581005586592, - "acc_norm_stderr": 0.01433352205921789 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.33, - "acc_stderr": 0.047258156262526045, - "acc_norm": 0.33, - "acc_norm_stderr": 0.047258156262526045 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.46, - "acc_stderr": 0.05009082659620332, - "acc_norm": 0.46, - "acc_norm_stderr": 0.05009082659620332 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.28308823529411764, - "acc_stderr": 0.0273658611315138, - "acc_norm": 0.28308823529411764, - "acc_norm_stderr": 0.0273658611315138 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.5061224489795918, - "acc_stderr": 0.032006820201639065, - "acc_norm": 0.5061224489795918, - "acc_norm_stderr": 0.032006820201639065 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.5063291139240507, - "acc_stderr": 0.0325446201076786, - "acc_norm": 0.5063291139240507, - "acc_norm_stderr": 0.0325446201076786 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.3089960886571056, - "acc_stderr": 0.011801729777239246, - "acc_norm": 0.3089960886571056, - "acc_norm_stderr": 0.011801729777239246 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.4117647058823529, - "acc_stderr": 0.03454236585380609, - "acc_norm": 0.4117647058823529, - "acc_norm_stderr": 0.03454236585380609 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.5151515151515151, - "acc_stderr": 0.03902551007374448, - "acc_norm": 0.5151515151515151, - "acc_norm_stderr": 0.03902551007374448 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.29498164014687883, - "mc1_stderr": 0.015964400965589678, - "mc2": 0.46187837195291875, - "mc2_stderr": 0.015227305019069102 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.32629107981220656, - "acc_stderr": 0.016072149717140726, - "acc_norm": 0.3884976525821596, - "acc_norm_stderr": 0.01670815454631332 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "jiwoochris/ko-llama2-v3", - "model_sha": "277462786fe73ea1b6f50d5e45ee1be5854611a1", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/jiwoochris/polyglot_350/result_2023-10-19 07:42:25.json b/jiwoochris/polyglot_350/result_2023-10-19 07:42:25.json deleted file mode 100644 index 5df72d525933008e92f75a6c519cffe2518168a6..0000000000000000000000000000000000000000 --- a/jiwoochris/polyglot_350/result_2023-10-19 07:42:25.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.28498293515358364, - "acc_stderr": 0.013191348179838792, - "acc_norm": 0.3174061433447099, - "acc_norm_stderr": 0.01360223908803817 - }, - "harness|ko_hellaswag|10": { - "acc": 0.371539533957379, - "acc_stderr": 0.004822286556305217, - "acc_norm": 0.4738099980083649, - "acc_norm_stderr": 0.004982931565945953 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.18128654970760233, - "acc_stderr": 0.029547741687640024, - "acc_norm": 0.18128654970760233, - "acc_norm_stderr": 0.029547741687640024 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.36893203883495146, - "acc_stderr": 0.047776151811567386, - "acc_norm": 0.36893203883495146, - "acc_norm_stderr": 0.047776151811567386 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.20689655172413793, - "acc_stderr": 0.014485656041669173, - "acc_norm": 0.20689655172413793, - "acc_norm_stderr": 0.014485656041669173 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.24444444444444444, - "acc_stderr": 0.037125378336148665, - "acc_norm": 0.24444444444444444, - "acc_norm_stderr": 0.037125378336148665 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.19, - "acc_stderr": 0.03942772444036623, - "acc_norm": 0.19, - "acc_norm_stderr": 0.03942772444036623 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.20851063829787234, - "acc_stderr": 0.026556982117838746, - "acc_norm": 0.20851063829787234, - "acc_norm_stderr": 0.026556982117838746 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.2891566265060241, - "acc_stderr": 0.03529486801511115, - "acc_norm": 0.2891566265060241, - "acc_norm_stderr": 0.03529486801511115 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.2379421221864952, - "acc_stderr": 0.024185150647818707, - "acc_norm": 0.2379421221864952, - "acc_norm_stderr": 0.024185150647818707 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.15246636771300448, - "acc_stderr": 0.024126204813252863, - "acc_norm": 0.15246636771300448, - "acc_norm_stderr": 0.024126204813252863 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.2748091603053435, - "acc_stderr": 0.03915345408847836, - "acc_norm": 0.2748091603053435, - "acc_norm_stderr": 0.03915345408847836 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.3181818181818182, - "acc_stderr": 0.033184773338453315, - "acc_norm": 0.3181818181818182, - "acc_norm_stderr": 0.033184773338453315 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.2413793103448276, - "acc_stderr": 0.03565998174135303, - "acc_norm": 0.2413793103448276, - "acc_norm_stderr": 0.03565998174135303 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.35294117647058826, - "acc_stderr": 0.04755129616062947, - "acc_norm": 0.35294117647058826, - "acc_norm_stderr": 0.04755129616062947 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.3445378151260504, - "acc_stderr": 0.03086868260412162, - "acc_norm": 0.3445378151260504, - "acc_norm_stderr": 0.03086868260412162 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.36153846153846153, - "acc_stderr": 0.024359581465396983, - "acc_norm": 0.36153846153846153, - "acc_norm_stderr": 0.024359581465396983 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.18, - "acc_stderr": 0.038612291966536955, - "acc_norm": 0.18, - "acc_norm_stderr": 0.038612291966536955 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.2037037037037037, - "acc_stderr": 0.038935425188248475, - "acc_norm": 0.2037037037037037, - "acc_norm_stderr": 0.038935425188248475 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.28078817733990147, - "acc_stderr": 0.0316185633535861, - "acc_norm": 0.28078817733990147, - "acc_norm_stderr": 0.0316185633535861 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3096774193548387, - "acc_stderr": 0.026302774983517418, - "acc_norm": 0.3096774193548387, - "acc_norm_stderr": 0.026302774983517418 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.21794871794871795, - "acc_stderr": 0.02704685763071668, - "acc_norm": 0.21794871794871795, - "acc_norm_stderr": 0.02704685763071668 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.2981132075471698, - "acc_stderr": 0.028152837942493864, - "acc_norm": 0.2981132075471698, - "acc_norm_stderr": 0.028152837942493864 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.24545454545454545, - "acc_stderr": 0.04122066502878284, - "acc_norm": 0.24545454545454545, - "acc_norm_stderr": 0.04122066502878284 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.25925925925925924, - "acc_stderr": 0.026719240783712166, - "acc_norm": 0.25925925925925924, - "acc_norm_stderr": 0.026719240783712166 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.31788079470198677, - "acc_stderr": 0.03802039760107903, - "acc_norm": 0.31788079470198677, - "acc_norm_stderr": 0.03802039760107903 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.263681592039801, - "acc_stderr": 0.03115715086935557, - "acc_norm": 0.263681592039801, - "acc_norm_stderr": 0.03115715086935557 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.32947976878612717, - "acc_stderr": 0.03583901754736412, - "acc_norm": 0.32947976878612717, - "acc_norm_stderr": 0.03583901754736412 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2671957671957672, - "acc_stderr": 0.02278967314577656, - "acc_norm": 0.2671957671957672, - "acc_norm_stderr": 0.02278967314577656 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2638888888888889, - "acc_stderr": 0.03685651095897532, - "acc_norm": 0.2638888888888889, - "acc_norm_stderr": 0.03685651095897532 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.43, - "acc_stderr": 0.049756985195624284, - "acc_norm": 0.43, - "acc_norm_stderr": 0.049756985195624284 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.26, - "acc_stderr": 0.044084400227680794, - "acc_norm": 0.26, - "acc_norm_stderr": 0.044084400227680794 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.2138728323699422, - "acc_stderr": 0.022075709251757173, - "acc_norm": 0.2138728323699422, - "acc_norm_stderr": 0.022075709251757173 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.2392638036809816, - "acc_stderr": 0.03351953879521271, - "acc_norm": 0.2392638036809816, - "acc_norm_stderr": 0.03351953879521271 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.23765432098765432, - "acc_stderr": 0.023683591837008553, - "acc_norm": 0.23765432098765432, - "acc_norm_stderr": 0.023683591837008553 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.28, - "acc_stderr": 0.045126085985421296, - "acc_norm": 0.28, - "acc_norm_stderr": 0.045126085985421296 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.3626943005181347, - "acc_stderr": 0.03469713791704372, - "acc_norm": 0.3626943005181347, - "acc_norm_stderr": 0.03469713791704372 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.24561403508771928, - "acc_stderr": 0.040493392977481404, - "acc_norm": 0.24561403508771928, - "acc_norm_stderr": 0.040493392977481404 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.3541284403669725, - "acc_stderr": 0.020504729013829104, - "acc_norm": 0.3541284403669725, - "acc_norm_stderr": 0.020504729013829104 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.36507936507936506, - "acc_stderr": 0.04306241259127153, - "acc_norm": 0.36507936507936506, - "acc_norm_stderr": 0.04306241259127153 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.28431372549019607, - "acc_stderr": 0.02582916327275748, - "acc_norm": 0.28431372549019607, - "acc_norm_stderr": 0.02582916327275748 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.19, - "acc_stderr": 0.03942772444036622, - "acc_norm": 0.19, - "acc_norm_stderr": 0.03942772444036622 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.14049586776859505, - "acc_stderr": 0.031722334260021585, - "acc_norm": 0.14049586776859505, - "acc_norm_stderr": 0.031722334260021585 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.3157894736842105, - "acc_stderr": 0.0378272898086547, - "acc_norm": 0.3157894736842105, - "acc_norm_stderr": 0.0378272898086547 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.2173202614379085, - "acc_stderr": 0.016684820929148598, - "acc_norm": 0.2173202614379085, - "acc_norm_stderr": 0.016684820929148598 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2553191489361702, - "acc_stderr": 0.026011992930902013, - "acc_norm": 0.2553191489361702, - "acc_norm_stderr": 0.026011992930902013 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.16071428571428573, - "acc_stderr": 0.034859460964757394, - "acc_norm": 0.16071428571428573, - "acc_norm_stderr": 0.034859460964757394 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.4537037037037037, - "acc_stderr": 0.033953227263757976, - "acc_norm": 0.4537037037037037, - "acc_norm_stderr": 0.033953227263757976 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.27262569832402234, - "acc_stderr": 0.014893391735249608, - "acc_norm": 0.27262569832402234, - "acc_norm_stderr": 0.014893391735249608 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.32, - "acc_stderr": 0.04688261722621504, - "acc_norm": 0.32, - "acc_norm_stderr": 0.04688261722621504 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.2, - "acc_stderr": 0.04020151261036846, - "acc_norm": 0.2, - "acc_norm_stderr": 0.04020151261036846 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.4411764705882353, - "acc_stderr": 0.030161911930767102, - "acc_norm": 0.4411764705882353, - "acc_norm_stderr": 0.030161911930767102 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.4, - "acc_stderr": 0.03136250240935892, - "acc_norm": 0.4, - "acc_norm_stderr": 0.03136250240935892 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.21940928270042195, - "acc_stderr": 0.026939106581553945, - "acc_norm": 0.21940928270042195, - "acc_norm_stderr": 0.026939106581553945 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2438070404172099, - "acc_stderr": 0.010966507972178475, - "acc_norm": 0.2438070404172099, - "acc_norm_stderr": 0.010966507972178475 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.24509803921568626, - "acc_stderr": 0.03019028245350194, - "acc_norm": 0.24509803921568626, - "acc_norm_stderr": 0.03019028245350194 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.2545454545454545, - "acc_stderr": 0.03401506715249039, - "acc_norm": 0.2545454545454545, - "acc_norm_stderr": 0.03401506715249039 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.26560587515299877, - "mc1_stderr": 0.015461027627253595, - "mc2": 0.40641374284087445, - "mc2_stderr": 0.014952562897051682 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.3720657276995305, - "acc_stderr": 0.016569223163823556, - "acc_norm": 0.4424882629107981, - "acc_norm_stderr": 0.01702601866298503 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "jiwoochris/polyglot_350", - "model_sha": "9fb5a66197344b0ec71467e384620bd610668339", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/jojo0217/ChatSKKU5.8B/result_2023-09-27 14:33:28.json b/jojo0217/ChatSKKU5.8B/result_2023-09-27 14:33:28.json deleted file mode 100644 index 7807984f3ba352b4f7456033448ba8b6e218e09b..0000000000000000000000000000000000000000 --- a/jojo0217/ChatSKKU5.8B/result_2023-09-27 14:33:28.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.28924914675767915, - "acc_stderr": 0.013250012579393443, - "acc_norm": 0.3293515358361775, - "acc_norm_stderr": 0.013734057652635474 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3754232224656443, - "acc_stderr": 0.004832423630593185, - "acc_norm": 0.48028281218880703, - "acc_norm_stderr": 0.004985900172317694 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.17543859649122806, - "acc_stderr": 0.029170885500727665, - "acc_norm": 0.17543859649122806, - "acc_norm_stderr": 0.029170885500727665 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.36893203883495146, - "acc_stderr": 0.04777615181156739, - "acc_norm": 0.36893203883495146, - "acc_norm_stderr": 0.04777615181156739 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.20434227330779056, - "acc_stderr": 0.014419123980931904, - "acc_norm": 0.20434227330779056, - "acc_norm_stderr": 0.014419123980931904 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.22962962962962963, - "acc_stderr": 0.036333844140734636, - "acc_norm": 0.22962962962962963, - "acc_norm_stderr": 0.036333844140734636 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.17, - "acc_stderr": 0.03775251680686371, - "acc_norm": 0.17, - "acc_norm_stderr": 0.03775251680686371 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.20851063829787234, - "acc_stderr": 0.026556982117838746, - "acc_norm": 0.20851063829787234, - "acc_norm_stderr": 0.026556982117838746 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.1927710843373494, - "acc_stderr": 0.030709824050565264, - "acc_norm": 0.1927710843373494, - "acc_norm_stderr": 0.030709824050565264 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.24115755627009647, - "acc_stderr": 0.024296594034763426, - "acc_norm": 0.24115755627009647, - "acc_norm_stderr": 0.024296594034763426 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.09865470852017937, - "acc_stderr": 0.020013729184919227, - "acc_norm": 0.09865470852017937, - "acc_norm_stderr": 0.020013729184919227 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.2824427480916031, - "acc_stderr": 0.03948406125768361, - "acc_norm": 0.2824427480916031, - "acc_norm_stderr": 0.03948406125768361 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542127, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542127 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.3383838383838384, - "acc_stderr": 0.033711241426263014, - "acc_norm": 0.3383838383838384, - "acc_norm_stderr": 0.033711241426263014 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.23448275862068965, - "acc_stderr": 0.035306258743465914, - "acc_norm": 0.23448275862068965, - "acc_norm_stderr": 0.035306258743465914 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.3627450980392157, - "acc_stderr": 0.04784060704105653, - "acc_norm": 0.3627450980392157, - "acc_norm_stderr": 0.04784060704105653 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.3487394957983193, - "acc_stderr": 0.030956636328566548, - "acc_norm": 0.3487394957983193, - "acc_norm_stderr": 0.030956636328566548 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.36923076923076925, - "acc_stderr": 0.02446861524147891, - "acc_norm": 0.36923076923076925, - "acc_norm_stderr": 0.02446861524147891 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.18, - "acc_stderr": 0.03861229196653694, - "acc_norm": 0.18, - "acc_norm_stderr": 0.03861229196653694 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.19, - "acc_stderr": 0.03942772444036623, - "acc_norm": 0.19, - "acc_norm_stderr": 0.03942772444036623 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.21296296296296297, - "acc_stderr": 0.03957835471980981, - "acc_norm": 0.21296296296296297, - "acc_norm_stderr": 0.03957835471980981 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.27586206896551724, - "acc_stderr": 0.031447125816782426, - "acc_norm": 0.27586206896551724, - "acc_norm_stderr": 0.031447125816782426 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3161290322580645, - "acc_stderr": 0.026450874489042764, - "acc_norm": 0.3161290322580645, - "acc_norm_stderr": 0.026450874489042764 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.19658119658119658, - "acc_stderr": 0.02603538609895129, - "acc_norm": 0.19658119658119658, - "acc_norm_stderr": 0.02603538609895129 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.2981132075471698, - "acc_stderr": 0.028152837942493864, - "acc_norm": 0.2981132075471698, - "acc_norm_stderr": 0.028152837942493864 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.21818181818181817, - "acc_stderr": 0.03955932861795833, - "acc_norm": 0.21818181818181817, - "acc_norm_stderr": 0.03955932861795833 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.25555555555555554, - "acc_stderr": 0.02659393910184408, - "acc_norm": 0.25555555555555554, - "acc_norm_stderr": 0.02659393910184408 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.33774834437086093, - "acc_stderr": 0.03861557546255168, - "acc_norm": 0.33774834437086093, - "acc_norm_stderr": 0.03861557546255168 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.2736318407960199, - "acc_stderr": 0.031524391865554016, - "acc_norm": 0.2736318407960199, - "acc_norm_stderr": 0.031524391865554016 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.3352601156069364, - "acc_stderr": 0.03599586301247078, - "acc_norm": 0.3352601156069364, - "acc_norm_stderr": 0.03599586301247078 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.24074074074074073, - "acc_stderr": 0.02201908001221789, - "acc_norm": 0.24074074074074073, - "acc_norm_stderr": 0.02201908001221789 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2708333333333333, - "acc_stderr": 0.03716177437566017, - "acc_norm": 0.2708333333333333, - "acc_norm_stderr": 0.03716177437566017 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.39, - "acc_stderr": 0.04902071300001975, - "acc_norm": 0.39, - "acc_norm_stderr": 0.04902071300001975 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.26, - "acc_stderr": 0.044084400227680794, - "acc_norm": 0.26, - "acc_norm_stderr": 0.044084400227680794 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.2138728323699422, - "acc_stderr": 0.022075709251757173, - "acc_norm": 0.2138728323699422, - "acc_norm_stderr": 0.022075709251757173 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.2331288343558282, - "acc_stderr": 0.0332201579577674, - "acc_norm": 0.2331288343558282, - "acc_norm_stderr": 0.0332201579577674 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.2191358024691358, - "acc_stderr": 0.023016705640262206, - "acc_norm": 0.2191358024691358, - "acc_norm_stderr": 0.023016705640262206 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.36787564766839376, - "acc_stderr": 0.034801756684660366, - "acc_norm": 0.36787564766839376, - "acc_norm_stderr": 0.034801756684660366 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.24561403508771928, - "acc_stderr": 0.040493392977481404, - "acc_norm": 0.24561403508771928, - "acc_norm_stderr": 0.040493392977481404 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.3504587155963303, - "acc_stderr": 0.02045607759982446, - "acc_norm": 0.3504587155963303, - "acc_norm_stderr": 0.02045607759982446 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.3412698412698413, - "acc_stderr": 0.04240799327574924, - "acc_norm": 0.3412698412698413, - "acc_norm_stderr": 0.04240799327574924 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.2973856209150327, - "acc_stderr": 0.026173908506718576, - "acc_norm": 0.2973856209150327, - "acc_norm_stderr": 0.026173908506718576 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.21, - "acc_stderr": 0.040936018074033256, - "acc_norm": 0.21, - "acc_norm_stderr": 0.040936018074033256 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.21487603305785125, - "acc_stderr": 0.037494924487096966, - "acc_norm": 0.21487603305785125, - "acc_norm_stderr": 0.037494924487096966 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.3355263157894737, - "acc_stderr": 0.03842498559395268, - "acc_norm": 0.3355263157894737, - "acc_norm_stderr": 0.03842498559395268 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.2173202614379085, - "acc_stderr": 0.016684820929148598, - "acc_norm": 0.2173202614379085, - "acc_norm_stderr": 0.016684820929148598 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.24113475177304963, - "acc_stderr": 0.02551873104953776, - "acc_norm": 0.24113475177304963, - "acc_norm_stderr": 0.02551873104953776 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.17857142857142858, - "acc_stderr": 0.036352091215778065, - "acc_norm": 0.17857142857142858, - "acc_norm_stderr": 0.036352091215778065 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.4722222222222222, - "acc_stderr": 0.0340470532865388, - "acc_norm": 0.4722222222222222, - "acc_norm_stderr": 0.0340470532865388 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.27262569832402234, - "acc_stderr": 0.014893391735249608, - "acc_norm": 0.27262569832402234, - "acc_norm_stderr": 0.014893391735249608 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.21, - "acc_stderr": 0.040936018074033256, - "acc_norm": 0.21, - "acc_norm_stderr": 0.040936018074033256 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.4485294117647059, - "acc_stderr": 0.030211479609121593, - "acc_norm": 0.4485294117647059, - "acc_norm_stderr": 0.030211479609121593 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.4, - "acc_stderr": 0.03136250240935892, - "acc_norm": 0.4, - "acc_norm_stderr": 0.03136250240935892 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.20253164556962025, - "acc_stderr": 0.026160568246601464, - "acc_norm": 0.20253164556962025, - "acc_norm_stderr": 0.026160568246601464 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.242503259452412, - "acc_stderr": 0.010946570966348775, - "acc_norm": 0.242503259452412, - "acc_norm_stderr": 0.010946570966348775 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.2549019607843137, - "acc_stderr": 0.030587591351604246, - "acc_norm": 0.2549019607843137, - "acc_norm_stderr": 0.030587591351604246 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.2606060606060606, - "acc_stderr": 0.03427743175816524, - "acc_norm": 0.2606060606060606, - "acc_norm_stderr": 0.03427743175816524 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2692778457772338, - "mc1_stderr": 0.015528566637087298, - "mc2": 0.41570723548070315, - "mc2_stderr": 0.014870707305351522 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.18309859154929578, - "acc_stderr": 0.013257527729799864, - "acc_norm": 0.2323943661971831, - "acc_norm_stderr": 0.014478284105610304 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "jojo0217/ChatSKKU5.8B", - "model_sha": "49cb4be98cae4dfd6560920be15ce3ca82c29026", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/jyoung105/KoR-Orca-Platypus-13B-neft/result_2023-10-23 17:13:22.json b/jyoung105/KoR-Orca-Platypus-13B-neft/result_2023-10-23 17:13:22.json deleted file mode 100644 index abf4c82c91a49f54393423dcb11ffea4619d6a19..0000000000000000000000000000000000000000 --- a/jyoung105/KoR-Orca-Platypus-13B-neft/result_2023-10-23 17:13:22.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.3626279863481229, - "acc_stderr": 0.014049106564955005, - "acc_norm": 0.40955631399317405, - "acc_norm_stderr": 0.014370358632472451 - }, - "harness|ko_hellaswag|10": { - "acc": 0.4001194981079466, - "acc_stderr": 0.004889210628907952, - "acc_norm": 0.5324636526588329, - "acc_norm_stderr": 0.0049792529549773125 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.5087719298245614, - "acc_stderr": 0.038342347441649924, - "acc_norm": 0.5087719298245614, - "acc_norm_stderr": 0.038342347441649924 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.3786407766990291, - "acc_stderr": 0.048026946982589726, - "acc_norm": 0.3786407766990291, - "acc_norm_stderr": 0.048026946982589726 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.4955300127713921, - "acc_stderr": 0.017879248970584388, - "acc_norm": 0.4955300127713921, - "acc_norm_stderr": 0.017879248970584388 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.4, - "acc_stderr": 0.04232073695151589, - "acc_norm": 0.4, - "acc_norm_stderr": 0.04232073695151589 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.33617021276595743, - "acc_stderr": 0.030881618520676942, - "acc_norm": 0.33617021276595743, - "acc_norm_stderr": 0.030881618520676942 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3192771084337349, - "acc_stderr": 0.0362933532994786, - "acc_norm": 0.3192771084337349, - "acc_norm_stderr": 0.0362933532994786 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.4758842443729904, - "acc_stderr": 0.02836504154256457, - "acc_norm": 0.4758842443729904, - "acc_norm_stderr": 0.02836504154256457 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.37668161434977576, - "acc_stderr": 0.03252113489929187, - "acc_norm": 0.37668161434977576, - "acc_norm_stderr": 0.03252113489929187 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.45038167938931295, - "acc_stderr": 0.04363643698524779, - "acc_norm": 0.45038167938931295, - "acc_norm_stderr": 0.04363643698524779 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.37373737373737376, - "acc_stderr": 0.034468977386593325, - "acc_norm": 0.37373737373737376, - "acc_norm_stderr": 0.034468977386593325 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.41379310344827586, - "acc_stderr": 0.04104269211806232, - "acc_norm": 0.41379310344827586, - "acc_norm_stderr": 0.04104269211806232 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.21568627450980393, - "acc_stderr": 0.04092563958237654, - "acc_norm": 0.21568627450980393, - "acc_norm_stderr": 0.04092563958237654 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.3697478991596639, - "acc_stderr": 0.031357095996135904, - "acc_norm": 0.3697478991596639, - "acc_norm_stderr": 0.031357095996135904 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.38461538461538464, - "acc_stderr": 0.024666744915187236, - "acc_norm": 0.38461538461538464, - "acc_norm_stderr": 0.024666744915187236 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.43, - "acc_stderr": 0.04975698519562428, - "acc_norm": 0.43, - "acc_norm_stderr": 0.04975698519562428 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.35, - "acc_stderr": 0.04793724854411018, - "acc_norm": 0.35, - "acc_norm_stderr": 0.04793724854411018 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.4444444444444444, - "acc_stderr": 0.04803752235190192, - "acc_norm": 0.4444444444444444, - "acc_norm_stderr": 0.04803752235190192 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.3103448275862069, - "acc_stderr": 0.03255086769970103, - "acc_norm": 0.3103448275862069, - "acc_norm_stderr": 0.03255086769970103 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.4483870967741935, - "acc_stderr": 0.02829205683011274, - "acc_norm": 0.4483870967741935, - "acc_norm_stderr": 0.02829205683011274 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.5854700854700855, - "acc_stderr": 0.03227396567623779, - "acc_norm": 0.5854700854700855, - "acc_norm_stderr": 0.03227396567623779 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.46037735849056605, - "acc_stderr": 0.030676096599389174, - "acc_norm": 0.46037735849056605, - "acc_norm_stderr": 0.030676096599389174 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.4909090909090909, - "acc_stderr": 0.04788339768702861, - "acc_norm": 0.4909090909090909, - "acc_norm_stderr": 0.04788339768702861 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.25925925925925924, - "acc_stderr": 0.026719240783712173, - "acc_norm": 0.25925925925925924, - "acc_norm_stderr": 0.026719240783712173 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2582781456953642, - "acc_stderr": 0.035737053147634576, - "acc_norm": 0.2582781456953642, - "acc_norm_stderr": 0.035737053147634576 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.5124378109452736, - "acc_stderr": 0.035344398485395785, - "acc_norm": 0.5124378109452736, - "acc_norm_stderr": 0.035344398485395785 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.36416184971098264, - "acc_stderr": 0.03669072477416906, - "acc_norm": 0.36416184971098264, - "acc_norm_stderr": 0.03669072477416906 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.29365079365079366, - "acc_stderr": 0.023456037383982026, - "acc_norm": 0.29365079365079366, - "acc_norm_stderr": 0.023456037383982026 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.039420826399272135, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.039420826399272135 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.21, - "acc_stderr": 0.040936018074033256, - "acc_norm": 0.21, - "acc_norm_stderr": 0.040936018074033256 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.51, - "acc_stderr": 0.05024183937956912, - "acc_norm": 0.51, - "acc_norm_stderr": 0.05024183937956912 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.4161849710982659, - "acc_stderr": 0.026538189104705488, - "acc_norm": 0.4161849710982659, - "acc_norm_stderr": 0.026538189104705488 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.37423312883435583, - "acc_stderr": 0.03802068102899616, - "acc_norm": 0.37423312883435583, - "acc_norm_stderr": 0.03802068102899616 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.44135802469135804, - "acc_stderr": 0.027628737155668777, - "acc_norm": 0.44135802469135804, - "acc_norm_stderr": 0.027628737155668777 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.45077720207253885, - "acc_stderr": 0.03590910952235524, - "acc_norm": 0.45077720207253885, - "acc_norm_stderr": 0.03590910952235524 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.23684210526315788, - "acc_stderr": 0.03999423879281336, - "acc_norm": 0.23684210526315788, - "acc_norm_stderr": 0.03999423879281336 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.43486238532110094, - "acc_stderr": 0.02125463146560928, - "acc_norm": 0.43486238532110094, - "acc_norm_stderr": 0.02125463146560928 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.2777777777777778, - "acc_stderr": 0.04006168083848878, - "acc_norm": 0.2777777777777778, - "acc_norm_stderr": 0.04006168083848878 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.3954248366013072, - "acc_stderr": 0.027996723180631455, - "acc_norm": 0.3954248366013072, - "acc_norm_stderr": 0.027996723180631455 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.44, - "acc_stderr": 0.0498887651569859, - "acc_norm": 0.44, - "acc_norm_stderr": 0.0498887651569859 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.5785123966942148, - "acc_stderr": 0.04507732278775087, - "acc_norm": 0.5785123966942148, - "acc_norm_stderr": 0.04507732278775087 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.40131578947368424, - "acc_stderr": 0.03988903703336284, - "acc_norm": 0.40131578947368424, - "acc_norm_stderr": 0.03988903703336284 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.3415032679738562, - "acc_stderr": 0.019184639328092484, - "acc_norm": 0.3415032679738562, - "acc_norm_stderr": 0.019184639328092484 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.32269503546099293, - "acc_stderr": 0.027889139300534774, - "acc_norm": 0.32269503546099293, - "acc_norm_stderr": 0.027889139300534774 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.25, - "acc_stderr": 0.04109974682633932, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04109974682633932 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.25, - "acc_stderr": 0.029531221160930918, - "acc_norm": 0.25, - "acc_norm_stderr": 0.029531221160930918 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2424581005586592, - "acc_stderr": 0.01433352205921789, - "acc_norm": 0.2424581005586592, - "acc_norm_stderr": 0.01433352205921789 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252605, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252605 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.42, - "acc_stderr": 0.049604496374885836, - "acc_norm": 0.42, - "acc_norm_stderr": 0.049604496374885836 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.25, - "acc_stderr": 0.026303648393696036, - "acc_norm": 0.25, - "acc_norm_stderr": 0.026303648393696036 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.3836734693877551, - "acc_stderr": 0.03113088039623593, - "acc_norm": 0.3836734693877551, - "acc_norm_stderr": 0.03113088039623593 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.540084388185654, - "acc_stderr": 0.03244246810187913, - "acc_norm": 0.540084388185654, - "acc_norm_stderr": 0.03244246810187913 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.31486310299869624, - "acc_stderr": 0.011862561755715931, - "acc_norm": 0.31486310299869624, - "acc_norm_stderr": 0.011862561755715931 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.4215686274509804, - "acc_stderr": 0.03465868196380758, - "acc_norm": 0.4215686274509804, - "acc_norm_stderr": 0.03465868196380758 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.4666666666666667, - "acc_stderr": 0.038956580652718446, - "acc_norm": 0.4666666666666667, - "acc_norm_stderr": 0.038956580652718446 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2913096695226438, - "mc1_stderr": 0.015905987048184828, - "mc2": 0.45413657999042506, - "mc2_stderr": 0.015074046336424325 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.6889671361502347, - "acc_stderr": 0.015868563452870778, - "acc_norm": 0.7535211267605634, - "acc_norm_stderr": 0.014773139084466522 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "jyoung105/KoR-Orca-Platypus-13B-neft", - "model_sha": "a02ee5b06d952c0dc23f5868d59778638696ebfd", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/jyoung105/ko-platypus2-collective-13b/result_2023-10-14 09:02:59.json b/jyoung105/ko-platypus2-collective-13b/result_2023-10-14 09:02:59.json deleted file mode 100644 index 526344303b3554954b08cf721e360cb375bcac43..0000000000000000000000000000000000000000 --- a/jyoung105/ko-platypus2-collective-13b/result_2023-10-14 09:02:59.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.3856655290102389, - "acc_stderr": 0.014224250973257182, - "acc_norm": 0.44283276450511944, - "acc_norm_stderr": 0.014515573873348906 - }, - "harness|ko_hellaswag|10": { - "acc": 0.40798645688109936, - "acc_stderr": 0.004904561795919, - "acc_norm": 0.5428201553475404, - "acc_norm_stderr": 0.004971449552787176 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.5263157894736842, - "acc_stderr": 0.03829509868994727, - "acc_norm": 0.5263157894736842, - "acc_norm_stderr": 0.03829509868994727 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.49514563106796117, - "acc_stderr": 0.049505043821289195, - "acc_norm": 0.49514563106796117, - "acc_norm_stderr": 0.049505043821289195 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.5261813537675607, - "acc_stderr": 0.017855434554042, - "acc_norm": 0.5261813537675607, - "acc_norm_stderr": 0.017855434554042 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.4444444444444444, - "acc_stderr": 0.04292596718256981, - "acc_norm": 0.4444444444444444, - "acc_norm_stderr": 0.04292596718256981 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.28, - "acc_stderr": 0.045126085985421276, - "acc_norm": 0.28, - "acc_norm_stderr": 0.045126085985421276 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.3574468085106383, - "acc_stderr": 0.03132941789476425, - "acc_norm": 0.3574468085106383, - "acc_norm_stderr": 0.03132941789476425 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3674698795180723, - "acc_stderr": 0.03753267402120574, - "acc_norm": 0.3674698795180723, - "acc_norm_stderr": 0.03753267402120574 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.4533762057877814, - "acc_stderr": 0.02827435985489424, - "acc_norm": 0.4533762057877814, - "acc_norm_stderr": 0.02827435985489424 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.42152466367713004, - "acc_stderr": 0.03314190222110655, - "acc_norm": 0.42152466367713004, - "acc_norm_stderr": 0.03314190222110655 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.48854961832061067, - "acc_stderr": 0.04384140024078016, - "acc_norm": 0.48854961832061067, - "acc_norm_stderr": 0.04384140024078016 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.38, - "acc_stderr": 0.048783173121456316, - "acc_norm": 0.38, - "acc_norm_stderr": 0.048783173121456316 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.4797979797979798, - "acc_stderr": 0.03559443565563919, - "acc_norm": 0.4797979797979798, - "acc_norm_stderr": 0.03559443565563919 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.4482758620689655, - "acc_stderr": 0.04144311810878151, - "acc_norm": 0.4482758620689655, - "acc_norm_stderr": 0.04144311810878151 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.29411764705882354, - "acc_stderr": 0.04533838195929776, - "acc_norm": 0.29411764705882354, - "acc_norm_stderr": 0.04533838195929776 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.42016806722689076, - "acc_stderr": 0.032061837832361516, - "acc_norm": 0.42016806722689076, - "acc_norm_stderr": 0.032061837832361516 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.4230769230769231, - "acc_stderr": 0.025049197876042328, - "acc_norm": 0.4230769230769231, - "acc_norm_stderr": 0.025049197876042328 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.53, - "acc_stderr": 0.05016135580465919, - "acc_norm": 0.53, - "acc_norm_stderr": 0.05016135580465919 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.5092592592592593, - "acc_stderr": 0.04832853553437056, - "acc_norm": 0.5092592592592593, - "acc_norm_stderr": 0.04832853553437056 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.3793103448275862, - "acc_stderr": 0.034139638059062345, - "acc_norm": 0.3793103448275862, - "acc_norm_stderr": 0.034139638059062345 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.44516129032258067, - "acc_stderr": 0.028272410186214906, - "acc_norm": 0.44516129032258067, - "acc_norm_stderr": 0.028272410186214906 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.6068376068376068, - "acc_stderr": 0.03199957924651048, - "acc_norm": 0.6068376068376068, - "acc_norm_stderr": 0.03199957924651048 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.4377358490566038, - "acc_stderr": 0.03053333843046751, - "acc_norm": 0.4377358490566038, - "acc_norm_stderr": 0.03053333843046751 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.5727272727272728, - "acc_stderr": 0.04738198703545483, - "acc_norm": 0.5727272727272728, - "acc_norm_stderr": 0.04738198703545483 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.2814814814814815, - "acc_stderr": 0.027420019350945273, - "acc_norm": 0.2814814814814815, - "acc_norm_stderr": 0.027420019350945273 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.33774834437086093, - "acc_stderr": 0.038615575462551684, - "acc_norm": 0.33774834437086093, - "acc_norm_stderr": 0.038615575462551684 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.572139303482587, - "acc_stderr": 0.03498541988407795, - "acc_norm": 0.572139303482587, - "acc_norm_stderr": 0.03498541988407795 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.3872832369942196, - "acc_stderr": 0.03714325906302065, - "acc_norm": 0.3872832369942196, - "acc_norm_stderr": 0.03714325906302065 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.29365079365079366, - "acc_stderr": 0.023456037383982026, - "acc_norm": 0.29365079365079366, - "acc_norm_stderr": 0.023456037383982026 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3611111111111111, - "acc_stderr": 0.040166600304512336, - "acc_norm": 0.3611111111111111, - "acc_norm_stderr": 0.040166600304512336 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.62, - "acc_stderr": 0.04878317312145633, - "acc_norm": 0.62, - "acc_norm_stderr": 0.04878317312145633 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.4797687861271676, - "acc_stderr": 0.026897049996382875, - "acc_norm": 0.4797687861271676, - "acc_norm_stderr": 0.026897049996382875 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.4171779141104294, - "acc_stderr": 0.038741028598180814, - "acc_norm": 0.4171779141104294, - "acc_norm_stderr": 0.038741028598180814 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.46296296296296297, - "acc_stderr": 0.027744313443376536, - "acc_norm": 0.46296296296296297, - "acc_norm_stderr": 0.027744313443376536 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.32, - "acc_stderr": 0.04688261722621504, - "acc_norm": 0.32, - "acc_norm_stderr": 0.04688261722621504 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.49740932642487046, - "acc_stderr": 0.03608390745384488, - "acc_norm": 0.49740932642487046, - "acc_norm_stderr": 0.03608390745384488 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.23684210526315788, - "acc_stderr": 0.03999423879281336, - "acc_norm": 0.23684210526315788, - "acc_norm_stderr": 0.03999423879281336 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.5339449541284403, - "acc_stderr": 0.021387863350353985, - "acc_norm": 0.5339449541284403, - "acc_norm_stderr": 0.021387863350353985 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.3253968253968254, - "acc_stderr": 0.04190596438871136, - "acc_norm": 0.3253968253968254, - "acc_norm_stderr": 0.04190596438871136 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.4411764705882353, - "acc_stderr": 0.028431095444176643, - "acc_norm": 0.4411764705882353, - "acc_norm_stderr": 0.028431095444176643 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.46, - "acc_stderr": 0.05009082659620332, - "acc_norm": 0.46, - "acc_norm_stderr": 0.05009082659620332 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.6198347107438017, - "acc_stderr": 0.04431324501968432, - "acc_norm": 0.6198347107438017, - "acc_norm_stderr": 0.04431324501968432 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.4407894736842105, - "acc_stderr": 0.04040311062490437, - "acc_norm": 0.4407894736842105, - "acc_norm_stderr": 0.04040311062490437 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.34967320261437906, - "acc_stderr": 0.019291961895066382, - "acc_norm": 0.34967320261437906, - "acc_norm_stderr": 0.019291961895066382 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.3404255319148936, - "acc_stderr": 0.02826765748265014, - "acc_norm": 0.3404255319148936, - "acc_norm_stderr": 0.02826765748265014 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.23214285714285715, - "acc_stderr": 0.04007341809755806, - "acc_norm": 0.23214285714285715, - "acc_norm_stderr": 0.04007341809755806 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.38425925925925924, - "acc_stderr": 0.03317354514310742, - "acc_norm": 0.38425925925925924, - "acc_norm_stderr": 0.03317354514310742 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2547486033519553, - "acc_stderr": 0.014572650383409146, - "acc_norm": 0.2547486033519553, - "acc_norm_stderr": 0.014572650383409146 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.44, - "acc_stderr": 0.04988876515698589, - "acc_norm": 0.44, - "acc_norm_stderr": 0.04988876515698589 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.43, - "acc_stderr": 0.04975698519562428, - "acc_norm": 0.43, - "acc_norm_stderr": 0.04975698519562428 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.4264705882352941, - "acc_stderr": 0.030042615832714864, - "acc_norm": 0.4264705882352941, - "acc_norm_stderr": 0.030042615832714864 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.46530612244897956, - "acc_stderr": 0.03193207024425314, - "acc_norm": 0.46530612244897956, - "acc_norm_stderr": 0.03193207024425314 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.5738396624472574, - "acc_stderr": 0.03219035703131774, - "acc_norm": 0.5738396624472574, - "acc_norm_stderr": 0.03219035703131774 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.3728813559322034, - "acc_stderr": 0.012350630058333362, - "acc_norm": 0.3728813559322034, - "acc_norm_stderr": 0.012350630058333362 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.46078431372549017, - "acc_stderr": 0.03498501649369527, - "acc_norm": 0.46078431372549017, - "acc_norm_stderr": 0.03498501649369527 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.509090909090909, - "acc_stderr": 0.03903698647748441, - "acc_norm": 0.509090909090909, - "acc_norm_stderr": 0.03903698647748441 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2729498164014688, - "mc1_stderr": 0.015594753632006514, - "mc2": 0.4442744883801461, - "mc2_stderr": 0.015229595169585636 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.4518779342723005, - "acc_stderr": 0.017060212258103228, - "acc_norm": 0.5422535211267606, - "acc_norm_stderr": 0.017078468242201064 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "jyoung105/ko-platypus2-collective-13b", - "model_sha": "a42bdc7082f08920ee23b5ed9946aa81008de332", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/jyoung105/ko-platypus2-collective-13b_v1.1/result_2023-10-20 02:08:26.json b/jyoung105/ko-platypus2-collective-13b_v1.1/result_2023-10-20 02:08:26.json deleted file mode 100644 index 797bb7a786aba97ff56e85147a55e7ffe45a6cc3..0000000000000000000000000000000000000000 --- a/jyoung105/ko-platypus2-collective-13b_v1.1/result_2023-10-20 02:08:26.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.38054607508532423, - "acc_stderr": 0.014188277712349812, - "acc_norm": 0.4453924914675768, - "acc_norm_stderr": 0.014523987638344078 - }, - "harness|ko_hellaswag|10": { - "acc": 0.4084843656642103, - "acc_stderr": 0.0049054894940050746, - "acc_norm": 0.5414260107548298, - "acc_norm_stderr": 0.0049726258487026555 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.5263157894736842, - "acc_stderr": 0.03829509868994727, - "acc_norm": 0.5263157894736842, - "acc_norm_stderr": 0.03829509868994727 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.49514563106796117, - "acc_stderr": 0.049505043821289195, - "acc_norm": 0.49514563106796117, - "acc_norm_stderr": 0.049505043821289195 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.5223499361430396, - "acc_stderr": 0.017862091778507855, - "acc_norm": 0.5223499361430396, - "acc_norm_stderr": 0.017862091778507855 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.42962962962962964, - "acc_stderr": 0.04276349494376599, - "acc_norm": 0.42962962962962964, - "acc_norm_stderr": 0.04276349494376599 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.3574468085106383, - "acc_stderr": 0.03132941789476425, - "acc_norm": 0.3574468085106383, - "acc_norm_stderr": 0.03132941789476425 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3493975903614458, - "acc_stderr": 0.037117251907407486, - "acc_norm": 0.3493975903614458, - "acc_norm_stderr": 0.037117251907407486 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.4662379421221865, - "acc_stderr": 0.028333277109562804, - "acc_norm": 0.4662379421221865, - "acc_norm_stderr": 0.028333277109562804 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.4080717488789238, - "acc_stderr": 0.03298574607842821, - "acc_norm": 0.4080717488789238, - "acc_norm_stderr": 0.03298574607842821 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.48091603053435117, - "acc_stderr": 0.04382094705550989, - "acc_norm": 0.48091603053435117, - "acc_norm_stderr": 0.04382094705550989 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.39, - "acc_stderr": 0.04902071300001975, - "acc_norm": 0.39, - "acc_norm_stderr": 0.04902071300001975 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.48484848484848486, - "acc_stderr": 0.0356071651653106, - "acc_norm": 0.48484848484848486, - "acc_norm_stderr": 0.0356071651653106 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.45517241379310347, - "acc_stderr": 0.04149886942192117, - "acc_norm": 0.45517241379310347, - "acc_norm_stderr": 0.04149886942192117 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.30392156862745096, - "acc_stderr": 0.045766654032077636, - "acc_norm": 0.30392156862745096, - "acc_norm_stderr": 0.045766654032077636 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.42016806722689076, - "acc_stderr": 0.032061837832361516, - "acc_norm": 0.42016806722689076, - "acc_norm_stderr": 0.032061837832361516 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.4282051282051282, - "acc_stderr": 0.02508830145469484, - "acc_norm": 0.4282051282051282, - "acc_norm_stderr": 0.02508830145469484 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.52, - "acc_stderr": 0.05021167315686779, - "acc_norm": 0.52, - "acc_norm_stderr": 0.05021167315686779 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.5, - "acc_stderr": 0.04833682445228318, - "acc_norm": 0.5, - "acc_norm_stderr": 0.04833682445228318 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.3694581280788177, - "acc_stderr": 0.03395970381998575, - "acc_norm": 0.3694581280788177, - "acc_norm_stderr": 0.03395970381998575 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.45483870967741935, - "acc_stderr": 0.02832774309156106, - "acc_norm": 0.45483870967741935, - "acc_norm_stderr": 0.02832774309156106 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.594017094017094, - "acc_stderr": 0.03217180182641087, - "acc_norm": 0.594017094017094, - "acc_norm_stderr": 0.03217180182641087 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.44528301886792454, - "acc_stderr": 0.030588052974270655, - "acc_norm": 0.44528301886792454, - "acc_norm_stderr": 0.030588052974270655 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.5363636363636364, - "acc_stderr": 0.047764491623961985, - "acc_norm": 0.5363636363636364, - "acc_norm_stderr": 0.047764491623961985 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.2814814814814815, - "acc_stderr": 0.027420019350945277, - "acc_norm": 0.2814814814814815, - "acc_norm_stderr": 0.027420019350945277 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.31788079470198677, - "acc_stderr": 0.038020397601079024, - "acc_norm": 0.31788079470198677, - "acc_norm_stderr": 0.038020397601079024 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.5771144278606966, - "acc_stderr": 0.034932317774212816, - "acc_norm": 0.5771144278606966, - "acc_norm_stderr": 0.034932317774212816 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.3988439306358382, - "acc_stderr": 0.03733626655383509, - "acc_norm": 0.3988439306358382, - "acc_norm_stderr": 0.03733626655383509 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.29365079365079366, - "acc_stderr": 0.023456037383982026, - "acc_norm": 0.29365079365079366, - "acc_norm_stderr": 0.023456037383982026 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3680555555555556, - "acc_stderr": 0.040329990539607195, - "acc_norm": 0.3680555555555556, - "acc_norm_stderr": 0.040329990539607195 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.62, - "acc_stderr": 0.04878317312145632, - "acc_norm": 0.62, - "acc_norm_stderr": 0.04878317312145632 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.4682080924855491, - "acc_stderr": 0.026864624366756643, - "acc_norm": 0.4682080924855491, - "acc_norm_stderr": 0.026864624366756643 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.4294478527607362, - "acc_stderr": 0.038890666191127216, - "acc_norm": 0.4294478527607362, - "acc_norm_stderr": 0.038890666191127216 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.4567901234567901, - "acc_stderr": 0.027716661650194048, - "acc_norm": 0.4567901234567901, - "acc_norm_stderr": 0.027716661650194048 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252606, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252606 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.5077720207253886, - "acc_stderr": 0.03608003225569653, - "acc_norm": 0.5077720207253886, - "acc_norm_stderr": 0.03608003225569653 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.22807017543859648, - "acc_stderr": 0.03947152782669415, - "acc_norm": 0.22807017543859648, - "acc_norm_stderr": 0.03947152782669415 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.5339449541284403, - "acc_stderr": 0.021387863350353982, - "acc_norm": 0.5339449541284403, - "acc_norm_stderr": 0.021387863350353982 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.30952380952380953, - "acc_stderr": 0.04134913018303316, - "acc_norm": 0.30952380952380953, - "acc_norm_stderr": 0.04134913018303316 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.43790849673202614, - "acc_stderr": 0.028408302020332687, - "acc_norm": 0.43790849673202614, - "acc_norm_stderr": 0.028408302020332687 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.48, - "acc_stderr": 0.050211673156867795, - "acc_norm": 0.48, - "acc_norm_stderr": 0.050211673156867795 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.6363636363636364, - "acc_stderr": 0.043913262867240704, - "acc_norm": 0.6363636363636364, - "acc_norm_stderr": 0.043913262867240704 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.4342105263157895, - "acc_stderr": 0.040335656678483184, - "acc_norm": 0.4342105263157895, - "acc_norm_stderr": 0.040335656678483184 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.3545751633986928, - "acc_stderr": 0.019353360547553704, - "acc_norm": 0.3545751633986928, - "acc_norm_stderr": 0.019353360547553704 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.3475177304964539, - "acc_stderr": 0.028406627809590947, - "acc_norm": 0.3475177304964539, - "acc_norm_stderr": 0.028406627809590947 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.25892857142857145, - "acc_stderr": 0.041577515398656284, - "acc_norm": 0.25892857142857145, - "acc_norm_stderr": 0.041577515398656284 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.37037037037037035, - "acc_stderr": 0.03293377139415191, - "acc_norm": 0.37037037037037035, - "acc_norm_stderr": 0.03293377139415191 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2659217877094972, - "acc_stderr": 0.014776765066438885, - "acc_norm": 0.2659217877094972, - "acc_norm_stderr": 0.014776765066438885 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.41, - "acc_stderr": 0.04943110704237102, - "acc_norm": 0.41, - "acc_norm_stderr": 0.04943110704237102 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.45, - "acc_stderr": 0.05, - "acc_norm": 0.45, - "acc_norm_stderr": 0.05 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.4227941176470588, - "acc_stderr": 0.030008562845003476, - "acc_norm": 0.4227941176470588, - "acc_norm_stderr": 0.030008562845003476 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.4775510204081633, - "acc_stderr": 0.031976941187136725, - "acc_norm": 0.4775510204081633, - "acc_norm_stderr": 0.031976941187136725 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.5780590717299579, - "acc_stderr": 0.032148146302403695, - "acc_norm": 0.5780590717299579, - "acc_norm_stderr": 0.032148146302403695 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.37222946544980445, - "acc_stderr": 0.012346241297204368, - "acc_norm": 0.37222946544980445, - "acc_norm_stderr": 0.012346241297204368 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.4411764705882353, - "acc_stderr": 0.034849415144292316, - "acc_norm": 0.4411764705882353, - "acc_norm_stderr": 0.034849415144292316 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.4909090909090909, - "acc_stderr": 0.03903698647748441, - "acc_norm": 0.4909090909090909, - "acc_norm_stderr": 0.03903698647748441 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.29008567931456547, - "mc1_stderr": 0.01588623687420952, - "mc2": 0.4598250500026081, - "mc2_stderr": 0.01526561900775728 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.40492957746478875, - "acc_stderr": 0.016827095223977993, - "acc_norm": 0.4835680751173709, - "acc_norm_stderr": 0.017130520993936017 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "jyoung105/ko-platypus2-collective-13b_v1.1", - "model_sha": "ccb5e5262ad155e445b27ca11c6bb946f56fc4d1", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/kfkas/Llama-2-ko-7b-Chat/result_2023-09-27 05:00:55.json b/kfkas/Llama-2-ko-7b-Chat/result_2023-09-27 05:00:55.json deleted file mode 100644 index 456866382e53efb70dda5cf6d852594a6612a283..0000000000000000000000000000000000000000 --- a/kfkas/Llama-2-ko-7b-Chat/result_2023-09-27 05:00:55.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.32593856655290104, - "acc_stderr": 0.013697432466693242, - "acc_norm": 0.3839590443686007, - "acc_norm_stderr": 0.01421244498065189 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3875721967735511, - "acc_stderr": 0.004862003566798545, - "acc_norm": 0.504779924317865, - "acc_norm_stderr": 0.004989553396413091 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.2631578947368421, - "acc_stderr": 0.033773102522091945, - "acc_norm": 0.2631578947368421, - "acc_norm_stderr": 0.033773102522091945 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.32038834951456313, - "acc_stderr": 0.0462028408228004, - "acc_norm": 0.32038834951456313, - "acc_norm_stderr": 0.0462028408228004 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.3499361430395913, - "acc_stderr": 0.017055679797150423, - "acc_norm": 0.3499361430395913, - "acc_norm_stderr": 0.017055679797150423 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.31851851851851853, - "acc_stderr": 0.0402477840197711, - "acc_norm": 0.31851851851851853, - "acc_norm_stderr": 0.0402477840197711 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.27, - "acc_stderr": 0.0446196043338474, - "acc_norm": 0.27, - "acc_norm_stderr": 0.0446196043338474 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.34893617021276596, - "acc_stderr": 0.03115852213135778, - "acc_norm": 0.34893617021276596, - "acc_norm_stderr": 0.03115852213135778 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3855421686746988, - "acc_stderr": 0.037891344246115496, - "acc_norm": 0.3855421686746988, - "acc_norm_stderr": 0.037891344246115496 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.2958199356913183, - "acc_stderr": 0.02592237178881879, - "acc_norm": 0.2958199356913183, - "acc_norm_stderr": 0.02592237178881879 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.3721973094170404, - "acc_stderr": 0.032443052830087304, - "acc_norm": 0.3721973094170404, - "acc_norm_stderr": 0.032443052830087304 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.3511450381679389, - "acc_stderr": 0.04186445163013751, - "acc_norm": 0.3511450381679389, - "acc_norm_stderr": 0.04186445163013751 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.27, - "acc_stderr": 0.044619604333847394, - "acc_norm": 0.27, - "acc_norm_stderr": 0.044619604333847394 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.3787878787878788, - "acc_stderr": 0.03456088731993747, - "acc_norm": 0.3787878787878788, - "acc_norm_stderr": 0.03456088731993747 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.27586206896551724, - "acc_stderr": 0.03724563619774632, - "acc_norm": 0.27586206896551724, - "acc_norm_stderr": 0.03724563619774632 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.20588235294117646, - "acc_stderr": 0.04023382273617746, - "acc_norm": 0.20588235294117646, - "acc_norm_stderr": 0.04023382273617746 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.2647058823529412, - "acc_stderr": 0.028657491285071966, - "acc_norm": 0.2647058823529412, - "acc_norm_stderr": 0.028657491285071966 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.2743589743589744, - "acc_stderr": 0.022622765767493197, - "acc_norm": 0.2743589743589744, - "acc_norm_stderr": 0.022622765767493197 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.26, - "acc_stderr": 0.04408440022768078, - "acc_norm": 0.26, - "acc_norm_stderr": 0.04408440022768078 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695235, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695235 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.2962962962962963, - "acc_stderr": 0.044143436668549335, - "acc_norm": 0.2962962962962963, - "acc_norm_stderr": 0.044143436668549335 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.3054187192118227, - "acc_stderr": 0.03240661565868408, - "acc_norm": 0.3054187192118227, - "acc_norm_stderr": 0.03240661565868408 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3193548387096774, - "acc_stderr": 0.02652270967466777, - "acc_norm": 0.3193548387096774, - "acc_norm_stderr": 0.02652270967466777 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.030882736974138653, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.030882736974138653 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.32075471698113206, - "acc_stderr": 0.028727502957880267, - "acc_norm": 0.32075471698113206, - "acc_norm_stderr": 0.028727502957880267 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.32727272727272727, - "acc_stderr": 0.04494290866252088, - "acc_norm": 0.32727272727272727, - "acc_norm_stderr": 0.04494290866252088 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.22592592592592592, - "acc_stderr": 0.02549753263960955, - "acc_norm": 0.22592592592592592, - "acc_norm_stderr": 0.02549753263960955 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2913907284768212, - "acc_stderr": 0.03710185726119995, - "acc_norm": 0.2913907284768212, - "acc_norm_stderr": 0.03710185726119995 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.263681592039801, - "acc_stderr": 0.03115715086935557, - "acc_norm": 0.263681592039801, - "acc_norm_stderr": 0.03115715086935557 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.24277456647398843, - "acc_stderr": 0.0326926380614177, - "acc_norm": 0.24277456647398843, - "acc_norm_stderr": 0.0326926380614177 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.25396825396825395, - "acc_stderr": 0.02241804289111394, - "acc_norm": 0.25396825396825395, - "acc_norm_stderr": 0.02241804289111394 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.19444444444444445, - "acc_stderr": 0.03309615177059006, - "acc_norm": 0.19444444444444445, - "acc_norm_stderr": 0.03309615177059006 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.26, - "acc_stderr": 0.0440844002276808, - "acc_norm": 0.26, - "acc_norm_stderr": 0.0440844002276808 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.41, - "acc_stderr": 0.049431107042371025, - "acc_norm": 0.41, - "acc_norm_stderr": 0.049431107042371025 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.2976878612716763, - "acc_stderr": 0.024617055388677003, - "acc_norm": 0.2976878612716763, - "acc_norm_stderr": 0.024617055388677003 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.26993865030674846, - "acc_stderr": 0.034878251684978906, - "acc_norm": 0.26993865030674846, - "acc_norm_stderr": 0.034878251684978906 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.30246913580246915, - "acc_stderr": 0.025557653981868052, - "acc_norm": 0.30246913580246915, - "acc_norm_stderr": 0.025557653981868052 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.23316062176165803, - "acc_stderr": 0.030516111371476008, - "acc_norm": 0.23316062176165803, - "acc_norm_stderr": 0.030516111371476008 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2719298245614035, - "acc_stderr": 0.041857744240220575, - "acc_norm": 0.2719298245614035, - "acc_norm_stderr": 0.041857744240220575 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.3688073394495413, - "acc_stderr": 0.02068622756072955, - "acc_norm": 0.3688073394495413, - "acc_norm_stderr": 0.02068622756072955 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.25396825396825395, - "acc_stderr": 0.03893259610604672, - "acc_norm": 0.25396825396825395, - "acc_norm_stderr": 0.03893259610604672 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.33986928104575165, - "acc_stderr": 0.027121956071388852, - "acc_norm": 0.33986928104575165, - "acc_norm_stderr": 0.027121956071388852 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542126, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542126 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.34710743801652894, - "acc_stderr": 0.04345724570292534, - "acc_norm": 0.34710743801652894, - "acc_norm_stderr": 0.04345724570292534 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.3223684210526316, - "acc_stderr": 0.038035102483515854, - "acc_norm": 0.3223684210526316, - "acc_norm_stderr": 0.038035102483515854 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.2549019607843137, - "acc_stderr": 0.017630827375148383, - "acc_norm": 0.2549019607843137, - "acc_norm_stderr": 0.017630827375148383 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.26595744680851063, - "acc_stderr": 0.026358065698880592, - "acc_norm": 0.26595744680851063, - "acc_norm_stderr": 0.026358065698880592 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.30357142857142855, - "acc_stderr": 0.04364226155841044, - "acc_norm": 0.30357142857142855, - "acc_norm_stderr": 0.04364226155841044 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.28703703703703703, - "acc_stderr": 0.030851992993257017, - "acc_norm": 0.28703703703703703, - "acc_norm_stderr": 0.030851992993257017 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2424581005586592, - "acc_stderr": 0.01433352205921789, - "acc_norm": 0.2424581005586592, - "acc_norm_stderr": 0.01433352205921789 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.22, - "acc_stderr": 0.04163331998932269, - "acc_norm": 0.22, - "acc_norm_stderr": 0.04163331998932269 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.4375, - "acc_stderr": 0.030134614954403924, - "acc_norm": 0.4375, - "acc_norm_stderr": 0.030134614954403924 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.27755102040816326, - "acc_stderr": 0.02866685779027465, - "acc_norm": 0.27755102040816326, - "acc_norm_stderr": 0.02866685779027465 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.3291139240506329, - "acc_stderr": 0.030587326294702368, - "acc_norm": 0.3291139240506329, - "acc_norm_stderr": 0.030587326294702368 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.26988265971316816, - "acc_stderr": 0.011337381084250411, - "acc_norm": 0.26988265971316816, - "acc_norm_stderr": 0.011337381084250411 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.2549019607843137, - "acc_stderr": 0.030587591351604246, - "acc_norm": 0.2549019607843137, - "acc_norm_stderr": 0.030587591351604246 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.2606060606060606, - "acc_stderr": 0.034277431758165236, - "acc_norm": 0.2606060606060606, - "acc_norm_stderr": 0.034277431758165236 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2350061199510404, - "mc1_stderr": 0.014843061507731613, - "mc2": 0.3670922997204656, - "mc2_stderr": 0.014677148528936845 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.4213615023474178, - "acc_stderr": 0.016926466620431478, - "acc_norm": 0.5234741784037559, - "acc_norm_stderr": 0.01712087952772565 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "kfkas/Llama-2-ko-7b-Chat", - "model_sha": "6d94c8e5b34fb09e80601548761a8dbd54bc0bba", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/kiyoonyoo/ko-en-trans-platypus-13b-v2/result_2023-10-20 01:21:48.json b/kiyoonyoo/ko-en-trans-platypus-13b-v2/result_2023-10-20 01:21:48.json deleted file mode 100644 index fbc27ad46c6fe7830088c651ed6037da8e39e517..0000000000000000000000000000000000000000 --- a/kiyoonyoo/ko-en-trans-platypus-13b-v2/result_2023-10-20 01:21:48.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.3660409556313993, - "acc_stderr": 0.01407722310847014, - "acc_norm": 0.42150170648464164, - "acc_norm_stderr": 0.014430197069326021 - }, - "harness|ko_hellaswag|10": { - "acc": 0.4064927305317666, - "acc_stderr": 0.004901747426331732, - "acc_norm": 0.5430193188607847, - "acc_norm_stderr": 0.004971278309204197 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.5614035087719298, - "acc_stderr": 0.038057975055904594, - "acc_norm": 0.5614035087719298, - "acc_norm_stderr": 0.038057975055904594 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.4854368932038835, - "acc_stderr": 0.04948637324026637, - "acc_norm": 0.4854368932038835, - "acc_norm_stderr": 0.04948637324026637 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.48020434227330777, - "acc_stderr": 0.017865944827291615, - "acc_norm": 0.48020434227330777, - "acc_norm_stderr": 0.017865944827291615 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.37777777777777777, - "acc_stderr": 0.04188307537595853, - "acc_norm": 0.37777777777777777, - "acc_norm_stderr": 0.04188307537595853 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.37446808510638296, - "acc_stderr": 0.031639106653672915, - "acc_norm": 0.37446808510638296, - "acc_norm_stderr": 0.031639106653672915 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.40963855421686746, - "acc_stderr": 0.038284011150790206, - "acc_norm": 0.40963855421686746, - "acc_norm_stderr": 0.038284011150790206 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.4694533762057878, - "acc_stderr": 0.028345045864840684, - "acc_norm": 0.4694533762057878, - "acc_norm_stderr": 0.028345045864840684 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.3542600896860987, - "acc_stderr": 0.032100621541349864, - "acc_norm": 0.3542600896860987, - "acc_norm_stderr": 0.032100621541349864 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.48854961832061067, - "acc_stderr": 0.04384140024078016, - "acc_norm": 0.48854961832061067, - "acc_norm_stderr": 0.04384140024078016 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695236, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695236 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.5252525252525253, - "acc_stderr": 0.035578062450873145, - "acc_norm": 0.5252525252525253, - "acc_norm_stderr": 0.035578062450873145 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.4206896551724138, - "acc_stderr": 0.0411391498118926, - "acc_norm": 0.4206896551724138, - "acc_norm_stderr": 0.0411391498118926 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.18627450980392157, - "acc_stderr": 0.03873958714149351, - "acc_norm": 0.18627450980392157, - "acc_norm_stderr": 0.03873958714149351 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.42436974789915966, - "acc_stderr": 0.032104790510157764, - "acc_norm": 0.42436974789915966, - "acc_norm_stderr": 0.032104790510157764 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.4076923076923077, - "acc_stderr": 0.02491524398598784, - "acc_norm": 0.4076923076923077, - "acc_norm_stderr": 0.02491524398598784 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.44, - "acc_stderr": 0.04988876515698589, - "acc_norm": 0.44, - "acc_norm_stderr": 0.04988876515698589 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695235, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695235 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.42592592592592593, - "acc_stderr": 0.0478034362693679, - "acc_norm": 0.42592592592592593, - "acc_norm_stderr": 0.0478034362693679 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.3694581280788177, - "acc_stderr": 0.03395970381998575, - "acc_norm": 0.3694581280788177, - "acc_norm_stderr": 0.03395970381998575 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.4096774193548387, - "acc_stderr": 0.027976054915347354, - "acc_norm": 0.4096774193548387, - "acc_norm_stderr": 0.027976054915347354 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.6239316239316239, - "acc_stderr": 0.03173393632969481, - "acc_norm": 0.6239316239316239, - "acc_norm_stderr": 0.03173393632969481 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.4641509433962264, - "acc_stderr": 0.030693675018458003, - "acc_norm": 0.4641509433962264, - "acc_norm_stderr": 0.030693675018458003 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.41818181818181815, - "acc_stderr": 0.04724577405731572, - "acc_norm": 0.41818181818181815, - "acc_norm_stderr": 0.04724577405731572 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.2740740740740741, - "acc_stderr": 0.027195934804085626, - "acc_norm": 0.2740740740740741, - "acc_norm_stderr": 0.027195934804085626 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2847682119205298, - "acc_stderr": 0.03684881521389023, - "acc_norm": 0.2847682119205298, - "acc_norm_stderr": 0.03684881521389023 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.5124378109452736, - "acc_stderr": 0.03534439848539579, - "acc_norm": 0.5124378109452736, - "acc_norm_stderr": 0.03534439848539579 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.34104046242774566, - "acc_stderr": 0.03614665424180826, - "acc_norm": 0.34104046242774566, - "acc_norm_stderr": 0.03614665424180826 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2751322751322751, - "acc_stderr": 0.023000086859068642, - "acc_norm": 0.2751322751322751, - "acc_norm_stderr": 0.023000086859068642 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3611111111111111, - "acc_stderr": 0.04016660030451233, - "acc_norm": 0.3611111111111111, - "acc_norm_stderr": 0.04016660030451233 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.36, - "acc_stderr": 0.048241815132442176, - "acc_norm": 0.36, - "acc_norm_stderr": 0.048241815132442176 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.5, - "acc_stderr": 0.050251890762960605, - "acc_norm": 0.5, - "acc_norm_stderr": 0.050251890762960605 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.4190751445086705, - "acc_stderr": 0.02656417811142262, - "acc_norm": 0.4190751445086705, - "acc_norm_stderr": 0.02656417811142262 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.36809815950920244, - "acc_stderr": 0.03789213935838396, - "acc_norm": 0.36809815950920244, - "acc_norm_stderr": 0.03789213935838396 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.4537037037037037, - "acc_stderr": 0.0277012284685426, - "acc_norm": 0.4537037037037037, - "acc_norm_stderr": 0.0277012284685426 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.42487046632124353, - "acc_stderr": 0.0356747133521254, - "acc_norm": 0.42487046632124353, - "acc_norm_stderr": 0.0356747133521254 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2807017543859649, - "acc_stderr": 0.04227054451232199, - "acc_norm": 0.2807017543859649, - "acc_norm_stderr": 0.04227054451232199 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.46788990825688076, - "acc_stderr": 0.021393071222680814, - "acc_norm": 0.46788990825688076, - "acc_norm_stderr": 0.021393071222680814 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.2698412698412698, - "acc_stderr": 0.03970158273235172, - "acc_norm": 0.2698412698412698, - "acc_norm_stderr": 0.03970158273235172 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.4215686274509804, - "acc_stderr": 0.02827549015679143, - "acc_norm": 0.4215686274509804, - "acc_norm_stderr": 0.02827549015679143 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.5619834710743802, - "acc_stderr": 0.04529146804435792, - "acc_norm": 0.5619834710743802, - "acc_norm_stderr": 0.04529146804435792 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.4342105263157895, - "acc_stderr": 0.0403356566784832, - "acc_norm": 0.4342105263157895, - "acc_norm_stderr": 0.0403356566784832 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.29901960784313725, - "acc_stderr": 0.018521756215423027, - "acc_norm": 0.29901960784313725, - "acc_norm_stderr": 0.018521756215423027 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.31560283687943264, - "acc_stderr": 0.027724989449509314, - "acc_norm": 0.31560283687943264, - "acc_norm_stderr": 0.027724989449509314 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.20535714285714285, - "acc_stderr": 0.03834241021419073, - "acc_norm": 0.20535714285714285, - "acc_norm_stderr": 0.03834241021419073 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.37962962962962965, - "acc_stderr": 0.03309682581119035, - "acc_norm": 0.37962962962962965, - "acc_norm_stderr": 0.03309682581119035 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2424581005586592, - "acc_stderr": 0.01433352205921789, - "acc_norm": 0.2424581005586592, - "acc_norm_stderr": 0.01433352205921789 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.29, - "acc_stderr": 0.04560480215720684, - "acc_norm": 0.29, - "acc_norm_stderr": 0.04560480215720684 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.39, - "acc_stderr": 0.04902071300001974, - "acc_norm": 0.39, - "acc_norm_stderr": 0.04902071300001974 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.2610294117647059, - "acc_stderr": 0.02667925227010311, - "acc_norm": 0.2610294117647059, - "acc_norm_stderr": 0.02667925227010311 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.45714285714285713, - "acc_stderr": 0.03189141832421397, - "acc_norm": 0.45714285714285713, - "acc_norm_stderr": 0.03189141832421397 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.47257383966244726, - "acc_stderr": 0.03249822718301303, - "acc_norm": 0.47257383966244726, - "acc_norm_stderr": 0.03249822718301303 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.3011734028683181, - "acc_stderr": 0.011717148751648435, - "acc_norm": 0.3011734028683181, - "acc_norm_stderr": 0.011717148751648435 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.38235294117647056, - "acc_stderr": 0.03410785338904719, - "acc_norm": 0.38235294117647056, - "acc_norm_stderr": 0.03410785338904719 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.45454545454545453, - "acc_stderr": 0.038881769216741004, - "acc_norm": 0.45454545454545453, - "acc_norm_stderr": 0.038881769216741004 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.24724602203182375, - "mc1_stderr": 0.015102404797359649, - "mc2": 0.40735838259254725, - "mc2_stderr": 0.0148987552825206 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.4765258215962441, - "acc_stderr": 0.017120879527725653, - "acc_norm": 0.573943661971831, - "acc_norm_stderr": 0.016951313945591816 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "kiyoonyoo/ko-en-trans-platypus-13b-v2", - "model_sha": "d050d876d84bdce99f417f180479586cf0fe8a86", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/kiyoonyoo/ko-en-trans-platypus-13b-v3/result_2023-10-22 06:24:03.json b/kiyoonyoo/ko-en-trans-platypus-13b-v3/result_2023-10-22 06:24:03.json deleted file mode 100644 index 5c6bb66212c04c4adec43f19bbda31dcca37d296..0000000000000000000000000000000000000000 --- a/kiyoonyoo/ko-en-trans-platypus-13b-v3/result_2023-10-22 06:24:03.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.37457337883959047, - "acc_stderr": 0.014144193471893456, - "acc_norm": 0.43430034129692835, - "acc_norm_stderr": 0.014484703048857355 - }, - "harness|ko_hellaswag|10": { - "acc": 0.4070902210714997, - "acc_stderr": 0.004902878806733046, - "acc_norm": 0.5408285202150966, - "acc_norm_stderr": 0.004973117975062488 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.5380116959064327, - "acc_stderr": 0.03823727092882307, - "acc_norm": 0.5380116959064327, - "acc_norm_stderr": 0.03823727092882307 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.5533980582524272, - "acc_stderr": 0.04922424153458935, - "acc_norm": 0.5533980582524272, - "acc_norm_stderr": 0.04922424153458935 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.5197956577266922, - "acc_stderr": 0.017865944827291633, - "acc_norm": 0.5197956577266922, - "acc_norm_stderr": 0.017865944827291633 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.43703703703703706, - "acc_stderr": 0.04284958639753399, - "acc_norm": 0.43703703703703706, - "acc_norm_stderr": 0.04284958639753399 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206824, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206824 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.3446808510638298, - "acc_stderr": 0.03106898596312215, - "acc_norm": 0.3446808510638298, - "acc_norm_stderr": 0.03106898596312215 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.40963855421686746, - "acc_stderr": 0.038284011150790206, - "acc_norm": 0.40963855421686746, - "acc_norm_stderr": 0.038284011150790206 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.5241157556270096, - "acc_stderr": 0.028365041542564584, - "acc_norm": 0.5241157556270096, - "acc_norm_stderr": 0.028365041542564584 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.4170403587443946, - "acc_stderr": 0.03309266936071721, - "acc_norm": 0.4170403587443946, - "acc_norm_stderr": 0.03309266936071721 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.5114503816793893, - "acc_stderr": 0.043841400240780176, - "acc_norm": 0.5114503816793893, - "acc_norm_stderr": 0.043841400240780176 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.36, - "acc_stderr": 0.04824181513244218, - "acc_norm": 0.36, - "acc_norm_stderr": 0.04824181513244218 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.5454545454545454, - "acc_stderr": 0.035476014940069384, - "acc_norm": 0.5454545454545454, - "acc_norm_stderr": 0.035476014940069384 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.47586206896551725, - "acc_stderr": 0.041618085035015295, - "acc_norm": 0.47586206896551725, - "acc_norm_stderr": 0.041618085035015295 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.18627450980392157, - "acc_stderr": 0.03873958714149352, - "acc_norm": 0.18627450980392157, - "acc_norm_stderr": 0.03873958714149352 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.42016806722689076, - "acc_stderr": 0.03206183783236153, - "acc_norm": 0.42016806722689076, - "acc_norm_stderr": 0.03206183783236153 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.4153846153846154, - "acc_stderr": 0.024985354923102318, - "acc_norm": 0.4153846153846154, - "acc_norm_stderr": 0.024985354923102318 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.47, - "acc_stderr": 0.050161355804659205, - "acc_norm": 0.47, - "acc_norm_stderr": 0.050161355804659205 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.38, - "acc_stderr": 0.04878317312145633, - "acc_norm": 0.38, - "acc_norm_stderr": 0.04878317312145633 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.48148148148148145, - "acc_stderr": 0.04830366024635331, - "acc_norm": 0.48148148148148145, - "acc_norm_stderr": 0.04830366024635331 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.3497536945812808, - "acc_stderr": 0.03355400904969566, - "acc_norm": 0.3497536945812808, - "acc_norm_stderr": 0.03355400904969566 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.4612903225806452, - "acc_stderr": 0.028358634859836914, - "acc_norm": 0.4612903225806452, - "acc_norm_stderr": 0.028358634859836914 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.6495726495726496, - "acc_stderr": 0.0312561082442188, - "acc_norm": 0.6495726495726496, - "acc_norm_stderr": 0.0312561082442188 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.4528301886792453, - "acc_stderr": 0.030635627957961823, - "acc_norm": 0.4528301886792453, - "acc_norm_stderr": 0.030635627957961823 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.4909090909090909, - "acc_stderr": 0.04788339768702861, - "acc_norm": 0.4909090909090909, - "acc_norm_stderr": 0.04788339768702861 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.29259259259259257, - "acc_stderr": 0.027738969632176095, - "acc_norm": 0.29259259259259257, - "acc_norm_stderr": 0.027738969632176095 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.31788079470198677, - "acc_stderr": 0.038020397601079024, - "acc_norm": 0.31788079470198677, - "acc_norm_stderr": 0.038020397601079024 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.572139303482587, - "acc_stderr": 0.03498541988407795, - "acc_norm": 0.572139303482587, - "acc_norm_stderr": 0.03498541988407795 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.4046242774566474, - "acc_stderr": 0.03742461193887248, - "acc_norm": 0.4046242774566474, - "acc_norm_stderr": 0.03742461193887248 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.30687830687830686, - "acc_stderr": 0.023752928712112126, - "acc_norm": 0.30687830687830686, - "acc_norm_stderr": 0.023752928712112126 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3958333333333333, - "acc_stderr": 0.04089465449325583, - "acc_norm": 0.3958333333333333, - "acc_norm_stderr": 0.04089465449325583 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.35, - "acc_stderr": 0.0479372485441102, - "acc_norm": 0.35, - "acc_norm_stderr": 0.0479372485441102 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.7, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.7, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.48265895953757226, - "acc_stderr": 0.02690290045866664, - "acc_norm": 0.48265895953757226, - "acc_norm_stderr": 0.02690290045866664 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.39263803680981596, - "acc_stderr": 0.03836740907831029, - "acc_norm": 0.39263803680981596, - "acc_norm_stderr": 0.03836740907831029 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.48148148148148145, - "acc_stderr": 0.027801656212323667, - "acc_norm": 0.48148148148148145, - "acc_norm_stderr": 0.027801656212323667 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.28, - "acc_stderr": 0.045126085985421276, - "acc_norm": 0.28, - "acc_norm_stderr": 0.045126085985421276 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.5595854922279793, - "acc_stderr": 0.03582724530036094, - "acc_norm": 0.5595854922279793, - "acc_norm_stderr": 0.03582724530036094 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.21929824561403508, - "acc_stderr": 0.03892431106518753, - "acc_norm": 0.21929824561403508, - "acc_norm_stderr": 0.03892431106518753 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.5394495412844037, - "acc_stderr": 0.021370494609995096, - "acc_norm": 0.5394495412844037, - "acc_norm_stderr": 0.021370494609995096 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.3492063492063492, - "acc_stderr": 0.04263906892795133, - "acc_norm": 0.3492063492063492, - "acc_norm_stderr": 0.04263906892795133 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.4542483660130719, - "acc_stderr": 0.028509807802626567, - "acc_norm": 0.4542483660130719, - "acc_norm_stderr": 0.028509807802626567 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.42, - "acc_stderr": 0.049604496374885836, - "acc_norm": 0.42, - "acc_norm_stderr": 0.049604496374885836 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.6363636363636364, - "acc_stderr": 0.043913262867240704, - "acc_norm": 0.6363636363636364, - "acc_norm_stderr": 0.043913262867240704 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.4407894736842105, - "acc_stderr": 0.040403110624904356, - "acc_norm": 0.4407894736842105, - "acc_norm_stderr": 0.040403110624904356 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.3480392156862745, - "acc_stderr": 0.019270998708223974, - "acc_norm": 0.3480392156862745, - "acc_norm_stderr": 0.019270998708223974 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.35815602836879434, - "acc_stderr": 0.02860208586275942, - "acc_norm": 0.35815602836879434, - "acc_norm_stderr": 0.02860208586275942 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.24107142857142858, - "acc_stderr": 0.04059867246952688, - "acc_norm": 0.24107142857142858, - "acc_norm_stderr": 0.04059867246952688 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.2916666666666667, - "acc_stderr": 0.03099866630456053, - "acc_norm": 0.2916666666666667, - "acc_norm_stderr": 0.03099866630456053 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.24022346368715083, - "acc_stderr": 0.014288343803925295, - "acc_norm": 0.24022346368715083, - "acc_norm_stderr": 0.014288343803925295 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.44, - "acc_stderr": 0.04988876515698589, - "acc_norm": 0.44, - "acc_norm_stderr": 0.04988876515698589 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.47, - "acc_stderr": 0.05016135580465919, - "acc_norm": 0.47, - "acc_norm_stderr": 0.05016135580465919 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.3639705882352941, - "acc_stderr": 0.02922719246003203, - "acc_norm": 0.3639705882352941, - "acc_norm_stderr": 0.02922719246003203 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.4775510204081633, - "acc_stderr": 0.03197694118713673, - "acc_norm": 0.4775510204081633, - "acc_norm_stderr": 0.03197694118713673 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.5949367088607594, - "acc_stderr": 0.031955147413706725, - "acc_norm": 0.5949367088607594, - "acc_norm_stderr": 0.031955147413706725 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.35919165580182527, - "acc_stderr": 0.012253386187584245, - "acc_norm": 0.35919165580182527, - "acc_norm_stderr": 0.012253386187584245 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.5049019607843137, - "acc_stderr": 0.03509143375606786, - "acc_norm": 0.5049019607843137, - "acc_norm_stderr": 0.03509143375606786 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.503030303030303, - "acc_stderr": 0.03904272341431856, - "acc_norm": 0.503030303030303, - "acc_norm_stderr": 0.03904272341431856 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.27539779681762544, - "mc1_stderr": 0.015638135667775523, - "mc2": 0.4457475184349363, - "mc2_stderr": 0.015091782961916999 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.25, - "acc_stderr": 0.014843484249893985, - "acc_norm": 0.30633802816901406, - "acc_norm_stderr": 0.015801911286714723 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "kiyoonyoo/ko-en-trans-platypus-13b-v3", - "model_sha": "3c27d710886ff8b6a9fcf321fae0e2f76eaeafa3", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/kiyoonyoo/ko-en-trans-platypus-13b/result_2023-10-18 00:15:42.json b/kiyoonyoo/ko-en-trans-platypus-13b/result_2023-10-18 00:15:42.json deleted file mode 100644 index 2641f6d8528d3c9c0d2e1f26527b2cd392ccdfc2..0000000000000000000000000000000000000000 --- a/kiyoonyoo/ko-en-trans-platypus-13b/result_2023-10-18 00:15:42.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.3660409556313993, - "acc_stderr": 0.01407722310847014, - "acc_norm": 0.42150170648464164, - "acc_norm_stderr": 0.014430197069326021 - }, - "harness|ko_hellaswag|10": { - "acc": 0.4064927305317666, - "acc_stderr": 0.004901747426331732, - "acc_norm": 0.5430193188607847, - "acc_norm_stderr": 0.004971278309204197 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.5614035087719298, - "acc_stderr": 0.038057975055904594, - "acc_norm": 0.5614035087719298, - "acc_norm_stderr": 0.038057975055904594 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.4854368932038835, - "acc_stderr": 0.04948637324026637, - "acc_norm": 0.4854368932038835, - "acc_norm_stderr": 0.04948637324026637 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.48020434227330777, - "acc_stderr": 0.017865944827291615, - "acc_norm": 0.48020434227330777, - "acc_norm_stderr": 0.017865944827291615 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.37777777777777777, - "acc_stderr": 0.04188307537595853, - "acc_norm": 0.37777777777777777, - "acc_norm_stderr": 0.04188307537595853 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.37446808510638296, - "acc_stderr": 0.031639106653672915, - "acc_norm": 0.37446808510638296, - "acc_norm_stderr": 0.031639106653672915 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.40963855421686746, - "acc_stderr": 0.038284011150790206, - "acc_norm": 0.40963855421686746, - "acc_norm_stderr": 0.038284011150790206 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.4694533762057878, - "acc_stderr": 0.028345045864840684, - "acc_norm": 0.4694533762057878, - "acc_norm_stderr": 0.028345045864840684 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.3542600896860987, - "acc_stderr": 0.032100621541349864, - "acc_norm": 0.3542600896860987, - "acc_norm_stderr": 0.032100621541349864 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.48854961832061067, - "acc_stderr": 0.04384140024078016, - "acc_norm": 0.48854961832061067, - "acc_norm_stderr": 0.04384140024078016 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695236, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695236 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.5252525252525253, - "acc_stderr": 0.035578062450873145, - "acc_norm": 0.5252525252525253, - "acc_norm_stderr": 0.035578062450873145 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.4206896551724138, - "acc_stderr": 0.0411391498118926, - "acc_norm": 0.4206896551724138, - "acc_norm_stderr": 0.0411391498118926 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.18627450980392157, - "acc_stderr": 0.03873958714149351, - "acc_norm": 0.18627450980392157, - "acc_norm_stderr": 0.03873958714149351 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.42436974789915966, - "acc_stderr": 0.032104790510157764, - "acc_norm": 0.42436974789915966, - "acc_norm_stderr": 0.032104790510157764 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.4076923076923077, - "acc_stderr": 0.02491524398598784, - "acc_norm": 0.4076923076923077, - "acc_norm_stderr": 0.02491524398598784 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.44, - "acc_stderr": 0.04988876515698589, - "acc_norm": 0.44, - "acc_norm_stderr": 0.04988876515698589 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695235, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695235 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.42592592592592593, - "acc_stderr": 0.0478034362693679, - "acc_norm": 0.42592592592592593, - "acc_norm_stderr": 0.0478034362693679 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.3694581280788177, - "acc_stderr": 0.03395970381998575, - "acc_norm": 0.3694581280788177, - "acc_norm_stderr": 0.03395970381998575 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.4096774193548387, - "acc_stderr": 0.027976054915347354, - "acc_norm": 0.4096774193548387, - "acc_norm_stderr": 0.027976054915347354 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.6239316239316239, - "acc_stderr": 0.03173393632969481, - "acc_norm": 0.6239316239316239, - "acc_norm_stderr": 0.03173393632969481 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.4641509433962264, - "acc_stderr": 0.030693675018458003, - "acc_norm": 0.4641509433962264, - "acc_norm_stderr": 0.030693675018458003 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.41818181818181815, - "acc_stderr": 0.04724577405731572, - "acc_norm": 0.41818181818181815, - "acc_norm_stderr": 0.04724577405731572 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.2740740740740741, - "acc_stderr": 0.027195934804085626, - "acc_norm": 0.2740740740740741, - "acc_norm_stderr": 0.027195934804085626 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2847682119205298, - "acc_stderr": 0.03684881521389023, - "acc_norm": 0.2847682119205298, - "acc_norm_stderr": 0.03684881521389023 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.5124378109452736, - "acc_stderr": 0.03534439848539579, - "acc_norm": 0.5124378109452736, - "acc_norm_stderr": 0.03534439848539579 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.34104046242774566, - "acc_stderr": 0.03614665424180826, - "acc_norm": 0.34104046242774566, - "acc_norm_stderr": 0.03614665424180826 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2751322751322751, - "acc_stderr": 0.023000086859068642, - "acc_norm": 0.2751322751322751, - "acc_norm_stderr": 0.023000086859068642 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3611111111111111, - "acc_stderr": 0.04016660030451233, - "acc_norm": 0.3611111111111111, - "acc_norm_stderr": 0.04016660030451233 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.36, - "acc_stderr": 0.048241815132442176, - "acc_norm": 0.36, - "acc_norm_stderr": 0.048241815132442176 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.5, - "acc_stderr": 0.050251890762960605, - "acc_norm": 0.5, - "acc_norm_stderr": 0.050251890762960605 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.4190751445086705, - "acc_stderr": 0.02656417811142262, - "acc_norm": 0.4190751445086705, - "acc_norm_stderr": 0.02656417811142262 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.36809815950920244, - "acc_stderr": 0.03789213935838396, - "acc_norm": 0.36809815950920244, - "acc_norm_stderr": 0.03789213935838396 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.4537037037037037, - "acc_stderr": 0.0277012284685426, - "acc_norm": 0.4537037037037037, - "acc_norm_stderr": 0.0277012284685426 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.42487046632124353, - "acc_stderr": 0.0356747133521254, - "acc_norm": 0.42487046632124353, - "acc_norm_stderr": 0.0356747133521254 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2807017543859649, - "acc_stderr": 0.04227054451232199, - "acc_norm": 0.2807017543859649, - "acc_norm_stderr": 0.04227054451232199 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.46788990825688076, - "acc_stderr": 0.021393071222680814, - "acc_norm": 0.46788990825688076, - "acc_norm_stderr": 0.021393071222680814 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.2698412698412698, - "acc_stderr": 0.03970158273235172, - "acc_norm": 0.2698412698412698, - "acc_norm_stderr": 0.03970158273235172 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.4215686274509804, - "acc_stderr": 0.02827549015679143, - "acc_norm": 0.4215686274509804, - "acc_norm_stderr": 0.02827549015679143 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.5619834710743802, - "acc_stderr": 0.04529146804435792, - "acc_norm": 0.5619834710743802, - "acc_norm_stderr": 0.04529146804435792 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.4342105263157895, - "acc_stderr": 0.0403356566784832, - "acc_norm": 0.4342105263157895, - "acc_norm_stderr": 0.0403356566784832 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.29901960784313725, - "acc_stderr": 0.018521756215423027, - "acc_norm": 0.29901960784313725, - "acc_norm_stderr": 0.018521756215423027 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.31560283687943264, - "acc_stderr": 0.027724989449509314, - "acc_norm": 0.31560283687943264, - "acc_norm_stderr": 0.027724989449509314 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.20535714285714285, - "acc_stderr": 0.03834241021419073, - "acc_norm": 0.20535714285714285, - "acc_norm_stderr": 0.03834241021419073 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.37962962962962965, - "acc_stderr": 0.03309682581119035, - "acc_norm": 0.37962962962962965, - "acc_norm_stderr": 0.03309682581119035 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2424581005586592, - "acc_stderr": 0.01433352205921789, - "acc_norm": 0.2424581005586592, - "acc_norm_stderr": 0.01433352205921789 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.29, - "acc_stderr": 0.04560480215720684, - "acc_norm": 0.29, - "acc_norm_stderr": 0.04560480215720684 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.39, - "acc_stderr": 0.04902071300001974, - "acc_norm": 0.39, - "acc_norm_stderr": 0.04902071300001974 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.2610294117647059, - "acc_stderr": 0.02667925227010311, - "acc_norm": 0.2610294117647059, - "acc_norm_stderr": 0.02667925227010311 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.45714285714285713, - "acc_stderr": 0.03189141832421397, - "acc_norm": 0.45714285714285713, - "acc_norm_stderr": 0.03189141832421397 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.47257383966244726, - "acc_stderr": 0.03249822718301303, - "acc_norm": 0.47257383966244726, - "acc_norm_stderr": 0.03249822718301303 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.3011734028683181, - "acc_stderr": 0.011717148751648435, - "acc_norm": 0.3011734028683181, - "acc_norm_stderr": 0.011717148751648435 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.38235294117647056, - "acc_stderr": 0.03410785338904719, - "acc_norm": 0.38235294117647056, - "acc_norm_stderr": 0.03410785338904719 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.45454545454545453, - "acc_stderr": 0.038881769216741004, - "acc_norm": 0.45454545454545453, - "acc_norm_stderr": 0.038881769216741004 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.24724602203182375, - "mc1_stderr": 0.015102404797359649, - "mc2": 0.40735838259254725, - "mc2_stderr": 0.0148987552825206 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.4765258215962441, - "acc_stderr": 0.017120879527725653, - "acc_norm": 0.573943661971831, - "acc_norm_stderr": 0.016951313945591816 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "kiyoonyoo/ko-en-trans-platypus-13b", - "model_sha": "a211ce8adabfe436e59735081efe813176a88e7b", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/kiyoonyoo/ko-platypus-13b-control/result_2023-10-17 01:13:41.json b/kiyoonyoo/ko-platypus-13b-control/result_2023-10-17 01:13:41.json deleted file mode 100644 index 18ee8b3239dfd04c04a559672d343473779a3486..0000000000000000000000000000000000000000 --- a/kiyoonyoo/ko-platypus-13b-control/result_2023-10-17 01:13:41.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.38822525597269625, - "acc_stderr": 0.01424161420741405, - "acc_norm": 0.4283276450511945, - "acc_norm_stderr": 0.01446049636759902 - }, - "harness|ko_hellaswag|10": { - "acc": 0.40360485958972314, - "acc_stderr": 0.004896173035943316, - "acc_norm": 0.5388368850826528, - "acc_norm_stderr": 0.0049747064284342835 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.5672514619883041, - "acc_stderr": 0.03799978644370607, - "acc_norm": 0.5672514619883041, - "acc_norm_stderr": 0.03799978644370607 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.5145631067961165, - "acc_stderr": 0.04948637324026637, - "acc_norm": 0.5145631067961165, - "acc_norm_stderr": 0.04948637324026637 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.5300127713920817, - "acc_stderr": 0.01784772308664907, - "acc_norm": 0.5300127713920817, - "acc_norm_stderr": 0.01784772308664907 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.4222222222222222, - "acc_stderr": 0.04266763404099582, - "acc_norm": 0.4222222222222222, - "acc_norm_stderr": 0.04266763404099582 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.32, - "acc_stderr": 0.046882617226215034, - "acc_norm": 0.32, - "acc_norm_stderr": 0.046882617226215034 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.33191489361702126, - "acc_stderr": 0.030783736757745667, - "acc_norm": 0.33191489361702126, - "acc_norm_stderr": 0.030783736757745667 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.35542168674698793, - "acc_stderr": 0.03726214354322415, - "acc_norm": 0.35542168674698793, - "acc_norm_stderr": 0.03726214354322415 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.5144694533762058, - "acc_stderr": 0.02838619808417768, - "acc_norm": 0.5144694533762058, - "acc_norm_stderr": 0.02838619808417768 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.43946188340807174, - "acc_stderr": 0.03331092511038179, - "acc_norm": 0.43946188340807174, - "acc_norm_stderr": 0.03331092511038179 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.5114503816793893, - "acc_stderr": 0.043841400240780176, - "acc_norm": 0.5114503816793893, - "acc_norm_stderr": 0.043841400240780176 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.38, - "acc_stderr": 0.048783173121456316, - "acc_norm": 0.38, - "acc_norm_stderr": 0.048783173121456316 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.5, - "acc_stderr": 0.035623524993954825, - "acc_norm": 0.5, - "acc_norm_stderr": 0.035623524993954825 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.38620689655172413, - "acc_stderr": 0.04057324734419034, - "acc_norm": 0.38620689655172413, - "acc_norm_stderr": 0.04057324734419034 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.22549019607843138, - "acc_stderr": 0.04158307533083286, - "acc_norm": 0.22549019607843138, - "acc_norm_stderr": 0.04158307533083286 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.453781512605042, - "acc_stderr": 0.032339434681820885, - "acc_norm": 0.453781512605042, - "acc_norm_stderr": 0.032339434681820885 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.44358974358974357, - "acc_stderr": 0.02518914989476419, - "acc_norm": 0.44358974358974357, - "acc_norm_stderr": 0.02518914989476419 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.49, - "acc_stderr": 0.05024183937956911, - "acc_norm": 0.49, - "acc_norm_stderr": 0.05024183937956911 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695235, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695235 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.5092592592592593, - "acc_stderr": 0.04832853553437056, - "acc_norm": 0.5092592592592593, - "acc_norm_stderr": 0.04832853553437056 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.3645320197044335, - "acc_stderr": 0.0338640574606209, - "acc_norm": 0.3645320197044335, - "acc_norm_stderr": 0.0338640574606209 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.4612903225806452, - "acc_stderr": 0.028358634859836918, - "acc_norm": 0.4612903225806452, - "acc_norm_stderr": 0.028358634859836918 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.6410256410256411, - "acc_stderr": 0.03142616993791924, - "acc_norm": 0.6410256410256411, - "acc_norm_stderr": 0.03142616993791924 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.4, - "acc_stderr": 0.030151134457776292, - "acc_norm": 0.4, - "acc_norm_stderr": 0.030151134457776292 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.5181818181818182, - "acc_stderr": 0.04785964010794916, - "acc_norm": 0.5181818181818182, - "acc_norm_stderr": 0.04785964010794916 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.3, - "acc_stderr": 0.02794045713622841, - "acc_norm": 0.3, - "acc_norm_stderr": 0.02794045713622841 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2781456953642384, - "acc_stderr": 0.03658603262763743, - "acc_norm": 0.2781456953642384, - "acc_norm_stderr": 0.03658603262763743 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.5671641791044776, - "acc_stderr": 0.03503490923673282, - "acc_norm": 0.5671641791044776, - "acc_norm_stderr": 0.03503490923673282 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.31213872832369943, - "acc_stderr": 0.035331333893236574, - "acc_norm": 0.31213872832369943, - "acc_norm_stderr": 0.035331333893236574 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2698412698412698, - "acc_stderr": 0.022860838309232072, - "acc_norm": 0.2698412698412698, - "acc_norm_stderr": 0.022860838309232072 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.4513888888888889, - "acc_stderr": 0.04161402398403279, - "acc_norm": 0.4513888888888889, - "acc_norm_stderr": 0.04161402398403279 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.63, - "acc_stderr": 0.048523658709391, - "acc_norm": 0.63, - "acc_norm_stderr": 0.048523658709391 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.4913294797687861, - "acc_stderr": 0.026915047355369804, - "acc_norm": 0.4913294797687861, - "acc_norm_stderr": 0.026915047355369804 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.44171779141104295, - "acc_stderr": 0.03901591825836184, - "acc_norm": 0.44171779141104295, - "acc_norm_stderr": 0.03901591825836184 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.42592592592592593, - "acc_stderr": 0.027513747284379414, - "acc_norm": 0.42592592592592593, - "acc_norm_stderr": 0.027513747284379414 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.27, - "acc_stderr": 0.044619604333847394, - "acc_norm": 0.27, - "acc_norm_stderr": 0.044619604333847394 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.538860103626943, - "acc_stderr": 0.03597524411734578, - "acc_norm": 0.538860103626943, - "acc_norm_stderr": 0.03597524411734578 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.22807017543859648, - "acc_stderr": 0.03947152782669415, - "acc_norm": 0.22807017543859648, - "acc_norm_stderr": 0.03947152782669415 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.5339449541284403, - "acc_stderr": 0.02138786335035399, - "acc_norm": 0.5339449541284403, - "acc_norm_stderr": 0.02138786335035399 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.3492063492063492, - "acc_stderr": 0.04263906892795133, - "acc_norm": 0.3492063492063492, - "acc_norm_stderr": 0.04263906892795133 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.43790849673202614, - "acc_stderr": 0.02840830202033269, - "acc_norm": 0.43790849673202614, - "acc_norm_stderr": 0.02840830202033269 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.45, - "acc_stderr": 0.05, - "acc_norm": 0.45, - "acc_norm_stderr": 0.05 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.6446280991735537, - "acc_stderr": 0.0436923632657398, - "acc_norm": 0.6446280991735537, - "acc_norm_stderr": 0.0436923632657398 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.4407894736842105, - "acc_stderr": 0.04040311062490436, - "acc_norm": 0.4407894736842105, - "acc_norm_stderr": 0.04040311062490436 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.3627450980392157, - "acc_stderr": 0.019450768432505518, - "acc_norm": 0.3627450980392157, - "acc_norm_stderr": 0.019450768432505518 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.31560283687943264, - "acc_stderr": 0.027724989449509317, - "acc_norm": 0.31560283687943264, - "acc_norm_stderr": 0.027724989449509317 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.24107142857142858, - "acc_stderr": 0.04059867246952687, - "acc_norm": 0.24107142857142858, - "acc_norm_stderr": 0.04059867246952687 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.35648148148148145, - "acc_stderr": 0.032664783315272714, - "acc_norm": 0.35648148148148145, - "acc_norm_stderr": 0.032664783315272714 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.24692737430167597, - "acc_stderr": 0.014422292204808852, - "acc_norm": 0.24692737430167597, - "acc_norm_stderr": 0.014422292204808852 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.41, - "acc_stderr": 0.049431107042371025, - "acc_norm": 0.41, - "acc_norm_stderr": 0.049431107042371025 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.46, - "acc_stderr": 0.05009082659620333, - "acc_norm": 0.46, - "acc_norm_stderr": 0.05009082659620333 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.3713235294117647, - "acc_stderr": 0.02934980313976587, - "acc_norm": 0.3713235294117647, - "acc_norm_stderr": 0.02934980313976587 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.40816326530612246, - "acc_stderr": 0.03146465712827424, - "acc_norm": 0.40816326530612246, - "acc_norm_stderr": 0.03146465712827424 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.6329113924050633, - "acc_stderr": 0.03137624072561619, - "acc_norm": 0.6329113924050633, - "acc_norm_stderr": 0.03137624072561619 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.37614080834419816, - "acc_stderr": 0.012372214430599819, - "acc_norm": 0.37614080834419816, - "acc_norm_stderr": 0.012372214430599819 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.4852941176470588, - "acc_stderr": 0.03507793834791324, - "acc_norm": 0.4852941176470588, - "acc_norm_stderr": 0.03507793834791324 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.5333333333333333, - "acc_stderr": 0.03895658065271847, - "acc_norm": 0.5333333333333333, - "acc_norm_stderr": 0.03895658065271847 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.27539779681762544, - "mc1_stderr": 0.015638135667775523, - "mc2": 0.4343237644069022, - "mc2_stderr": 0.015029108040608447 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.4448356807511737, - "acc_stderr": 0.01703514366596629, - "acc_norm": 0.5575117370892019, - "acc_norm_stderr": 0.017026018662985032 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "kiyoonyoo/ko-platypus-13b-control", - "model_sha": "6cdc49b0713c6d4ad656fe98f5be7eccb1d8b4ef", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/krevas/LDCC-Instruct-Llama-2-ko-13B-v2/result_2023-10-09 19:29:45.json b/krevas/LDCC-Instruct-Llama-2-ko-13B-v2/result_2023-10-09 19:29:45.json deleted file mode 100644 index fdbeb6746561afcc4095510dbb6b6fdd93bc9c6a..0000000000000000000000000000000000000000 --- a/krevas/LDCC-Instruct-Llama-2-ko-13B-v2/result_2023-10-09 19:29:45.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.35921501706484643, - "acc_stderr": 0.014020224155839162, - "acc_norm": 0.4206484641638225, - "acc_norm_stderr": 0.0144262112525084 - }, - "harness|ko_hellaswag|10": { - "acc": 0.4087831109340769, - "acc_stderr": 0.004906043613013394, - "acc_norm": 0.5447122087233619, - "acc_norm_stderr": 0.004969790407117533 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.4853801169590643, - "acc_stderr": 0.038331852752130205, - "acc_norm": 0.4853801169590643, - "acc_norm_stderr": 0.038331852752130205 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.4368932038834951, - "acc_stderr": 0.04911147107365777, - "acc_norm": 0.4368932038834951, - "acc_norm_stderr": 0.04911147107365777 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.4367816091954023, - "acc_stderr": 0.017736470837800684, - "acc_norm": 0.4367816091954023, - "acc_norm_stderr": 0.017736470837800684 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.26666666666666666, - "acc_stderr": 0.03820169914517905, - "acc_norm": 0.26666666666666666, - "acc_norm_stderr": 0.03820169914517905 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542128, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542128 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.3276595744680851, - "acc_stderr": 0.030683020843231008, - "acc_norm": 0.3276595744680851, - "acc_norm_stderr": 0.030683020843231008 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.40963855421686746, - "acc_stderr": 0.0382840111507902, - "acc_norm": 0.40963855421686746, - "acc_norm_stderr": 0.0382840111507902 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.3633440514469453, - "acc_stderr": 0.02731684767419271, - "acc_norm": 0.3633440514469453, - "acc_norm_stderr": 0.02731684767419271 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.49327354260089684, - "acc_stderr": 0.033554765962343545, - "acc_norm": 0.49327354260089684, - "acc_norm_stderr": 0.033554765962343545 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.3893129770992366, - "acc_stderr": 0.04276486542814591, - "acc_norm": 0.3893129770992366, - "acc_norm_stderr": 0.04276486542814591 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.36, - "acc_stderr": 0.04824181513244218, - "acc_norm": 0.36, - "acc_norm_stderr": 0.04824181513244218 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.3787878787878788, - "acc_stderr": 0.03456088731993747, - "acc_norm": 0.3787878787878788, - "acc_norm_stderr": 0.03456088731993747 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.27586206896551724, - "acc_stderr": 0.03724563619774633, - "acc_norm": 0.27586206896551724, - "acc_norm_stderr": 0.03724563619774633 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.20588235294117646, - "acc_stderr": 0.04023382273617747, - "acc_norm": 0.20588235294117646, - "acc_norm_stderr": 0.04023382273617747 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.3235294117647059, - "acc_stderr": 0.030388353551886845, - "acc_norm": 0.3235294117647059, - "acc_norm_stderr": 0.030388353551886845 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.32564102564102565, - "acc_stderr": 0.02375966576741229, - "acc_norm": 0.32564102564102565, - "acc_norm_stderr": 0.02375966576741229 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.36, - "acc_stderr": 0.048241815132442176, - "acc_norm": 0.36, - "acc_norm_stderr": 0.048241815132442176 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.26, - "acc_stderr": 0.044084400227680794, - "acc_norm": 0.26, - "acc_norm_stderr": 0.044084400227680794 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.4166666666666667, - "acc_stderr": 0.04766075165356461, - "acc_norm": 0.4166666666666667, - "acc_norm_stderr": 0.04766075165356461 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.21674876847290642, - "acc_stderr": 0.02899033125251624, - "acc_norm": 0.21674876847290642, - "acc_norm_stderr": 0.02899033125251624 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.2967741935483871, - "acc_stderr": 0.02598850079241187, - "acc_norm": 0.2967741935483871, - "acc_norm_stderr": 0.02598850079241187 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.5982905982905983, - "acc_stderr": 0.03211693751051621, - "acc_norm": 0.5982905982905983, - "acc_norm_stderr": 0.03211693751051621 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.30566037735849055, - "acc_stderr": 0.028353298073322666, - "acc_norm": 0.30566037735849055, - "acc_norm_stderr": 0.028353298073322666 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.4, - "acc_stderr": 0.0469237132203465, - "acc_norm": 0.4, - "acc_norm_stderr": 0.0469237132203465 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.24444444444444444, - "acc_stderr": 0.026202766534652148, - "acc_norm": 0.24444444444444444, - "acc_norm_stderr": 0.026202766534652148 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.19205298013245034, - "acc_stderr": 0.032162984205936135, - "acc_norm": 0.19205298013245034, - "acc_norm_stderr": 0.032162984205936135 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.46766169154228854, - "acc_stderr": 0.035281314729336065, - "acc_norm": 0.46766169154228854, - "acc_norm_stderr": 0.035281314729336065 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.24855491329479767, - "acc_stderr": 0.03295304696818318, - "acc_norm": 0.24855491329479767, - "acc_norm_stderr": 0.03295304696818318 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.21693121693121692, - "acc_stderr": 0.021227082449445045, - "acc_norm": 0.21693121693121692, - "acc_norm_stderr": 0.021227082449445045 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3472222222222222, - "acc_stderr": 0.03981240543717862, - "acc_norm": 0.3472222222222222, - "acc_norm_stderr": 0.03981240543717862 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.19, - "acc_stderr": 0.03942772444036623, - "acc_norm": 0.19, - "acc_norm_stderr": 0.03942772444036623 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.5, - "acc_stderr": 0.050251890762960605, - "acc_norm": 0.5, - "acc_norm_stderr": 0.050251890762960605 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.36416184971098264, - "acc_stderr": 0.025906632631016124, - "acc_norm": 0.36416184971098264, - "acc_norm_stderr": 0.025906632631016124 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.26993865030674846, - "acc_stderr": 0.034878251684978906, - "acc_norm": 0.26993865030674846, - "acc_norm_stderr": 0.034878251684978906 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.37037037037037035, - "acc_stderr": 0.026869490744815254, - "acc_norm": 0.37037037037037035, - "acc_norm_stderr": 0.026869490744815254 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.22, - "acc_stderr": 0.0416333199893227, - "acc_norm": 0.22, - "acc_norm_stderr": 0.0416333199893227 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.41450777202072536, - "acc_stderr": 0.03555300319557672, - "acc_norm": 0.41450777202072536, - "acc_norm_stderr": 0.03555300319557672 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.21052631578947367, - "acc_stderr": 0.0383515395439942, - "acc_norm": 0.21052631578947367, - "acc_norm_stderr": 0.0383515395439942 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.3394495412844037, - "acc_stderr": 0.02030210934266235, - "acc_norm": 0.3394495412844037, - "acc_norm_stderr": 0.02030210934266235 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.2619047619047619, - "acc_stderr": 0.03932537680392871, - "acc_norm": 0.2619047619047619, - "acc_norm_stderr": 0.03932537680392871 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.3300653594771242, - "acc_stderr": 0.026925654653615686, - "acc_norm": 0.3300653594771242, - "acc_norm_stderr": 0.026925654653615686 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.44, - "acc_stderr": 0.04988876515698589, - "acc_norm": 0.44, - "acc_norm_stderr": 0.04988876515698589 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.6033057851239669, - "acc_stderr": 0.044658697805310094, - "acc_norm": 0.6033057851239669, - "acc_norm_stderr": 0.044658697805310094 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.32894736842105265, - "acc_stderr": 0.03823428969926604, - "acc_norm": 0.32894736842105265, - "acc_norm_stderr": 0.03823428969926604 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.3366013071895425, - "acc_stderr": 0.01911721391149517, - "acc_norm": 0.3366013071895425, - "acc_norm_stderr": 0.01911721391149517 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2695035460992908, - "acc_stderr": 0.026469036818590627, - "acc_norm": 0.2695035460992908, - "acc_norm_stderr": 0.026469036818590627 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.3392857142857143, - "acc_stderr": 0.04493949068613539, - "acc_norm": 0.3392857142857143, - "acc_norm_stderr": 0.04493949068613539 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.13425925925925927, - "acc_stderr": 0.02325127759054592, - "acc_norm": 0.13425925925925927, - "acc_norm_stderr": 0.02325127759054592 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.24692737430167597, - "acc_stderr": 0.014422292204808852, - "acc_norm": 0.24692737430167597, - "acc_norm_stderr": 0.014422292204808852 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.22, - "acc_stderr": 0.041633319989322695, - "acc_norm": 0.22, - "acc_norm_stderr": 0.041633319989322695 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.39, - "acc_stderr": 0.04902071300001975, - "acc_norm": 0.39, - "acc_norm_stderr": 0.04902071300001975 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.22058823529411764, - "acc_stderr": 0.02518778666022726, - "acc_norm": 0.22058823529411764, - "acc_norm_stderr": 0.02518778666022726 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.2857142857142857, - "acc_stderr": 0.028920583220675578, - "acc_norm": 0.2857142857142857, - "acc_norm_stderr": 0.028920583220675578 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.3670886075949367, - "acc_stderr": 0.031376240725616185, - "acc_norm": 0.3670886075949367, - "acc_norm_stderr": 0.031376240725616185 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.27183833116036504, - "acc_stderr": 0.011363135278651414, - "acc_norm": 0.27183833116036504, - "acc_norm_stderr": 0.011363135278651414 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.3431372549019608, - "acc_stderr": 0.033321399446680854, - "acc_norm": 0.3431372549019608, - "acc_norm_stderr": 0.033321399446680854 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.3575757575757576, - "acc_stderr": 0.037425970438065836, - "acc_norm": 0.3575757575757576, - "acc_norm_stderr": 0.037425970438065836 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2582619339045288, - "mc1_stderr": 0.015321821688476197, - "mc2": 0.42735209878041286, - "mc2_stderr": 0.014892368565155705 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.43779342723004694, - "acc_stderr": 0.017006611775152725, - "acc_norm": 0.5316901408450704, - "acc_norm_stderr": 0.01710531885082843 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "krevas/LDCC-Instruct-Llama-2-ko-13B-v2", - "model_sha": "0779b43890c83a02fe7696321c95966717945f58", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/krevas/LDCC-Instruct-Llama-2-ko-13B-v3/result_2023-10-12 23:49:38.json b/krevas/LDCC-Instruct-Llama-2-ko-13B-v3/result_2023-10-12 23:49:38.json deleted file mode 100644 index 1bf4585c7a7b6f201cd857bf4d6d90b4ec377767..0000000000000000000000000000000000000000 --- a/krevas/LDCC-Instruct-Llama-2-ko-13B-v3/result_2023-10-12 23:49:38.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.37627986348122866, - "acc_stderr": 0.014157022555407161, - "acc_norm": 0.44880546075085326, - "acc_norm_stderr": 0.014534599585097672 - }, - "harness|ko_hellaswag|10": { - "acc": 0.4080860386377216, - "acc_stderr": 0.00490474775228696, - "acc_norm": 0.5417247560246963, - "acc_norm_stderr": 0.004972377085916327 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.5321637426900585, - "acc_stderr": 0.03826882417660369, - "acc_norm": 0.5321637426900585, - "acc_norm_stderr": 0.03826882417660369 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.34951456310679613, - "acc_stderr": 0.04721188506097172, - "acc_norm": 0.34951456310679613, - "acc_norm_stderr": 0.04721188506097172 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.454661558109834, - "acc_stderr": 0.017806304585052606, - "acc_norm": 0.454661558109834, - "acc_norm_stderr": 0.017806304585052606 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.3037037037037037, - "acc_stderr": 0.039725528847851375, - "acc_norm": 0.3037037037037037, - "acc_norm_stderr": 0.039725528847851375 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.24, - "acc_stderr": 0.042923469599092816, - "acc_norm": 0.24, - "acc_norm_stderr": 0.042923469599092816 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.3021276595744681, - "acc_stderr": 0.03001755447188055, - "acc_norm": 0.3021276595744681, - "acc_norm_stderr": 0.03001755447188055 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3433734939759036, - "acc_stderr": 0.03696584317010601, - "acc_norm": 0.3433734939759036, - "acc_norm_stderr": 0.03696584317010601 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.40192926045016075, - "acc_stderr": 0.027846476005930477, - "acc_norm": 0.40192926045016075, - "acc_norm_stderr": 0.027846476005930477 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.45739910313901344, - "acc_stderr": 0.033435777055830646, - "acc_norm": 0.45739910313901344, - "acc_norm_stderr": 0.033435777055830646 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.366412213740458, - "acc_stderr": 0.042258754519696386, - "acc_norm": 0.366412213740458, - "acc_norm_stderr": 0.042258754519696386 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.36, - "acc_stderr": 0.04824181513244218, - "acc_norm": 0.36, - "acc_norm_stderr": 0.04824181513244218 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.3484848484848485, - "acc_stderr": 0.033948539651564025, - "acc_norm": 0.3484848484848485, - "acc_norm_stderr": 0.033948539651564025 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.32413793103448274, - "acc_stderr": 0.03900432069185553, - "acc_norm": 0.32413793103448274, - "acc_norm_stderr": 0.03900432069185553 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.20588235294117646, - "acc_stderr": 0.04023382273617747, - "acc_norm": 0.20588235294117646, - "acc_norm_stderr": 0.04023382273617747 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.31092436974789917, - "acc_stderr": 0.03006676158297792, - "acc_norm": 0.31092436974789917, - "acc_norm_stderr": 0.03006676158297792 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.35384615384615387, - "acc_stderr": 0.02424378399406217, - "acc_norm": 0.35384615384615387, - "acc_norm_stderr": 0.02424378399406217 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.44, - "acc_stderr": 0.04988876515698589, - "acc_norm": 0.44, - "acc_norm_stderr": 0.04988876515698589 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.4444444444444444, - "acc_stderr": 0.04803752235190192, - "acc_norm": 0.4444444444444444, - "acc_norm_stderr": 0.04803752235190192 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.2413793103448276, - "acc_stderr": 0.030108330718011625, - "acc_norm": 0.2413793103448276, - "acc_norm_stderr": 0.030108330718011625 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.33548387096774196, - "acc_stderr": 0.02686020644472433, - "acc_norm": 0.33548387096774196, - "acc_norm_stderr": 0.02686020644472433 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.6025641025641025, - "acc_stderr": 0.03205953453789293, - "acc_norm": 0.6025641025641025, - "acc_norm_stderr": 0.03205953453789293 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.33584905660377357, - "acc_stderr": 0.029067220146644826, - "acc_norm": 0.33584905660377357, - "acc_norm_stderr": 0.029067220146644826 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.4727272727272727, - "acc_stderr": 0.04782001791380063, - "acc_norm": 0.4727272727272727, - "acc_norm_stderr": 0.04782001791380063 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.21481481481481482, - "acc_stderr": 0.025040443877000693, - "acc_norm": 0.21481481481481482, - "acc_norm_stderr": 0.025040443877000693 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.23841059602649006, - "acc_stderr": 0.03479185572599661, - "acc_norm": 0.23841059602649006, - "acc_norm_stderr": 0.03479185572599661 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.4577114427860697, - "acc_stderr": 0.03522865864099597, - "acc_norm": 0.4577114427860697, - "acc_norm_stderr": 0.03522865864099597 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.32947976878612717, - "acc_stderr": 0.03583901754736412, - "acc_norm": 0.32947976878612717, - "acc_norm_stderr": 0.03583901754736412 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2566137566137566, - "acc_stderr": 0.022494510767503154, - "acc_norm": 0.2566137566137566, - "acc_norm_stderr": 0.022494510767503154 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3680555555555556, - "acc_stderr": 0.04032999053960717, - "acc_norm": 0.3680555555555556, - "acc_norm_stderr": 0.04032999053960717 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.22, - "acc_stderr": 0.041633319989322695, - "acc_norm": 0.22, - "acc_norm_stderr": 0.041633319989322695 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.51, - "acc_stderr": 0.05024183937956911, - "acc_norm": 0.51, - "acc_norm_stderr": 0.05024183937956911 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.37283236994219654, - "acc_stderr": 0.026033890613576294, - "acc_norm": 0.37283236994219654, - "acc_norm_stderr": 0.026033890613576294 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.34355828220858897, - "acc_stderr": 0.03731133519673893, - "acc_norm": 0.34355828220858897, - "acc_norm_stderr": 0.03731133519673893 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.39814814814814814, - "acc_stderr": 0.02723741509459248, - "acc_norm": 0.39814814814814814, - "acc_norm_stderr": 0.02723741509459248 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.23, - "acc_stderr": 0.04229525846816505, - "acc_norm": 0.23, - "acc_norm_stderr": 0.04229525846816505 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.44559585492227977, - "acc_stderr": 0.035870149860756595, - "acc_norm": 0.44559585492227977, - "acc_norm_stderr": 0.035870149860756595 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2719298245614035, - "acc_stderr": 0.04185774424022056, - "acc_norm": 0.2719298245614035, - "acc_norm_stderr": 0.04185774424022056 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.3761467889908257, - "acc_stderr": 0.020769231968205078, - "acc_norm": 0.3761467889908257, - "acc_norm_stderr": 0.020769231968205078 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.2698412698412698, - "acc_stderr": 0.03970158273235172, - "acc_norm": 0.2698412698412698, - "acc_norm_stderr": 0.03970158273235172 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.3366013071895425, - "acc_stderr": 0.027057974624494382, - "acc_norm": 0.3366013071895425, - "acc_norm_stderr": 0.027057974624494382 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.43, - "acc_stderr": 0.049756985195624284, - "acc_norm": 0.43, - "acc_norm_stderr": 0.049756985195624284 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.628099173553719, - "acc_stderr": 0.044120158066245044, - "acc_norm": 0.628099173553719, - "acc_norm_stderr": 0.044120158066245044 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.34868421052631576, - "acc_stderr": 0.03878139888797611, - "acc_norm": 0.34868421052631576, - "acc_norm_stderr": 0.03878139888797611 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.3562091503267974, - "acc_stderr": 0.019373332420724507, - "acc_norm": 0.3562091503267974, - "acc_norm_stderr": 0.019373332420724507 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.3191489361702128, - "acc_stderr": 0.027807990141320186, - "acc_norm": 0.3191489361702128, - "acc_norm_stderr": 0.027807990141320186 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.33035714285714285, - "acc_stderr": 0.04464285714285713, - "acc_norm": 0.33035714285714285, - "acc_norm_stderr": 0.04464285714285713 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.1712962962962963, - "acc_stderr": 0.025695341643824705, - "acc_norm": 0.1712962962962963, - "acc_norm_stderr": 0.025695341643824705 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2770949720670391, - "acc_stderr": 0.014968772435812145, - "acc_norm": 0.2770949720670391, - "acc_norm_stderr": 0.014968772435812145 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.24, - "acc_stderr": 0.042923469599092816, - "acc_norm": 0.24, - "acc_norm_stderr": 0.042923469599092816 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.46, - "acc_stderr": 0.05009082659620333, - "acc_norm": 0.46, - "acc_norm_stderr": 0.05009082659620333 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.2536764705882353, - "acc_stderr": 0.02643132987078954, - "acc_norm": 0.2536764705882353, - "acc_norm_stderr": 0.02643132987078954 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.2612244897959184, - "acc_stderr": 0.028123429335142797, - "acc_norm": 0.2612244897959184, - "acc_norm_stderr": 0.028123429335142797 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.540084388185654, - "acc_stderr": 0.03244246810187913, - "acc_norm": 0.540084388185654, - "acc_norm_stderr": 0.03244246810187913 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.32529335071707954, - "acc_stderr": 0.011965311536571528, - "acc_norm": 0.32529335071707954, - "acc_norm_stderr": 0.011965311536571528 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.46568627450980393, - "acc_stderr": 0.03501038327635897, - "acc_norm": 0.46568627450980393, - "acc_norm_stderr": 0.03501038327635897 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.46060606060606063, - "acc_stderr": 0.03892207016552013, - "acc_norm": 0.46060606060606063, - "acc_norm_stderr": 0.03892207016552013 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.23255813953488372, - "mc1_stderr": 0.014789157531080538, - "mc2": 0.3996297603129026, - "mc2_stderr": 0.014698539951630042 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.3474178403755869, - "acc_stderr": 0.016322206819108943, - "acc_norm": 0.4518779342723005, - "acc_norm_stderr": 0.017060212258103228 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "krevas/LDCC-Instruct-Llama-2-ko-13B-v3", - "model_sha": "f2f7f1ba950c94dab2f672259dcf420fe22f31ca", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.1.12/result_2023-10-19 23:41:08.json b/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.1.12/result_2023-10-19 23:41:08.json deleted file mode 100644 index 807c1d56c428e168a854f80a9474ed301525667a..0000000000000000000000000000000000000000 --- a/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.1.12/result_2023-10-19 23:41:08.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.39505119453924914, - "acc_stderr": 0.014285898292938167, - "acc_norm": 0.45307167235494883, - "acc_norm_stderr": 0.014546892052005628 - }, - "harness|ko_hellaswag|10": { - "acc": 0.40450109539932283, - "acc_stderr": 0.004897921845492103, - "acc_norm": 0.5380402310296754, - "acc_norm_stderr": 0.004975319435777095 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.40350877192982454, - "acc_stderr": 0.03762738699917056, - "acc_norm": 0.40350877192982454, - "acc_norm_stderr": 0.03762738699917056 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.2912621359223301, - "acc_stderr": 0.044986763205729245, - "acc_norm": 0.2912621359223301, - "acc_norm_stderr": 0.044986763205729245 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.016857391247472552, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.016857391247472552 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.35555555555555557, - "acc_stderr": 0.04135176749720386, - "acc_norm": 0.35555555555555557, - "acc_norm_stderr": 0.04135176749720386 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.2425531914893617, - "acc_stderr": 0.028020226271200217, - "acc_norm": 0.2425531914893617, - "acc_norm_stderr": 0.028020226271200217 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.28313253012048195, - "acc_stderr": 0.03507295431370518, - "acc_norm": 0.28313253012048195, - "acc_norm_stderr": 0.03507295431370518 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.3762057877813505, - "acc_stderr": 0.027513925683549427, - "acc_norm": 0.3762057877813505, - "acc_norm_stderr": 0.027513925683549427 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.336322869955157, - "acc_stderr": 0.031708824268455, - "acc_norm": 0.336322869955157, - "acc_norm_stderr": 0.031708824268455 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.3282442748091603, - "acc_stderr": 0.041184385658062976, - "acc_norm": 0.3282442748091603, - "acc_norm_stderr": 0.041184385658062976 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.3, - "acc_stderr": 0.04605661864718381, - "acc_norm": 0.3, - "acc_norm_stderr": 0.04605661864718381 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.2828282828282828, - "acc_stderr": 0.03208779558786751, - "acc_norm": 0.2828282828282828, - "acc_norm_stderr": 0.03208779558786751 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.2620689655172414, - "acc_stderr": 0.036646663372252565, - "acc_norm": 0.2620689655172414, - "acc_norm_stderr": 0.036646663372252565 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.18627450980392157, - "acc_stderr": 0.03873958714149352, - "acc_norm": 0.18627450980392157, - "acc_norm_stderr": 0.03873958714149352 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.25630252100840334, - "acc_stderr": 0.028359620870533953, - "acc_norm": 0.25630252100840334, - "acc_norm_stderr": 0.028359620870533953 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.24615384615384617, - "acc_stderr": 0.021840866990423088, - "acc_norm": 0.24615384615384617, - "acc_norm_stderr": 0.021840866990423088 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.41, - "acc_stderr": 0.049431107042371025, - "acc_norm": 0.41, - "acc_norm_stderr": 0.049431107042371025 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.37037037037037035, - "acc_stderr": 0.04668408033024931, - "acc_norm": 0.37037037037037035, - "acc_norm_stderr": 0.04668408033024931 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.3103448275862069, - "acc_stderr": 0.03255086769970103, - "acc_norm": 0.3103448275862069, - "acc_norm_stderr": 0.03255086769970103 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3387096774193548, - "acc_stderr": 0.02692344605930286, - "acc_norm": 0.3387096774193548, - "acc_norm_stderr": 0.02692344605930286 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.4444444444444444, - "acc_stderr": 0.03255326307272486, - "acc_norm": 0.4444444444444444, - "acc_norm_stderr": 0.03255326307272486 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.2490566037735849, - "acc_stderr": 0.02661648298050172, - "acc_norm": 0.2490566037735849, - "acc_norm_stderr": 0.02661648298050172 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.2818181818181818, - "acc_stderr": 0.04309118709946458, - "acc_norm": 0.2818181818181818, - "acc_norm_stderr": 0.04309118709946458 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.26666666666666666, - "acc_stderr": 0.026962424325073838, - "acc_norm": 0.26666666666666666, - "acc_norm_stderr": 0.026962424325073838 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2847682119205298, - "acc_stderr": 0.03684881521389023, - "acc_norm": 0.2847682119205298, - "acc_norm_stderr": 0.03684881521389023 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.46766169154228854, - "acc_stderr": 0.035281314729336065, - "acc_norm": 0.46766169154228854, - "acc_norm_stderr": 0.035281314729336065 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.2832369942196532, - "acc_stderr": 0.034355680560478746, - "acc_norm": 0.2832369942196532, - "acc_norm_stderr": 0.034355680560478746 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2857142857142857, - "acc_stderr": 0.02326651221373057, - "acc_norm": 0.2857142857142857, - "acc_norm_stderr": 0.02326651221373057 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3263888888888889, - "acc_stderr": 0.03921067198982266, - "acc_norm": 0.3263888888888889, - "acc_norm_stderr": 0.03921067198982266 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.19, - "acc_stderr": 0.03942772444036624, - "acc_norm": 0.19, - "acc_norm_stderr": 0.03942772444036624 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.36, - "acc_stderr": 0.04824181513244218, - "acc_norm": 0.36, - "acc_norm_stderr": 0.04824181513244218 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.3901734104046243, - "acc_stderr": 0.026261677607806636, - "acc_norm": 0.3901734104046243, - "acc_norm_stderr": 0.026261677607806636 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.3496932515337423, - "acc_stderr": 0.037466683254700206, - "acc_norm": 0.3496932515337423, - "acc_norm_stderr": 0.037466683254700206 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.3425925925925926, - "acc_stderr": 0.02640614597362566, - "acc_norm": 0.3425925925925926, - "acc_norm_stderr": 0.02640614597362566 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.26, - "acc_stderr": 0.04408440022768079, - "acc_norm": 0.26, - "acc_norm_stderr": 0.04408440022768079 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.32124352331606215, - "acc_stderr": 0.033699508685490674, - "acc_norm": 0.32124352331606215, - "acc_norm_stderr": 0.033699508685490674 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.24561403508771928, - "acc_stderr": 0.04049339297748141, - "acc_norm": 0.24561403508771928, - "acc_norm_stderr": 0.04049339297748141 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.30825688073394497, - "acc_stderr": 0.019798366698367268, - "acc_norm": 0.30825688073394497, - "acc_norm_stderr": 0.019798366698367268 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.21428571428571427, - "acc_stderr": 0.03670066451047181, - "acc_norm": 0.21428571428571427, - "acc_norm_stderr": 0.03670066451047181 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.3202614379084967, - "acc_stderr": 0.026716118380156827, - "acc_norm": 0.3202614379084967, - "acc_norm_stderr": 0.026716118380156827 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.5702479338842975, - "acc_stderr": 0.04519082021319771, - "acc_norm": 0.5702479338842975, - "acc_norm_stderr": 0.04519082021319771 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.3092105263157895, - "acc_stderr": 0.03761070869867479, - "acc_norm": 0.3092105263157895, - "acc_norm_stderr": 0.03761070869867479 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.31862745098039214, - "acc_stderr": 0.01885008469646872, - "acc_norm": 0.31862745098039214, - "acc_norm_stderr": 0.01885008469646872 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2765957446808511, - "acc_stderr": 0.026684564340461, - "acc_norm": 0.2765957446808511, - "acc_norm_stderr": 0.026684564340461 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.19642857142857142, - "acc_stderr": 0.037709700493470194, - "acc_norm": 0.19642857142857142, - "acc_norm_stderr": 0.037709700493470194 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.24537037037037038, - "acc_stderr": 0.029346665094372948, - "acc_norm": 0.24537037037037038, - "acc_norm_stderr": 0.029346665094372948 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2324022346368715, - "acc_stderr": 0.014125968754673385, - "acc_norm": 0.2324022346368715, - "acc_norm_stderr": 0.014125968754673385 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.27, - "acc_stderr": 0.0446196043338474, - "acc_norm": 0.27, - "acc_norm_stderr": 0.0446196043338474 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.4, - "acc_stderr": 0.049236596391733084, - "acc_norm": 0.4, - "acc_norm_stderr": 0.049236596391733084 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.20955882352941177, - "acc_stderr": 0.02472311040767705, - "acc_norm": 0.20955882352941177, - "acc_norm_stderr": 0.02472311040767705 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.2653061224489796, - "acc_stderr": 0.028263889943784617, - "acc_norm": 0.2653061224489796, - "acc_norm_stderr": 0.028263889943784617 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.4472573839662447, - "acc_stderr": 0.03236564251614192, - "acc_norm": 0.4472573839662447, - "acc_norm_stderr": 0.03236564251614192 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.30182529335071706, - "acc_stderr": 0.01172435051810589, - "acc_norm": 0.30182529335071706, - "acc_norm_stderr": 0.01172435051810589 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.3627450980392157, - "acc_stderr": 0.03374499356319354, - "acc_norm": 0.3627450980392157, - "acc_norm_stderr": 0.03374499356319354 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.4, - "acc_stderr": 0.03825460278380026, - "acc_norm": 0.4, - "acc_norm_stderr": 0.03825460278380026 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.3072215422276622, - "mc1_stderr": 0.016150201321323002, - "mc2": 0.47008540499028884, - "mc2_stderr": 0.015171096468571796 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.5903755868544601, - "acc_stderr": 0.016857467505356098, - "acc_norm": 0.6842723004694836, - "acc_norm_stderr": 0.01593331134555564 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "krevas/LDCC-Instruct-Llama-2-ko-13B-v4.1.12", - "model_sha": "b70b4cab0f97d64c9540240ae4cdbec4afbf7206", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.1.14/result_2023-10-22 01:18:39.json b/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.1.14/result_2023-10-22 01:18:39.json deleted file mode 100644 index 546113410eef33fceaab115c5907bf2f01fe462c..0000000000000000000000000000000000000000 --- a/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.1.14/result_2023-10-22 01:18:39.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.3779863481228669, - "acc_stderr": 0.014169664520303096, - "acc_norm": 0.4325938566552901, - "acc_norm_stderr": 0.014478005694182531 - }, - "harness|ko_hellaswag|10": { - "acc": 0.4039036048595897, - "acc_stderr": 0.004896757857022551, - "acc_norm": 0.5393347938657638, - "acc_norm_stderr": 0.004974316807920405 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.5087719298245614, - "acc_stderr": 0.038342347441649924, - "acc_norm": 0.5087719298245614, - "acc_norm_stderr": 0.038342347441649924 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.44660194174757284, - "acc_stderr": 0.049224241534589326, - "acc_norm": 0.44660194174757284, - "acc_norm_stderr": 0.049224241534589326 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.4495530012771392, - "acc_stderr": 0.017788725283507337, - "acc_norm": 0.4495530012771392, - "acc_norm_stderr": 0.017788725283507337 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.3925925925925926, - "acc_stderr": 0.042185062153688786, - "acc_norm": 0.3925925925925926, - "acc_norm_stderr": 0.042185062153688786 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.23, - "acc_stderr": 0.04229525846816505, - "acc_norm": 0.23, - "acc_norm_stderr": 0.04229525846816505 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.33617021276595743, - "acc_stderr": 0.030881618520676942, - "acc_norm": 0.33617021276595743, - "acc_norm_stderr": 0.030881618520676942 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3433734939759036, - "acc_stderr": 0.03696584317010601, - "acc_norm": 0.3433734939759036, - "acc_norm_stderr": 0.03696584317010601 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.4694533762057878, - "acc_stderr": 0.02834504586484068, - "acc_norm": 0.4694533762057878, - "acc_norm_stderr": 0.02834504586484068 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.3632286995515695, - "acc_stderr": 0.03227790442850499, - "acc_norm": 0.3632286995515695, - "acc_norm_stderr": 0.03227790442850499 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.44274809160305345, - "acc_stderr": 0.04356447202665069, - "acc_norm": 0.44274809160305345, - "acc_norm_stderr": 0.04356447202665069 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.32, - "acc_stderr": 0.04688261722621502, - "acc_norm": 0.32, - "acc_norm_stderr": 0.04688261722621502 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.51010101010101, - "acc_stderr": 0.035616254886737454, - "acc_norm": 0.51010101010101, - "acc_norm_stderr": 0.035616254886737454 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.3586206896551724, - "acc_stderr": 0.03996629574876719, - "acc_norm": 0.3586206896551724, - "acc_norm_stderr": 0.03996629574876719 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.21568627450980393, - "acc_stderr": 0.04092563958237655, - "acc_norm": 0.21568627450980393, - "acc_norm_stderr": 0.04092563958237655 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.42016806722689076, - "acc_stderr": 0.03206183783236152, - "acc_norm": 0.42016806722689076, - "acc_norm_stderr": 0.03206183783236152 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.4282051282051282, - "acc_stderr": 0.025088301454694834, - "acc_norm": 0.4282051282051282, - "acc_norm_stderr": 0.025088301454694834 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.43, - "acc_stderr": 0.049756985195624284, - "acc_norm": 0.43, - "acc_norm_stderr": 0.049756985195624284 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252604, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252604 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.37962962962962965, - "acc_stderr": 0.04691521224077742, - "acc_norm": 0.37962962962962965, - "acc_norm_stderr": 0.04691521224077742 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.33004926108374383, - "acc_stderr": 0.03308530426228258, - "acc_norm": 0.33004926108374383, - "acc_norm_stderr": 0.03308530426228258 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.41935483870967744, - "acc_stderr": 0.02807158890109185, - "acc_norm": 0.41935483870967744, - "acc_norm_stderr": 0.02807158890109185 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.5854700854700855, - "acc_stderr": 0.03227396567623779, - "acc_norm": 0.5854700854700855, - "acc_norm_stderr": 0.03227396567623779 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.42641509433962266, - "acc_stderr": 0.03043779434298305, - "acc_norm": 0.42641509433962266, - "acc_norm_stderr": 0.03043779434298305 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.41818181818181815, - "acc_stderr": 0.04724577405731571, - "acc_norm": 0.41818181818181815, - "acc_norm_stderr": 0.04724577405731571 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.2962962962962963, - "acc_stderr": 0.027840811495871927, - "acc_norm": 0.2962962962962963, - "acc_norm_stderr": 0.027840811495871927 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.304635761589404, - "acc_stderr": 0.03757949922943343, - "acc_norm": 0.304635761589404, - "acc_norm_stderr": 0.03757949922943343 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.5323383084577115, - "acc_stderr": 0.03528131472933607, - "acc_norm": 0.5323383084577115, - "acc_norm_stderr": 0.03528131472933607 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.4393063583815029, - "acc_stderr": 0.037842719328874674, - "acc_norm": 0.4393063583815029, - "acc_norm_stderr": 0.037842719328874674 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.30423280423280424, - "acc_stderr": 0.023695415009463087, - "acc_norm": 0.30423280423280424, - "acc_norm_stderr": 0.023695415009463087 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3958333333333333, - "acc_stderr": 0.04089465449325582, - "acc_norm": 0.3958333333333333, - "acc_norm_stderr": 0.04089465449325582 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.56, - "acc_stderr": 0.049888765156985884, - "acc_norm": 0.56, - "acc_norm_stderr": 0.049888765156985884 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.407514450867052, - "acc_stderr": 0.026454578146931498, - "acc_norm": 0.407514450867052, - "acc_norm_stderr": 0.026454578146931498 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.39263803680981596, - "acc_stderr": 0.03836740907831029, - "acc_norm": 0.39263803680981596, - "acc_norm_stderr": 0.03836740907831029 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.4166666666666667, - "acc_stderr": 0.02743162372241502, - "acc_norm": 0.4166666666666667, - "acc_norm_stderr": 0.02743162372241502 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.46632124352331605, - "acc_stderr": 0.036002440698671784, - "acc_norm": 0.46632124352331605, - "acc_norm_stderr": 0.036002440698671784 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2631578947368421, - "acc_stderr": 0.0414243971948936, - "acc_norm": 0.2631578947368421, - "acc_norm_stderr": 0.0414243971948936 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.4990825688073395, - "acc_stderr": 0.021437287056051215, - "acc_norm": 0.4990825688073395, - "acc_norm_stderr": 0.021437287056051215 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.30952380952380953, - "acc_stderr": 0.04134913018303316, - "acc_norm": 0.30952380952380953, - "acc_norm_stderr": 0.04134913018303316 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.43790849673202614, - "acc_stderr": 0.02840830202033269, - "acc_norm": 0.43790849673202614, - "acc_norm_stderr": 0.02840830202033269 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.43, - "acc_stderr": 0.049756985195624284, - "acc_norm": 0.43, - "acc_norm_stderr": 0.049756985195624284 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.6363636363636364, - "acc_stderr": 0.043913262867240704, - "acc_norm": 0.6363636363636364, - "acc_norm_stderr": 0.043913262867240704 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.46710526315789475, - "acc_stderr": 0.04060127035236395, - "acc_norm": 0.46710526315789475, - "acc_norm_stderr": 0.04060127035236395 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.3562091503267974, - "acc_stderr": 0.0193733324207245, - "acc_norm": 0.3562091503267974, - "acc_norm_stderr": 0.0193733324207245 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.32269503546099293, - "acc_stderr": 0.027889139300534785, - "acc_norm": 0.32269503546099293, - "acc_norm_stderr": 0.027889139300534785 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.32142857142857145, - "acc_stderr": 0.04432804055291519, - "acc_norm": 0.32142857142857145, - "acc_norm_stderr": 0.04432804055291519 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.4166666666666667, - "acc_stderr": 0.033622774366080424, - "acc_norm": 0.4166666666666667, - "acc_norm_stderr": 0.033622774366080424 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.24022346368715083, - "acc_stderr": 0.01428834380392531, - "acc_norm": 0.24022346368715083, - "acc_norm_stderr": 0.01428834380392531 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.4, - "acc_stderr": 0.049236596391733084, - "acc_norm": 0.4, - "acc_norm_stderr": 0.049236596391733084 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.45, - "acc_stderr": 0.049999999999999996, - "acc_norm": 0.45, - "acc_norm_stderr": 0.049999999999999996 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.3492647058823529, - "acc_stderr": 0.02895975519682485, - "acc_norm": 0.3492647058823529, - "acc_norm_stderr": 0.02895975519682485 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.4122448979591837, - "acc_stderr": 0.03151236044674281, - "acc_norm": 0.4122448979591837, - "acc_norm_stderr": 0.03151236044674281 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.5232067510548524, - "acc_stderr": 0.032512152011410174, - "acc_norm": 0.5232067510548524, - "acc_norm_stderr": 0.032512152011410174 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.3239895697522816, - "acc_stderr": 0.011952840809646563, - "acc_norm": 0.3239895697522816, - "acc_norm_stderr": 0.011952840809646563 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.44607843137254904, - "acc_stderr": 0.03488845451304974, - "acc_norm": 0.44607843137254904, - "acc_norm_stderr": 0.03488845451304974 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.46060606060606063, - "acc_stderr": 0.03892207016552013, - "acc_norm": 0.46060606060606063, - "acc_norm_stderr": 0.03892207016552013 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2864137086903305, - "mc1_stderr": 0.01582614243950234, - "mc2": 0.44508082063982635, - "mc2_stderr": 0.014978253495446162 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.43779342723004694, - "acc_stderr": 0.017006611775152728, - "acc_norm": 0.528169014084507, - "acc_norm_stderr": 0.01711255703508659 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "krevas/LDCC-Instruct-Llama-2-ko-13B-v4.1.14", - "model_sha": "06b824795d8f7b9efa5cbe1c3a7b21e7c939bf8b", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.1.2/result_2023-10-23 03:57:31.json b/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.1.2/result_2023-10-23 03:57:31.json deleted file mode 100644 index 8677f771078b57ee96b9c1eb9ab9205e6a05b565..0000000000000000000000000000000000000000 --- a/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.1.2/result_2023-10-23 03:57:31.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.37627986348122866, - "acc_stderr": 0.014157022555407166, - "acc_norm": 0.4445392491467577, - "acc_norm_stderr": 0.014521226405627074 - }, - "harness|ko_hellaswag|10": { - "acc": 0.40679147580163316, - "acc_stderr": 0.004902314055725591, - "acc_norm": 0.5413264289982075, - "acc_norm_stderr": 0.004972708369656543 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.3157894736842105, - "acc_stderr": 0.03565079670708311, - "acc_norm": 0.3157894736842105, - "acc_norm_stderr": 0.03565079670708311 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.2524271844660194, - "acc_stderr": 0.04301250399690879, - "acc_norm": 0.2524271844660194, - "acc_norm_stderr": 0.04301250399690879 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.2681992337164751, - "acc_stderr": 0.01584243083526942, - "acc_norm": 0.2681992337164751, - "acc_norm_stderr": 0.01584243083526942 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.32592592592592595, - "acc_stderr": 0.040491220417025055, - "acc_norm": 0.32592592592592595, - "acc_norm_stderr": 0.040491220417025055 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542127, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542127 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.20425531914893616, - "acc_stderr": 0.026355158413349417, - "acc_norm": 0.20425531914893616, - "acc_norm_stderr": 0.026355158413349417 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.22289156626506024, - "acc_stderr": 0.032400048255946876, - "acc_norm": 0.22289156626506024, - "acc_norm_stderr": 0.032400048255946876 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.33440514469453375, - "acc_stderr": 0.02679542232789394, - "acc_norm": 0.33440514469453375, - "acc_norm_stderr": 0.02679542232789394 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.2825112107623318, - "acc_stderr": 0.030216831011508755, - "acc_norm": 0.2825112107623318, - "acc_norm_stderr": 0.030216831011508755 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.2366412213740458, - "acc_stderr": 0.03727673575596917, - "acc_norm": 0.2366412213740458, - "acc_norm_stderr": 0.03727673575596917 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542128, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542128 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.24242424242424243, - "acc_stderr": 0.030532892233932026, - "acc_norm": 0.24242424242424243, - "acc_norm_stderr": 0.030532892233932026 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.22758620689655173, - "acc_stderr": 0.03493950380131184, - "acc_norm": 0.22758620689655173, - "acc_norm_stderr": 0.03493950380131184 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.1568627450980392, - "acc_stderr": 0.03618664819936246, - "acc_norm": 0.1568627450980392, - "acc_norm_stderr": 0.03618664819936246 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.24369747899159663, - "acc_stderr": 0.027886828078380558, - "acc_norm": 0.24369747899159663, - "acc_norm_stderr": 0.027886828078380558 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.2205128205128205, - "acc_stderr": 0.02102067268082791, - "acc_norm": 0.2205128205128205, - "acc_norm_stderr": 0.02102067268082791 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252605, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252605 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542127, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542127 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.3611111111111111, - "acc_stderr": 0.04643454608906275, - "acc_norm": 0.3611111111111111, - "acc_norm_stderr": 0.04643454608906275 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.29064039408866993, - "acc_stderr": 0.0319474007226554, - "acc_norm": 0.29064039408866993, - "acc_norm_stderr": 0.0319474007226554 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3548387096774194, - "acc_stderr": 0.027218889773308753, - "acc_norm": 0.3548387096774194, - "acc_norm_stderr": 0.027218889773308753 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.030882736974138653, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.030882736974138653 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.2, - "acc_stderr": 0.02461829819586651, - "acc_norm": 0.2, - "acc_norm_stderr": 0.02461829819586651 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.2909090909090909, - "acc_stderr": 0.04350271442923243, - "acc_norm": 0.2909090909090909, - "acc_norm_stderr": 0.04350271442923243 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.26666666666666666, - "acc_stderr": 0.026962424325073838, - "acc_norm": 0.26666666666666666, - "acc_norm_stderr": 0.026962424325073838 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2847682119205298, - "acc_stderr": 0.03684881521389023, - "acc_norm": 0.2847682119205298, - "acc_norm_stderr": 0.03684881521389023 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.34328358208955223, - "acc_stderr": 0.03357379665433431, - "acc_norm": 0.34328358208955223, - "acc_norm_stderr": 0.03357379665433431 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.24855491329479767, - "acc_stderr": 0.03295304696818317, - "acc_norm": 0.24855491329479767, - "acc_norm_stderr": 0.03295304696818317 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.24867724867724866, - "acc_stderr": 0.022261817692400175, - "acc_norm": 0.24867724867724866, - "acc_norm_stderr": 0.022261817692400175 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3194444444444444, - "acc_stderr": 0.03899073687357335, - "acc_norm": 0.3194444444444444, - "acc_norm_stderr": 0.03899073687357335 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.13, - "acc_stderr": 0.0337997668989631, - "acc_norm": 0.13, - "acc_norm_stderr": 0.0337997668989631 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.36127167630057805, - "acc_stderr": 0.025862201852277895, - "acc_norm": 0.36127167630057805, - "acc_norm_stderr": 0.025862201852277895 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.3312883435582822, - "acc_stderr": 0.03697983910025588, - "acc_norm": 0.3312883435582822, - "acc_norm_stderr": 0.03697983910025588 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.2932098765432099, - "acc_stderr": 0.025329888171900933, - "acc_norm": 0.2932098765432099, - "acc_norm_stderr": 0.025329888171900933 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.26, - "acc_stderr": 0.04408440022768078, - "acc_norm": 0.26, - "acc_norm_stderr": 0.04408440022768078 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.3005181347150259, - "acc_stderr": 0.033088185944157515, - "acc_norm": 0.3005181347150259, - "acc_norm_stderr": 0.033088185944157515 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.24561403508771928, - "acc_stderr": 0.04049339297748141, - "acc_norm": 0.24561403508771928, - "acc_norm_stderr": 0.04049339297748141 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.25688073394495414, - "acc_stderr": 0.01873249292834245, - "acc_norm": 0.25688073394495414, - "acc_norm_stderr": 0.01873249292834245 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.1746031746031746, - "acc_stderr": 0.033954900208561116, - "acc_norm": 0.1746031746031746, - "acc_norm_stderr": 0.033954900208561116 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.3006535947712418, - "acc_stderr": 0.02625605383571896, - "acc_norm": 0.3006535947712418, - "acc_norm_stderr": 0.02625605383571896 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.47107438016528924, - "acc_stderr": 0.04556710331269498, - "acc_norm": 0.47107438016528924, - "acc_norm_stderr": 0.04556710331269498 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.29605263157894735, - "acc_stderr": 0.03715062154998905, - "acc_norm": 0.29605263157894735, - "acc_norm_stderr": 0.03715062154998905 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.3235294117647059, - "acc_stderr": 0.01892608291608339, - "acc_norm": 0.3235294117647059, - "acc_norm_stderr": 0.01892608291608339 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.28368794326241137, - "acc_stderr": 0.026891709428343957, - "acc_norm": 0.28368794326241137, - "acc_norm_stderr": 0.026891709428343957 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.23214285714285715, - "acc_stderr": 0.04007341809755807, - "acc_norm": 0.23214285714285715, - "acc_norm_stderr": 0.04007341809755807 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.24537037037037038, - "acc_stderr": 0.029346665094372948, - "acc_norm": 0.24537037037037038, - "acc_norm_stderr": 0.029346665094372948 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.24581005586592178, - "acc_stderr": 0.014400296429225606, - "acc_norm": 0.24581005586592178, - "acc_norm_stderr": 0.014400296429225606 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.24, - "acc_stderr": 0.042923469599092816, - "acc_norm": 0.24, - "acc_norm_stderr": 0.042923469599092816 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.39, - "acc_stderr": 0.04902071300001975, - "acc_norm": 0.39, - "acc_norm_stderr": 0.04902071300001975 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.19117647058823528, - "acc_stderr": 0.02388688192244036, - "acc_norm": 0.19117647058823528, - "acc_norm_stderr": 0.02388688192244036 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.4816326530612245, - "acc_stderr": 0.031987615467631264, - "acc_norm": 0.4816326530612245, - "acc_norm_stderr": 0.031987615467631264 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.350210970464135, - "acc_stderr": 0.031052391937584353, - "acc_norm": 0.350210970464135, - "acc_norm_stderr": 0.031052391937584353 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2790091264667536, - "acc_stderr": 0.011455208832803545, - "acc_norm": 0.2790091264667536, - "acc_norm_stderr": 0.011455208832803545 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.3137254901960784, - "acc_stderr": 0.032566854844603886, - "acc_norm": 0.3137254901960784, - "acc_norm_stderr": 0.032566854844603886 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.3090909090909091, - "acc_stderr": 0.03608541011573967, - "acc_norm": 0.3090909090909091, - "acc_norm_stderr": 0.03608541011573967 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2876376988984088, - "mc1_stderr": 0.015846315101394816, - "mc2": 0.44907946334045823, - "mc2_stderr": 0.015040408260408762 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.2992957746478873, - "acc_stderr": 0.01569830927620496, - "acc_norm": 0.4354460093896714, - "acc_norm_stderr": 0.01699632856934095 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "krevas/LDCC-Instruct-Llama-2-ko-13B-v4.1.2", - "model_sha": "116dea6c97133d0729b618bbe76cf650a92a90a8", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.1.8/result_2023-10-20 03:33:51.json b/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.1.8/result_2023-10-20 03:33:51.json deleted file mode 100644 index 3f278dfb425775dc6e6090629a0a817fc2a2cbad..0000000000000000000000000000000000000000 --- a/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.1.8/result_2023-10-20 03:33:51.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.38822525597269625, - "acc_stderr": 0.014241614207414046, - "acc_norm": 0.4513651877133106, - "acc_norm_stderr": 0.014542104569955262 - }, - "harness|ko_hellaswag|10": { - "acc": 0.40689105755825533, - "acc_stderr": 0.004902502514738606, - "acc_norm": 0.5412268472415853, - "acc_norm_stderr": 0.004972790690640187 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.30409356725146197, - "acc_stderr": 0.0352821125824523, - "acc_norm": 0.30409356725146197, - "acc_norm_stderr": 0.0352821125824523 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.36893203883495146, - "acc_stderr": 0.04777615181156739, - "acc_norm": 0.36893203883495146, - "acc_norm_stderr": 0.04777615181156739 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.2822477650063857, - "acc_stderr": 0.016095302969878548, - "acc_norm": 0.2822477650063857, - "acc_norm_stderr": 0.016095302969878548 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.2740740740740741, - "acc_stderr": 0.03853254836552003, - "acc_norm": 0.2740740740740741, - "acc_norm_stderr": 0.03853254836552003 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.2, - "acc_stderr": 0.04020151261036845, - "acc_norm": 0.2, - "acc_norm_stderr": 0.04020151261036845 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.24680851063829787, - "acc_stderr": 0.0281854413012341, - "acc_norm": 0.24680851063829787, - "acc_norm_stderr": 0.0281854413012341 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3132530120481928, - "acc_stderr": 0.036108050180310235, - "acc_norm": 0.3132530120481928, - "acc_norm_stderr": 0.036108050180310235 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.3247588424437299, - "acc_stderr": 0.026596782287697043, - "acc_norm": 0.3247588424437299, - "acc_norm_stderr": 0.026596782287697043 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.25112107623318386, - "acc_stderr": 0.029105220833224622, - "acc_norm": 0.25112107623318386, - "acc_norm_stderr": 0.029105220833224622 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.32061068702290074, - "acc_stderr": 0.040933292298342784, - "acc_norm": 0.32061068702290074, - "acc_norm_stderr": 0.040933292298342784 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.27, - "acc_stderr": 0.0446196043338474, - "acc_norm": 0.27, - "acc_norm_stderr": 0.0446196043338474 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.30303030303030304, - "acc_stderr": 0.03274287914026866, - "acc_norm": 0.30303030303030304, - "acc_norm_stderr": 0.03274287914026866 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.25517241379310346, - "acc_stderr": 0.03632984052707842, - "acc_norm": 0.25517241379310346, - "acc_norm_stderr": 0.03632984052707842 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.20588235294117646, - "acc_stderr": 0.04023382273617747, - "acc_norm": 0.20588235294117646, - "acc_norm_stderr": 0.04023382273617747 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.3697478991596639, - "acc_stderr": 0.03135709599613591, - "acc_norm": 0.3697478991596639, - "acc_norm_stderr": 0.03135709599613591 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.33076923076923076, - "acc_stderr": 0.023854795680971142, - "acc_norm": 0.33076923076923076, - "acc_norm_stderr": 0.023854795680971142 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.33, - "acc_stderr": 0.047258156262526045, - "acc_norm": 0.33, - "acc_norm_stderr": 0.047258156262526045 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.17, - "acc_stderr": 0.0377525168068637, - "acc_norm": 0.17, - "acc_norm_stderr": 0.0377525168068637 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.3055555555555556, - "acc_stderr": 0.044531975073749834, - "acc_norm": 0.3055555555555556, - "acc_norm_stderr": 0.044531975073749834 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.2512315270935961, - "acc_stderr": 0.030516530732694433, - "acc_norm": 0.2512315270935961, - "acc_norm_stderr": 0.030516530732694433 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.31290322580645163, - "acc_stderr": 0.02637756702864586, - "acc_norm": 0.31290322580645163, - "acc_norm_stderr": 0.02637756702864586 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.33760683760683763, - "acc_stderr": 0.030980296992618558, - "acc_norm": 0.33760683760683763, - "acc_norm_stderr": 0.030980296992618558 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.27169811320754716, - "acc_stderr": 0.027377706624670713, - "acc_norm": 0.27169811320754716, - "acc_norm_stderr": 0.027377706624670713 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.2727272727272727, - "acc_stderr": 0.04265792110940588, - "acc_norm": 0.2727272727272727, - "acc_norm_stderr": 0.04265792110940588 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.2222222222222222, - "acc_stderr": 0.025348097468097856, - "acc_norm": 0.2222222222222222, - "acc_norm_stderr": 0.025348097468097856 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.31788079470198677, - "acc_stderr": 0.03802039760107903, - "acc_norm": 0.31788079470198677, - "acc_norm_stderr": 0.03802039760107903 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.39303482587064675, - "acc_stderr": 0.0345368246603156, - "acc_norm": 0.39303482587064675, - "acc_norm_stderr": 0.0345368246603156 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.24855491329479767, - "acc_stderr": 0.03295304696818317, - "acc_norm": 0.24855491329479767, - "acc_norm_stderr": 0.03295304696818317 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.22486772486772486, - "acc_stderr": 0.02150209607822914, - "acc_norm": 0.22486772486772486, - "acc_norm_stderr": 0.02150209607822914 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3541666666666667, - "acc_stderr": 0.039994111357535424, - "acc_norm": 0.3541666666666667, - "acc_norm_stderr": 0.039994111357535424 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.27, - "acc_stderr": 0.044619604333847394, - "acc_norm": 0.27, - "acc_norm_stderr": 0.044619604333847394 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.45, - "acc_stderr": 0.04999999999999999, - "acc_norm": 0.45, - "acc_norm_stderr": 0.04999999999999999 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.3236994219653179, - "acc_stderr": 0.025190181327608415, - "acc_norm": 0.3236994219653179, - "acc_norm_stderr": 0.025190181327608415 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.24539877300613497, - "acc_stderr": 0.03380939813943354, - "acc_norm": 0.24539877300613497, - "acc_norm_stderr": 0.03380939813943354 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.31790123456790126, - "acc_stderr": 0.025910063528240865, - "acc_norm": 0.31790123456790126, - "acc_norm_stderr": 0.025910063528240865 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252605, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252605 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.39896373056994816, - "acc_stderr": 0.03533999094065696, - "acc_norm": 0.39896373056994816, - "acc_norm_stderr": 0.03533999094065696 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2631578947368421, - "acc_stderr": 0.0414243971948936, - "acc_norm": 0.2631578947368421, - "acc_norm_stderr": 0.0414243971948936 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.29357798165137616, - "acc_stderr": 0.01952515112263966, - "acc_norm": 0.29357798165137616, - "acc_norm_stderr": 0.01952515112263966 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.3888888888888889, - "acc_stderr": 0.04360314860077459, - "acc_norm": 0.3888888888888889, - "acc_norm_stderr": 0.04360314860077459 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.026992544339297226, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.026992544339297226 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.49586776859504134, - "acc_stderr": 0.045641987674327526, - "acc_norm": 0.49586776859504134, - "acc_norm_stderr": 0.045641987674327526 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.28289473684210525, - "acc_stderr": 0.03665349695640767, - "acc_norm": 0.28289473684210525, - "acc_norm_stderr": 0.03665349695640767 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.2679738562091503, - "acc_stderr": 0.017917974069594726, - "acc_norm": 0.2679738562091503, - "acc_norm_stderr": 0.017917974069594726 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2765957446808511, - "acc_stderr": 0.026684564340460994, - "acc_norm": 0.2765957446808511, - "acc_norm_stderr": 0.026684564340460994 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.15178571428571427, - "acc_stderr": 0.03405702838185695, - "acc_norm": 0.15178571428571427, - "acc_norm_stderr": 0.03405702838185695 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.4675925925925926, - "acc_stderr": 0.03402801581358966, - "acc_norm": 0.4675925925925926, - "acc_norm_stderr": 0.03402801581358966 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.26145251396648045, - "acc_stderr": 0.014696599650364545, - "acc_norm": 0.26145251396648045, - "acc_norm_stderr": 0.014696599650364545 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.37, - "acc_stderr": 0.04852365870939099, - "acc_norm": 0.37, - "acc_norm_stderr": 0.04852365870939099 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252605, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252605 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.23161764705882354, - "acc_stderr": 0.025626533803777562, - "acc_norm": 0.23161764705882354, - "acc_norm_stderr": 0.025626533803777562 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.40816326530612246, - "acc_stderr": 0.03146465712827423, - "acc_norm": 0.40816326530612246, - "acc_norm_stderr": 0.03146465712827423 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.2869198312236287, - "acc_stderr": 0.02944377302259469, - "acc_norm": 0.2869198312236287, - "acc_norm_stderr": 0.02944377302259469 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.24902216427640156, - "acc_stderr": 0.01104489226404077, - "acc_norm": 0.24902216427640156, - "acc_norm_stderr": 0.01104489226404077 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.24509803921568626, - "acc_stderr": 0.030190282453501933, - "acc_norm": 0.24509803921568626, - "acc_norm_stderr": 0.030190282453501933 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.2909090909090909, - "acc_stderr": 0.03546563019624335, - "acc_norm": 0.2909090909090909, - "acc_norm_stderr": 0.03546563019624335 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.29253365973072215, - "mc1_stderr": 0.015925597445286165, - "mc2": 0.4600456246073735, - "mc2_stderr": 0.014958372484169768 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.3051643192488263, - "acc_stderr": 0.0157849478907378, - "acc_norm": 0.4284037558685446, - "acc_norm_stderr": 0.01696315139010863 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "krevas/LDCC-Instruct-Llama-2-ko-13B-v4.1.8", - "model_sha": "424602efb3cb7b2c4e901d325113335c002a1da2", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.1/result_2023-10-17 12:52:13.json b/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.1/result_2023-10-17 12:52:13.json deleted file mode 100644 index bf6a2fbda0cb3ecf4af976488ae0667e6498104c..0000000000000000000000000000000000000000 --- a/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.1/result_2023-10-17 12:52:13.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.3993174061433447, - "acc_stderr": 0.014312094557946704, - "acc_norm": 0.46928327645051193, - "acc_norm_stderr": 0.014583792546304038 - }, - "harness|ko_hellaswag|10": { - "acc": 0.4056960764787891, - "acc_stderr": 0.004900227226433389, - "acc_norm": 0.5419239195379406, - "acc_norm_stderr": 0.00497221024402057 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.32748538011695905, - "acc_stderr": 0.035993357714560276, - "acc_norm": 0.32748538011695905, - "acc_norm_stderr": 0.035993357714560276 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.32038834951456313, - "acc_stderr": 0.046202840822800406, - "acc_norm": 0.32038834951456313, - "acc_norm_stderr": 0.046202840822800406 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.2822477650063857, - "acc_stderr": 0.016095302969878534, - "acc_norm": 0.2822477650063857, - "acc_norm_stderr": 0.016095302969878534 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.34074074074074073, - "acc_stderr": 0.04094376269996794, - "acc_norm": 0.34074074074074073, - "acc_norm_stderr": 0.04094376269996794 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.32, - "acc_stderr": 0.046882617226215034, - "acc_norm": 0.32, - "acc_norm_stderr": 0.046882617226215034 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.225531914893617, - "acc_stderr": 0.027321078417387533, - "acc_norm": 0.225531914893617, - "acc_norm_stderr": 0.027321078417387533 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.26506024096385544, - "acc_stderr": 0.03436024037944968, - "acc_norm": 0.26506024096385544, - "acc_norm_stderr": 0.03436024037944968 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.3762057877813505, - "acc_stderr": 0.027513925683549427, - "acc_norm": 0.3762057877813505, - "acc_norm_stderr": 0.027513925683549427 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.2556053811659193, - "acc_stderr": 0.029275891003969923, - "acc_norm": 0.2556053811659193, - "acc_norm_stderr": 0.029275891003969923 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.32061068702290074, - "acc_stderr": 0.040933292298342784, - "acc_norm": 0.32061068702290074, - "acc_norm_stderr": 0.040933292298342784 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.26, - "acc_stderr": 0.0440844002276808, - "acc_norm": 0.26, - "acc_norm_stderr": 0.0440844002276808 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.29292929292929293, - "acc_stderr": 0.032424979581788166, - "acc_norm": 0.29292929292929293, - "acc_norm_stderr": 0.032424979581788166 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.2689655172413793, - "acc_stderr": 0.036951833116502325, - "acc_norm": 0.2689655172413793, - "acc_norm_stderr": 0.036951833116502325 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.19607843137254902, - "acc_stderr": 0.039505818611799616, - "acc_norm": 0.19607843137254902, - "acc_norm_stderr": 0.039505818611799616 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.29411764705882354, - "acc_stderr": 0.029597329730978082, - "acc_norm": 0.29411764705882354, - "acc_norm_stderr": 0.029597329730978082 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.29743589743589743, - "acc_stderr": 0.023177408131465942, - "acc_norm": 0.29743589743589743, - "acc_norm_stderr": 0.023177408131465942 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.36, - "acc_stderr": 0.048241815132442176, - "acc_norm": 0.36, - "acc_norm_stderr": 0.048241815132442176 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.37962962962962965, - "acc_stderr": 0.04691521224077742, - "acc_norm": 0.37962962962962965, - "acc_norm_stderr": 0.04691521224077742 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.2857142857142857, - "acc_stderr": 0.03178529710642749, - "acc_norm": 0.2857142857142857, - "acc_norm_stderr": 0.03178529710642749 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3548387096774194, - "acc_stderr": 0.027218889773308757, - "acc_norm": 0.3548387096774194, - "acc_norm_stderr": 0.027218889773308757 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.03088273697413865, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.03088273697413865 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.25660377358490566, - "acc_stderr": 0.026880647889051996, - "acc_norm": 0.25660377358490566, - "acc_norm_stderr": 0.026880647889051996 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.2909090909090909, - "acc_stderr": 0.04350271442923243, - "acc_norm": 0.2909090909090909, - "acc_norm_stderr": 0.04350271442923243 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.2740740740740741, - "acc_stderr": 0.027195934804085622, - "acc_norm": 0.2740740740740741, - "acc_norm_stderr": 0.027195934804085622 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.271523178807947, - "acc_stderr": 0.03631329803969654, - "acc_norm": 0.271523178807947, - "acc_norm_stderr": 0.03631329803969654 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.417910447761194, - "acc_stderr": 0.034875586404620636, - "acc_norm": 0.417910447761194, - "acc_norm_stderr": 0.034875586404620636 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.24855491329479767, - "acc_stderr": 0.03295304696818317, - "acc_norm": 0.24855491329479767, - "acc_norm_stderr": 0.03295304696818317 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.23015873015873015, - "acc_stderr": 0.021679219663693152, - "acc_norm": 0.23015873015873015, - "acc_norm_stderr": 0.021679219663693152 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2777777777777778, - "acc_stderr": 0.03745554791462457, - "acc_norm": 0.2777777777777778, - "acc_norm_stderr": 0.03745554791462457 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.16, - "acc_stderr": 0.03684529491774708, - "acc_norm": 0.16, - "acc_norm_stderr": 0.03684529491774708 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.32, - "acc_stderr": 0.04688261722621505, - "acc_norm": 0.32, - "acc_norm_stderr": 0.04688261722621505 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.407514450867052, - "acc_stderr": 0.0264545781469315, - "acc_norm": 0.407514450867052, - "acc_norm_stderr": 0.0264545781469315 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.36809815950920244, - "acc_stderr": 0.03789213935838396, - "acc_norm": 0.36809815950920244, - "acc_norm_stderr": 0.03789213935838396 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.3425925925925926, - "acc_stderr": 0.02640614597362566, - "acc_norm": 0.3425925925925926, - "acc_norm_stderr": 0.02640614597362566 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.23, - "acc_stderr": 0.04229525846816505, - "acc_norm": 0.23, - "acc_norm_stderr": 0.04229525846816505 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.38341968911917096, - "acc_stderr": 0.03508984236295341, - "acc_norm": 0.38341968911917096, - "acc_norm_stderr": 0.03508984236295341 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.22807017543859648, - "acc_stderr": 0.03947152782669415, - "acc_norm": 0.22807017543859648, - "acc_norm_stderr": 0.03947152782669415 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.3192660550458716, - "acc_stderr": 0.01998782906975, - "acc_norm": 0.3192660550458716, - "acc_norm_stderr": 0.01998782906975 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.23015873015873015, - "acc_stderr": 0.03764950879790605, - "acc_norm": 0.23015873015873015, - "acc_norm_stderr": 0.03764950879790605 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.31699346405228757, - "acc_stderr": 0.026643278474508748, - "acc_norm": 0.31699346405228757, - "acc_norm_stderr": 0.026643278474508748 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542128, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542128 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.5371900826446281, - "acc_stderr": 0.04551711196104218, - "acc_norm": 0.5371900826446281, - "acc_norm_stderr": 0.04551711196104218 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.3092105263157895, - "acc_stderr": 0.03761070869867479, - "acc_norm": 0.3092105263157895, - "acc_norm_stderr": 0.03761070869867479 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.3202614379084967, - "acc_stderr": 0.018875682938069443, - "acc_norm": 0.3202614379084967, - "acc_norm_stderr": 0.018875682938069443 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.32269503546099293, - "acc_stderr": 0.02788913930053478, - "acc_norm": 0.32269503546099293, - "acc_norm_stderr": 0.02788913930053478 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.21428571428571427, - "acc_stderr": 0.03894641120044793, - "acc_norm": 0.21428571428571427, - "acc_norm_stderr": 0.03894641120044793 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.2916666666666667, - "acc_stderr": 0.030998666304560534, - "acc_norm": 0.2916666666666667, - "acc_norm_stderr": 0.030998666304560534 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.24581005586592178, - "acc_stderr": 0.014400296429225608, - "acc_norm": 0.24581005586592178, - "acc_norm_stderr": 0.014400296429225608 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.32, - "acc_stderr": 0.04688261722621505, - "acc_norm": 0.32, - "acc_norm_stderr": 0.04688261722621505 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.37, - "acc_stderr": 0.048523658709391, - "acc_norm": 0.37, - "acc_norm_stderr": 0.048523658709391 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.20220588235294118, - "acc_stderr": 0.02439819298665492, - "acc_norm": 0.20220588235294118, - "acc_norm_stderr": 0.02439819298665492 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.4204081632653061, - "acc_stderr": 0.03160106993449604, - "acc_norm": 0.4204081632653061, - "acc_norm_stderr": 0.03160106993449604 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.38396624472573837, - "acc_stderr": 0.03165867806410668, - "acc_norm": 0.38396624472573837, - "acc_norm_stderr": 0.03165867806410668 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.30247718383311606, - "acc_stderr": 0.011731524234165706, - "acc_norm": 0.30247718383311606, - "acc_norm_stderr": 0.011731524234165706 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.3382352941176471, - "acc_stderr": 0.03320574612945432, - "acc_norm": 0.3382352941176471, - "acc_norm_stderr": 0.03320574612945432 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.40606060606060607, - "acc_stderr": 0.03834816355401181, - "acc_norm": 0.40606060606060607, - "acc_norm_stderr": 0.03834816355401181 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.3292533659730722, - "mc1_stderr": 0.016451264440068246, - "mc2": 0.4905950778856991, - "mc2_stderr": 0.01526052031524314 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.2300469483568075, - "acc_stderr": 0.014426985010207649, - "acc_norm": 0.3814553990610329, - "acc_norm_stderr": 0.01665108789451208 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "krevas/LDCC-Instruct-Llama-2-ko-13B-v4.1", - "model_sha": "f0e5e0f218635b4dd43f0ba2b3b4cd5007967625", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.2.1/result_2023-10-25 14:30:21.json b/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.2.1/result_2023-10-25 14:30:21.json deleted file mode 100644 index 2aa581b5805256d8e527eae566b18c2b0a3f3b3c..0000000000000000000000000000000000000000 --- a/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.2.1/result_2023-10-25 14:30:21.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.35409556313993173, - "acc_stderr": 0.013975454122756557, - "acc_norm": 0.3993174061433447, - "acc_norm_stderr": 0.014312094557946705 - }, - "harness|ko_hellaswag|10": { - "acc": 0.4021111332403904, - "acc_stderr": 0.004893220635011784, - "acc_norm": 0.536247759410476, - "acc_norm_stderr": 0.00497665198975764 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.42105263157894735, - "acc_stderr": 0.037867207062342145, - "acc_norm": 0.42105263157894735, - "acc_norm_stderr": 0.037867207062342145 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.5436893203883495, - "acc_stderr": 0.049318019942204146, - "acc_norm": 0.5436893203883495, - "acc_norm_stderr": 0.049318019942204146 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.45721583652618136, - "acc_stderr": 0.01781438523853443, - "acc_norm": 0.45721583652618136, - "acc_norm_stderr": 0.01781438523853443 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.37037037037037035, - "acc_stderr": 0.04171654161354543, - "acc_norm": 0.37037037037037035, - "acc_norm_stderr": 0.04171654161354543 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.28, - "acc_stderr": 0.045126085985421276, - "acc_norm": 0.28, - "acc_norm_stderr": 0.045126085985421276 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.3276595744680851, - "acc_stderr": 0.030683020843231004, - "acc_norm": 0.3276595744680851, - "acc_norm_stderr": 0.030683020843231004 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3433734939759036, - "acc_stderr": 0.03696584317010601, - "acc_norm": 0.3433734939759036, - "acc_norm_stderr": 0.03696584317010601 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.4565916398713826, - "acc_stderr": 0.0282908690541976, - "acc_norm": 0.4565916398713826, - "acc_norm_stderr": 0.0282908690541976 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.34080717488789236, - "acc_stderr": 0.03181149747055358, - "acc_norm": 0.34080717488789236, - "acc_norm_stderr": 0.03181149747055358 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.3816793893129771, - "acc_stderr": 0.042607351576445594, - "acc_norm": 0.3816793893129771, - "acc_norm_stderr": 0.042607351576445594 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.37, - "acc_stderr": 0.04852365870939099, - "acc_norm": 0.37, - "acc_norm_stderr": 0.04852365870939099 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.5151515151515151, - "acc_stderr": 0.03560716516531061, - "acc_norm": 0.5151515151515151, - "acc_norm_stderr": 0.03560716516531061 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.33793103448275863, - "acc_stderr": 0.039417076320648906, - "acc_norm": 0.33793103448275863, - "acc_norm_stderr": 0.039417076320648906 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.22549019607843138, - "acc_stderr": 0.041583075330832865, - "acc_norm": 0.22549019607843138, - "acc_norm_stderr": 0.041583075330832865 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.4411764705882353, - "acc_stderr": 0.0322529423239964, - "acc_norm": 0.4411764705882353, - "acc_norm_stderr": 0.0322529423239964 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.40512820512820513, - "acc_stderr": 0.024890471769938145, - "acc_norm": 0.40512820512820513, - "acc_norm_stderr": 0.024890471769938145 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.43, - "acc_stderr": 0.04975698519562428, - "acc_norm": 0.43, - "acc_norm_stderr": 0.04975698519562428 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.3425925925925926, - "acc_stderr": 0.045879047413018105, - "acc_norm": 0.3425925925925926, - "acc_norm_stderr": 0.045879047413018105 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.35467980295566504, - "acc_stderr": 0.03366124489051448, - "acc_norm": 0.35467980295566504, - "acc_norm_stderr": 0.03366124489051448 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.4258064516129032, - "acc_stderr": 0.0281291127091659, - "acc_norm": 0.4258064516129032, - "acc_norm_stderr": 0.0281291127091659 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.5982905982905983, - "acc_stderr": 0.03211693751051621, - "acc_norm": 0.5982905982905983, - "acc_norm_stderr": 0.03211693751051621 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.41132075471698115, - "acc_stderr": 0.0302850092590098, - "acc_norm": 0.41132075471698115, - "acc_norm_stderr": 0.0302850092590098 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.4818181818181818, - "acc_stderr": 0.04785964010794916, - "acc_norm": 0.4818181818181818, - "acc_norm_stderr": 0.04785964010794916 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.3037037037037037, - "acc_stderr": 0.02803792996911499, - "acc_norm": 0.3037037037037037, - "acc_norm_stderr": 0.02803792996911499 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.304635761589404, - "acc_stderr": 0.03757949922943343, - "acc_norm": 0.304635761589404, - "acc_norm_stderr": 0.03757949922943343 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.5373134328358209, - "acc_stderr": 0.03525675167467974, - "acc_norm": 0.5373134328358209, - "acc_norm_stderr": 0.03525675167467974 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.3872832369942196, - "acc_stderr": 0.03714325906302065, - "acc_norm": 0.3872832369942196, - "acc_norm_stderr": 0.03714325906302065 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.335978835978836, - "acc_stderr": 0.024326310529149152, - "acc_norm": 0.335978835978836, - "acc_norm_stderr": 0.024326310529149152 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3263888888888889, - "acc_stderr": 0.03921067198982266, - "acc_norm": 0.3263888888888889, - "acc_norm_stderr": 0.03921067198982266 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.32, - "acc_stderr": 0.04688261722621505, - "acc_norm": 0.32, - "acc_norm_stderr": 0.04688261722621505 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.57, - "acc_stderr": 0.04975698519562428, - "acc_norm": 0.57, - "acc_norm_stderr": 0.04975698519562428 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.3930635838150289, - "acc_stderr": 0.026296227915613663, - "acc_norm": 0.3930635838150289, - "acc_norm_stderr": 0.026296227915613663 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.4049079754601227, - "acc_stderr": 0.038566721635489125, - "acc_norm": 0.4049079754601227, - "acc_norm_stderr": 0.038566721635489125 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.43209876543209874, - "acc_stderr": 0.02756301097160667, - "acc_norm": 0.43209876543209874, - "acc_norm_stderr": 0.02756301097160667 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.3, - "acc_stderr": 0.04605661864718381, - "acc_norm": 0.3, - "acc_norm_stderr": 0.04605661864718381 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.49222797927461137, - "acc_stderr": 0.03608003225569654, - "acc_norm": 0.49222797927461137, - "acc_norm_stderr": 0.03608003225569654 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2894736842105263, - "acc_stderr": 0.04266339443159394, - "acc_norm": 0.2894736842105263, - "acc_norm_stderr": 0.04266339443159394 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.47889908256880737, - "acc_stderr": 0.021418224754264643, - "acc_norm": 0.47889908256880737, - "acc_norm_stderr": 0.021418224754264643 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.4126984126984127, - "acc_stderr": 0.04403438954768177, - "acc_norm": 0.4126984126984127, - "acc_norm_stderr": 0.04403438954768177 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.3954248366013072, - "acc_stderr": 0.027996723180631466, - "acc_norm": 0.3954248366013072, - "acc_norm_stderr": 0.027996723180631466 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.39, - "acc_stderr": 0.04902071300001975, - "acc_norm": 0.39, - "acc_norm_stderr": 0.04902071300001975 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.5289256198347108, - "acc_stderr": 0.04556710331269498, - "acc_norm": 0.5289256198347108, - "acc_norm_stderr": 0.04556710331269498 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.4473684210526316, - "acc_stderr": 0.04046336883978251, - "acc_norm": 0.4473684210526316, - "acc_norm_stderr": 0.04046336883978251 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.3415032679738562, - "acc_stderr": 0.019184639328092484, - "acc_norm": 0.3415032679738562, - "acc_norm_stderr": 0.019184639328092484 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2872340425531915, - "acc_stderr": 0.026992199173064356, - "acc_norm": 0.2872340425531915, - "acc_norm_stderr": 0.026992199173064356 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.25, - "acc_stderr": 0.04109974682633932, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04109974682633932 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.4305555555555556, - "acc_stderr": 0.03376922151252336, - "acc_norm": 0.4305555555555556, - "acc_norm_stderr": 0.03376922151252336 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.28044692737430166, - "acc_stderr": 0.015024083883322869, - "acc_norm": 0.28044692737430166, - "acc_norm_stderr": 0.015024083883322869 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.38, - "acc_stderr": 0.04878317312145632, - "acc_norm": 0.38, - "acc_norm_stderr": 0.04878317312145632 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.45, - "acc_stderr": 0.049999999999999996, - "acc_norm": 0.45, - "acc_norm_stderr": 0.049999999999999996 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.3860294117647059, - "acc_stderr": 0.02957326913441112, - "acc_norm": 0.3860294117647059, - "acc_norm_stderr": 0.02957326913441112 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.4448979591836735, - "acc_stderr": 0.031814251181977865, - "acc_norm": 0.4448979591836735, - "acc_norm_stderr": 0.031814251181977865 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.5189873417721519, - "acc_stderr": 0.03252375148090447, - "acc_norm": 0.5189873417721519, - "acc_norm_stderr": 0.03252375148090447 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.3428943937418514, - "acc_stderr": 0.012123463271585895, - "acc_norm": 0.3428943937418514, - "acc_norm_stderr": 0.012123463271585895 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.4411764705882353, - "acc_stderr": 0.03484941514429231, - "acc_norm": 0.4411764705882353, - "acc_norm_stderr": 0.03484941514429231 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.46060606060606063, - "acc_stderr": 0.03892207016552013, - "acc_norm": 0.46060606060606063, - "acc_norm_stderr": 0.03892207016552013 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.26560587515299877, - "mc1_stderr": 0.015461027627253597, - "mc2": 0.4170988801266876, - "mc2_stderr": 0.015242823678966766 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.460093896713615, - "acc_stderr": 0.017085101945400166, - "acc_norm": 0.6115023474178404, - "acc_norm_stderr": 0.01670815454631332 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "krevas/LDCC-Instruct-Llama-2-ko-13B-v4.2.1", - "model_sha": "37db0cf6282e151ecc013b98fda871ce486e52c3", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.2/result_2023-10-25 12:23:22.json b/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.2/result_2023-10-25 12:23:22.json deleted file mode 100644 index 12dafedd479af014fd8fc665876d98c990090614..0000000000000000000000000000000000000000 --- a/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.2/result_2023-10-25 12:23:22.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.36689419795221845, - "acc_stderr": 0.014084133118104292, - "acc_norm": 0.4249146757679181, - "acc_norm_stderr": 0.014445698968520769 - }, - "harness|ko_hellaswag|10": { - "acc": 0.4053973312089225, - "acc_stderr": 0.004899653704032835, - "acc_norm": 0.5408285202150966, - "acc_norm_stderr": 0.004973117975062489 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.5497076023391813, - "acc_stderr": 0.03815827365913236, - "acc_norm": 0.5497076023391813, - "acc_norm_stderr": 0.03815827365913236 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.42718446601941745, - "acc_stderr": 0.04897957737781168, - "acc_norm": 0.42718446601941745, - "acc_norm_stderr": 0.04897957737781168 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.49169859514687103, - "acc_stderr": 0.017877498991072008, - "acc_norm": 0.49169859514687103, - "acc_norm_stderr": 0.017877498991072008 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.362962962962963, - "acc_stderr": 0.04153948404742399, - "acc_norm": 0.362962962962963, - "acc_norm_stderr": 0.04153948404742399 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.26, - "acc_stderr": 0.04408440022768077, - "acc_norm": 0.26, - "acc_norm_stderr": 0.04408440022768077 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.3148936170212766, - "acc_stderr": 0.030363582197238167, - "acc_norm": 0.3148936170212766, - "acc_norm_stderr": 0.030363582197238167 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.4036144578313253, - "acc_stderr": 0.03819486140758398, - "acc_norm": 0.4036144578313253, - "acc_norm_stderr": 0.03819486140758398 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.4340836012861736, - "acc_stderr": 0.0281502322445356, - "acc_norm": 0.4340836012861736, - "acc_norm_stderr": 0.0281502322445356 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.4170403587443946, - "acc_stderr": 0.03309266936071721, - "acc_norm": 0.4170403587443946, - "acc_norm_stderr": 0.03309266936071721 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.45038167938931295, - "acc_stderr": 0.04363643698524779, - "acc_norm": 0.45038167938931295, - "acc_norm_stderr": 0.04363643698524779 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.35, - "acc_stderr": 0.0479372485441102, - "acc_norm": 0.35, - "acc_norm_stderr": 0.0479372485441102 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.41414141414141414, - "acc_stderr": 0.03509438348879629, - "acc_norm": 0.41414141414141414, - "acc_norm_stderr": 0.03509438348879629 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.35172413793103446, - "acc_stderr": 0.0397923663749741, - "acc_norm": 0.35172413793103446, - "acc_norm_stderr": 0.0397923663749741 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.16666666666666666, - "acc_stderr": 0.03708284662416545, - "acc_norm": 0.16666666666666666, - "acc_norm_stderr": 0.03708284662416545 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.3907563025210084, - "acc_stderr": 0.031693802357129965, - "acc_norm": 0.3907563025210084, - "acc_norm_stderr": 0.031693802357129965 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.37948717948717947, - "acc_stderr": 0.024603626924097417, - "acc_norm": 0.37948717948717947, - "acc_norm_stderr": 0.024603626924097417 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.44, - "acc_stderr": 0.049888765156985884, - "acc_norm": 0.44, - "acc_norm_stderr": 0.049888765156985884 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.26, - "acc_stderr": 0.044084400227680814, - "acc_norm": 0.26, - "acc_norm_stderr": 0.044084400227680814 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.35185185185185186, - "acc_stderr": 0.04616631111801713, - "acc_norm": 0.35185185185185186, - "acc_norm_stderr": 0.04616631111801713 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.3103448275862069, - "acc_stderr": 0.03255086769970103, - "acc_norm": 0.3103448275862069, - "acc_norm_stderr": 0.03255086769970103 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3709677419354839, - "acc_stderr": 0.027480541887953593, - "acc_norm": 0.3709677419354839, - "acc_norm_stderr": 0.027480541887953593 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.6367521367521367, - "acc_stderr": 0.03150712523091264, - "acc_norm": 0.6367521367521367, - "acc_norm_stderr": 0.03150712523091264 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.38113207547169814, - "acc_stderr": 0.029890609686286616, - "acc_norm": 0.38113207547169814, - "acc_norm_stderr": 0.029890609686286616 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.4909090909090909, - "acc_stderr": 0.04788339768702861, - "acc_norm": 0.4909090909090909, - "acc_norm_stderr": 0.04788339768702861 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.25555555555555554, - "acc_stderr": 0.026593939101844065, - "acc_norm": 0.25555555555555554, - "acc_norm_stderr": 0.026593939101844065 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2119205298013245, - "acc_stderr": 0.03336767086567978, - "acc_norm": 0.2119205298013245, - "acc_norm_stderr": 0.03336767086567978 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.48258706467661694, - "acc_stderr": 0.03533389234739245, - "acc_norm": 0.48258706467661694, - "acc_norm_stderr": 0.03533389234739245 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.2832369942196532, - "acc_stderr": 0.03435568056047873, - "acc_norm": 0.2832369942196532, - "acc_norm_stderr": 0.03435568056047873 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.25925925925925924, - "acc_stderr": 0.022569897074918417, - "acc_norm": 0.25925925925925924, - "acc_norm_stderr": 0.022569897074918417 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3680555555555556, - "acc_stderr": 0.04032999053960717, - "acc_norm": 0.3680555555555556, - "acc_norm_stderr": 0.04032999053960717 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.54, - "acc_stderr": 0.05009082659620333, - "acc_norm": 0.54, - "acc_norm_stderr": 0.05009082659620333 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.45664739884393063, - "acc_stderr": 0.026817718130348923, - "acc_norm": 0.45664739884393063, - "acc_norm_stderr": 0.026817718130348923 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.39263803680981596, - "acc_stderr": 0.03836740907831029, - "acc_norm": 0.39263803680981596, - "acc_norm_stderr": 0.03836740907831029 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.4382716049382716, - "acc_stderr": 0.027607914087400463, - "acc_norm": 0.4382716049382716, - "acc_norm_stderr": 0.027607914087400463 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.28, - "acc_stderr": 0.045126085985421276, - "acc_norm": 0.28, - "acc_norm_stderr": 0.045126085985421276 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.43005181347150256, - "acc_stderr": 0.03572954333144808, - "acc_norm": 0.43005181347150256, - "acc_norm_stderr": 0.03572954333144808 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2807017543859649, - "acc_stderr": 0.042270544512321984, - "acc_norm": 0.2807017543859649, - "acc_norm_stderr": 0.042270544512321984 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.41467889908256883, - "acc_stderr": 0.021122903208602602, - "acc_norm": 0.41467889908256883, - "acc_norm_stderr": 0.021122903208602602 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.25396825396825395, - "acc_stderr": 0.038932596106046734, - "acc_norm": 0.25396825396825395, - "acc_norm_stderr": 0.038932596106046734 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.35947712418300654, - "acc_stderr": 0.027475969910660952, - "acc_norm": 0.35947712418300654, - "acc_norm_stderr": 0.027475969910660952 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.4, - "acc_stderr": 0.049236596391733084, - "acc_norm": 0.4, - "acc_norm_stderr": 0.049236596391733084 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.628099173553719, - "acc_stderr": 0.04412015806624504, - "acc_norm": 0.628099173553719, - "acc_norm_stderr": 0.04412015806624504 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.35526315789473684, - "acc_stderr": 0.038947344870133176, - "acc_norm": 0.35526315789473684, - "acc_norm_stderr": 0.038947344870133176 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.3284313725490196, - "acc_stderr": 0.01899970738316267, - "acc_norm": 0.3284313725490196, - "acc_norm_stderr": 0.01899970738316267 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.30141843971631205, - "acc_stderr": 0.02737412888263115, - "acc_norm": 0.30141843971631205, - "acc_norm_stderr": 0.02737412888263115 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.24107142857142858, - "acc_stderr": 0.04059867246952686, - "acc_norm": 0.24107142857142858, - "acc_norm_stderr": 0.04059867246952686 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.2037037037037037, - "acc_stderr": 0.027467401804058014, - "acc_norm": 0.2037037037037037, - "acc_norm_stderr": 0.027467401804058014 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2424581005586592, - "acc_stderr": 0.01433352205921789, - "acc_norm": 0.2424581005586592, - "acc_norm_stderr": 0.01433352205921789 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.32, - "acc_stderr": 0.04688261722621504, - "acc_norm": 0.32, - "acc_norm_stderr": 0.04688261722621504 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.4, - "acc_stderr": 0.049236596391733084, - "acc_norm": 0.4, - "acc_norm_stderr": 0.049236596391733084 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.22058823529411764, - "acc_stderr": 0.02518778666022729, - "acc_norm": 0.22058823529411764, - "acc_norm_stderr": 0.02518778666022729 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.4489795918367347, - "acc_stderr": 0.03184213866687579, - "acc_norm": 0.4489795918367347, - "acc_norm_stderr": 0.03184213866687579 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.5738396624472574, - "acc_stderr": 0.03219035703131774, - "acc_norm": 0.5738396624472574, - "acc_norm_stderr": 0.03219035703131774 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.30378096479791394, - "acc_stderr": 0.01174578772047248, - "acc_norm": 0.30378096479791394, - "acc_norm_stderr": 0.01174578772047248 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.4264705882352941, - "acc_stderr": 0.03471157907953425, - "acc_norm": 0.4264705882352941, - "acc_norm_stderr": 0.03471157907953425 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.4666666666666667, - "acc_stderr": 0.038956580652718446, - "acc_norm": 0.4666666666666667, - "acc_norm_stderr": 0.038956580652718446 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.24479804161566707, - "mc1_stderr": 0.015051869486715008, - "mc2": 0.40911872695606455, - "mc2_stderr": 0.014770266015676235 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.41431924882629106, - "acc_stderr": 0.01688625139998468, - "acc_norm": 0.5164319248826291, - "acc_norm_stderr": 0.017130520993936017 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "krevas/LDCC-Instruct-Llama-2-ko-13B-v4.2", - "model_sha": "a0128d1e42741ce3dc112beead1aea568e0ceaa9", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/krevas/LDCC-Instruct-Llama-2-ko-13B-v4/result_2023-10-13 21:03:52.json b/krevas/LDCC-Instruct-Llama-2-ko-13B-v4/result_2023-10-13 21:03:52.json deleted file mode 100644 index 8a992d65d83c521c3c27f022833cc9b74ca304ee..0000000000000000000000000000000000000000 --- a/krevas/LDCC-Instruct-Llama-2-ko-13B-v4/result_2023-10-13 21:03:52.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.3720136518771331, - "acc_stderr": 0.014124597881844466, - "acc_norm": 0.4402730375426621, - "acc_norm_stderr": 0.014506769524804243 - }, - "harness|ko_hellaswag|10": { - "acc": 0.40938060147381, - "acc_stderr": 0.004907146229347555, - "acc_norm": 0.5426209918342959, - "acc_norm_stderr": 0.004971619995879755 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.49707602339181284, - "acc_stderr": 0.03834759370936839, - "acc_norm": 0.49707602339181284, - "acc_norm_stderr": 0.03834759370936839 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.3592233009708738, - "acc_stderr": 0.047504583990416925, - "acc_norm": 0.3592233009708738, - "acc_norm_stderr": 0.047504583990416925 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.4891443167305236, - "acc_stderr": 0.017875748840242418, - "acc_norm": 0.4891443167305236, - "acc_norm_stderr": 0.017875748840242418 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.362962962962963, - "acc_stderr": 0.041539484047424, - "acc_norm": 0.362962962962963, - "acc_norm_stderr": 0.041539484047424 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.26, - "acc_stderr": 0.0440844002276808, - "acc_norm": 0.26, - "acc_norm_stderr": 0.0440844002276808 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.34893617021276596, - "acc_stderr": 0.031158522131357783, - "acc_norm": 0.34893617021276596, - "acc_norm_stderr": 0.031158522131357783 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.35542168674698793, - "acc_stderr": 0.03726214354322415, - "acc_norm": 0.35542168674698793, - "acc_norm_stderr": 0.03726214354322415 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.40836012861736337, - "acc_stderr": 0.027917050748484634, - "acc_norm": 0.40836012861736337, - "acc_norm_stderr": 0.027917050748484634 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.4170403587443946, - "acc_stderr": 0.03309266936071721, - "acc_norm": 0.4170403587443946, - "acc_norm_stderr": 0.03309266936071721 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.44274809160305345, - "acc_stderr": 0.0435644720266507, - "acc_norm": 0.44274809160305345, - "acc_norm_stderr": 0.0435644720266507 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.43, - "acc_stderr": 0.0497569851956243, - "acc_norm": 0.43, - "acc_norm_stderr": 0.0497569851956243 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.4292929292929293, - "acc_stderr": 0.035265527246011986, - "acc_norm": 0.4292929292929293, - "acc_norm_stderr": 0.035265527246011986 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.22758620689655173, - "acc_stderr": 0.03493950380131184, - "acc_norm": 0.22758620689655173, - "acc_norm_stderr": 0.03493950380131184 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.23529411764705882, - "acc_stderr": 0.04220773659171453, - "acc_norm": 0.23529411764705882, - "acc_norm_stderr": 0.04220773659171453 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.31092436974789917, - "acc_stderr": 0.030066761582977924, - "acc_norm": 0.31092436974789917, - "acc_norm_stderr": 0.030066761582977924 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.3564102564102564, - "acc_stderr": 0.024283140529467284, - "acc_norm": 0.3564102564102564, - "acc_norm_stderr": 0.024283140529467284 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.44, - "acc_stderr": 0.049888765156985884, - "acc_norm": 0.44, - "acc_norm_stderr": 0.049888765156985884 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.37, - "acc_stderr": 0.04852365870939099, - "acc_norm": 0.37, - "acc_norm_stderr": 0.04852365870939099 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.4444444444444444, - "acc_stderr": 0.04803752235190192, - "acc_norm": 0.4444444444444444, - "acc_norm_stderr": 0.04803752235190192 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.31527093596059114, - "acc_stderr": 0.03269080871970186, - "acc_norm": 0.31527093596059114, - "acc_norm_stderr": 0.03269080871970186 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3774193548387097, - "acc_stderr": 0.027575960723278243, - "acc_norm": 0.3774193548387097, - "acc_norm_stderr": 0.027575960723278243 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.5982905982905983, - "acc_stderr": 0.03211693751051622, - "acc_norm": 0.5982905982905983, - "acc_norm_stderr": 0.03211693751051622 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.3433962264150943, - "acc_stderr": 0.029224526469124792, - "acc_norm": 0.3433962264150943, - "acc_norm_stderr": 0.029224526469124792 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.4, - "acc_stderr": 0.0469237132203465, - "acc_norm": 0.4, - "acc_norm_stderr": 0.0469237132203465 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.2740740740740741, - "acc_stderr": 0.027195934804085626, - "acc_norm": 0.2740740740740741, - "acc_norm_stderr": 0.027195934804085626 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2052980132450331, - "acc_stderr": 0.03297986648473836, - "acc_norm": 0.2052980132450331, - "acc_norm_stderr": 0.03297986648473836 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.48258706467661694, - "acc_stderr": 0.03533389234739244, - "acc_norm": 0.48258706467661694, - "acc_norm_stderr": 0.03533389234739244 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.32947976878612717, - "acc_stderr": 0.03583901754736411, - "acc_norm": 0.32947976878612717, - "acc_norm_stderr": 0.03583901754736411 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2222222222222222, - "acc_stderr": 0.02141168439369419, - "acc_norm": 0.2222222222222222, - "acc_norm_stderr": 0.02141168439369419 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.375, - "acc_stderr": 0.04048439222695598, - "acc_norm": 0.375, - "acc_norm_stderr": 0.04048439222695598 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.53, - "acc_stderr": 0.050161355804659205, - "acc_norm": 0.53, - "acc_norm_stderr": 0.050161355804659205 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.4046242774566474, - "acc_stderr": 0.026424816594009845, - "acc_norm": 0.4046242774566474, - "acc_norm_stderr": 0.026424816594009845 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.44785276073619634, - "acc_stderr": 0.03906947479456601, - "acc_norm": 0.44785276073619634, - "acc_norm_stderr": 0.03906947479456601 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.4104938271604938, - "acc_stderr": 0.027371350925124764, - "acc_norm": 0.4104938271604938, - "acc_norm_stderr": 0.027371350925124764 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.16, - "acc_stderr": 0.0368452949177471, - "acc_norm": 0.16, - "acc_norm_stderr": 0.0368452949177471 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.41968911917098445, - "acc_stderr": 0.03561587327685883, - "acc_norm": 0.41968911917098445, - "acc_norm_stderr": 0.03561587327685883 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.23684210526315788, - "acc_stderr": 0.03999423879281336, - "acc_norm": 0.23684210526315788, - "acc_norm_stderr": 0.03999423879281336 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.3669724770642202, - "acc_stderr": 0.020664675659520532, - "acc_norm": 0.3669724770642202, - "acc_norm_stderr": 0.020664675659520532 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.23015873015873015, - "acc_stderr": 0.037649508797906066, - "acc_norm": 0.23015873015873015, - "acc_norm_stderr": 0.037649508797906066 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.3562091503267974, - "acc_stderr": 0.02742047766262923, - "acc_norm": 0.3562091503267974, - "acc_norm_stderr": 0.02742047766262923 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695236, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695236 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.6115702479338843, - "acc_stderr": 0.04449270350068383, - "acc_norm": 0.6115702479338843, - "acc_norm_stderr": 0.04449270350068383 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.40131578947368424, - "acc_stderr": 0.03988903703336285, - "acc_norm": 0.40131578947368424, - "acc_norm_stderr": 0.03988903703336285 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.3480392156862745, - "acc_stderr": 0.019270998708223977, - "acc_norm": 0.3480392156862745, - "acc_norm_stderr": 0.019270998708223977 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2730496453900709, - "acc_stderr": 0.026577860943307854, - "acc_norm": 0.2730496453900709, - "acc_norm_stderr": 0.026577860943307854 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.30357142857142855, - "acc_stderr": 0.04364226155841044, - "acc_norm": 0.30357142857142855, - "acc_norm_stderr": 0.04364226155841044 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.20833333333333334, - "acc_stderr": 0.027696910713093936, - "acc_norm": 0.20833333333333334, - "acc_norm_stderr": 0.027696910713093936 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2435754189944134, - "acc_stderr": 0.01435591196476786, - "acc_norm": 0.2435754189944134, - "acc_norm_stderr": 0.01435591196476786 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.27, - "acc_stderr": 0.044619604333847394, - "acc_norm": 0.27, - "acc_norm_stderr": 0.044619604333847394 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.4, - "acc_stderr": 0.049236596391733084, - "acc_norm": 0.4, - "acc_norm_stderr": 0.049236596391733084 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.23161764705882354, - "acc_stderr": 0.025626533803777565, - "acc_norm": 0.23161764705882354, - "acc_norm_stderr": 0.025626533803777565 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.31020408163265306, - "acc_stderr": 0.029613459872484385, - "acc_norm": 0.31020408163265306, - "acc_norm_stderr": 0.029613459872484385 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.46835443037974683, - "acc_stderr": 0.03248197400511075, - "acc_norm": 0.46835443037974683, - "acc_norm_stderr": 0.03248197400511075 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.28226857887874834, - "acc_stderr": 0.011495852176241954, - "acc_norm": 0.28226857887874834, - "acc_norm_stderr": 0.011495852176241954 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.44607843137254904, - "acc_stderr": 0.03488845451304974, - "acc_norm": 0.44607843137254904, - "acc_norm_stderr": 0.03488845451304974 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.4727272727272727, - "acc_stderr": 0.03898531605579419, - "acc_norm": 0.4727272727272727, - "acc_norm_stderr": 0.03898531605579419 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.29498164014687883, - "mc1_stderr": 0.015964400965589678, - "mc2": 0.4528465622549083, - "mc2_stderr": 0.015125783674090152 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.5774647887323944, - "acc_stderr": 0.016932825271884965, - "acc_norm": 0.6830985915492958, - "acc_norm_stderr": 0.015949203508790578 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "krevas/LDCC-Instruct-Llama-2-ko-13B-v4", - "model_sha": "07d2a890b5efda243a02747ddb39e4bd3760235f", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/krevas/LDCC-Instruct-Llama-2-ko-13B-v5/result_2023-10-15 04:23:03.json b/krevas/LDCC-Instruct-Llama-2-ko-13B-v5/result_2023-10-15 04:23:03.json deleted file mode 100644 index 2b196ac94461db5147d6217320296b27b0fdff49..0000000000000000000000000000000000000000 --- a/krevas/LDCC-Instruct-Llama-2-ko-13B-v5/result_2023-10-15 04:23:03.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.378839590443686, - "acc_stderr": 0.014175915490000326, - "acc_norm": 0.4377133105802048, - "acc_norm_stderr": 0.014497573881108287 - }, - "harness|ko_hellaswag|10": { - "acc": 0.4102768372834097, - "acc_stderr": 0.004908786109095825, - "acc_norm": 0.5422226648078072, - "acc_norm_stderr": 0.004971958480920486 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.3742690058479532, - "acc_stderr": 0.03711601185389481, - "acc_norm": 0.3742690058479532, - "acc_norm_stderr": 0.03711601185389481 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.3592233009708738, - "acc_stderr": 0.04750458399041692, - "acc_norm": 0.3592233009708738, - "acc_norm_stderr": 0.04750458399041692 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.351213282247765, - "acc_stderr": 0.017069982051499427, - "acc_norm": 0.351213282247765, - "acc_norm_stderr": 0.017069982051499427 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.37037037037037035, - "acc_stderr": 0.04171654161354543, - "acc_norm": 0.37037037037037035, - "acc_norm_stderr": 0.04171654161354543 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.2, - "acc_stderr": 0.0261488180184245, - "acc_norm": 0.2, - "acc_norm_stderr": 0.0261488180184245 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3132530120481928, - "acc_stderr": 0.036108050180310235, - "acc_norm": 0.3132530120481928, - "acc_norm_stderr": 0.036108050180310235 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.43729903536977494, - "acc_stderr": 0.02817391776176289, - "acc_norm": 0.43729903536977494, - "acc_norm_stderr": 0.02817391776176289 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.33183856502242154, - "acc_stderr": 0.03160295143776679, - "acc_norm": 0.33183856502242154, - "acc_norm_stderr": 0.03160295143776679 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.35877862595419846, - "acc_stderr": 0.04206739313864908, - "acc_norm": 0.35877862595419846, - "acc_norm_stderr": 0.04206739313864908 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.3, - "acc_stderr": 0.04605661864718381, - "acc_norm": 0.3, - "acc_norm_stderr": 0.04605661864718381 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.3181818181818182, - "acc_stderr": 0.033184773338453315, - "acc_norm": 0.3181818181818182, - "acc_norm_stderr": 0.033184773338453315 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.3310344827586207, - "acc_stderr": 0.03921545312467122, - "acc_norm": 0.3310344827586207, - "acc_norm_stderr": 0.03921545312467122 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.18627450980392157, - "acc_stderr": 0.038739587141493524, - "acc_norm": 0.18627450980392157, - "acc_norm_stderr": 0.038739587141493524 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.2773109243697479, - "acc_stderr": 0.02907937453948001, - "acc_norm": 0.2773109243697479, - "acc_norm_stderr": 0.02907937453948001 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.36923076923076925, - "acc_stderr": 0.02446861524147892, - "acc_norm": 0.36923076923076925, - "acc_norm_stderr": 0.02446861524147892 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.42, - "acc_stderr": 0.049604496374885836, - "acc_norm": 0.42, - "acc_norm_stderr": 0.049604496374885836 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.4074074074074074, - "acc_stderr": 0.04750077341199984, - "acc_norm": 0.4074074074074074, - "acc_norm_stderr": 0.04750077341199984 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.29064039408866993, - "acc_stderr": 0.0319474007226554, - "acc_norm": 0.29064039408866993, - "acc_norm_stderr": 0.0319474007226554 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.36774193548387096, - "acc_stderr": 0.027430866579973474, - "acc_norm": 0.36774193548387096, - "acc_norm_stderr": 0.027430866579973474 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.49572649572649574, - "acc_stderr": 0.032754892643821316, - "acc_norm": 0.49572649572649574, - "acc_norm_stderr": 0.032754892643821316 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.30943396226415093, - "acc_stderr": 0.028450154794118627, - "acc_norm": 0.30943396226415093, - "acc_norm_stderr": 0.028450154794118627 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.33636363636363636, - "acc_stderr": 0.04525393596302505, - "acc_norm": 0.33636363636363636, - "acc_norm_stderr": 0.04525393596302505 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.26666666666666666, - "acc_stderr": 0.026962424325073835, - "acc_norm": 0.26666666666666666, - "acc_norm_stderr": 0.026962424325073835 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2847682119205298, - "acc_stderr": 0.03684881521389023, - "acc_norm": 0.2847682119205298, - "acc_norm_stderr": 0.03684881521389023 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.48756218905472637, - "acc_stderr": 0.03534439848539579, - "acc_norm": 0.48756218905472637, - "acc_norm_stderr": 0.03534439848539579 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.28901734104046245, - "acc_stderr": 0.034564257450869995, - "acc_norm": 0.28901734104046245, - "acc_norm_stderr": 0.034564257450869995 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2671957671957672, - "acc_stderr": 0.022789673145776575, - "acc_norm": 0.2671957671957672, - "acc_norm_stderr": 0.022789673145776575 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.375, - "acc_stderr": 0.04048439222695598, - "acc_norm": 0.375, - "acc_norm_stderr": 0.04048439222695598 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.15, - "acc_stderr": 0.03588702812826371, - "acc_norm": 0.15, - "acc_norm_stderr": 0.03588702812826371 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.46, - "acc_stderr": 0.05009082659620332, - "acc_norm": 0.46, - "acc_norm_stderr": 0.05009082659620332 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.43352601156069365, - "acc_stderr": 0.026680134761679214, - "acc_norm": 0.43352601156069365, - "acc_norm_stderr": 0.026680134761679214 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.4110429447852761, - "acc_stderr": 0.038656978537853624, - "acc_norm": 0.4110429447852761, - "acc_norm_stderr": 0.038656978537853624 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.3487654320987654, - "acc_stderr": 0.02651759772446501, - "acc_norm": 0.3487654320987654, - "acc_norm_stderr": 0.02651759772446501 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.40414507772020725, - "acc_stderr": 0.03541508578884021, - "acc_norm": 0.40414507772020725, - "acc_norm_stderr": 0.03541508578884021 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2719298245614035, - "acc_stderr": 0.04185774424022056, - "acc_norm": 0.2719298245614035, - "acc_norm_stderr": 0.04185774424022056 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.3321100917431193, - "acc_stderr": 0.020192682985423357, - "acc_norm": 0.3321100917431193, - "acc_norm_stderr": 0.020192682985423357 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.2222222222222222, - "acc_stderr": 0.037184890068181146, - "acc_norm": 0.2222222222222222, - "acc_norm_stderr": 0.037184890068181146 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.35947712418300654, - "acc_stderr": 0.027475969910660952, - "acc_norm": 0.35947712418300654, - "acc_norm_stderr": 0.027475969910660952 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.41, - "acc_stderr": 0.049431107042371025, - "acc_norm": 0.41, - "acc_norm_stderr": 0.049431107042371025 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.5867768595041323, - "acc_stderr": 0.04495087843548408, - "acc_norm": 0.5867768595041323, - "acc_norm_stderr": 0.04495087843548408 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.34868421052631576, - "acc_stderr": 0.038781398887976104, - "acc_norm": 0.34868421052631576, - "acc_norm_stderr": 0.038781398887976104 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.33986928104575165, - "acc_stderr": 0.01916241858862355, - "acc_norm": 0.33986928104575165, - "acc_norm_stderr": 0.01916241858862355 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.28368794326241137, - "acc_stderr": 0.026891709428343957, - "acc_norm": 0.28368794326241137, - "acc_norm_stderr": 0.026891709428343957 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.22321428571428573, - "acc_stderr": 0.039523019677025116, - "acc_norm": 0.22321428571428573, - "acc_norm_stderr": 0.039523019677025116 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.21296296296296297, - "acc_stderr": 0.027920963147993666, - "acc_norm": 0.21296296296296297, - "acc_norm_stderr": 0.027920963147993666 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2446927374301676, - "acc_stderr": 0.014378169884098424, - "acc_norm": 0.2446927374301676, - "acc_norm_stderr": 0.014378169884098424 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.24, - "acc_stderr": 0.04292346959909281, - "acc_norm": 0.24, - "acc_norm_stderr": 0.04292346959909281 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.45, - "acc_stderr": 0.05, - "acc_norm": 0.45, - "acc_norm_stderr": 0.05 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.21691176470588236, - "acc_stderr": 0.02503584522771125, - "acc_norm": 0.21691176470588236, - "acc_norm_stderr": 0.02503584522771125 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.3020408163265306, - "acc_stderr": 0.029393609319879818, - "acc_norm": 0.3020408163265306, - "acc_norm_stderr": 0.029393609319879818 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.459915611814346, - "acc_stderr": 0.03244246810187914, - "acc_norm": 0.459915611814346, - "acc_norm_stderr": 0.03244246810187914 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.3076923076923077, - "acc_stderr": 0.011787910251664587, - "acc_norm": 0.3076923076923077, - "acc_norm_stderr": 0.011787910251664587 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.4019607843137255, - "acc_stderr": 0.03441190023482465, - "acc_norm": 0.4019607843137255, - "acc_norm_stderr": 0.03441190023482465 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.40606060606060607, - "acc_stderr": 0.03834816355401181, - "acc_norm": 0.40606060606060607, - "acc_norm_stderr": 0.03834816355401181 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.26805385556915545, - "mc1_stderr": 0.015506204722834564, - "mc2": 0.4372987950502058, - "mc2_stderr": 0.014966095064841716 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.4424882629107981, - "acc_stderr": 0.01702601866298502, - "acc_norm": 0.5715962441314554, - "acc_norm_stderr": 0.01696315139010863 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "krevas/LDCC-Instruct-Llama-2-ko-13B-v5", - "model_sha": "c1b76b3349242c7c39d5d0f5d7bbff905b890421", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/krevas/LDCC-Instruct-Llama-2-ko-13B-v6/result_2023-10-16 13:04:43.json b/krevas/LDCC-Instruct-Llama-2-ko-13B-v6/result_2023-10-16 13:04:43.json deleted file mode 100644 index 66f7505f76a21e821b8eecd28e84bf25b1de6677..0000000000000000000000000000000000000000 --- a/krevas/LDCC-Instruct-Llama-2-ko-13B-v6/result_2023-10-16 13:04:43.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.3890784982935154, - "acc_stderr": 0.014247309976045609, - "acc_norm": 0.4496587030716723, - "acc_norm_stderr": 0.01453714444428475 - }, - "harness|ko_hellaswag|10": { - "acc": 0.4111730730930094, - "acc_stderr": 0.004910409150135485, - "acc_norm": 0.5493925512846046, - "acc_norm_stderr": 0.004965375341643134 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.3508771929824561, - "acc_stderr": 0.03660298834049164, - "acc_norm": 0.3508771929824561, - "acc_norm_stderr": 0.03660298834049164 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.3786407766990291, - "acc_stderr": 0.048026946982589726, - "acc_norm": 0.3786407766990291, - "acc_norm_stderr": 0.048026946982589726 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.3537675606641124, - "acc_stderr": 0.01709818470816191, - "acc_norm": 0.3537675606641124, - "acc_norm_stderr": 0.01709818470816191 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.34074074074074073, - "acc_stderr": 0.04094376269996793, - "acc_norm": 0.34074074074074073, - "acc_norm_stderr": 0.04094376269996793 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.27, - "acc_stderr": 0.044619604333847394, - "acc_norm": 0.27, - "acc_norm_stderr": 0.044619604333847394 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.2553191489361702, - "acc_stderr": 0.02850485647051418, - "acc_norm": 0.2553191489361702, - "acc_norm_stderr": 0.02850485647051418 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.30120481927710846, - "acc_stderr": 0.0357160923005348, - "acc_norm": 0.30120481927710846, - "acc_norm_stderr": 0.0357160923005348 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.40514469453376206, - "acc_stderr": 0.027882383791325956, - "acc_norm": 0.40514469453376206, - "acc_norm_stderr": 0.027882383791325956 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.31390134529147984, - "acc_stderr": 0.031146796482972465, - "acc_norm": 0.31390134529147984, - "acc_norm_stderr": 0.031146796482972465 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.37404580152671757, - "acc_stderr": 0.04243869242230524, - "acc_norm": 0.37404580152671757, - "acc_norm_stderr": 0.04243869242230524 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.31313131313131315, - "acc_stderr": 0.03304205087813652, - "acc_norm": 0.31313131313131315, - "acc_norm_stderr": 0.03304205087813652 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.2896551724137931, - "acc_stderr": 0.03780019230438015, - "acc_norm": 0.2896551724137931, - "acc_norm_stderr": 0.03780019230438015 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.11764705882352941, - "acc_stderr": 0.032059077331445286, - "acc_norm": 0.11764705882352941, - "acc_norm_stderr": 0.032059077331445286 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.22268907563025211, - "acc_stderr": 0.02702543349888239, - "acc_norm": 0.22268907563025211, - "acc_norm_stderr": 0.02702543349888239 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.26666666666666666, - "acc_stderr": 0.022421273612923714, - "acc_norm": 0.26666666666666666, - "acc_norm_stderr": 0.022421273612923714 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.41, - "acc_stderr": 0.049431107042371025, - "acc_norm": 0.41, - "acc_norm_stderr": 0.049431107042371025 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.37037037037037035, - "acc_stderr": 0.04668408033024931, - "acc_norm": 0.37037037037037035, - "acc_norm_stderr": 0.04668408033024931 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.3054187192118227, - "acc_stderr": 0.03240661565868408, - "acc_norm": 0.3054187192118227, - "acc_norm_stderr": 0.03240661565868408 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3387096774193548, - "acc_stderr": 0.026923446059302848, - "acc_norm": 0.3387096774193548, - "acc_norm_stderr": 0.026923446059302848 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.45726495726495725, - "acc_stderr": 0.03263622596380688, - "acc_norm": 0.45726495726495725, - "acc_norm_stderr": 0.03263622596380688 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.2830188679245283, - "acc_stderr": 0.027724236492700907, - "acc_norm": 0.2830188679245283, - "acc_norm_stderr": 0.027724236492700907 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.32727272727272727, - "acc_stderr": 0.04494290866252089, - "acc_norm": 0.32727272727272727, - "acc_norm_stderr": 0.04494290866252089 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.2740740740740741, - "acc_stderr": 0.027195934804085626, - "acc_norm": 0.2740740740740741, - "acc_norm_stderr": 0.027195934804085626 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.23841059602649006, - "acc_stderr": 0.03479185572599661, - "acc_norm": 0.23841059602649006, - "acc_norm_stderr": 0.03479185572599661 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.40298507462686567, - "acc_stderr": 0.03468343295111126, - "acc_norm": 0.40298507462686567, - "acc_norm_stderr": 0.03468343295111126 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.26011560693641617, - "acc_stderr": 0.033450369167889904, - "acc_norm": 0.26011560693641617, - "acc_norm_stderr": 0.033450369167889904 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2724867724867725, - "acc_stderr": 0.022930973071633356, - "acc_norm": 0.2724867724867725, - "acc_norm_stderr": 0.022930973071633356 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3472222222222222, - "acc_stderr": 0.039812405437178615, - "acc_norm": 0.3472222222222222, - "acc_norm_stderr": 0.039812405437178615 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.15, - "acc_stderr": 0.03588702812826371, - "acc_norm": 0.15, - "acc_norm_stderr": 0.03588702812826371 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.37, - "acc_stderr": 0.048523658709390974, - "acc_norm": 0.37, - "acc_norm_stderr": 0.048523658709390974 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.41329479768786126, - "acc_stderr": 0.02651126136940925, - "acc_norm": 0.41329479768786126, - "acc_norm_stderr": 0.02651126136940925 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.3558282208588957, - "acc_stderr": 0.03761521380046734, - "acc_norm": 0.3558282208588957, - "acc_norm_stderr": 0.03761521380046734 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.3734567901234568, - "acc_stderr": 0.02691500301138016, - "acc_norm": 0.3734567901234568, - "acc_norm_stderr": 0.02691500301138016 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542126, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542126 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.3471502590673575, - "acc_stderr": 0.03435696168361355, - "acc_norm": 0.3471502590673575, - "acc_norm_stderr": 0.03435696168361355 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.23684210526315788, - "acc_stderr": 0.03999423879281336, - "acc_norm": 0.23684210526315788, - "acc_norm_stderr": 0.03999423879281336 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.3339449541284404, - "acc_stderr": 0.0202205541967364, - "acc_norm": 0.3339449541284404, - "acc_norm_stderr": 0.0202205541967364 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.24603174603174602, - "acc_stderr": 0.03852273364924315, - "acc_norm": 0.24603174603174602, - "acc_norm_stderr": 0.03852273364924315 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.31699346405228757, - "acc_stderr": 0.02664327847450875, - "acc_norm": 0.31699346405228757, - "acc_norm_stderr": 0.02664327847450875 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.26, - "acc_stderr": 0.044084400227680794, - "acc_norm": 0.26, - "acc_norm_stderr": 0.044084400227680794 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.512396694214876, - "acc_stderr": 0.04562951548180765, - "acc_norm": 0.512396694214876, - "acc_norm_stderr": 0.04562951548180765 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.3157894736842105, - "acc_stderr": 0.037827289808654685, - "acc_norm": 0.3157894736842105, - "acc_norm_stderr": 0.037827289808654685 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.33169934640522875, - "acc_stderr": 0.019047485239360378, - "acc_norm": 0.33169934640522875, - "acc_norm_stderr": 0.019047485239360378 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.30851063829787234, - "acc_stderr": 0.02755336616510137, - "acc_norm": 0.30851063829787234, - "acc_norm_stderr": 0.02755336616510137 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.24107142857142858, - "acc_stderr": 0.040598672469526885, - "acc_norm": 0.24107142857142858, - "acc_norm_stderr": 0.040598672469526885 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.26851851851851855, - "acc_stderr": 0.030225226160012407, - "acc_norm": 0.26851851851851855, - "acc_norm_stderr": 0.030225226160012407 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.24134078212290502, - "acc_stderr": 0.014310999547961443, - "acc_norm": 0.24134078212290502, - "acc_norm_stderr": 0.014310999547961443 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.28, - "acc_stderr": 0.045126085985421276, - "acc_norm": 0.28, - "acc_norm_stderr": 0.045126085985421276 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.37, - "acc_stderr": 0.04852365870939099, - "acc_norm": 0.37, - "acc_norm_stderr": 0.04852365870939099 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.21691176470588236, - "acc_stderr": 0.025035845227711254, - "acc_norm": 0.21691176470588236, - "acc_norm_stderr": 0.025035845227711254 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.4, - "acc_stderr": 0.03136250240935893, - "acc_norm": 0.4, - "acc_norm_stderr": 0.03136250240935893 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.38396624472573837, - "acc_stderr": 0.031658678064106674, - "acc_norm": 0.38396624472573837, - "acc_norm_stderr": 0.031658678064106674 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.29791395045632335, - "acc_stderr": 0.011680717340400031, - "acc_norm": 0.29791395045632335, - "acc_norm_stderr": 0.011680717340400031 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.31862745098039214, - "acc_stderr": 0.0327028718148208, - "acc_norm": 0.31862745098039214, - "acc_norm_stderr": 0.0327028718148208 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.34545454545454546, - "acc_stderr": 0.03713158067481912, - "acc_norm": 0.34545454545454546, - "acc_norm_stderr": 0.03713158067481912 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.29008567931456547, - "mc1_stderr": 0.01588623687420952, - "mc2": 0.44352094267213416, - "mc2_stderr": 0.014982781844107165 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.323943661971831, - "acc_stderr": 0.016042106970464827, - "acc_norm": 0.43896713615023475, - "acc_norm_stderr": 0.017011608310486013 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "krevas/LDCC-Instruct-Llama-2-ko-13B-v6", - "model_sha": "987860d23201c3c0611a1879baef61d10bfb0b4c", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/krevas/LDCC-Instruct-Llama-2-ko-13B-v7/result_2023-10-17 07:32:46.json b/krevas/LDCC-Instruct-Llama-2-ko-13B-v7/result_2023-10-17 07:32:46.json deleted file mode 100644 index 51f3ca1c01a93f37ae8240fc69e35d79f98fe3b7..0000000000000000000000000000000000000000 --- a/krevas/LDCC-Instruct-Llama-2-ko-13B-v7/result_2023-10-17 07:32:46.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.38139931740614336, - "acc_stderr": 0.014194389086685251, - "acc_norm": 0.45307167235494883, - "acc_norm_stderr": 0.014546892052005628 - }, - "harness|ko_hellaswag|10": { - "acc": 0.41057558255327625, - "acc_stderr": 0.004909328992915067, - "acc_norm": 0.5488946425014938, - "acc_norm_stderr": 0.004965866098318175 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.30994152046783624, - "acc_stderr": 0.035469769593931624, - "acc_norm": 0.30994152046783624, - "acc_norm_stderr": 0.035469769593931624 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.32038834951456313, - "acc_stderr": 0.046202840822800406, - "acc_norm": 0.32038834951456313, - "acc_norm_stderr": 0.046202840822800406 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.31417624521072796, - "acc_stderr": 0.016599291735884893, - "acc_norm": 0.31417624521072796, - "acc_norm_stderr": 0.016599291735884893 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.34074074074074073, - "acc_stderr": 0.04094376269996793, - "acc_norm": 0.34074074074074073, - "acc_norm_stderr": 0.04094376269996793 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.2553191489361702, - "acc_stderr": 0.02850485647051419, - "acc_norm": 0.2553191489361702, - "acc_norm_stderr": 0.02850485647051419 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3072289156626506, - "acc_stderr": 0.035915667978246635, - "acc_norm": 0.3072289156626506, - "acc_norm_stderr": 0.035915667978246635 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.3954983922829582, - "acc_stderr": 0.027770918531427834, - "acc_norm": 0.3954983922829582, - "acc_norm_stderr": 0.027770918531427834 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.29596412556053814, - "acc_stderr": 0.03063659134869982, - "acc_norm": 0.29596412556053814, - "acc_norm_stderr": 0.03063659134869982 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.3816793893129771, - "acc_stderr": 0.04260735157644561, - "acc_norm": 0.3816793893129771, - "acc_norm_stderr": 0.04260735157644561 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.27, - "acc_stderr": 0.0446196043338474, - "acc_norm": 0.27, - "acc_norm_stderr": 0.0446196043338474 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.3181818181818182, - "acc_stderr": 0.03318477333845332, - "acc_norm": 0.3181818181818182, - "acc_norm_stderr": 0.03318477333845332 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.2827586206896552, - "acc_stderr": 0.037528339580033376, - "acc_norm": 0.2827586206896552, - "acc_norm_stderr": 0.037528339580033376 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.09803921568627451, - "acc_stderr": 0.029589188531613252, - "acc_norm": 0.09803921568627451, - "acc_norm_stderr": 0.029589188531613252 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.29831932773109243, - "acc_stderr": 0.02971914287634286, - "acc_norm": 0.29831932773109243, - "acc_norm_stderr": 0.02971914287634286 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.2743589743589744, - "acc_stderr": 0.022622765767493214, - "acc_norm": 0.2743589743589744, - "acc_norm_stderr": 0.022622765767493214 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.35, - "acc_stderr": 0.047937248544110196, - "acc_norm": 0.35, - "acc_norm_stderr": 0.047937248544110196 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695235, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695235 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.3425925925925926, - "acc_stderr": 0.045879047413018105, - "acc_norm": 0.3425925925925926, - "acc_norm_stderr": 0.045879047413018105 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.2857142857142857, - "acc_stderr": 0.03178529710642748, - "acc_norm": 0.2857142857142857, - "acc_norm_stderr": 0.03178529710642748 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.34838709677419355, - "acc_stderr": 0.027104826328100944, - "acc_norm": 0.34838709677419355, - "acc_norm_stderr": 0.027104826328100944 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.37606837606837606, - "acc_stderr": 0.031733936329694824, - "acc_norm": 0.37606837606837606, - "acc_norm_stderr": 0.031733936329694824 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.27547169811320754, - "acc_stderr": 0.027495663683724046, - "acc_norm": 0.27547169811320754, - "acc_norm_stderr": 0.027495663683724046 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.2909090909090909, - "acc_stderr": 0.04350271442923243, - "acc_norm": 0.2909090909090909, - "acc_norm_stderr": 0.04350271442923243 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.25555555555555554, - "acc_stderr": 0.026593939101844065, - "acc_norm": 0.25555555555555554, - "acc_norm_stderr": 0.026593939101844065 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2582781456953642, - "acc_stderr": 0.035737053147634576, - "acc_norm": 0.2582781456953642, - "acc_norm_stderr": 0.035737053147634576 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.373134328358209, - "acc_stderr": 0.034198326081760065, - "acc_norm": 0.373134328358209, - "acc_norm_stderr": 0.034198326081760065 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.3063583815028902, - "acc_stderr": 0.03514942551267438, - "acc_norm": 0.3063583815028902, - "acc_norm_stderr": 0.03514942551267438 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2830687830687831, - "acc_stderr": 0.023201392938194978, - "acc_norm": 0.2830687830687831, - "acc_norm_stderr": 0.023201392938194978 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3611111111111111, - "acc_stderr": 0.04016660030451233, - "acc_norm": 0.3611111111111111, - "acc_norm_stderr": 0.04016660030451233 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.12, - "acc_stderr": 0.03265986323710906, - "acc_norm": 0.12, - "acc_norm_stderr": 0.03265986323710906 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.37, - "acc_stderr": 0.048523658709390974, - "acc_norm": 0.37, - "acc_norm_stderr": 0.048523658709390974 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.37572254335260113, - "acc_stderr": 0.02607431485165708, - "acc_norm": 0.37572254335260113, - "acc_norm_stderr": 0.02607431485165708 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.3006134969325153, - "acc_stderr": 0.0360251131880677, - "acc_norm": 0.3006134969325153, - "acc_norm_stderr": 0.0360251131880677 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.35802469135802467, - "acc_stderr": 0.0266756119260371, - "acc_norm": 0.35802469135802467, - "acc_norm_stderr": 0.0266756119260371 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.34196891191709844, - "acc_stderr": 0.03423465100104281, - "acc_norm": 0.34196891191709844, - "acc_norm_stderr": 0.03423465100104281 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.30701754385964913, - "acc_stderr": 0.0433913832257986, - "acc_norm": 0.30701754385964913, - "acc_norm_stderr": 0.0433913832257986 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.3174311926605505, - "acc_stderr": 0.019957152198460504, - "acc_norm": 0.3174311926605505, - "acc_norm_stderr": 0.019957152198460504 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.2777777777777778, - "acc_stderr": 0.04006168083848876, - "acc_norm": 0.2777777777777778, - "acc_norm_stderr": 0.04006168083848876 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.3137254901960784, - "acc_stderr": 0.02656892101545716, - "acc_norm": 0.3137254901960784, - "acc_norm_stderr": 0.02656892101545716 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.27, - "acc_stderr": 0.044619604333847394, - "acc_norm": 0.27, - "acc_norm_stderr": 0.044619604333847394 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.4380165289256198, - "acc_stderr": 0.045291468044357915, - "acc_norm": 0.4380165289256198, - "acc_norm_stderr": 0.045291468044357915 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.3223684210526316, - "acc_stderr": 0.03803510248351586, - "acc_norm": 0.3223684210526316, - "acc_norm_stderr": 0.03803510248351586 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.3137254901960784, - "acc_stderr": 0.01877168389352817, - "acc_norm": 0.3137254901960784, - "acc_norm_stderr": 0.01877168389352817 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.31560283687943264, - "acc_stderr": 0.027724989449509317, - "acc_norm": 0.31560283687943264, - "acc_norm_stderr": 0.027724989449509317 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.22321428571428573, - "acc_stderr": 0.03952301967702511, - "acc_norm": 0.22321428571428573, - "acc_norm_stderr": 0.03952301967702511 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.23148148148148148, - "acc_stderr": 0.028765111718046944, - "acc_norm": 0.23148148148148148, - "acc_norm_stderr": 0.028765111718046944 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2435754189944134, - "acc_stderr": 0.01435591196476786, - "acc_norm": 0.2435754189944134, - "acc_norm_stderr": 0.01435591196476786 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.41, - "acc_stderr": 0.049431107042371025, - "acc_norm": 0.41, - "acc_norm_stderr": 0.049431107042371025 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.22058823529411764, - "acc_stderr": 0.02518778666022727, - "acc_norm": 0.22058823529411764, - "acc_norm_stderr": 0.02518778666022727 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.40816326530612246, - "acc_stderr": 0.03146465712827423, - "acc_norm": 0.40816326530612246, - "acc_norm_stderr": 0.03146465712827423 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.43037974683544306, - "acc_stderr": 0.032230171959375976, - "acc_norm": 0.43037974683544306, - "acc_norm_stderr": 0.032230171959375976 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.29791395045632335, - "acc_stderr": 0.011680717340400057, - "acc_norm": 0.29791395045632335, - "acc_norm_stderr": 0.011680717340400057 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.3431372549019608, - "acc_stderr": 0.033321399446680854, - "acc_norm": 0.3431372549019608, - "acc_norm_stderr": 0.033321399446680854 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.3151515151515151, - "acc_stderr": 0.0362773057502241, - "acc_norm": 0.3151515151515151, - "acc_norm_stderr": 0.0362773057502241 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.27906976744186046, - "mc1_stderr": 0.015702107090627884, - "mc2": 0.43717065836326097, - "mc2_stderr": 0.014982579691917674 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.3568075117370892, - "acc_stderr": 0.016421873267918906, - "acc_norm": 0.4753521126760563, - "acc_norm_stderr": 0.017118941126722683 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "krevas/LDCC-Instruct-Llama-2-ko-13B-v7", - "model_sha": "c0836cce043af8ee88da9cb52b2032d3fa8c5ddd", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/krevas/LDCC-Instruct-Llama-2-ko-13B/result_2023-10-09 05:34:31.json b/krevas/LDCC-Instruct-Llama-2-ko-13B/result_2023-10-09 05:34:31.json deleted file mode 100644 index 1c7ef03f4da1a263d1b027ef0ce07ccf82659daf..0000000000000000000000000000000000000000 --- a/krevas/LDCC-Instruct-Llama-2-ko-13B/result_2023-10-09 05:34:31.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.371160409556314, - "acc_stderr": 0.014117971901142818, - "acc_norm": 0.42662116040955633, - "acc_norm_stderr": 0.014453185592920293 - }, - "harness|ko_hellaswag|10": { - "acc": 0.4075881298546106, - "acc_stderr": 0.004903815885983279, - "acc_norm": 0.5435172276438957, - "acc_norm_stderr": 0.004970846697552308 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.5146198830409356, - "acc_stderr": 0.03833185275213026, - "acc_norm": 0.5146198830409356, - "acc_norm_stderr": 0.03833185275213026 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.44660194174757284, - "acc_stderr": 0.04922424153458933, - "acc_norm": 0.44660194174757284, - "acc_norm_stderr": 0.04922424153458933 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.5019157088122606, - "acc_stderr": 0.017879832259026673, - "acc_norm": 0.5019157088122606, - "acc_norm_stderr": 0.017879832259026673 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.04072314811876837, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.04072314811876837 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.29, - "acc_stderr": 0.04560480215720683, - "acc_norm": 0.29, - "acc_norm_stderr": 0.04560480215720683 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.3872340425531915, - "acc_stderr": 0.03184389265339526, - "acc_norm": 0.3872340425531915, - "acc_norm_stderr": 0.03184389265339526 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.40963855421686746, - "acc_stderr": 0.0382840111507902, - "acc_norm": 0.40963855421686746, - "acc_norm_stderr": 0.0382840111507902 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.4887459807073955, - "acc_stderr": 0.028390897396863533, - "acc_norm": 0.4887459807073955, - "acc_norm_stderr": 0.028390897396863533 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.49327354260089684, - "acc_stderr": 0.033554765962343545, - "acc_norm": 0.49327354260089684, - "acc_norm_stderr": 0.033554765962343545 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.4732824427480916, - "acc_stderr": 0.04379024936553894, - "acc_norm": 0.4732824427480916, - "acc_norm_stderr": 0.04379024936553894 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.38, - "acc_stderr": 0.048783173121456316, - "acc_norm": 0.38, - "acc_norm_stderr": 0.048783173121456316 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.48484848484848486, - "acc_stderr": 0.0356071651653106, - "acc_norm": 0.48484848484848486, - "acc_norm_stderr": 0.0356071651653106 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.33793103448275863, - "acc_stderr": 0.03941707632064889, - "acc_norm": 0.33793103448275863, - "acc_norm_stderr": 0.03941707632064889 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.18627450980392157, - "acc_stderr": 0.038739587141493524, - "acc_norm": 0.18627450980392157, - "acc_norm_stderr": 0.038739587141493524 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.40336134453781514, - "acc_stderr": 0.031866081214088314, - "acc_norm": 0.40336134453781514, - "acc_norm_stderr": 0.031866081214088314 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.4076923076923077, - "acc_stderr": 0.02491524398598784, - "acc_norm": 0.4076923076923077, - "acc_norm_stderr": 0.02491524398598784 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.49, - "acc_stderr": 0.05024183937956912, - "acc_norm": 0.49, - "acc_norm_stderr": 0.05024183937956912 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.32, - "acc_stderr": 0.04688261722621505, - "acc_norm": 0.32, - "acc_norm_stderr": 0.04688261722621505 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.4351851851851852, - "acc_stderr": 0.04792898170907061, - "acc_norm": 0.4351851851851852, - "acc_norm_stderr": 0.04792898170907061 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.2955665024630542, - "acc_stderr": 0.032104944337514575, - "acc_norm": 0.2955665024630542, - "acc_norm_stderr": 0.032104944337514575 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.4161290322580645, - "acc_stderr": 0.02804098138076155, - "acc_norm": 0.4161290322580645, - "acc_norm_stderr": 0.02804098138076155 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.6837606837606838, - "acc_stderr": 0.030463656747340254, - "acc_norm": 0.6837606837606838, - "acc_norm_stderr": 0.030463656747340254 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.39245283018867927, - "acc_stderr": 0.03005258057955784, - "acc_norm": 0.39245283018867927, - "acc_norm_stderr": 0.03005258057955784 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.4909090909090909, - "acc_stderr": 0.04788339768702861, - "acc_norm": 0.4909090909090909, - "acc_norm_stderr": 0.04788339768702861 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.3037037037037037, - "acc_stderr": 0.028037929969114986, - "acc_norm": 0.3037037037037037, - "acc_norm_stderr": 0.028037929969114986 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2582781456953642, - "acc_stderr": 0.035737053147634576, - "acc_norm": 0.2582781456953642, - "acc_norm_stderr": 0.035737053147634576 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.5422885572139303, - "acc_stderr": 0.03522865864099598, - "acc_norm": 0.5422885572139303, - "acc_norm_stderr": 0.03522865864099598 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.3583815028901734, - "acc_stderr": 0.03656343653353159, - "acc_norm": 0.3583815028901734, - "acc_norm_stderr": 0.03656343653353159 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2962962962962963, - "acc_stderr": 0.023517294335963286, - "acc_norm": 0.2962962962962963, - "acc_norm_stderr": 0.023517294335963286 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3611111111111111, - "acc_stderr": 0.040166600304512336, - "acc_norm": 0.3611111111111111, - "acc_norm_stderr": 0.040166600304512336 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.62, - "acc_stderr": 0.04878317312145633, - "acc_norm": 0.62, - "acc_norm_stderr": 0.04878317312145633 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.44508670520231214, - "acc_stderr": 0.026756255129663762, - "acc_norm": 0.44508670520231214, - "acc_norm_stderr": 0.026756255129663762 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.4110429447852761, - "acc_stderr": 0.038656978537853624, - "acc_norm": 0.4110429447852761, - "acc_norm_stderr": 0.038656978537853624 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.42901234567901236, - "acc_stderr": 0.027538925613470867, - "acc_norm": 0.42901234567901236, - "acc_norm_stderr": 0.027538925613470867 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.5077720207253886, - "acc_stderr": 0.03608003225569653, - "acc_norm": 0.5077720207253886, - "acc_norm_stderr": 0.03608003225569653 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2807017543859649, - "acc_stderr": 0.04227054451232199, - "acc_norm": 0.2807017543859649, - "acc_norm_stderr": 0.04227054451232199 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.5064220183486239, - "acc_stderr": 0.02143555482001308, - "acc_norm": 0.5064220183486239, - "acc_norm_stderr": 0.02143555482001308 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.3253968253968254, - "acc_stderr": 0.04190596438871137, - "acc_norm": 0.3253968253968254, - "acc_norm_stderr": 0.04190596438871137 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.434640522875817, - "acc_stderr": 0.02838425670488304, - "acc_norm": 0.434640522875817, - "acc_norm_stderr": 0.02838425670488304 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.5, - "acc_stderr": 0.050251890762960605, - "acc_norm": 0.5, - "acc_norm_stderr": 0.050251890762960605 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.5950413223140496, - "acc_stderr": 0.04481137755942469, - "acc_norm": 0.5950413223140496, - "acc_norm_stderr": 0.04481137755942469 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.3881578947368421, - "acc_stderr": 0.03965842097512744, - "acc_norm": 0.3881578947368421, - "acc_norm_stderr": 0.03965842097512744 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.3562091503267974, - "acc_stderr": 0.0193733324207245, - "acc_norm": 0.3562091503267974, - "acc_norm_stderr": 0.0193733324207245 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.32269503546099293, - "acc_stderr": 0.027889139300534792, - "acc_norm": 0.32269503546099293, - "acc_norm_stderr": 0.027889139300534792 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.3125, - "acc_stderr": 0.043994650575715215, - "acc_norm": 0.3125, - "acc_norm_stderr": 0.043994650575715215 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.3287037037037037, - "acc_stderr": 0.03203614084670058, - "acc_norm": 0.3287037037037037, - "acc_norm_stderr": 0.03203614084670058 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2424581005586592, - "acc_stderr": 0.01433352205921789, - "acc_norm": 0.2424581005586592, - "acc_norm_stderr": 0.01433352205921789 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.36, - "acc_stderr": 0.04824181513244218, - "acc_norm": 0.36, - "acc_norm_stderr": 0.04824181513244218 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.4, - "acc_stderr": 0.04923659639173309, - "acc_norm": 0.4, - "acc_norm_stderr": 0.04923659639173309 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.3014705882352941, - "acc_stderr": 0.027875982114273168, - "acc_norm": 0.3014705882352941, - "acc_norm_stderr": 0.027875982114273168 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.37551020408163266, - "acc_stderr": 0.031001209039894843, - "acc_norm": 0.37551020408163266, - "acc_norm_stderr": 0.031001209039894843 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.5527426160337553, - "acc_stderr": 0.03236564251614192, - "acc_norm": 0.5527426160337553, - "acc_norm_stderr": 0.03236564251614192 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.31421121251629724, - "acc_stderr": 0.011855911587048231, - "acc_norm": 0.31421121251629724, - "acc_norm_stderr": 0.011855911587048231 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.4852941176470588, - "acc_stderr": 0.03507793834791324, - "acc_norm": 0.4852941176470588, - "acc_norm_stderr": 0.03507793834791324 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.509090909090909, - "acc_stderr": 0.0390369864774844, - "acc_norm": 0.509090909090909, - "acc_norm_stderr": 0.0390369864774844 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2484700122399021, - "mc1_stderr": 0.0151274270965207, - "mc2": 0.40213800667232835, - "mc2_stderr": 0.014730084928202228 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.3192488262910798, - "acc_stderr": 0.015980636535168225, - "acc_norm": 0.44366197183098594, - "acc_norm_stderr": 0.017030629301613084 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "krevas/LDCC-Instruct-Llama-2-ko-13B", - "model_sha": "fb333611b94ee15e4bb43e2535da14f147f760dc", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/krevas/LDCC-Instruct-Llama-2-ko-13B/result_2023-10-09 05:52:57.json b/krevas/LDCC-Instruct-Llama-2-ko-13B/result_2023-10-09 05:52:57.json deleted file mode 100644 index db404daf3f64ee1567bb7754e29ab7726e9a2ee6..0000000000000000000000000000000000000000 --- a/krevas/LDCC-Instruct-Llama-2-ko-13B/result_2023-10-09 05:52:57.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.371160409556314, - "acc_stderr": 0.014117971901142818, - "acc_norm": 0.42662116040955633, - "acc_norm_stderr": 0.014453185592920293 - }, - "harness|ko_hellaswag|10": { - "acc": 0.4075881298546106, - "acc_stderr": 0.004903815885983279, - "acc_norm": 0.5435172276438957, - "acc_norm_stderr": 0.004970846697552308 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.5146198830409356, - "acc_stderr": 0.03833185275213026, - "acc_norm": 0.5146198830409356, - "acc_norm_stderr": 0.03833185275213026 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.44660194174757284, - "acc_stderr": 0.04922424153458933, - "acc_norm": 0.44660194174757284, - "acc_norm_stderr": 0.04922424153458933 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.49936143039591313, - "acc_stderr": 0.017879948914431662, - "acc_norm": 0.49936143039591313, - "acc_norm_stderr": 0.017879948914431662 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.04072314811876837, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.04072314811876837 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.29, - "acc_stderr": 0.04560480215720683, - "acc_norm": 0.29, - "acc_norm_stderr": 0.04560480215720683 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.3829787234042553, - "acc_stderr": 0.03177821250236922, - "acc_norm": 0.3829787234042553, - "acc_norm_stderr": 0.03177821250236922 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.40963855421686746, - "acc_stderr": 0.0382840111507902, - "acc_norm": 0.40963855421686746, - "acc_norm_stderr": 0.0382840111507902 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.4887459807073955, - "acc_stderr": 0.028390897396863533, - "acc_norm": 0.4887459807073955, - "acc_norm_stderr": 0.028390897396863533 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.4977578475336323, - "acc_stderr": 0.033557465352232634, - "acc_norm": 0.4977578475336323, - "acc_norm_stderr": 0.033557465352232634 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.4732824427480916, - "acc_stderr": 0.04379024936553894, - "acc_norm": 0.4732824427480916, - "acc_norm_stderr": 0.04379024936553894 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.39, - "acc_stderr": 0.04902071300001975, - "acc_norm": 0.39, - "acc_norm_stderr": 0.04902071300001975 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.4797979797979798, - "acc_stderr": 0.03559443565563919, - "acc_norm": 0.4797979797979798, - "acc_norm_stderr": 0.03559443565563919 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.33793103448275863, - "acc_stderr": 0.03941707632064889, - "acc_norm": 0.33793103448275863, - "acc_norm_stderr": 0.03941707632064889 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.18627450980392157, - "acc_stderr": 0.038739587141493524, - "acc_norm": 0.18627450980392157, - "acc_norm_stderr": 0.038739587141493524 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.40336134453781514, - "acc_stderr": 0.031866081214088314, - "acc_norm": 0.40336134453781514, - "acc_norm_stderr": 0.031866081214088314 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.4076923076923077, - "acc_stderr": 0.02491524398598784, - "acc_norm": 0.4076923076923077, - "acc_norm_stderr": 0.02491524398598784 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.49, - "acc_stderr": 0.05024183937956912, - "acc_norm": 0.49, - "acc_norm_stderr": 0.05024183937956912 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.32, - "acc_stderr": 0.04688261722621505, - "acc_norm": 0.32, - "acc_norm_stderr": 0.04688261722621505 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.4351851851851852, - "acc_stderr": 0.04792898170907061, - "acc_norm": 0.4351851851851852, - "acc_norm_stderr": 0.04792898170907061 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.2955665024630542, - "acc_stderr": 0.032104944337514575, - "acc_norm": 0.2955665024630542, - "acc_norm_stderr": 0.032104944337514575 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.4161290322580645, - "acc_stderr": 0.02804098138076155, - "acc_norm": 0.4161290322580645, - "acc_norm_stderr": 0.02804098138076155 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.6837606837606838, - "acc_stderr": 0.030463656747340254, - "acc_norm": 0.6837606837606838, - "acc_norm_stderr": 0.030463656747340254 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.3886792452830189, - "acc_stderr": 0.030000485448675986, - "acc_norm": 0.3886792452830189, - "acc_norm_stderr": 0.030000485448675986 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.4909090909090909, - "acc_stderr": 0.04788339768702861, - "acc_norm": 0.4909090909090909, - "acc_norm_stderr": 0.04788339768702861 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.3037037037037037, - "acc_stderr": 0.028037929969114986, - "acc_norm": 0.3037037037037037, - "acc_norm_stderr": 0.028037929969114986 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2582781456953642, - "acc_stderr": 0.035737053147634576, - "acc_norm": 0.2582781456953642, - "acc_norm_stderr": 0.035737053147634576 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.5422885572139303, - "acc_stderr": 0.03522865864099598, - "acc_norm": 0.5422885572139303, - "acc_norm_stderr": 0.03522865864099598 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.3583815028901734, - "acc_stderr": 0.03656343653353159, - "acc_norm": 0.3583815028901734, - "acc_norm_stderr": 0.03656343653353159 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.29365079365079366, - "acc_stderr": 0.02345603738398202, - "acc_norm": 0.29365079365079366, - "acc_norm_stderr": 0.02345603738398202 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3611111111111111, - "acc_stderr": 0.040166600304512336, - "acc_norm": 0.3611111111111111, - "acc_norm_stderr": 0.040166600304512336 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.62, - "acc_stderr": 0.04878317312145633, - "acc_norm": 0.62, - "acc_norm_stderr": 0.04878317312145633 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.44508670520231214, - "acc_stderr": 0.026756255129663762, - "acc_norm": 0.44508670520231214, - "acc_norm_stderr": 0.026756255129663762 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.4110429447852761, - "acc_stderr": 0.038656978537853624, - "acc_norm": 0.4110429447852761, - "acc_norm_stderr": 0.038656978537853624 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.43209876543209874, - "acc_stderr": 0.02756301097160667, - "acc_norm": 0.43209876543209874, - "acc_norm_stderr": 0.02756301097160667 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.5077720207253886, - "acc_stderr": 0.03608003225569653, - "acc_norm": 0.5077720207253886, - "acc_norm_stderr": 0.03608003225569653 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2807017543859649, - "acc_stderr": 0.04227054451232199, - "acc_norm": 0.2807017543859649, - "acc_norm_stderr": 0.04227054451232199 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.5082568807339449, - "acc_stderr": 0.021434399918214338, - "acc_norm": 0.5082568807339449, - "acc_norm_stderr": 0.021434399918214338 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.3253968253968254, - "acc_stderr": 0.04190596438871137, - "acc_norm": 0.3253968253968254, - "acc_norm_stderr": 0.04190596438871137 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.434640522875817, - "acc_stderr": 0.02838425670488304, - "acc_norm": 0.434640522875817, - "acc_norm_stderr": 0.02838425670488304 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.5, - "acc_stderr": 0.050251890762960605, - "acc_norm": 0.5, - "acc_norm_stderr": 0.050251890762960605 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.5950413223140496, - "acc_stderr": 0.04481137755942469, - "acc_norm": 0.5950413223140496, - "acc_norm_stderr": 0.04481137755942469 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.3881578947368421, - "acc_stderr": 0.03965842097512744, - "acc_norm": 0.3881578947368421, - "acc_norm_stderr": 0.03965842097512744 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.3562091503267974, - "acc_stderr": 0.0193733324207245, - "acc_norm": 0.3562091503267974, - "acc_norm_stderr": 0.0193733324207245 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.3191489361702128, - "acc_stderr": 0.027807990141320186, - "acc_norm": 0.3191489361702128, - "acc_norm_stderr": 0.027807990141320186 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.3125, - "acc_stderr": 0.043994650575715215, - "acc_norm": 0.3125, - "acc_norm_stderr": 0.043994650575715215 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.3287037037037037, - "acc_stderr": 0.03203614084670058, - "acc_norm": 0.3287037037037037, - "acc_norm_stderr": 0.03203614084670058 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2424581005586592, - "acc_stderr": 0.01433352205921789, - "acc_norm": 0.2424581005586592, - "acc_norm_stderr": 0.01433352205921789 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.35, - "acc_stderr": 0.0479372485441102, - "acc_norm": 0.35, - "acc_norm_stderr": 0.0479372485441102 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.4, - "acc_stderr": 0.04923659639173309, - "acc_norm": 0.4, - "acc_norm_stderr": 0.04923659639173309 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.3014705882352941, - "acc_stderr": 0.027875982114273168, - "acc_norm": 0.3014705882352941, - "acc_norm_stderr": 0.027875982114273168 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.37551020408163266, - "acc_stderr": 0.031001209039894843, - "acc_norm": 0.37551020408163266, - "acc_norm_stderr": 0.031001209039894843 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.5527426160337553, - "acc_stderr": 0.03236564251614192, - "acc_norm": 0.5527426160337553, - "acc_norm_stderr": 0.03236564251614192 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.31421121251629724, - "acc_stderr": 0.011855911587048231, - "acc_norm": 0.31421121251629724, - "acc_norm_stderr": 0.011855911587048231 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.4852941176470588, - "acc_stderr": 0.03507793834791324, - "acc_norm": 0.4852941176470588, - "acc_norm_stderr": 0.03507793834791324 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.509090909090909, - "acc_stderr": 0.0390369864774844, - "acc_norm": 0.509090909090909, - "acc_norm_stderr": 0.0390369864774844 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2484700122399021, - "mc1_stderr": 0.0151274270965207, - "mc2": 0.40213800667232835, - "mc2_stderr": 0.014730084928202228 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.3192488262910798, - "acc_stderr": 0.015980636535168225, - "acc_norm": 0.44366197183098594, - "acc_norm_stderr": 0.017030629301613084 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "krevas/LDCC-Instruct-Llama-2-ko-13B", - "model_sha": "fb333611b94ee15e4bb43e2535da14f147f760dc", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/kyujinpy/CoT-llama-2k-7b/result_2023-09-27 16:26:44.json b/kyujinpy/CoT-llama-2k-7b/result_2023-09-27 16:26:44.json deleted file mode 100644 index 77c44a82c761938678c56712dc7508592e429fe6..0000000000000000000000000000000000000000 --- a/kyujinpy/CoT-llama-2k-7b/result_2023-09-27 16:26:44.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.3174061433447099, - "acc_stderr": 0.01360223908803817, - "acc_norm": 0.3677474402730375, - "acc_norm_stderr": 0.014090995618168484 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3822943636725752, - "acc_stderr": 0.004849547819134473, - "acc_norm": 0.4938259310894244, - "acc_norm_stderr": 0.00498940098472222 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.2631578947368421, - "acc_stderr": 0.03377310252209194, - "acc_norm": 0.2631578947368421, - "acc_norm_stderr": 0.03377310252209194 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.27184466019417475, - "acc_stderr": 0.044052680241409216, - "acc_norm": 0.27184466019417475, - "acc_norm_stderr": 0.044052680241409216 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.3486590038314176, - "acc_stderr": 0.017041243143490946, - "acc_norm": 0.3486590038314176, - "acc_norm_stderr": 0.017041243143490946 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.32592592592592595, - "acc_stderr": 0.040491220417025055, - "acc_norm": 0.32592592592592595, - "acc_norm_stderr": 0.040491220417025055 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.28, - "acc_stderr": 0.045126085985421255, - "acc_norm": 0.28, - "acc_norm_stderr": 0.045126085985421255 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.2723404255319149, - "acc_stderr": 0.029101290698386705, - "acc_norm": 0.2723404255319149, - "acc_norm_stderr": 0.029101290698386705 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3253012048192771, - "acc_stderr": 0.03647168523683227, - "acc_norm": 0.3253012048192771, - "acc_norm_stderr": 0.03647168523683227 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.34726688102893893, - "acc_stderr": 0.027040745502307336, - "acc_norm": 0.34726688102893893, - "acc_norm_stderr": 0.027040745502307336 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.3183856502242152, - "acc_stderr": 0.03126580522513713, - "acc_norm": 0.3183856502242152, - "acc_norm_stderr": 0.03126580522513713 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.35877862595419846, - "acc_stderr": 0.04206739313864908, - "acc_norm": 0.35877862595419846, - "acc_norm_stderr": 0.04206739313864908 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.29, - "acc_stderr": 0.04560480215720684, - "acc_norm": 0.29, - "acc_norm_stderr": 0.04560480215720684 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.3787878787878788, - "acc_stderr": 0.03456088731993747, - "acc_norm": 0.3787878787878788, - "acc_norm_stderr": 0.03456088731993747 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.31724137931034485, - "acc_stderr": 0.03878352372138622, - "acc_norm": 0.31724137931034485, - "acc_norm_stderr": 0.03878352372138622 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.18627450980392157, - "acc_stderr": 0.038739587141493524, - "acc_norm": 0.18627450980392157, - "acc_norm_stderr": 0.038739587141493524 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.27310924369747897, - "acc_stderr": 0.028942004040998167, - "acc_norm": 0.27310924369747897, - "acc_norm_stderr": 0.028942004040998167 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.25384615384615383, - "acc_stderr": 0.022066054378726257, - "acc_norm": 0.25384615384615383, - "acc_norm_stderr": 0.022066054378726257 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695235, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695235 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.32, - "acc_stderr": 0.04688261722621504, - "acc_norm": 0.32, - "acc_norm_stderr": 0.04688261722621504 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.2962962962962963, - "acc_stderr": 0.044143436668549335, - "acc_norm": 0.2962962962962963, - "acc_norm_stderr": 0.044143436668549335 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.30049261083743845, - "acc_stderr": 0.03225799476233484, - "acc_norm": 0.30049261083743845, - "acc_norm_stderr": 0.03225799476233484 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3032258064516129, - "acc_stderr": 0.026148685930671746, - "acc_norm": 0.3032258064516129, - "acc_norm_stderr": 0.026148685930671746 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.32051282051282054, - "acc_stderr": 0.030572811310299607, - "acc_norm": 0.32051282051282054, - "acc_norm_stderr": 0.030572811310299607 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.3169811320754717, - "acc_stderr": 0.028637235639800935, - "acc_norm": 0.3169811320754717, - "acc_norm_stderr": 0.028637235639800935 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.3090909090909091, - "acc_stderr": 0.044262946482000985, - "acc_norm": 0.3090909090909091, - "acc_norm_stderr": 0.044262946482000985 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.29259259259259257, - "acc_stderr": 0.02773896963217609, - "acc_norm": 0.29259259259259257, - "acc_norm_stderr": 0.02773896963217609 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.26490066225165565, - "acc_stderr": 0.03603038545360384, - "acc_norm": 0.26490066225165565, - "acc_norm_stderr": 0.03603038545360384 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.32338308457711445, - "acc_stderr": 0.03307615947979033, - "acc_norm": 0.32338308457711445, - "acc_norm_stderr": 0.03307615947979033 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.2543352601156069, - "acc_stderr": 0.0332055644308557, - "acc_norm": 0.2543352601156069, - "acc_norm_stderr": 0.0332055644308557 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.25925925925925924, - "acc_stderr": 0.022569897074918417, - "acc_norm": 0.25925925925925924, - "acc_norm_stderr": 0.022569897074918417 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.18055555555555555, - "acc_stderr": 0.032166008088022675, - "acc_norm": 0.18055555555555555, - "acc_norm_stderr": 0.032166008088022675 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.26, - "acc_stderr": 0.0440844002276808, - "acc_norm": 0.26, - "acc_norm_stderr": 0.0440844002276808 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695235, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695235 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.3265895953757225, - "acc_stderr": 0.02524826477424284, - "acc_norm": 0.3265895953757225, - "acc_norm_stderr": 0.02524826477424284 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.25766871165644173, - "acc_stderr": 0.03436150827846917, - "acc_norm": 0.25766871165644173, - "acc_norm_stderr": 0.03436150827846917 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.32098765432098764, - "acc_stderr": 0.025976566010862744, - "acc_norm": 0.32098765432098764, - "acc_norm_stderr": 0.025976566010862744 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.32, - "acc_stderr": 0.04688261722621504, - "acc_norm": 0.32, - "acc_norm_stderr": 0.04688261722621504 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.26424870466321243, - "acc_stderr": 0.03182155050916647, - "acc_norm": 0.26424870466321243, - "acc_norm_stderr": 0.03182155050916647 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2807017543859649, - "acc_stderr": 0.042270544512322, - "acc_norm": 0.2807017543859649, - "acc_norm_stderr": 0.042270544512322 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.3724770642201835, - "acc_stderr": 0.020728368457638494, - "acc_norm": 0.3724770642201835, - "acc_norm_stderr": 0.020728368457638494 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.15873015873015872, - "acc_stderr": 0.03268454013011744, - "acc_norm": 0.15873015873015872, - "acc_norm_stderr": 0.03268454013011744 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.3562091503267974, - "acc_stderr": 0.027420477662629245, - "acc_norm": 0.3562091503267974, - "acc_norm_stderr": 0.027420477662629245 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.27, - "acc_stderr": 0.044619604333847415, - "acc_norm": 0.27, - "acc_norm_stderr": 0.044619604333847415 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.4214876033057851, - "acc_stderr": 0.045077322787750944, - "acc_norm": 0.4214876033057851, - "acc_norm_stderr": 0.045077322787750944 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.3684210526315789, - "acc_stderr": 0.03925523381052932, - "acc_norm": 0.3684210526315789, - "acc_norm_stderr": 0.03925523381052932 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.2777777777777778, - "acc_stderr": 0.018120224251484577, - "acc_norm": 0.2777777777777778, - "acc_norm_stderr": 0.018120224251484577 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.28368794326241137, - "acc_stderr": 0.02689170942834396, - "acc_norm": 0.28368794326241137, - "acc_norm_stderr": 0.02689170942834396 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.29464285714285715, - "acc_stderr": 0.043270409325787296, - "acc_norm": 0.29464285714285715, - "acc_norm_stderr": 0.043270409325787296 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.32407407407407407, - "acc_stderr": 0.03191923445686185, - "acc_norm": 0.32407407407407407, - "acc_norm_stderr": 0.03191923445686185 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.24134078212290502, - "acc_stderr": 0.014310999547961441, - "acc_norm": 0.24134078212290502, - "acc_norm_stderr": 0.014310999547961441 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.26, - "acc_stderr": 0.04408440022768079, - "acc_norm": 0.26, - "acc_norm_stderr": 0.04408440022768079 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.24, - "acc_stderr": 0.042923469599092816, - "acc_norm": 0.24, - "acc_norm_stderr": 0.042923469599092816 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.35661764705882354, - "acc_stderr": 0.02909720956841196, - "acc_norm": 0.35661764705882354, - "acc_norm_stderr": 0.02909720956841196 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.2816326530612245, - "acc_stderr": 0.02879518557429129, - "acc_norm": 0.2816326530612245, - "acc_norm_stderr": 0.02879518557429129 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.35864978902953587, - "acc_stderr": 0.03121956944530184, - "acc_norm": 0.35864978902953587, - "acc_norm_stderr": 0.03121956944530184 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2842242503259452, - "acc_stderr": 0.011519880596516076, - "acc_norm": 0.2842242503259452, - "acc_norm_stderr": 0.011519880596516076 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.2696078431372549, - "acc_stderr": 0.03114557065948678, - "acc_norm": 0.2696078431372549, - "acc_norm_stderr": 0.03114557065948678 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.30303030303030304, - "acc_stderr": 0.03588624800091708, - "acc_norm": 0.30303030303030304, - "acc_norm_stderr": 0.03588624800091708 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.24724602203182375, - "mc1_stderr": 0.01510240479735965, - "mc2": 0.3775578914340665, - "mc2_stderr": 0.014769349915486594 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.39436619718309857, - "acc_stderr": 0.016752904573125338, - "acc_norm": 0.539906103286385, - "acc_norm_stderr": 0.017085101945400163 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "kyujinpy/CoT-llama-2k-7b", - "model_sha": "34e288e1ae07ae75ac12ecaa9161ac1dc1b25552", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/kyujinpy/KO-Platypus2-13B/result_2023-10-05 18:34:43.json b/kyujinpy/KO-Platypus2-13B/result_2023-10-05 18:34:43.json deleted file mode 100644 index 354ea31cc189484e2f32316e4c85f95ae7db7c80..0000000000000000000000000000000000000000 --- a/kyujinpy/KO-Platypus2-13B/result_2023-10-05 18:34:43.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.386518771331058, - "acc_stderr": 0.014230084761910471, - "acc_norm": 0.44197952218430037, - "acc_norm_stderr": 0.014512682523128345 - }, - "harness|ko_hellaswag|10": { - "acc": 0.40818562039434375, - "acc_stderr": 0.004904933500255867, - "acc_norm": 0.5431189006174069, - "acc_norm_stderr": 0.0049711923872024465 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.5321637426900585, - "acc_stderr": 0.03826882417660368, - "acc_norm": 0.5321637426900585, - "acc_norm_stderr": 0.03826882417660368 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.5048543689320388, - "acc_stderr": 0.04950504382128921, - "acc_norm": 0.5048543689320388, - "acc_norm_stderr": 0.04950504382128921 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.5261813537675607, - "acc_stderr": 0.017855434554042, - "acc_norm": 0.5261813537675607, - "acc_norm_stderr": 0.017855434554042 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.43703703703703706, - "acc_stderr": 0.04284958639753399, - "acc_norm": 0.43703703703703706, - "acc_norm_stderr": 0.04284958639753399 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.28, - "acc_stderr": 0.045126085985421276, - "acc_norm": 0.28, - "acc_norm_stderr": 0.045126085985421276 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.35319148936170214, - "acc_stderr": 0.03124532520276193, - "acc_norm": 0.35319148936170214, - "acc_norm_stderr": 0.03124532520276193 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3674698795180723, - "acc_stderr": 0.03753267402120574, - "acc_norm": 0.3674698795180723, - "acc_norm_stderr": 0.03753267402120574 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.4533762057877814, - "acc_stderr": 0.02827435985489424, - "acc_norm": 0.4533762057877814, - "acc_norm_stderr": 0.02827435985489424 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.4170403587443946, - "acc_stderr": 0.03309266936071721, - "acc_norm": 0.4170403587443946, - "acc_norm_stderr": 0.03309266936071721 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.48854961832061067, - "acc_stderr": 0.04384140024078016, - "acc_norm": 0.48854961832061067, - "acc_norm_stderr": 0.04384140024078016 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.38, - "acc_stderr": 0.048783173121456316, - "acc_norm": 0.38, - "acc_norm_stderr": 0.048783173121456316 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.4797979797979798, - "acc_stderr": 0.03559443565563919, - "acc_norm": 0.4797979797979798, - "acc_norm_stderr": 0.03559443565563919 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.4482758620689655, - "acc_stderr": 0.04144311810878151, - "acc_norm": 0.4482758620689655, - "acc_norm_stderr": 0.04144311810878151 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.29411764705882354, - "acc_stderr": 0.04533838195929776, - "acc_norm": 0.29411764705882354, - "acc_norm_stderr": 0.04533838195929776 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.42016806722689076, - "acc_stderr": 0.032061837832361516, - "acc_norm": 0.42016806722689076, - "acc_norm_stderr": 0.032061837832361516 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.4205128205128205, - "acc_stderr": 0.025028610276710852, - "acc_norm": 0.4205128205128205, - "acc_norm_stderr": 0.025028610276710852 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.52, - "acc_stderr": 0.05021167315686779, - "acc_norm": 0.52, - "acc_norm_stderr": 0.05021167315686779 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.5185185185185185, - "acc_stderr": 0.04830366024635331, - "acc_norm": 0.5185185185185185, - "acc_norm_stderr": 0.04830366024635331 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.37438423645320196, - "acc_stderr": 0.03405155380561952, - "acc_norm": 0.37438423645320196, - "acc_norm_stderr": 0.03405155380561952 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.44516129032258067, - "acc_stderr": 0.028272410186214906, - "acc_norm": 0.44516129032258067, - "acc_norm_stderr": 0.028272410186214906 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.6068376068376068, - "acc_stderr": 0.03199957924651048, - "acc_norm": 0.6068376068376068, - "acc_norm_stderr": 0.03199957924651048 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.4377358490566038, - "acc_stderr": 0.03053333843046751, - "acc_norm": 0.4377358490566038, - "acc_norm_stderr": 0.03053333843046751 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.5636363636363636, - "acc_stderr": 0.04750185058907297, - "acc_norm": 0.5636363636363636, - "acc_norm_stderr": 0.04750185058907297 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.2814814814814815, - "acc_stderr": 0.027420019350945273, - "acc_norm": 0.2814814814814815, - "acc_norm_stderr": 0.027420019350945273 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.33774834437086093, - "acc_stderr": 0.038615575462551684, - "acc_norm": 0.33774834437086093, - "acc_norm_stderr": 0.038615575462551684 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.572139303482587, - "acc_stderr": 0.03498541988407795, - "acc_norm": 0.572139303482587, - "acc_norm_stderr": 0.03498541988407795 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.3872832369942196, - "acc_stderr": 0.03714325906302065, - "acc_norm": 0.3872832369942196, - "acc_norm_stderr": 0.03714325906302065 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2962962962962963, - "acc_stderr": 0.023517294335963286, - "acc_norm": 0.2962962962962963, - "acc_norm_stderr": 0.023517294335963286 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3611111111111111, - "acc_stderr": 0.040166600304512336, - "acc_norm": 0.3611111111111111, - "acc_norm_stderr": 0.040166600304512336 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.62, - "acc_stderr": 0.04878317312145633, - "acc_norm": 0.62, - "acc_norm_stderr": 0.04878317312145633 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.476878612716763, - "acc_stderr": 0.026890297881303128, - "acc_norm": 0.476878612716763, - "acc_norm_stderr": 0.026890297881303128 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.4171779141104294, - "acc_stderr": 0.038741028598180814, - "acc_norm": 0.4171779141104294, - "acc_norm_stderr": 0.038741028598180814 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.4567901234567901, - "acc_stderr": 0.027716661650194048, - "acc_norm": 0.4567901234567901, - "acc_norm_stderr": 0.027716661650194048 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.32, - "acc_stderr": 0.04688261722621504, - "acc_norm": 0.32, - "acc_norm_stderr": 0.04688261722621504 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.49740932642487046, - "acc_stderr": 0.03608390745384488, - "acc_norm": 0.49740932642487046, - "acc_norm_stderr": 0.03608390745384488 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.23684210526315788, - "acc_stderr": 0.03999423879281336, - "acc_norm": 0.23684210526315788, - "acc_norm_stderr": 0.03999423879281336 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.5321100917431193, - "acc_stderr": 0.021393071222680804, - "acc_norm": 0.5321100917431193, - "acc_norm_stderr": 0.021393071222680804 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.3253968253968254, - "acc_stderr": 0.04190596438871136, - "acc_norm": 0.3253968253968254, - "acc_norm_stderr": 0.04190596438871136 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.4411764705882353, - "acc_stderr": 0.028431095444176643, - "acc_norm": 0.4411764705882353, - "acc_norm_stderr": 0.028431095444176643 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.46, - "acc_stderr": 0.05009082659620332, - "acc_norm": 0.46, - "acc_norm_stderr": 0.05009082659620332 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.628099173553719, - "acc_stderr": 0.044120158066245044, - "acc_norm": 0.628099173553719, - "acc_norm_stderr": 0.044120158066245044 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.4407894736842105, - "acc_stderr": 0.04040311062490437, - "acc_norm": 0.4407894736842105, - "acc_norm_stderr": 0.04040311062490437 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.34967320261437906, - "acc_stderr": 0.019291961895066382, - "acc_norm": 0.34967320261437906, - "acc_norm_stderr": 0.019291961895066382 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.34397163120567376, - "acc_stderr": 0.028338017428611317, - "acc_norm": 0.34397163120567376, - "acc_norm_stderr": 0.028338017428611317 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.23214285714285715, - "acc_stderr": 0.04007341809755806, - "acc_norm": 0.23214285714285715, - "acc_norm_stderr": 0.04007341809755806 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.38425925925925924, - "acc_stderr": 0.03317354514310742, - "acc_norm": 0.38425925925925924, - "acc_norm_stderr": 0.03317354514310742 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2569832402234637, - "acc_stderr": 0.014614465821966346, - "acc_norm": 0.2569832402234637, - "acc_norm_stderr": 0.014614465821966346 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.44, - "acc_stderr": 0.04988876515698589, - "acc_norm": 0.44, - "acc_norm_stderr": 0.04988876515698589 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.43, - "acc_stderr": 0.04975698519562428, - "acc_norm": 0.43, - "acc_norm_stderr": 0.04975698519562428 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.4227941176470588, - "acc_stderr": 0.030008562845003476, - "acc_norm": 0.4227941176470588, - "acc_norm_stderr": 0.030008562845003476 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.46530612244897956, - "acc_stderr": 0.03193207024425314, - "acc_norm": 0.46530612244897956, - "acc_norm_stderr": 0.03193207024425314 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.5738396624472574, - "acc_stderr": 0.03219035703131774, - "acc_norm": 0.5738396624472574, - "acc_norm_stderr": 0.03219035703131774 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.37353324641460234, - "acc_stderr": 0.012354994823515274, - "acc_norm": 0.37353324641460234, - "acc_norm_stderr": 0.012354994823515274 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.46078431372549017, - "acc_stderr": 0.03498501649369527, - "acc_norm": 0.46078431372549017, - "acc_norm_stderr": 0.03498501649369527 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.509090909090909, - "acc_stderr": 0.0390369864774844, - "acc_norm": 0.509090909090909, - "acc_norm_stderr": 0.0390369864774844 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2729498164014688, - "mc1_stderr": 0.015594753632006514, - "mc2": 0.44412739310048044, - "mc2_stderr": 0.015229602209106612 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.4507042253521127, - "acc_stderr": 0.017056273992093966, - "acc_norm": 0.5410798122065728, - "acc_norm_stderr": 0.01708183279935279 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "kyujinpy/KO-Platypus2-13B", - "model_sha": "dffd8a6c44d2ece30aadd4ba260ae1ea7dbb1104", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/kyujinpy/KO-Platypus2-7B-ex/result_2023-09-27 11:27:48.json b/kyujinpy/KO-Platypus2-7B-ex/result_2023-09-27 11:27:48.json deleted file mode 100644 index 664ffd489eaa772928cfee7d61e98b47214a2541..0000000000000000000000000000000000000000 --- a/kyujinpy/KO-Platypus2-7B-ex/result_2023-09-27 11:27:48.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.3302047781569966, - "acc_stderr": 0.013743085603760427, - "acc_norm": 0.39078498293515357, - "acc_norm_stderr": 0.014258563880513778 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3868751244771958, - "acc_stderr": 0.004860393011974673, - "acc_norm": 0.5085640310695081, - "acc_norm_stderr": 0.004989049430391292 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.4619883040935672, - "acc_stderr": 0.03823727092882307, - "acc_norm": 0.4619883040935672, - "acc_norm_stderr": 0.03823727092882307 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.3106796116504854, - "acc_stderr": 0.045821241601615506, - "acc_norm": 0.3106796116504854, - "acc_norm_stderr": 0.045821241601615506 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.38697318007662834, - "acc_stderr": 0.017417138059440146, - "acc_norm": 0.38697318007662834, - "acc_norm_stderr": 0.017417138059440146 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.37037037037037035, - "acc_stderr": 0.04171654161354544, - "acc_norm": 0.37037037037037035, - "acc_norm_stderr": 0.04171654161354544 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.2765957446808511, - "acc_stderr": 0.029241883869628817, - "acc_norm": 0.2765957446808511, - "acc_norm_stderr": 0.029241883869628817 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3614457831325301, - "acc_stderr": 0.037400593820293204, - "acc_norm": 0.3614457831325301, - "acc_norm_stderr": 0.037400593820293204 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.37942122186495175, - "acc_stderr": 0.027559949802347813, - "acc_norm": 0.37942122186495175, - "acc_norm_stderr": 0.027559949802347813 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.39461883408071746, - "acc_stderr": 0.03280400504755291, - "acc_norm": 0.39461883408071746, - "acc_norm_stderr": 0.03280400504755291 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.48854961832061067, - "acc_stderr": 0.04384140024078016, - "acc_norm": 0.48854961832061067, - "acc_norm_stderr": 0.04384140024078016 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.398989898989899, - "acc_stderr": 0.03488901616852731, - "acc_norm": 0.398989898989899, - "acc_norm_stderr": 0.03488901616852731 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.38620689655172413, - "acc_stderr": 0.04057324734419035, - "acc_norm": 0.38620689655172413, - "acc_norm_stderr": 0.04057324734419035 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.23529411764705882, - "acc_stderr": 0.04220773659171453, - "acc_norm": 0.23529411764705882, - "acc_norm_stderr": 0.04220773659171453 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.3865546218487395, - "acc_stderr": 0.0316314580755238, - "acc_norm": 0.3865546218487395, - "acc_norm_stderr": 0.0316314580755238 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.2846153846153846, - "acc_stderr": 0.022878322799706294, - "acc_norm": 0.2846153846153846, - "acc_norm_stderr": 0.022878322799706294 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.41, - "acc_stderr": 0.04943110704237102, - "acc_norm": 0.41, - "acc_norm_stderr": 0.04943110704237102 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.22, - "acc_stderr": 0.04163331998932269, - "acc_norm": 0.22, - "acc_norm_stderr": 0.04163331998932269 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.37962962962962965, - "acc_stderr": 0.04691521224077742, - "acc_norm": 0.37962962962962965, - "acc_norm_stderr": 0.04691521224077742 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.2512315270935961, - "acc_stderr": 0.030516530732694436, - "acc_norm": 0.2512315270935961, - "acc_norm_stderr": 0.030516530732694436 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.33548387096774196, - "acc_stderr": 0.026860206444724342, - "acc_norm": 0.33548387096774196, - "acc_norm_stderr": 0.026860206444724342 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.47863247863247865, - "acc_stderr": 0.032726164476349545, - "acc_norm": 0.47863247863247865, - "acc_norm_stderr": 0.032726164476349545 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.33962264150943394, - "acc_stderr": 0.029146904747798356, - "acc_norm": 0.33962264150943394, - "acc_norm_stderr": 0.029146904747798356 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.36363636363636365, - "acc_stderr": 0.046075820907199756, - "acc_norm": 0.36363636363636365, - "acc_norm_stderr": 0.046075820907199756 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.24444444444444444, - "acc_stderr": 0.026202766534652148, - "acc_norm": 0.24444444444444444, - "acc_norm_stderr": 0.026202766534652148 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.271523178807947, - "acc_stderr": 0.03631329803969653, - "acc_norm": 0.271523178807947, - "acc_norm_stderr": 0.03631329803969653 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.4626865671641791, - "acc_stderr": 0.03525675167467974, - "acc_norm": 0.4626865671641791, - "acc_norm_stderr": 0.03525675167467974 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.31213872832369943, - "acc_stderr": 0.035331333893236574, - "acc_norm": 0.31213872832369943, - "acc_norm_stderr": 0.035331333893236574 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.26455026455026454, - "acc_stderr": 0.02271746789770861, - "acc_norm": 0.26455026455026454, - "acc_norm_stderr": 0.02271746789770861 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3125, - "acc_stderr": 0.038760854559127644, - "acc_norm": 0.3125, - "acc_norm_stderr": 0.038760854559127644 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.22, - "acc_stderr": 0.04163331998932269, - "acc_norm": 0.22, - "acc_norm_stderr": 0.04163331998932269 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.36, - "acc_stderr": 0.04824181513244218, - "acc_norm": 0.36, - "acc_norm_stderr": 0.04824181513244218 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.40173410404624277, - "acc_stderr": 0.026394104177643634, - "acc_norm": 0.40173410404624277, - "acc_norm_stderr": 0.026394104177643634 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.31901840490797545, - "acc_stderr": 0.03661997551073836, - "acc_norm": 0.31901840490797545, - "acc_norm_stderr": 0.03661997551073836 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.3765432098765432, - "acc_stderr": 0.026959344518747794, - "acc_norm": 0.3765432098765432, - "acc_norm_stderr": 0.026959344518747794 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.34, - "acc_stderr": 0.047609522856952344, - "acc_norm": 0.34, - "acc_norm_stderr": 0.047609522856952344 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.41450777202072536, - "acc_stderr": 0.03555300319557672, - "acc_norm": 0.41450777202072536, - "acc_norm_stderr": 0.03555300319557672 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2719298245614035, - "acc_stderr": 0.04185774424022057, - "acc_norm": 0.2719298245614035, - "acc_norm_stderr": 0.04185774424022057 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.4036697247706422, - "acc_stderr": 0.021035704856574963, - "acc_norm": 0.4036697247706422, - "acc_norm_stderr": 0.021035704856574963 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.24603174603174602, - "acc_stderr": 0.03852273364924315, - "acc_norm": 0.24603174603174602, - "acc_norm_stderr": 0.03852273364924315 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.39869281045751637, - "acc_stderr": 0.028036092273891776, - "acc_norm": 0.39869281045751637, - "acc_norm_stderr": 0.028036092273891776 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252605, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252605 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.512396694214876, - "acc_stderr": 0.04562951548180765, - "acc_norm": 0.512396694214876, - "acc_norm_stderr": 0.04562951548180765 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.28289473684210525, - "acc_stderr": 0.03665349695640767, - "acc_norm": 0.28289473684210525, - "acc_norm_stderr": 0.03665349695640767 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.32516339869281047, - "acc_stderr": 0.018950886770806308, - "acc_norm": 0.32516339869281047, - "acc_norm_stderr": 0.018950886770806308 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2872340425531915, - "acc_stderr": 0.026992199173064356, - "acc_norm": 0.2872340425531915, - "acc_norm_stderr": 0.026992199173064356 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.26785714285714285, - "acc_stderr": 0.04203277291467762, - "acc_norm": 0.26785714285714285, - "acc_norm_stderr": 0.04203277291467762 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.3194444444444444, - "acc_stderr": 0.03179876342176851, - "acc_norm": 0.3194444444444444, - "acc_norm_stderr": 0.03179876342176851 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.24581005586592178, - "acc_stderr": 0.014400296429225608, - "acc_norm": 0.24581005586592178, - "acc_norm_stderr": 0.014400296429225608 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.29, - "acc_stderr": 0.04560480215720684, - "acc_norm": 0.29, - "acc_norm_stderr": 0.04560480215720684 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.41544117647058826, - "acc_stderr": 0.029935342707877746, - "acc_norm": 0.41544117647058826, - "acc_norm_stderr": 0.029935342707877746 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.39183673469387753, - "acc_stderr": 0.031251275910891656, - "acc_norm": 0.39183673469387753, - "acc_norm_stderr": 0.031251275910891656 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.5527426160337553, - "acc_stderr": 0.03236564251614192, - "acc_norm": 0.5527426160337553, - "acc_norm_stderr": 0.03236564251614192 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.32659713168187743, - "acc_stderr": 0.01197767670471599, - "acc_norm": 0.32659713168187743, - "acc_norm_stderr": 0.01197767670471599 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.36764705882352944, - "acc_stderr": 0.03384132045674119, - "acc_norm": 0.36764705882352944, - "acc_norm_stderr": 0.03384132045674119 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.36363636363636365, - "acc_stderr": 0.03756335775187897, - "acc_norm": 0.36363636363636365, - "acc_norm_stderr": 0.03756335775187897 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.24112607099143207, - "mc1_stderr": 0.014974827279752334, - "mc2": 0.3794460140456843, - "mc2_stderr": 0.014936611984494383 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.5598591549295775, - "acc_stderr": 0.017016508248243346, - "acc_norm": 0.6455399061032864, - "acc_norm_stderr": 0.016397605788502096 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "kyujinpy/KO-Platypus2-7B-ex", - "model_sha": "02e294c2650d604a5fbd14bf1254855c6eea240c", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/kyujinpy/KoR-Orca-Platypus-13B/result_2023-10-14 11:37:08.json b/kyujinpy/KoR-Orca-Platypus-13B/result_2023-10-14 11:37:08.json deleted file mode 100644 index 1c59c4209629c3f48efe10d7425098716bf88707..0000000000000000000000000000000000000000 --- a/kyujinpy/KoR-Orca-Platypus-13B/result_2023-10-14 11:37:08.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.36860068259385664, - "acc_stderr": 0.0140978106780422, - "acc_norm": 0.4206484641638225, - "acc_norm_stderr": 0.014426211252508394 - }, - "harness|ko_hellaswag|10": { - "acc": 0.4047002589125672, - "acc_stderr": 0.004898308167211838, - "acc_norm": 0.5395339573790081, - "acc_norm_stderr": 0.004974159561342694 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.4853801169590643, - "acc_stderr": 0.038331852752130205, - "acc_norm": 0.4853801169590643, - "acc_norm_stderr": 0.038331852752130205 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.5533980582524272, - "acc_stderr": 0.04922424153458934, - "acc_norm": 0.5533980582524272, - "acc_norm_stderr": 0.04922424153458934 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.5095785440613027, - "acc_stderr": 0.01787668227534086, - "acc_norm": 0.5095785440613027, - "acc_norm_stderr": 0.01787668227534086 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.42962962962962964, - "acc_stderr": 0.04276349494376599, - "acc_norm": 0.42962962962962964, - "acc_norm_stderr": 0.04276349494376599 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.26, - "acc_stderr": 0.044084400227680794, - "acc_norm": 0.26, - "acc_norm_stderr": 0.044084400227680794 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.33191489361702126, - "acc_stderr": 0.030783736757745657, - "acc_norm": 0.33191489361702126, - "acc_norm_stderr": 0.030783736757745657 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3614457831325301, - "acc_stderr": 0.037400593820293204, - "acc_norm": 0.3614457831325301, - "acc_norm_stderr": 0.037400593820293204 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.4983922829581994, - "acc_stderr": 0.02839794490780661, - "acc_norm": 0.4983922829581994, - "acc_norm_stderr": 0.02839794490780661 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.4977578475336323, - "acc_stderr": 0.03355746535223263, - "acc_norm": 0.4977578475336323, - "acc_norm_stderr": 0.03355746535223263 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.44274809160305345, - "acc_stderr": 0.0435644720266507, - "acc_norm": 0.44274809160305345, - "acc_norm_stderr": 0.0435644720266507 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.41, - "acc_stderr": 0.049431107042371025, - "acc_norm": 0.41, - "acc_norm_stderr": 0.049431107042371025 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.5454545454545454, - "acc_stderr": 0.03547601494006938, - "acc_norm": 0.5454545454545454, - "acc_norm_stderr": 0.03547601494006938 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.4482758620689655, - "acc_stderr": 0.04144311810878151, - "acc_norm": 0.4482758620689655, - "acc_norm_stderr": 0.04144311810878151 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.24509803921568626, - "acc_stderr": 0.04280105837364396, - "acc_norm": 0.24509803921568626, - "acc_norm_stderr": 0.04280105837364396 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.453781512605042, - "acc_stderr": 0.032339434681820885, - "acc_norm": 0.453781512605042, - "acc_norm_stderr": 0.032339434681820885 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.4230769230769231, - "acc_stderr": 0.025049197876042335, - "acc_norm": 0.4230769230769231, - "acc_norm_stderr": 0.025049197876042335 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.41, - "acc_stderr": 0.04943110704237102, - "acc_norm": 0.41, - "acc_norm_stderr": 0.04943110704237102 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.27, - "acc_stderr": 0.044619604333847394, - "acc_norm": 0.27, - "acc_norm_stderr": 0.044619604333847394 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.46296296296296297, - "acc_stderr": 0.04820403072760626, - "acc_norm": 0.46296296296296297, - "acc_norm_stderr": 0.04820403072760626 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.3694581280788177, - "acc_stderr": 0.03395970381998576, - "acc_norm": 0.3694581280788177, - "acc_norm_stderr": 0.03395970381998576 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.43548387096774194, - "acc_stderr": 0.028206225591502744, - "acc_norm": 0.43548387096774194, - "acc_norm_stderr": 0.028206225591502744 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.6452991452991453, - "acc_stderr": 0.03134250486245402, - "acc_norm": 0.6452991452991453, - "acc_norm_stderr": 0.03134250486245402 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.4226415094339623, - "acc_stderr": 0.03040233144576954, - "acc_norm": 0.4226415094339623, - "acc_norm_stderr": 0.03040233144576954 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.5, - "acc_stderr": 0.04789131426105757, - "acc_norm": 0.5, - "acc_norm_stderr": 0.04789131426105757 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.3148148148148148, - "acc_stderr": 0.028317533496066475, - "acc_norm": 0.3148148148148148, - "acc_norm_stderr": 0.028317533496066475 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.304635761589404, - "acc_stderr": 0.03757949922943343, - "acc_norm": 0.304635761589404, - "acc_norm_stderr": 0.03757949922943343 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.5522388059701493, - "acc_stderr": 0.035161847729521675, - "acc_norm": 0.5522388059701493, - "acc_norm_stderr": 0.035161847729521675 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.3930635838150289, - "acc_stderr": 0.0372424959581773, - "acc_norm": 0.3930635838150289, - "acc_norm_stderr": 0.0372424959581773 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.36772486772486773, - "acc_stderr": 0.024833839825562427, - "acc_norm": 0.36772486772486773, - "acc_norm_stderr": 0.024833839825562427 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3541666666666667, - "acc_stderr": 0.039994111357535424, - "acc_norm": 0.3541666666666667, - "acc_norm_stderr": 0.039994111357535424 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.32, - "acc_stderr": 0.04688261722621504, - "acc_norm": 0.32, - "acc_norm_stderr": 0.04688261722621504 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.59, - "acc_stderr": 0.04943110704237102, - "acc_norm": 0.59, - "acc_norm_stderr": 0.04943110704237102 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.4624277456647399, - "acc_stderr": 0.026842985519615375, - "acc_norm": 0.4624277456647399, - "acc_norm_stderr": 0.026842985519615375 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.4601226993865031, - "acc_stderr": 0.0391585729143697, - "acc_norm": 0.4601226993865031, - "acc_norm_stderr": 0.0391585729143697 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.4783950617283951, - "acc_stderr": 0.027794760105008746, - "acc_norm": 0.4783950617283951, - "acc_norm_stderr": 0.027794760105008746 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.49222797927461137, - "acc_stderr": 0.03608003225569654, - "acc_norm": 0.49222797927461137, - "acc_norm_stderr": 0.03608003225569654 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.21052631578947367, - "acc_stderr": 0.0383515395439942, - "acc_norm": 0.21052631578947367, - "acc_norm_stderr": 0.0383515395439942 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.5376146788990825, - "acc_stderr": 0.021376575274397576, - "acc_norm": 0.5376146788990825, - "acc_norm_stderr": 0.021376575274397576 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.42063492063492064, - "acc_stderr": 0.04415438226743744, - "acc_norm": 0.42063492063492064, - "acc_norm_stderr": 0.04415438226743744 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.42483660130718953, - "acc_stderr": 0.028304576673141107, - "acc_norm": 0.42483660130718953, - "acc_norm_stderr": 0.028304576673141107 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.47, - "acc_stderr": 0.05016135580465919, - "acc_norm": 0.47, - "acc_norm_stderr": 0.05016135580465919 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.5950413223140496, - "acc_stderr": 0.04481137755942469, - "acc_norm": 0.5950413223140496, - "acc_norm_stderr": 0.04481137755942469 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.4407894736842105, - "acc_stderr": 0.04040311062490436, - "acc_norm": 0.4407894736842105, - "acc_norm_stderr": 0.04040311062490436 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.36437908496732024, - "acc_stderr": 0.019469518221573702, - "acc_norm": 0.36437908496732024, - "acc_norm_stderr": 0.019469518221573702 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.3404255319148936, - "acc_stderr": 0.028267657482650147, - "acc_norm": 0.3404255319148936, - "acc_norm_stderr": 0.028267657482650147 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.2767857142857143, - "acc_stderr": 0.042466243366976256, - "acc_norm": 0.2767857142857143, - "acc_norm_stderr": 0.042466243366976256 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.375, - "acc_stderr": 0.033016908987210894, - "acc_norm": 0.375, - "acc_norm_stderr": 0.033016908987210894 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.24804469273743016, - "acc_stderr": 0.014444157808261453, - "acc_norm": 0.24804469273743016, - "acc_norm_stderr": 0.014444157808261453 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.44, - "acc_stderr": 0.04988876515698589, - "acc_norm": 0.44, - "acc_norm_stderr": 0.04988876515698589 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.46, - "acc_stderr": 0.05009082659620333, - "acc_norm": 0.46, - "acc_norm_stderr": 0.05009082659620333 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.3382352941176471, - "acc_stderr": 0.028739328513983583, - "acc_norm": 0.3382352941176471, - "acc_norm_stderr": 0.028739328513983583 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.47346938775510206, - "acc_stderr": 0.03196412734523272, - "acc_norm": 0.47346938775510206, - "acc_norm_stderr": 0.03196412734523272 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.569620253164557, - "acc_stderr": 0.03223017195937599, - "acc_norm": 0.569620253164557, - "acc_norm_stderr": 0.03223017195937599 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.35658409387222945, - "acc_stderr": 0.012233642989273891, - "acc_norm": 0.35658409387222945, - "acc_norm_stderr": 0.012233642989273891 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.45098039215686275, - "acc_stderr": 0.03492406104163614, - "acc_norm": 0.45098039215686275, - "acc_norm_stderr": 0.03492406104163614 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.49696969696969695, - "acc_stderr": 0.03904272341431857, - "acc_norm": 0.49696969696969695, - "acc_norm_stderr": 0.03904272341431857 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2766217870257038, - "mc1_stderr": 0.015659605755326905, - "mc2": 0.43550201857978377, - "mc2_stderr": 0.015311053526638174 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.5903755868544601, - "acc_stderr": 0.016857467505356098, - "acc_norm": 0.687793427230047, - "acc_norm_stderr": 0.01588492803037487 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "kyujinpy/KoR-Orca-Platypus-13B", - "model_sha": "196ac2685100eeb813e05e57d843df14b81b0709", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/kyujinpy/KoT-platypus2-13B/result_2023-10-07 18:04:13.json b/kyujinpy/KoT-platypus2-13B/result_2023-10-07 18:04:13.json deleted file mode 100644 index ee0ac9a656e4acfe7a4a0b3d77d0d262f22cbc3f..0000000000000000000000000000000000000000 --- a/kyujinpy/KoT-platypus2-13B/result_2023-10-07 18:04:13.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.371160409556314, - "acc_stderr": 0.014117971901142818, - "acc_norm": 0.43686006825938567, - "acc_norm_stderr": 0.014494421584256515 - }, - "harness|ko_hellaswag|10": { - "acc": 0.40221071499701255, - "acc_stderr": 0.004893418929918276, - "acc_norm": 0.5304720175263892, - "acc_norm_stderr": 0.004980506329407588 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.52046783625731, - "acc_stderr": 0.038316105328219316, - "acc_norm": 0.52046783625731, - "acc_norm_stderr": 0.038316105328219316 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.5145631067961165, - "acc_stderr": 0.04948637324026637, - "acc_norm": 0.5145631067961165, - "acc_norm_stderr": 0.04948637324026637 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.5172413793103449, - "acc_stderr": 0.017869330154003698, - "acc_norm": 0.5172413793103449, - "acc_norm_stderr": 0.017869330154003698 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.45925925925925926, - "acc_stderr": 0.04304979692464243, - "acc_norm": 0.45925925925925926, - "acc_norm_stderr": 0.04304979692464243 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.27, - "acc_stderr": 0.0446196043338474, - "acc_norm": 0.27, - "acc_norm_stderr": 0.0446196043338474 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.35319148936170214, - "acc_stderr": 0.03124532520276193, - "acc_norm": 0.35319148936170214, - "acc_norm_stderr": 0.03124532520276193 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3674698795180723, - "acc_stderr": 0.03753267402120574, - "acc_norm": 0.3674698795180723, - "acc_norm_stderr": 0.03753267402120574 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.45016077170418006, - "acc_stderr": 0.02825666072336018, - "acc_norm": 0.45016077170418006, - "acc_norm_stderr": 0.02825666072336018 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.4080717488789238, - "acc_stderr": 0.03298574607842821, - "acc_norm": 0.4080717488789238, - "acc_norm_stderr": 0.03298574607842821 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.44274809160305345, - "acc_stderr": 0.043564472026650695, - "acc_norm": 0.44274809160305345, - "acc_norm_stderr": 0.043564472026650695 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.38, - "acc_stderr": 0.048783173121456316, - "acc_norm": 0.38, - "acc_norm_stderr": 0.048783173121456316 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.5202020202020202, - "acc_stderr": 0.03559443565563918, - "acc_norm": 0.5202020202020202, - "acc_norm_stderr": 0.03559443565563918 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.4206896551724138, - "acc_stderr": 0.0411391498118926, - "acc_norm": 0.4206896551724138, - "acc_norm_stderr": 0.0411391498118926 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.2647058823529412, - "acc_stderr": 0.04389869956808777, - "acc_norm": 0.2647058823529412, - "acc_norm_stderr": 0.04389869956808777 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.4117647058823529, - "acc_stderr": 0.03196876989195779, - "acc_norm": 0.4117647058823529, - "acc_norm_stderr": 0.03196876989195779 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.4025641025641026, - "acc_stderr": 0.02486499515976777, - "acc_norm": 0.4025641025641026, - "acc_norm_stderr": 0.02486499515976777 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.49, - "acc_stderr": 0.05024183937956911, - "acc_norm": 0.49, - "acc_norm_stderr": 0.05024183937956911 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.5462962962962963, - "acc_stderr": 0.04812917324536823, - "acc_norm": 0.5462962962962963, - "acc_norm_stderr": 0.04812917324536823 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.35467980295566504, - "acc_stderr": 0.03366124489051448, - "acc_norm": 0.35467980295566504, - "acc_norm_stderr": 0.03366124489051448 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.4483870967741935, - "acc_stderr": 0.028292056830112735, - "acc_norm": 0.4483870967741935, - "acc_norm_stderr": 0.028292056830112735 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.6068376068376068, - "acc_stderr": 0.03199957924651048, - "acc_norm": 0.6068376068376068, - "acc_norm_stderr": 0.03199957924651048 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.4377358490566038, - "acc_stderr": 0.03053333843046751, - "acc_norm": 0.4377358490566038, - "acc_norm_stderr": 0.03053333843046751 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.5545454545454546, - "acc_stderr": 0.047605488214603246, - "acc_norm": 0.5545454545454546, - "acc_norm_stderr": 0.047605488214603246 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.29259259259259257, - "acc_stderr": 0.02773896963217609, - "acc_norm": 0.29259259259259257, - "acc_norm_stderr": 0.02773896963217609 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.33774834437086093, - "acc_stderr": 0.038615575462551684, - "acc_norm": 0.33774834437086093, - "acc_norm_stderr": 0.038615575462551684 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.5920398009950248, - "acc_stderr": 0.03475116365194092, - "acc_norm": 0.5920398009950248, - "acc_norm_stderr": 0.03475116365194092 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.4046242774566474, - "acc_stderr": 0.0374246119388725, - "acc_norm": 0.4046242774566474, - "acc_norm_stderr": 0.0374246119388725 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2804232804232804, - "acc_stderr": 0.02313528797432564, - "acc_norm": 0.2804232804232804, - "acc_norm_stderr": 0.02313528797432564 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.375, - "acc_stderr": 0.04048439222695598, - "acc_norm": 0.375, - "acc_norm_stderr": 0.04048439222695598 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.65, - "acc_stderr": 0.04793724854411018, - "acc_norm": 0.65, - "acc_norm_stderr": 0.04793724854411018 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.47398843930635837, - "acc_stderr": 0.02688264343402289, - "acc_norm": 0.47398843930635837, - "acc_norm_stderr": 0.02688264343402289 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.4171779141104294, - "acc_stderr": 0.038741028598180814, - "acc_norm": 0.4171779141104294, - "acc_norm_stderr": 0.038741028598180814 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.4567901234567901, - "acc_stderr": 0.027716661650194048, - "acc_norm": 0.4567901234567901, - "acc_norm_stderr": 0.027716661650194048 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.32, - "acc_stderr": 0.04688261722621504, - "acc_norm": 0.32, - "acc_norm_stderr": 0.04688261722621504 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.5233160621761658, - "acc_stderr": 0.03604513672442202, - "acc_norm": 0.5233160621761658, - "acc_norm_stderr": 0.03604513672442202 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.22807017543859648, - "acc_stderr": 0.03947152782669415, - "acc_norm": 0.22807017543859648, - "acc_norm_stderr": 0.03947152782669415 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.5541284403669725, - "acc_stderr": 0.02131133500970857, - "acc_norm": 0.5541284403669725, - "acc_norm_stderr": 0.02131133500970857 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.31746031746031744, - "acc_stderr": 0.0416345303130286, - "acc_norm": 0.31746031746031744, - "acc_norm_stderr": 0.0416345303130286 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.434640522875817, - "acc_stderr": 0.028384256704883037, - "acc_norm": 0.434640522875817, - "acc_norm_stderr": 0.028384256704883037 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.49, - "acc_stderr": 0.05024183937956912, - "acc_norm": 0.49, - "acc_norm_stderr": 0.05024183937956912 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.6363636363636364, - "acc_stderr": 0.043913262867240704, - "acc_norm": 0.6363636363636364, - "acc_norm_stderr": 0.043913262867240704 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.4144736842105263, - "acc_stderr": 0.04008973785779206, - "acc_norm": 0.4144736842105263, - "acc_norm_stderr": 0.04008973785779206 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.3562091503267974, - "acc_stderr": 0.0193733324207245, - "acc_norm": 0.3562091503267974, - "acc_norm_stderr": 0.0193733324207245 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.34397163120567376, - "acc_stderr": 0.028338017428611317, - "acc_norm": 0.34397163120567376, - "acc_norm_stderr": 0.028338017428611317 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.22321428571428573, - "acc_stderr": 0.039523019677025116, - "acc_norm": 0.22321428571428573, - "acc_norm_stderr": 0.039523019677025116 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.3611111111111111, - "acc_stderr": 0.03275773486100999, - "acc_norm": 0.3611111111111111, - "acc_norm_stderr": 0.03275773486100999 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2558659217877095, - "acc_stderr": 0.014593620923210756, - "acc_norm": 0.2558659217877095, - "acc_norm_stderr": 0.014593620923210756 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.42, - "acc_stderr": 0.049604496374885836, - "acc_norm": 0.42, - "acc_norm_stderr": 0.049604496374885836 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.45, - "acc_stderr": 0.05, - "acc_norm": 0.45, - "acc_norm_stderr": 0.05 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.40441176470588236, - "acc_stderr": 0.029812630701569743, - "acc_norm": 0.40441176470588236, - "acc_norm_stderr": 0.029812630701569743 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.4775510204081633, - "acc_stderr": 0.031976941187136725, - "acc_norm": 0.4775510204081633, - "acc_norm_stderr": 0.031976941187136725 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.5864978902953587, - "acc_stderr": 0.03205649904851858, - "acc_norm": 0.5864978902953587, - "acc_norm_stderr": 0.03205649904851858 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.3683181225554107, - "acc_stderr": 0.01231940336956464, - "acc_norm": 0.3683181225554107, - "acc_norm_stderr": 0.01231940336956464 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.4411764705882353, - "acc_stderr": 0.034849415144292316, - "acc_norm": 0.4411764705882353, - "acc_norm_stderr": 0.034849415144292316 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.47878787878787876, - "acc_stderr": 0.03900828913737302, - "acc_norm": 0.47878787878787876, - "acc_norm_stderr": 0.03900828913737302 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2668298653610771, - "mc1_stderr": 0.015483691939237272, - "mc2": 0.4334291763920242, - "mc2_stderr": 0.014968924711902113 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.5586854460093896, - "acc_stderr": 0.017021311671847474, - "acc_norm": 0.653755868544601, - "acc_norm_stderr": 0.016309259043203083 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "kyujinpy/KoT-platypus2-13B", - "model_sha": "a5d295abdb2fca50971ee29b4db84f7565c67ab3", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/kyujinpy/KoT-platypus2-7B/result_2023-09-30 05:32:02.json b/kyujinpy/KoT-platypus2-7B/result_2023-09-30 05:32:02.json deleted file mode 100644 index bc5364f9c0da90cfff08f46a2ba21922fae42eba..0000000000000000000000000000000000000000 --- a/kyujinpy/KoT-platypus2-7B/result_2023-09-30 05:32:02.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.3225255972696246, - "acc_stderr": 0.013659980894277375, - "acc_norm": 0.38054607508532423, - "acc_norm_stderr": 0.014188277712349819 - }, - "harness|ko_hellaswag|10": { - "acc": 0.38179645488946423, - "acc_stderr": 0.004848341560492151, - "acc_norm": 0.4963154750049791, - "acc_norm_stderr": 0.004989645929811438 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.42105263157894735, - "acc_stderr": 0.037867207062342145, - "acc_norm": 0.42105263157894735, - "acc_norm_stderr": 0.037867207062342145 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.34951456310679613, - "acc_stderr": 0.04721188506097172, - "acc_norm": 0.34951456310679613, - "acc_norm_stderr": 0.04721188506097172 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.39208173690932313, - "acc_stderr": 0.01745852405014764, - "acc_norm": 0.39208173690932313, - "acc_norm_stderr": 0.01745852405014764 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.37037037037037035, - "acc_stderr": 0.04171654161354544, - "acc_norm": 0.37037037037037035, - "acc_norm_stderr": 0.04171654161354544 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.29, - "acc_stderr": 0.04560480215720684, - "acc_norm": 0.29, - "acc_norm_stderr": 0.04560480215720684 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.2723404255319149, - "acc_stderr": 0.029101290698386705, - "acc_norm": 0.2723404255319149, - "acc_norm_stderr": 0.029101290698386705 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3253012048192771, - "acc_stderr": 0.03647168523683227, - "acc_norm": 0.3253012048192771, - "acc_norm_stderr": 0.03647168523683227 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.39228295819935693, - "acc_stderr": 0.027731258647011998, - "acc_norm": 0.39228295819935693, - "acc_norm_stderr": 0.027731258647011998 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.40358744394618834, - "acc_stderr": 0.03292802819330313, - "acc_norm": 0.40358744394618834, - "acc_norm_stderr": 0.03292802819330313 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.4961832061068702, - "acc_stderr": 0.043851623256015534, - "acc_norm": 0.4961832061068702, - "acc_norm_stderr": 0.043851623256015534 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.3888888888888889, - "acc_stderr": 0.0347327959083696, - "acc_norm": 0.3888888888888889, - "acc_norm_stderr": 0.0347327959083696 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.43448275862068964, - "acc_stderr": 0.04130740879555497, - "acc_norm": 0.43448275862068964, - "acc_norm_stderr": 0.04130740879555497 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.21568627450980393, - "acc_stderr": 0.04092563958237654, - "acc_norm": 0.21568627450980393, - "acc_norm_stderr": 0.04092563958237654 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.3739495798319328, - "acc_stderr": 0.031429466378837076, - "acc_norm": 0.3739495798319328, - "acc_norm_stderr": 0.031429466378837076 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.31794871794871793, - "acc_stderr": 0.02361088430892786, - "acc_norm": 0.31794871794871793, - "acc_norm_stderr": 0.02361088430892786 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.42, - "acc_stderr": 0.049604496374885836, - "acc_norm": 0.42, - "acc_norm_stderr": 0.049604496374885836 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.27, - "acc_stderr": 0.044619604333847394, - "acc_norm": 0.27, - "acc_norm_stderr": 0.044619604333847394 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.3888888888888889, - "acc_stderr": 0.0471282125742677, - "acc_norm": 0.3888888888888889, - "acc_norm_stderr": 0.0471282125742677 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.26108374384236455, - "acc_stderr": 0.030903796952114485, - "acc_norm": 0.26108374384236455, - "acc_norm_stderr": 0.030903796952114485 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.34516129032258064, - "acc_stderr": 0.027045746573534327, - "acc_norm": 0.34516129032258064, - "acc_norm_stderr": 0.027045746573534327 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.5, - "acc_stderr": 0.03275608910402091, - "acc_norm": 0.5, - "acc_norm_stderr": 0.03275608910402091 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.3433962264150943, - "acc_stderr": 0.029224526469124792, - "acc_norm": 0.3433962264150943, - "acc_norm_stderr": 0.029224526469124792 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.35454545454545455, - "acc_stderr": 0.04582004841505415, - "acc_norm": 0.35454545454545455, - "acc_norm_stderr": 0.04582004841505415 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.24444444444444444, - "acc_stderr": 0.02620276653465215, - "acc_norm": 0.24444444444444444, - "acc_norm_stderr": 0.02620276653465215 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2781456953642384, - "acc_stderr": 0.03658603262763743, - "acc_norm": 0.2781456953642384, - "acc_norm_stderr": 0.03658603262763743 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.43781094527363185, - "acc_stderr": 0.035080801121998406, - "acc_norm": 0.43781094527363185, - "acc_norm_stderr": 0.035080801121998406 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.3063583815028902, - "acc_stderr": 0.03514942551267437, - "acc_norm": 0.3063583815028902, - "acc_norm_stderr": 0.03514942551267437 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.24338624338624337, - "acc_stderr": 0.022101128787415426, - "acc_norm": 0.24338624338624337, - "acc_norm_stderr": 0.022101128787415426 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3263888888888889, - "acc_stderr": 0.03921067198982266, - "acc_norm": 0.3263888888888889, - "acc_norm_stderr": 0.03921067198982266 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.22, - "acc_stderr": 0.04163331998932269, - "acc_norm": 0.22, - "acc_norm_stderr": 0.04163331998932269 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.4, - "acc_stderr": 0.049236596391733084, - "acc_norm": 0.4, - "acc_norm_stderr": 0.049236596391733084 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.4161849710982659, - "acc_stderr": 0.02653818910470548, - "acc_norm": 0.4161849710982659, - "acc_norm_stderr": 0.02653818910470548 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.32515337423312884, - "acc_stderr": 0.03680350371286461, - "acc_norm": 0.32515337423312884, - "acc_norm_stderr": 0.03680350371286461 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.39197530864197533, - "acc_stderr": 0.02716368603827123, - "acc_norm": 0.39197530864197533, - "acc_norm_stderr": 0.02716368603827123 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.42487046632124353, - "acc_stderr": 0.0356747133521254, - "acc_norm": 0.42487046632124353, - "acc_norm_stderr": 0.0356747133521254 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.24561403508771928, - "acc_stderr": 0.04049339297748141, - "acc_norm": 0.24561403508771928, - "acc_norm_stderr": 0.04049339297748141 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.3926605504587156, - "acc_stderr": 0.020937505161201093, - "acc_norm": 0.3926605504587156, - "acc_norm_stderr": 0.020937505161201093 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.25396825396825395, - "acc_stderr": 0.038932596106046734, - "acc_norm": 0.25396825396825395, - "acc_norm_stderr": 0.038932596106046734 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.4084967320261438, - "acc_stderr": 0.028146405993096358, - "acc_norm": 0.4084967320261438, - "acc_norm_stderr": 0.028146405993096358 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.37, - "acc_stderr": 0.04852365870939099, - "acc_norm": 0.37, - "acc_norm_stderr": 0.04852365870939099 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.49586776859504134, - "acc_stderr": 0.04564198767432754, - "acc_norm": 0.49586776859504134, - "acc_norm_stderr": 0.04564198767432754 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.27631578947368424, - "acc_stderr": 0.03639057569952925, - "acc_norm": 0.27631578947368424, - "acc_norm_stderr": 0.03639057569952925 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.3284313725490196, - "acc_stderr": 0.01899970738316266, - "acc_norm": 0.3284313725490196, - "acc_norm_stderr": 0.01899970738316266 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2765957446808511, - "acc_stderr": 0.026684564340460997, - "acc_norm": 0.2765957446808511, - "acc_norm_stderr": 0.026684564340460997 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.30357142857142855, - "acc_stderr": 0.04364226155841044, - "acc_norm": 0.30357142857142855, - "acc_norm_stderr": 0.04364226155841044 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.3425925925925926, - "acc_stderr": 0.03236585252602157, - "acc_norm": 0.3425925925925926, - "acc_norm_stderr": 0.03236585252602157 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2569832402234637, - "acc_stderr": 0.014614465821966344, - "acc_norm": 0.2569832402234637, - "acc_norm_stderr": 0.014614465821966344 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.26, - "acc_stderr": 0.0440844002276808, - "acc_norm": 0.26, - "acc_norm_stderr": 0.0440844002276808 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542127, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542127 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.41911764705882354, - "acc_stderr": 0.02997280717046463, - "acc_norm": 0.41911764705882354, - "acc_norm_stderr": 0.02997280717046463 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.39183673469387753, - "acc_stderr": 0.031251275910891656, - "acc_norm": 0.39183673469387753, - "acc_norm_stderr": 0.031251275910891656 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.48945147679324896, - "acc_stderr": 0.032539983791662855, - "acc_norm": 0.48945147679324896, - "acc_norm_stderr": 0.032539983791662855 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.32529335071707954, - "acc_stderr": 0.01196531153657153, - "acc_norm": 0.32529335071707954, - "acc_norm_stderr": 0.01196531153657153 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.35784313725490197, - "acc_stderr": 0.033644872860882996, - "acc_norm": 0.35784313725490197, - "acc_norm_stderr": 0.033644872860882996 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.3393939393939394, - "acc_stderr": 0.03697442205031596, - "acc_norm": 0.3393939393939394, - "acc_norm_stderr": 0.03697442205031596 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.24112607099143207, - "mc1_stderr": 0.014974827279752332, - "mc2": 0.37686510476734664, - "mc2_stderr": 0.014752533377181794 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.596244131455399, - "acc_stderr": 0.01681925296969962, - "acc_norm": 0.6807511737089202, - "acc_norm_stderr": 0.015980636535168225 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "kyujinpy/KoT-platypus2-7B", - "model_sha": "60e0201ec41121d82e85ae235d920c055bb5fb1f", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/kyujinpy/Korean-OpenOrca-13B/result_2023-10-09 08:06:20.json b/kyujinpy/Korean-OpenOrca-13B/result_2023-10-09 08:06:20.json deleted file mode 100644 index d84a58f85e399ed6a5fb5a28525467f1f0507b18..0000000000000000000000000000000000000000 --- a/kyujinpy/Korean-OpenOrca-13B/result_2023-10-09 08:06:20.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.37372013651877134, - "acc_stderr": 0.014137708601759091, - "acc_norm": 0.4308873720136519, - "acc_norm_stderr": 0.014471133392642475 - }, - "harness|ko_hellaswag|10": { - "acc": 0.4080860386377216, - "acc_stderr": 0.004904747752286962, - "acc_norm": 0.5413264289982075, - "acc_norm_stderr": 0.0049727083696565425 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.4619883040935672, - "acc_stderr": 0.03823727092882307, - "acc_norm": 0.4619883040935672, - "acc_norm_stderr": 0.03823727092882307 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.5242718446601942, - "acc_stderr": 0.049449010929737795, - "acc_norm": 0.5242718446601942, - "acc_norm_stderr": 0.049449010929737795 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.5223499361430396, - "acc_stderr": 0.01786209177850786, - "acc_norm": 0.5223499361430396, - "acc_norm_stderr": 0.01786209177850786 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.4074074074074074, - "acc_stderr": 0.04244633238353229, - "acc_norm": 0.4074074074074074, - "acc_norm_stderr": 0.04244633238353229 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.3446808510638298, - "acc_stderr": 0.03106898596312215, - "acc_norm": 0.3446808510638298, - "acc_norm_stderr": 0.03106898596312215 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3855421686746988, - "acc_stderr": 0.03789134424611548, - "acc_norm": 0.3855421686746988, - "acc_norm_stderr": 0.03789134424611548 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.4437299035369775, - "acc_stderr": 0.028217683556652315, - "acc_norm": 0.4437299035369775, - "acc_norm_stderr": 0.028217683556652315 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.4439461883408072, - "acc_stderr": 0.03334625674242728, - "acc_norm": 0.4439461883408072, - "acc_norm_stderr": 0.03334625674242728 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.48854961832061067, - "acc_stderr": 0.043841400240780176, - "acc_norm": 0.48854961832061067, - "acc_norm_stderr": 0.043841400240780176 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.34, - "acc_stderr": 0.047609522856952365, - "acc_norm": 0.34, - "acc_norm_stderr": 0.047609522856952365 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.5252525252525253, - "acc_stderr": 0.035578062450873145, - "acc_norm": 0.5252525252525253, - "acc_norm_stderr": 0.035578062450873145 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.42758620689655175, - "acc_stderr": 0.041227371113703316, - "acc_norm": 0.42758620689655175, - "acc_norm_stderr": 0.041227371113703316 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.21568627450980393, - "acc_stderr": 0.04092563958237654, - "acc_norm": 0.21568627450980393, - "acc_norm_stderr": 0.04092563958237654 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.37815126050420167, - "acc_stderr": 0.031499305777849054, - "acc_norm": 0.37815126050420167, - "acc_norm_stderr": 0.031499305777849054 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.4461538461538462, - "acc_stderr": 0.025203571773028333, - "acc_norm": 0.4461538461538462, - "acc_norm_stderr": 0.025203571773028333 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.45, - "acc_stderr": 0.05, - "acc_norm": 0.45, - "acc_norm_stderr": 0.05 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.33, - "acc_stderr": 0.047258156262526045, - "acc_norm": 0.33, - "acc_norm_stderr": 0.047258156262526045 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.5, - "acc_stderr": 0.04833682445228318, - "acc_norm": 0.5, - "acc_norm_stderr": 0.04833682445228318 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.3497536945812808, - "acc_stderr": 0.03355400904969566, - "acc_norm": 0.3497536945812808, - "acc_norm_stderr": 0.03355400904969566 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.43870967741935485, - "acc_stderr": 0.02822949732031722, - "acc_norm": 0.43870967741935485, - "acc_norm_stderr": 0.02822949732031722 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.6581196581196581, - "acc_stderr": 0.03107502852650775, - "acc_norm": 0.6581196581196581, - "acc_norm_stderr": 0.03107502852650775 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.44150943396226416, - "acc_stderr": 0.030561590426731833, - "acc_norm": 0.44150943396226416, - "acc_norm_stderr": 0.030561590426731833 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.5, - "acc_stderr": 0.04789131426105757, - "acc_norm": 0.5, - "acc_norm_stderr": 0.04789131426105757 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.27037037037037037, - "acc_stderr": 0.027080372815145654, - "acc_norm": 0.27037037037037037, - "acc_norm_stderr": 0.027080372815145654 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.26490066225165565, - "acc_stderr": 0.03603038545360384, - "acc_norm": 0.26490066225165565, - "acc_norm_stderr": 0.03603038545360384 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.48756218905472637, - "acc_stderr": 0.03534439848539579, - "acc_norm": 0.48756218905472637, - "acc_norm_stderr": 0.03534439848539579 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.3872832369942196, - "acc_stderr": 0.03714325906302064, - "acc_norm": 0.3872832369942196, - "acc_norm_stderr": 0.03714325906302064 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.291005291005291, - "acc_stderr": 0.02339382650048487, - "acc_norm": 0.291005291005291, - "acc_norm_stderr": 0.02339382650048487 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3680555555555556, - "acc_stderr": 0.04032999053960718, - "acc_norm": 0.3680555555555556, - "acc_norm_stderr": 0.04032999053960718 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.32, - "acc_stderr": 0.046882617226215034, - "acc_norm": 0.32, - "acc_norm_stderr": 0.046882617226215034 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.59, - "acc_stderr": 0.04943110704237102, - "acc_norm": 0.59, - "acc_norm_stderr": 0.04943110704237102 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.44508670520231214, - "acc_stderr": 0.02675625512966377, - "acc_norm": 0.44508670520231214, - "acc_norm_stderr": 0.02675625512966377 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.44171779141104295, - "acc_stderr": 0.039015918258361836, - "acc_norm": 0.44171779141104295, - "acc_norm_stderr": 0.039015918258361836 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.4691358024691358, - "acc_stderr": 0.027767689606833935, - "acc_norm": 0.4691358024691358, - "acc_norm_stderr": 0.027767689606833935 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.5025906735751295, - "acc_stderr": 0.03608390745384488, - "acc_norm": 0.5025906735751295, - "acc_norm_stderr": 0.03608390745384488 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2543859649122807, - "acc_stderr": 0.040969851398436716, - "acc_norm": 0.2543859649122807, - "acc_norm_stderr": 0.040969851398436716 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.4935779816513762, - "acc_stderr": 0.021435554820013077, - "acc_norm": 0.4935779816513762, - "acc_norm_stderr": 0.021435554820013077 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.2619047619047619, - "acc_stderr": 0.039325376803928704, - "acc_norm": 0.2619047619047619, - "acc_norm_stderr": 0.039325376803928704 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.40522875816993464, - "acc_stderr": 0.02811092849280907, - "acc_norm": 0.40522875816993464, - "acc_norm_stderr": 0.02811092849280907 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.39, - "acc_stderr": 0.04902071300001974, - "acc_norm": 0.39, - "acc_norm_stderr": 0.04902071300001974 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.6198347107438017, - "acc_stderr": 0.04431324501968432, - "acc_norm": 0.6198347107438017, - "acc_norm_stderr": 0.04431324501968432 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.39473684210526316, - "acc_stderr": 0.039777499346220734, - "acc_norm": 0.39473684210526316, - "acc_norm_stderr": 0.039777499346220734 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.33986928104575165, - "acc_stderr": 0.01916241858862356, - "acc_norm": 0.33986928104575165, - "acc_norm_stderr": 0.01916241858862356 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.3120567375886525, - "acc_stderr": 0.027640120545169934, - "acc_norm": 0.3120567375886525, - "acc_norm_stderr": 0.027640120545169934 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.24107142857142858, - "acc_stderr": 0.04059867246952687, - "acc_norm": 0.24107142857142858, - "acc_norm_stderr": 0.04059867246952687 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.2638888888888889, - "acc_stderr": 0.030058202704309846, - "acc_norm": 0.2638888888888889, - "acc_norm_stderr": 0.030058202704309846 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.24134078212290502, - "acc_stderr": 0.014310999547961443, - "acc_norm": 0.24134078212290502, - "acc_norm_stderr": 0.014310999547961443 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695235, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695235 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.46, - "acc_stderr": 0.05009082659620332, - "acc_norm": 0.46, - "acc_norm_stderr": 0.05009082659620332 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.28308823529411764, - "acc_stderr": 0.027365861131513805, - "acc_norm": 0.28308823529411764, - "acc_norm_stderr": 0.027365861131513805 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.40816326530612246, - "acc_stderr": 0.03146465712827424, - "acc_norm": 0.40816326530612246, - "acc_norm_stderr": 0.03146465712827424 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.5527426160337553, - "acc_stderr": 0.03236564251614192, - "acc_norm": 0.5527426160337553, - "acc_norm_stderr": 0.03236564251614192 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.30247718383311606, - "acc_stderr": 0.0117315242341657, - "acc_norm": 0.30247718383311606, - "acc_norm_stderr": 0.0117315242341657 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.45098039215686275, - "acc_stderr": 0.03492406104163614, - "acc_norm": 0.45098039215686275, - "acc_norm_stderr": 0.03492406104163614 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.4727272727272727, - "acc_stderr": 0.03898531605579419, - "acc_norm": 0.4727272727272727, - "acc_norm_stderr": 0.03898531605579419 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2864137086903305, - "mc1_stderr": 0.015826142439502332, - "mc2": 0.4522241098057631, - "mc2_stderr": 0.015289294572002421 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.431924882629108, - "acc_stderr": 0.016980177092572064, - "acc_norm": 0.5657276995305164, - "acc_norm_stderr": 0.016991041723268784 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "kyujinpy/Korean-OpenOrca-13B", - "model_sha": "b12e0fc5f123688183ac97132bd8ac764aaa43e1", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/kyujinpy/Kosy-Platypus2-13B/result_2023-10-24 13:04:50.json b/kyujinpy/Kosy-Platypus2-13B/result_2023-10-24 13:04:50.json deleted file mode 100644 index 78a2e64e7e9a7cf0db7a51c4aa81f59ec874610a..0000000000000000000000000000000000000000 --- a/kyujinpy/Kosy-Platypus2-13B/result_2023-10-24 13:04:50.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.3839590443686007, - "acc_stderr": 0.01421244498065189, - "acc_norm": 0.439419795221843, - "acc_norm_stderr": 0.014503747823580122 - }, - "harness|ko_hellaswag|10": { - "acc": 0.40679147580163316, - "acc_stderr": 0.0049023140557255904, - "acc_norm": 0.5388368850826528, - "acc_norm_stderr": 0.00497470642843428 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.5263157894736842, - "acc_stderr": 0.03829509868994727, - "acc_norm": 0.5263157894736842, - "acc_norm_stderr": 0.03829509868994727 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.4563106796116505, - "acc_stderr": 0.049318019942204146, - "acc_norm": 0.4563106796116505, - "acc_norm_stderr": 0.049318019942204146 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.5159642401021711, - "acc_stderr": 0.01787084750608174, - "acc_norm": 0.5159642401021711, - "acc_norm_stderr": 0.01787084750608174 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.42962962962962964, - "acc_stderr": 0.04276349494376599, - "acc_norm": 0.42962962962962964, - "acc_norm_stderr": 0.04276349494376599 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.39, - "acc_stderr": 0.04902071300001975, - "acc_norm": 0.39, - "acc_norm_stderr": 0.04902071300001975 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.3574468085106383, - "acc_stderr": 0.03132941789476425, - "acc_norm": 0.3574468085106383, - "acc_norm_stderr": 0.03132941789476425 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.39759036144578314, - "acc_stderr": 0.038099730845402184, - "acc_norm": 0.39759036144578314, - "acc_norm_stderr": 0.038099730845402184 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.5176848874598071, - "acc_stderr": 0.02838032284907713, - "acc_norm": 0.5176848874598071, - "acc_norm_stderr": 0.02838032284907713 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.484304932735426, - "acc_stderr": 0.0335412657542081, - "acc_norm": 0.484304932735426, - "acc_norm_stderr": 0.0335412657542081 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.4732824427480916, - "acc_stderr": 0.04379024936553894, - "acc_norm": 0.4732824427480916, - "acc_norm_stderr": 0.04379024936553894 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.38, - "acc_stderr": 0.048783173121456316, - "acc_norm": 0.38, - "acc_norm_stderr": 0.048783173121456316 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.5151515151515151, - "acc_stderr": 0.03560716516531061, - "acc_norm": 0.5151515151515151, - "acc_norm_stderr": 0.03560716516531061 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.4206896551724138, - "acc_stderr": 0.0411391498118926, - "acc_norm": 0.4206896551724138, - "acc_norm_stderr": 0.0411391498118926 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.2647058823529412, - "acc_stderr": 0.043898699568087785, - "acc_norm": 0.2647058823529412, - "acc_norm_stderr": 0.043898699568087785 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.453781512605042, - "acc_stderr": 0.032339434681820885, - "acc_norm": 0.453781512605042, - "acc_norm_stderr": 0.032339434681820885 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.44358974358974357, - "acc_stderr": 0.02518914989476419, - "acc_norm": 0.44358974358974357, - "acc_norm_stderr": 0.02518914989476419 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.49, - "acc_stderr": 0.05024183937956912, - "acc_norm": 0.49, - "acc_norm_stderr": 0.05024183937956912 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695235, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695235 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.46296296296296297, - "acc_stderr": 0.04820403072760627, - "acc_norm": 0.46296296296296297, - "acc_norm_stderr": 0.04820403072760627 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.4039408866995074, - "acc_stderr": 0.034524539038220385, - "acc_norm": 0.4039408866995074, - "acc_norm_stderr": 0.034524539038220385 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.44516129032258067, - "acc_stderr": 0.02827241018621491, - "acc_norm": 0.44516129032258067, - "acc_norm_stderr": 0.02827241018621491 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.6709401709401709, - "acc_stderr": 0.03078232157768817, - "acc_norm": 0.6709401709401709, - "acc_norm_stderr": 0.03078232157768817 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.4075471698113208, - "acc_stderr": 0.030242233800854498, - "acc_norm": 0.4075471698113208, - "acc_norm_stderr": 0.030242233800854498 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.5454545454545454, - "acc_stderr": 0.04769300568972744, - "acc_norm": 0.5454545454545454, - "acc_norm_stderr": 0.04769300568972744 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.028742040903948492, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.028742040903948492 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.271523178807947, - "acc_stderr": 0.036313298039696525, - "acc_norm": 0.271523178807947, - "acc_norm_stderr": 0.036313298039696525 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.572139303482587, - "acc_stderr": 0.03498541988407795, - "acc_norm": 0.572139303482587, - "acc_norm_stderr": 0.03498541988407795 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.3583815028901734, - "acc_stderr": 0.036563436533531585, - "acc_norm": 0.3583815028901734, - "acc_norm_stderr": 0.036563436533531585 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2751322751322751, - "acc_stderr": 0.02300008685906864, - "acc_norm": 0.2751322751322751, - "acc_norm_stderr": 0.02300008685906864 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3402777777777778, - "acc_stderr": 0.039621355734862175, - "acc_norm": 0.3402777777777778, - "acc_norm_stderr": 0.039621355734862175 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.29, - "acc_stderr": 0.04560480215720684, - "acc_norm": 0.29, - "acc_norm_stderr": 0.04560480215720684 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.67, - "acc_stderr": 0.04725815626252606, - "acc_norm": 0.67, - "acc_norm_stderr": 0.04725815626252606 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.49710982658959535, - "acc_stderr": 0.026918645383239004, - "acc_norm": 0.49710982658959535, - "acc_norm_stderr": 0.026918645383239004 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.43558282208588955, - "acc_stderr": 0.03895632464138937, - "acc_norm": 0.43558282208588955, - "acc_norm_stderr": 0.03895632464138937 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.44753086419753085, - "acc_stderr": 0.0276671385694227, - "acc_norm": 0.44753086419753085, - "acc_norm_stderr": 0.0276671385694227 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.5233160621761658, - "acc_stderr": 0.036045136724422014, - "acc_norm": 0.5233160621761658, - "acc_norm_stderr": 0.036045136724422014 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.24561403508771928, - "acc_stderr": 0.04049339297748142, - "acc_norm": 0.24561403508771928, - "acc_norm_stderr": 0.04049339297748142 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.5211009174311927, - "acc_stderr": 0.02141822475426465, - "acc_norm": 0.5211009174311927, - "acc_norm_stderr": 0.02141822475426465 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.31746031746031744, - "acc_stderr": 0.0416345303130286, - "acc_norm": 0.31746031746031744, - "acc_norm_stderr": 0.0416345303130286 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.434640522875817, - "acc_stderr": 0.028384256704883037, - "acc_norm": 0.434640522875817, - "acc_norm_stderr": 0.028384256704883037 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.51, - "acc_stderr": 0.05024183937956911, - "acc_norm": 0.51, - "acc_norm_stderr": 0.05024183937956911 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.5867768595041323, - "acc_stderr": 0.04495087843548408, - "acc_norm": 0.5867768595041323, - "acc_norm_stderr": 0.04495087843548408 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.4934210526315789, - "acc_stderr": 0.040685900502249704, - "acc_norm": 0.4934210526315789, - "acc_norm_stderr": 0.040685900502249704 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.3888888888888889, - "acc_stderr": 0.019722058939618068, - "acc_norm": 0.3888888888888889, - "acc_norm_stderr": 0.019722058939618068 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.32978723404255317, - "acc_stderr": 0.028045946942042398, - "acc_norm": 0.32978723404255317, - "acc_norm_stderr": 0.028045946942042398 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.25, - "acc_stderr": 0.04109974682633932, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04109974682633932 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.38425925925925924, - "acc_stderr": 0.03317354514310742, - "acc_norm": 0.38425925925925924, - "acc_norm_stderr": 0.03317354514310742 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.24692737430167597, - "acc_stderr": 0.014422292204808852, - "acc_norm": 0.24692737430167597, - "acc_norm_stderr": 0.014422292204808852 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695235, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695235 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.47, - "acc_stderr": 0.050161355804659205, - "acc_norm": 0.47, - "acc_norm_stderr": 0.050161355804659205 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.38235294117647056, - "acc_stderr": 0.029520095697687754, - "acc_norm": 0.38235294117647056, - "acc_norm_stderr": 0.029520095697687754 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.4326530612244898, - "acc_stderr": 0.03171752824062664, - "acc_norm": 0.4326530612244898, - "acc_norm_stderr": 0.03171752824062664 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.5738396624472574, - "acc_stderr": 0.03219035703131774, - "acc_norm": 0.5738396624472574, - "acc_norm_stderr": 0.03219035703131774 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.35984354628422427, - "acc_stderr": 0.012258260483689803, - "acc_norm": 0.35984354628422427, - "acc_norm_stderr": 0.012258260483689803 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.43137254901960786, - "acc_stderr": 0.03476099060501636, - "acc_norm": 0.43137254901960786, - "acc_norm_stderr": 0.03476099060501636 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.5212121212121212, - "acc_stderr": 0.03900828913737301, - "acc_norm": 0.5212121212121212, - "acc_norm_stderr": 0.03900828913737301 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.26438188494492043, - "mc1_stderr": 0.015438211119522509, - "mc2": 0.43461012650741965, - "mc2_stderr": 0.015133199211121806 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.3826291079812207, - "acc_stderr": 0.016660855482636353, - "acc_norm": 0.4671361502347418, - "acc_norm_stderr": 0.01710271614064038 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "kyujinpy/Kosy-Platypus2-13B", - "model_sha": "7d5af714d5429ed3496f73e8a44525bec4a73d20", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/lcw99/llama2-ko-chang-instruct-chat/result_2023-10-03 09:21:53.json b/lcw99/llama2-ko-chang-instruct-chat/result_2023-10-03 09:21:53.json deleted file mode 100644 index f491129fc74703aaa7a35c2807db09daf1fe7657..0000000000000000000000000000000000000000 --- a/lcw99/llama2-ko-chang-instruct-chat/result_2023-10-03 09:21:53.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.3225255972696246, - "acc_stderr": 0.013659980894277373, - "acc_norm": 0.38054607508532423, - "acc_norm_stderr": 0.014188277712349814 - }, - "harness|ko_hellaswag|10": { - "acc": 0.4010157339175463, - "acc_stderr": 0.004891025533633028, - "acc_norm": 0.5273849830711014, - "acc_norm_stderr": 0.004982291744069926 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.36257309941520466, - "acc_stderr": 0.0368713061556206, - "acc_norm": 0.36257309941520466, - "acc_norm_stderr": 0.0368713061556206 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.21359223300970873, - "acc_stderr": 0.040580420156460344, - "acc_norm": 0.21359223300970873, - "acc_norm_stderr": 0.040580420156460344 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.34099616858237547, - "acc_stderr": 0.016951781383223313, - "acc_norm": 0.34099616858237547, - "acc_norm_stderr": 0.016951781383223313 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.4074074074074074, - "acc_stderr": 0.04244633238353228, - "acc_norm": 0.4074074074074074, - "acc_norm_stderr": 0.04244633238353228 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.2170212765957447, - "acc_stderr": 0.02694748312149625, - "acc_norm": 0.2170212765957447, - "acc_norm_stderr": 0.02694748312149625 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.22289156626506024, - "acc_stderr": 0.03240004825594688, - "acc_norm": 0.22289156626506024, - "acc_norm_stderr": 0.03240004825594688 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.31511254019292606, - "acc_stderr": 0.026385273703464496, - "acc_norm": 0.31511254019292606, - "acc_norm_stderr": 0.026385273703464496 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.2914798206278027, - "acc_stderr": 0.030500283176545906, - "acc_norm": 0.2914798206278027, - "acc_norm_stderr": 0.030500283176545906 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.3435114503816794, - "acc_stderr": 0.041649760719448786, - "acc_norm": 0.3435114503816794, - "acc_norm_stderr": 0.041649760719448786 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.24, - "acc_stderr": 0.04292346959909283, - "acc_norm": 0.24, - "acc_norm_stderr": 0.04292346959909283 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.2474747474747475, - "acc_stderr": 0.03074630074212451, - "acc_norm": 0.2474747474747475, - "acc_norm_stderr": 0.03074630074212451 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.3448275862068966, - "acc_stderr": 0.03960933549451207, - "acc_norm": 0.3448275862068966, - "acc_norm_stderr": 0.03960933549451207 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.16666666666666666, - "acc_stderr": 0.03708284662416544, - "acc_norm": 0.16666666666666666, - "acc_norm_stderr": 0.03708284662416544 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.24789915966386555, - "acc_stderr": 0.028047967224176892, - "acc_norm": 0.24789915966386555, - "acc_norm_stderr": 0.028047967224176892 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.19743589743589743, - "acc_stderr": 0.02018264696867484, - "acc_norm": 0.19743589743589743, - "acc_norm_stderr": 0.02018264696867484 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695235, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695235 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.24, - "acc_stderr": 0.04292346959909282, - "acc_norm": 0.24, - "acc_norm_stderr": 0.04292346959909282 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.26851851851851855, - "acc_stderr": 0.04284467968052191, - "acc_norm": 0.26851851851851855, - "acc_norm_stderr": 0.04284467968052191 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.29064039408866993, - "acc_stderr": 0.031947400722655395, - "acc_norm": 0.29064039408866993, - "acc_norm_stderr": 0.031947400722655395 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.26129032258064516, - "acc_stderr": 0.024993053397764815, - "acc_norm": 0.26129032258064516, - "acc_norm_stderr": 0.024993053397764815 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.34615384615384615, - "acc_stderr": 0.0311669573672359, - "acc_norm": 0.34615384615384615, - "acc_norm_stderr": 0.0311669573672359 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.2830188679245283, - "acc_stderr": 0.027724236492700897, - "acc_norm": 0.2830188679245283, - "acc_norm_stderr": 0.027724236492700897 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.2636363636363636, - "acc_stderr": 0.04220224692971987, - "acc_norm": 0.2636363636363636, - "acc_norm_stderr": 0.04220224692971987 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.25555555555555554, - "acc_stderr": 0.026593939101844072, - "acc_norm": 0.25555555555555554, - "acc_norm_stderr": 0.026593939101844072 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2781456953642384, - "acc_stderr": 0.03658603262763743, - "acc_norm": 0.2781456953642384, - "acc_norm_stderr": 0.03658603262763743 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.2835820895522388, - "acc_stderr": 0.03187187537919798, - "acc_norm": 0.2835820895522388, - "acc_norm_stderr": 0.03187187537919798 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.2543352601156069, - "acc_stderr": 0.0332055644308557, - "acc_norm": 0.2543352601156069, - "acc_norm_stderr": 0.0332055644308557 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.25396825396825395, - "acc_stderr": 0.022418042891113932, - "acc_norm": 0.25396825396825395, - "acc_norm_stderr": 0.022418042891113932 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2708333333333333, - "acc_stderr": 0.037161774375660164, - "acc_norm": 0.2708333333333333, - "acc_norm_stderr": 0.037161774375660164 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.14, - "acc_stderr": 0.03487350880197771, - "acc_norm": 0.14, - "acc_norm_stderr": 0.03487350880197771 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.24, - "acc_stderr": 0.04292346959909284, - "acc_norm": 0.24, - "acc_norm_stderr": 0.04292346959909284 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.27167630057803466, - "acc_stderr": 0.023948512905468376, - "acc_norm": 0.27167630057803466, - "acc_norm_stderr": 0.023948512905468376 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.2822085889570552, - "acc_stderr": 0.03536117886664743, - "acc_norm": 0.2822085889570552, - "acc_norm_stderr": 0.03536117886664743 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.32098765432098764, - "acc_stderr": 0.02597656601086274, - "acc_norm": 0.32098765432098764, - "acc_norm_stderr": 0.02597656601086274 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252606, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252606 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.2538860103626943, - "acc_stderr": 0.03141024780565318, - "acc_norm": 0.2538860103626943, - "acc_norm_stderr": 0.03141024780565318 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2631578947368421, - "acc_stderr": 0.041424397194893624, - "acc_norm": 0.2631578947368421, - "acc_norm_stderr": 0.041424397194893624 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.26422018348623855, - "acc_stderr": 0.018904164171510196, - "acc_norm": 0.26422018348623855, - "acc_norm_stderr": 0.018904164171510196 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.19047619047619047, - "acc_stderr": 0.03512207412302052, - "acc_norm": 0.19047619047619047, - "acc_norm_stderr": 0.03512207412302052 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.28104575163398693, - "acc_stderr": 0.02573885479781873, - "acc_norm": 0.28104575163398693, - "acc_norm_stderr": 0.02573885479781873 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.27, - "acc_stderr": 0.0446196043338474, - "acc_norm": 0.27, - "acc_norm_stderr": 0.0446196043338474 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.3884297520661157, - "acc_stderr": 0.04449270350068382, - "acc_norm": 0.3884297520661157, - "acc_norm_stderr": 0.04449270350068382 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.23684210526315788, - "acc_stderr": 0.03459777606810537, - "acc_norm": 0.23684210526315788, - "acc_norm_stderr": 0.03459777606810537 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.2875816993464052, - "acc_stderr": 0.018311653053648222, - "acc_norm": 0.2875816993464052, - "acc_norm_stderr": 0.018311653053648222 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2695035460992908, - "acc_stderr": 0.026469036818590634, - "acc_norm": 0.2695035460992908, - "acc_norm_stderr": 0.026469036818590634 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.25, - "acc_stderr": 0.04109974682633932, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04109974682633932 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.26851851851851855, - "acc_stderr": 0.030225226160012393, - "acc_norm": 0.26851851851851855, - "acc_norm_stderr": 0.030225226160012393 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2435754189944134, - "acc_stderr": 0.01435591196476786, - "acc_norm": 0.2435754189944134, - "acc_norm_stderr": 0.01435591196476786 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.41, - "acc_stderr": 0.049431107042371025, - "acc_norm": 0.41, - "acc_norm_stderr": 0.049431107042371025 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.18382352941176472, - "acc_stderr": 0.023529242185193106, - "acc_norm": 0.18382352941176472, - "acc_norm_stderr": 0.023529242185193106 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.27755102040816326, - "acc_stderr": 0.02866685779027465, - "acc_norm": 0.27755102040816326, - "acc_norm_stderr": 0.02866685779027465 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.31223628691983124, - "acc_stderr": 0.03016513786784701, - "acc_norm": 0.31223628691983124, - "acc_norm_stderr": 0.03016513786784701 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.27183833116036504, - "acc_stderr": 0.011363135278651411, - "acc_norm": 0.27183833116036504, - "acc_norm_stderr": 0.011363135278651411 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.2696078431372549, - "acc_stderr": 0.031145570659486782, - "acc_norm": 0.2696078431372549, - "acc_norm_stderr": 0.031145570659486782 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.2909090909090909, - "acc_stderr": 0.03546563019624336, - "acc_norm": 0.2909090909090909, - "acc_norm_stderr": 0.03546563019624336 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.27050183598531213, - "mc1_stderr": 0.015550778332842888, - "mc2": 0.4179272559309221, - "mc2_stderr": 0.01511034311354295 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.3227699530516432, - "acc_stderr": 0.016026912972697233, - "acc_norm": 0.3884976525821596, - "acc_norm_stderr": 0.01670815454631332 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "lcw99/llama2-ko-chang-instruct-chat", - "model_sha": "c4acb327349f29d8106c1d22f8d8feed643fe9b7", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/lcw99/llama2-ko-chang-instruct-chat/result_2023-10-05 04:26:03.json b/lcw99/llama2-ko-chang-instruct-chat/result_2023-10-05 04:26:03.json deleted file mode 100644 index 8eabf4258b11580288fd1a4d2ba05de434f1ec9f..0000000000000000000000000000000000000000 --- a/lcw99/llama2-ko-chang-instruct-chat/result_2023-10-05 04:26:03.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.3199658703071672, - "acc_stderr": 0.013631345807016195, - "acc_norm": 0.386518771331058, - "acc_norm_stderr": 0.014230084761910474 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3990240987851026, - "acc_stderr": 0.004886969266944273, - "acc_norm": 0.528281218880701, - "acc_norm_stderr": 0.004981793089848266 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.42105263157894735, - "acc_stderr": 0.037867207062342145, - "acc_norm": 0.42105263157894735, - "acc_norm_stderr": 0.037867207062342145 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.23300970873786409, - "acc_stderr": 0.04185832598928315, - "acc_norm": 0.23300970873786409, - "acc_norm_stderr": 0.04185832598928315 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.36270753512132825, - "acc_stderr": 0.017192708674602302, - "acc_norm": 0.36270753512132825, - "acc_norm_stderr": 0.017192708674602302 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.4074074074074074, - "acc_stderr": 0.042446332383532286, - "acc_norm": 0.4074074074074074, - "acc_norm_stderr": 0.042446332383532286 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.28, - "acc_stderr": 0.045126085985421255, - "acc_norm": 0.28, - "acc_norm_stderr": 0.045126085985421255 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.26382978723404255, - "acc_stderr": 0.028809989854102967, - "acc_norm": 0.26382978723404255, - "acc_norm_stderr": 0.028809989854102967 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.25301204819277107, - "acc_stderr": 0.03384429155233136, - "acc_norm": 0.25301204819277107, - "acc_norm_stderr": 0.03384429155233136 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.3247588424437299, - "acc_stderr": 0.026596782287697043, - "acc_norm": 0.3247588424437299, - "acc_norm_stderr": 0.026596782287697043 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.33183856502242154, - "acc_stderr": 0.03160295143776679, - "acc_norm": 0.33183856502242154, - "acc_norm_stderr": 0.03160295143776679 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.4198473282442748, - "acc_stderr": 0.04328577215262971, - "acc_norm": 0.4198473282442748, - "acc_norm_stderr": 0.04328577215262971 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.36, - "acc_stderr": 0.04824181513244218, - "acc_norm": 0.36, - "acc_norm_stderr": 0.04824181513244218 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.3383838383838384, - "acc_stderr": 0.033711241426263035, - "acc_norm": 0.3383838383838384, - "acc_norm_stderr": 0.033711241426263035 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.36551724137931035, - "acc_stderr": 0.04013124195424385, - "acc_norm": 0.36551724137931035, - "acc_norm_stderr": 0.04013124195424385 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.1568627450980392, - "acc_stderr": 0.036186648199362466, - "acc_norm": 0.1568627450980392, - "acc_norm_stderr": 0.036186648199362466 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.31512605042016806, - "acc_stderr": 0.03017680828897434, - "acc_norm": 0.31512605042016806, - "acc_norm_stderr": 0.03017680828897434 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.24358974358974358, - "acc_stderr": 0.0217637336841739, - "acc_norm": 0.24358974358974358, - "acc_norm_stderr": 0.0217637336841739 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695235, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695235 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.37, - "acc_stderr": 0.04852365870939099, - "acc_norm": 0.37, - "acc_norm_stderr": 0.04852365870939099 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.3148148148148148, - "acc_stderr": 0.04489931073591312, - "acc_norm": 0.3148148148148148, - "acc_norm_stderr": 0.04489931073591312 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.28078817733990147, - "acc_stderr": 0.03161856335358611, - "acc_norm": 0.28078817733990147, - "acc_norm_stderr": 0.03161856335358611 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.2903225806451613, - "acc_stderr": 0.025822106119415895, - "acc_norm": 0.2903225806451613, - "acc_norm_stderr": 0.025822106119415895 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.4017094017094017, - "acc_stderr": 0.03211693751051622, - "acc_norm": 0.4017094017094017, - "acc_norm_stderr": 0.03211693751051622 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.3320754716981132, - "acc_stderr": 0.028985455652334395, - "acc_norm": 0.3320754716981132, - "acc_norm_stderr": 0.028985455652334395 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.37272727272727274, - "acc_stderr": 0.04631381319425463, - "acc_norm": 0.37272727272727274, - "acc_norm_stderr": 0.04631381319425463 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.25925925925925924, - "acc_stderr": 0.02671924078371216, - "acc_norm": 0.25925925925925924, - "acc_norm_stderr": 0.02671924078371216 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2582781456953642, - "acc_stderr": 0.035737053147634576, - "acc_norm": 0.2582781456953642, - "acc_norm_stderr": 0.035737053147634576 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.31840796019900497, - "acc_stderr": 0.03294118479054095, - "acc_norm": 0.31840796019900497, - "acc_norm_stderr": 0.03294118479054095 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.2947976878612717, - "acc_stderr": 0.03476599607516478, - "acc_norm": 0.2947976878612717, - "acc_norm_stderr": 0.03476599607516478 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2566137566137566, - "acc_stderr": 0.022494510767503154, - "acc_norm": 0.2566137566137566, - "acc_norm_stderr": 0.022494510767503154 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.24305555555555555, - "acc_stderr": 0.03586879280080343, - "acc_norm": 0.24305555555555555, - "acc_norm_stderr": 0.03586879280080343 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.2, - "acc_stderr": 0.04020151261036846, - "acc_norm": 0.2, - "acc_norm_stderr": 0.04020151261036846 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.3265895953757225, - "acc_stderr": 0.02524826477424284, - "acc_norm": 0.3265895953757225, - "acc_norm_stderr": 0.02524826477424284 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.26993865030674846, - "acc_stderr": 0.03487825168497892, - "acc_norm": 0.26993865030674846, - "acc_norm_stderr": 0.03487825168497892 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.33641975308641975, - "acc_stderr": 0.026289734945952926, - "acc_norm": 0.33641975308641975, - "acc_norm_stderr": 0.026289734945952926 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.27, - "acc_stderr": 0.044619604333847394, - "acc_norm": 0.27, - "acc_norm_stderr": 0.044619604333847394 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.30569948186528495, - "acc_stderr": 0.033248379397581594, - "acc_norm": 0.30569948186528495, - "acc_norm_stderr": 0.033248379397581594 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2543859649122807, - "acc_stderr": 0.040969851398436716, - "acc_norm": 0.2543859649122807, - "acc_norm_stderr": 0.040969851398436716 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.3559633027522936, - "acc_stderr": 0.020528559278244218, - "acc_norm": 0.3559633027522936, - "acc_norm_stderr": 0.020528559278244218 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.1746031746031746, - "acc_stderr": 0.03395490020856111, - "acc_norm": 0.1746031746031746, - "acc_norm_stderr": 0.03395490020856111 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.3235294117647059, - "acc_stderr": 0.026787453111906532, - "acc_norm": 0.3235294117647059, - "acc_norm_stderr": 0.026787453111906532 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.48760330578512395, - "acc_stderr": 0.045629515481807666, - "acc_norm": 0.48760330578512395, - "acc_norm_stderr": 0.045629515481807666 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.2565789473684211, - "acc_stderr": 0.0355418036802569, - "acc_norm": 0.2565789473684211, - "acc_norm_stderr": 0.0355418036802569 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.30718954248366015, - "acc_stderr": 0.018663359671463663, - "acc_norm": 0.30718954248366015, - "acc_norm_stderr": 0.018663359671463663 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2730496453900709, - "acc_stderr": 0.026577860943307857, - "acc_norm": 0.2730496453900709, - "acc_norm_stderr": 0.026577860943307857 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.33035714285714285, - "acc_stderr": 0.044642857142857116, - "acc_norm": 0.33035714285714285, - "acc_norm_stderr": 0.044642857142857116 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.2175925925925926, - "acc_stderr": 0.028139689444859672, - "acc_norm": 0.2175925925925926, - "acc_norm_stderr": 0.028139689444859672 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.23910614525139665, - "acc_stderr": 0.014265554192331165, - "acc_norm": 0.23910614525139665, - "acc_norm_stderr": 0.014265554192331165 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.26, - "acc_stderr": 0.04408440022768078, - "acc_norm": 0.26, - "acc_norm_stderr": 0.04408440022768078 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.25735294117647056, - "acc_stderr": 0.026556519470041513, - "acc_norm": 0.25735294117647056, - "acc_norm_stderr": 0.026556519470041513 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.39591836734693875, - "acc_stderr": 0.03130802899065685, - "acc_norm": 0.39591836734693875, - "acc_norm_stderr": 0.03130802899065685 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.3924050632911392, - "acc_stderr": 0.031784718745647283, - "acc_norm": 0.3924050632911392, - "acc_norm_stderr": 0.031784718745647283 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.28552803129074317, - "acc_stderr": 0.011535751586665668, - "acc_norm": 0.28552803129074317, - "acc_norm_stderr": 0.011535751586665668 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.3235294117647059, - "acc_stderr": 0.03283472056108567, - "acc_norm": 0.3235294117647059, - "acc_norm_stderr": 0.03283472056108567 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.3393939393939394, - "acc_stderr": 0.03697442205031596, - "acc_norm": 0.3393939393939394, - "acc_norm_stderr": 0.03697442205031596 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.26193390452876375, - "mc1_stderr": 0.01539211880501501, - "mc2": 0.41139003440232036, - "mc2_stderr": 0.015295007853319996 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.3028169014084507, - "acc_stderr": 0.01575065796584493, - "acc_norm": 0.3532863849765258, - "acc_norm_stderr": 0.016385310378526204 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "lcw99/llama2-ko-chang-instruct-chat", - "model_sha": "53faa94566bdad4d1ff0611ca8c78a7aabda8cee", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/lcw99/llama2-ko-chang-instruct-chat/result_2023-10-07 09:27:18.json b/lcw99/llama2-ko-chang-instruct-chat/result_2023-10-07 09:27:18.json deleted file mode 100644 index 986deea778bb9b99ba36fd7bac6a21d27d823d5a..0000000000000000000000000000000000000000 --- a/lcw99/llama2-ko-chang-instruct-chat/result_2023-10-07 09:27:18.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.32593856655290104, - "acc_stderr": 0.013697432466693239, - "acc_norm": 0.3839590443686007, - "acc_norm_stderr": 0.01421244498065189 - }, - "harness|ko_hellaswag|10": { - "acc": 0.401911969727146, - "acc_stderr": 0.00489282341554655, - "acc_norm": 0.5347540330611432, - "acc_norm_stderr": 0.0049777130738993145 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.4093567251461988, - "acc_stderr": 0.037712831076265434, - "acc_norm": 0.4093567251461988, - "acc_norm_stderr": 0.037712831076265434 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.22330097087378642, - "acc_stderr": 0.04123553189891431, - "acc_norm": 0.22330097087378642, - "acc_norm_stderr": 0.04123553189891431 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.35887611749680715, - "acc_stderr": 0.017152991797501342, - "acc_norm": 0.35887611749680715, - "acc_norm_stderr": 0.017152991797501342 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.3851851851851852, - "acc_stderr": 0.042039210401562783, - "acc_norm": 0.3851851851851852, - "acc_norm_stderr": 0.042039210401562783 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.26, - "acc_stderr": 0.04408440022768078, - "acc_norm": 0.26, - "acc_norm_stderr": 0.04408440022768078 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.26382978723404255, - "acc_stderr": 0.028809989854102953, - "acc_norm": 0.26382978723404255, - "acc_norm_stderr": 0.028809989854102953 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.21686746987951808, - "acc_stderr": 0.03208284450356365, - "acc_norm": 0.21686746987951808, - "acc_norm_stderr": 0.03208284450356365 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.3215434083601286, - "acc_stderr": 0.026527724079528872, - "acc_norm": 0.3215434083601286, - "acc_norm_stderr": 0.026527724079528872 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.3721973094170404, - "acc_stderr": 0.03244305283008731, - "acc_norm": 0.3721973094170404, - "acc_norm_stderr": 0.03244305283008731 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.4122137404580153, - "acc_stderr": 0.04317171194870255, - "acc_norm": 0.4122137404580153, - "acc_norm_stderr": 0.04317171194870255 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.3181818181818182, - "acc_stderr": 0.03318477333845332, - "acc_norm": 0.3181818181818182, - "acc_norm_stderr": 0.03318477333845332 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.31724137931034485, - "acc_stderr": 0.03878352372138623, - "acc_norm": 0.31724137931034485, - "acc_norm_stderr": 0.03878352372138623 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.21568627450980393, - "acc_stderr": 0.04092563958237654, - "acc_norm": 0.21568627450980393, - "acc_norm_stderr": 0.04092563958237654 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.31512605042016806, - "acc_stderr": 0.030176808288974337, - "acc_norm": 0.31512605042016806, - "acc_norm_stderr": 0.030176808288974337 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.23076923076923078, - "acc_stderr": 0.021362027725222724, - "acc_norm": 0.23076923076923078, - "acc_norm_stderr": 0.021362027725222724 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.43, - "acc_stderr": 0.04975698519562428, - "acc_norm": 0.43, - "acc_norm_stderr": 0.04975698519562428 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.26, - "acc_stderr": 0.0440844002276808, - "acc_norm": 0.26, - "acc_norm_stderr": 0.0440844002276808 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.28703703703703703, - "acc_stderr": 0.043733130409147614, - "acc_norm": 0.28703703703703703, - "acc_norm_stderr": 0.043733130409147614 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.2660098522167488, - "acc_stderr": 0.03108982600293752, - "acc_norm": 0.2660098522167488, - "acc_norm_stderr": 0.03108982600293752 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.2645161290322581, - "acc_stderr": 0.025091892378859275, - "acc_norm": 0.2645161290322581, - "acc_norm_stderr": 0.025091892378859275 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.39316239316239315, - "acc_stderr": 0.03199957924651047, - "acc_norm": 0.39316239316239315, - "acc_norm_stderr": 0.03199957924651047 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.30943396226415093, - "acc_stderr": 0.028450154794118627, - "acc_norm": 0.30943396226415093, - "acc_norm_stderr": 0.028450154794118627 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.3090909090909091, - "acc_stderr": 0.044262946482000985, - "acc_norm": 0.3090909090909091, - "acc_norm_stderr": 0.044262946482000985 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.26666666666666666, - "acc_stderr": 0.026962424325073824, - "acc_norm": 0.26666666666666666, - "acc_norm_stderr": 0.026962424325073824 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.271523178807947, - "acc_stderr": 0.03631329803969654, - "acc_norm": 0.271523178807947, - "acc_norm_stderr": 0.03631329803969654 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.3781094527363184, - "acc_stderr": 0.03428867848778658, - "acc_norm": 0.3781094527363184, - "acc_norm_stderr": 0.03428867848778658 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.23699421965317918, - "acc_stderr": 0.03242414757483098, - "acc_norm": 0.23699421965317918, - "acc_norm_stderr": 0.03242414757483098 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.26455026455026454, - "acc_stderr": 0.022717467897708617, - "acc_norm": 0.26455026455026454, - "acc_norm_stderr": 0.022717467897708617 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2569444444444444, - "acc_stderr": 0.03653946969442099, - "acc_norm": 0.2569444444444444, - "acc_norm_stderr": 0.03653946969442099 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.16, - "acc_stderr": 0.03684529491774709, - "acc_norm": 0.16, - "acc_norm_stderr": 0.03684529491774709 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252605, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252605 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.29190751445086704, - "acc_stderr": 0.024476994076247316, - "acc_norm": 0.29190751445086704, - "acc_norm_stderr": 0.024476994076247316 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.25766871165644173, - "acc_stderr": 0.03436150827846917, - "acc_norm": 0.25766871165644173, - "acc_norm_stderr": 0.03436150827846917 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.32407407407407407, - "acc_stderr": 0.026041766202717163, - "acc_norm": 0.32407407407407407, - "acc_norm_stderr": 0.026041766202717163 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.32, - "acc_stderr": 0.046882617226215034, - "acc_norm": 0.32, - "acc_norm_stderr": 0.046882617226215034 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.27979274611398963, - "acc_stderr": 0.03239637046735704, - "acc_norm": 0.27979274611398963, - "acc_norm_stderr": 0.03239637046735704 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2894736842105263, - "acc_stderr": 0.04266339443159394, - "acc_norm": 0.2894736842105263, - "acc_norm_stderr": 0.04266339443159394 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.3174311926605505, - "acc_stderr": 0.0199571521984605, - "acc_norm": 0.3174311926605505, - "acc_norm_stderr": 0.0199571521984605 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.15873015873015872, - "acc_stderr": 0.03268454013011744, - "acc_norm": 0.15873015873015872, - "acc_norm_stderr": 0.03268454013011744 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.37254901960784315, - "acc_stderr": 0.027684181883302898, - "acc_norm": 0.37254901960784315, - "acc_norm_stderr": 0.027684181883302898 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.41, - "acc_stderr": 0.04943110704237102, - "acc_norm": 0.41, - "acc_norm_stderr": 0.04943110704237102 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.35537190082644626, - "acc_stderr": 0.04369236326573981, - "acc_norm": 0.35537190082644626, - "acc_norm_stderr": 0.04369236326573981 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.27631578947368424, - "acc_stderr": 0.03639057569952925, - "acc_norm": 0.27631578947368424, - "acc_norm_stderr": 0.03639057569952925 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.29248366013071897, - "acc_stderr": 0.018403415710109772, - "acc_norm": 0.29248366013071897, - "acc_norm_stderr": 0.018403415710109772 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.25177304964539005, - "acc_stderr": 0.025892151156709405, - "acc_norm": 0.25177304964539005, - "acc_norm_stderr": 0.025892151156709405 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.32142857142857145, - "acc_stderr": 0.04432804055291519, - "acc_norm": 0.32142857142857145, - "acc_norm_stderr": 0.04432804055291519 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.22685185185185186, - "acc_stderr": 0.028561650102422256, - "acc_norm": 0.22685185185185186, - "acc_norm_stderr": 0.028561650102422256 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2346368715083799, - "acc_stderr": 0.014173044098303675, - "acc_norm": 0.2346368715083799, - "acc_norm_stderr": 0.014173044098303675 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.38, - "acc_stderr": 0.04878317312145633, - "acc_norm": 0.38, - "acc_norm_stderr": 0.04878317312145633 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.28, - "acc_stderr": 0.045126085985421276, - "acc_norm": 0.28, - "acc_norm_stderr": 0.045126085985421276 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.2647058823529412, - "acc_stderr": 0.026799562024887688, - "acc_norm": 0.2647058823529412, - "acc_norm_stderr": 0.026799562024887688 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.3836734693877551, - "acc_stderr": 0.031130880396235936, - "acc_norm": 0.3836734693877551, - "acc_norm_stderr": 0.031130880396235936 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.43037974683544306, - "acc_stderr": 0.032230171959375976, - "acc_norm": 0.43037974683544306, - "acc_norm_stderr": 0.032230171959375976 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2796610169491525, - "acc_stderr": 0.011463397393861964, - "acc_norm": 0.2796610169491525, - "acc_norm_stderr": 0.011463397393861964 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.03308611113236437, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.03308611113236437 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.28484848484848485, - "acc_stderr": 0.035243908445117836, - "acc_norm": 0.28484848484848485, - "acc_norm_stderr": 0.035243908445117836 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2741738066095471, - "mc1_stderr": 0.015616518497219374, - "mc2": 0.4290947920734499, - "mc2_stderr": 0.015338103665717853 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.30633802816901406, - "acc_stderr": 0.015801911286714723, - "acc_norm": 0.357981220657277, - "acc_norm_stderr": 0.01643384581482908 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "lcw99/llama2-ko-chang-instruct-chat", - "model_sha": "77bd1c3d0a5997aa8e406eb689ae786a3178b274", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/lcw99/llama2-ko-chang-instruct-chat/result_2023-10-09 07:25:40.json b/lcw99/llama2-ko-chang-instruct-chat/result_2023-10-09 07:25:40.json deleted file mode 100644 index e61a4e5d2b3c0f2819ddabd15023845a0c418ec2..0000000000000000000000000000000000000000 --- a/lcw99/llama2-ko-chang-instruct-chat/result_2023-10-09 07:25:40.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.32593856655290104, - "acc_stderr": 0.013697432466693239, - "acc_norm": 0.3839590443686007, - "acc_norm_stderr": 0.01421244498065189 - }, - "harness|ko_hellaswag|10": { - "acc": 0.401911969727146, - "acc_stderr": 0.00489282341554655, - "acc_norm": 0.5347540330611432, - "acc_norm_stderr": 0.0049777130738993145 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.4093567251461988, - "acc_stderr": 0.037712831076265434, - "acc_norm": 0.4093567251461988, - "acc_norm_stderr": 0.037712831076265434 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.22330097087378642, - "acc_stderr": 0.04123553189891431, - "acc_norm": 0.22330097087378642, - "acc_norm_stderr": 0.04123553189891431 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.35887611749680715, - "acc_stderr": 0.017152991797501342, - "acc_norm": 0.35887611749680715, - "acc_norm_stderr": 0.017152991797501342 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.3851851851851852, - "acc_stderr": 0.042039210401562783, - "acc_norm": 0.3851851851851852, - "acc_norm_stderr": 0.042039210401562783 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.26, - "acc_stderr": 0.04408440022768078, - "acc_norm": 0.26, - "acc_norm_stderr": 0.04408440022768078 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.26382978723404255, - "acc_stderr": 0.028809989854102953, - "acc_norm": 0.26382978723404255, - "acc_norm_stderr": 0.028809989854102953 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.21686746987951808, - "acc_stderr": 0.03208284450356365, - "acc_norm": 0.21686746987951808, - "acc_norm_stderr": 0.03208284450356365 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.3215434083601286, - "acc_stderr": 0.026527724079528872, - "acc_norm": 0.3215434083601286, - "acc_norm_stderr": 0.026527724079528872 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.3721973094170404, - "acc_stderr": 0.03244305283008731, - "acc_norm": 0.3721973094170404, - "acc_norm_stderr": 0.03244305283008731 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.4122137404580153, - "acc_stderr": 0.04317171194870255, - "acc_norm": 0.4122137404580153, - "acc_norm_stderr": 0.04317171194870255 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.3181818181818182, - "acc_stderr": 0.03318477333845332, - "acc_norm": 0.3181818181818182, - "acc_norm_stderr": 0.03318477333845332 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.31724137931034485, - "acc_stderr": 0.03878352372138623, - "acc_norm": 0.31724137931034485, - "acc_norm_stderr": 0.03878352372138623 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.21568627450980393, - "acc_stderr": 0.04092563958237654, - "acc_norm": 0.21568627450980393, - "acc_norm_stderr": 0.04092563958237654 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.31512605042016806, - "acc_stderr": 0.030176808288974337, - "acc_norm": 0.31512605042016806, - "acc_norm_stderr": 0.030176808288974337 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.23076923076923078, - "acc_stderr": 0.021362027725222724, - "acc_norm": 0.23076923076923078, - "acc_norm_stderr": 0.021362027725222724 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.43, - "acc_stderr": 0.04975698519562428, - "acc_norm": 0.43, - "acc_norm_stderr": 0.04975698519562428 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.26, - "acc_stderr": 0.0440844002276808, - "acc_norm": 0.26, - "acc_norm_stderr": 0.0440844002276808 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.28703703703703703, - "acc_stderr": 0.043733130409147614, - "acc_norm": 0.28703703703703703, - "acc_norm_stderr": 0.043733130409147614 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.2660098522167488, - "acc_stderr": 0.03108982600293752, - "acc_norm": 0.2660098522167488, - "acc_norm_stderr": 0.03108982600293752 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.2645161290322581, - "acc_stderr": 0.025091892378859275, - "acc_norm": 0.2645161290322581, - "acc_norm_stderr": 0.025091892378859275 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.39316239316239315, - "acc_stderr": 0.03199957924651047, - "acc_norm": 0.39316239316239315, - "acc_norm_stderr": 0.03199957924651047 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.30943396226415093, - "acc_stderr": 0.028450154794118627, - "acc_norm": 0.30943396226415093, - "acc_norm_stderr": 0.028450154794118627 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.3090909090909091, - "acc_stderr": 0.044262946482000985, - "acc_norm": 0.3090909090909091, - "acc_norm_stderr": 0.044262946482000985 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.26666666666666666, - "acc_stderr": 0.026962424325073824, - "acc_norm": 0.26666666666666666, - "acc_norm_stderr": 0.026962424325073824 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.271523178807947, - "acc_stderr": 0.03631329803969654, - "acc_norm": 0.271523178807947, - "acc_norm_stderr": 0.03631329803969654 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.3781094527363184, - "acc_stderr": 0.03428867848778658, - "acc_norm": 0.3781094527363184, - "acc_norm_stderr": 0.03428867848778658 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.23699421965317918, - "acc_stderr": 0.03242414757483098, - "acc_norm": 0.23699421965317918, - "acc_norm_stderr": 0.03242414757483098 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.26455026455026454, - "acc_stderr": 0.022717467897708617, - "acc_norm": 0.26455026455026454, - "acc_norm_stderr": 0.022717467897708617 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2569444444444444, - "acc_stderr": 0.03653946969442099, - "acc_norm": 0.2569444444444444, - "acc_norm_stderr": 0.03653946969442099 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.16, - "acc_stderr": 0.03684529491774709, - "acc_norm": 0.16, - "acc_norm_stderr": 0.03684529491774709 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252605, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252605 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.29190751445086704, - "acc_stderr": 0.024476994076247316, - "acc_norm": 0.29190751445086704, - "acc_norm_stderr": 0.024476994076247316 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.25766871165644173, - "acc_stderr": 0.03436150827846917, - "acc_norm": 0.25766871165644173, - "acc_norm_stderr": 0.03436150827846917 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.32407407407407407, - "acc_stderr": 0.026041766202717163, - "acc_norm": 0.32407407407407407, - "acc_norm_stderr": 0.026041766202717163 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.32, - "acc_stderr": 0.046882617226215034, - "acc_norm": 0.32, - "acc_norm_stderr": 0.046882617226215034 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.27979274611398963, - "acc_stderr": 0.03239637046735704, - "acc_norm": 0.27979274611398963, - "acc_norm_stderr": 0.03239637046735704 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2894736842105263, - "acc_stderr": 0.04266339443159394, - "acc_norm": 0.2894736842105263, - "acc_norm_stderr": 0.04266339443159394 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.3174311926605505, - "acc_stderr": 0.0199571521984605, - "acc_norm": 0.3174311926605505, - "acc_norm_stderr": 0.0199571521984605 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.15873015873015872, - "acc_stderr": 0.03268454013011744, - "acc_norm": 0.15873015873015872, - "acc_norm_stderr": 0.03268454013011744 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.37254901960784315, - "acc_stderr": 0.027684181883302898, - "acc_norm": 0.37254901960784315, - "acc_norm_stderr": 0.027684181883302898 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.41, - "acc_stderr": 0.04943110704237102, - "acc_norm": 0.41, - "acc_norm_stderr": 0.04943110704237102 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.35537190082644626, - "acc_stderr": 0.04369236326573981, - "acc_norm": 0.35537190082644626, - "acc_norm_stderr": 0.04369236326573981 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.27631578947368424, - "acc_stderr": 0.03639057569952925, - "acc_norm": 0.27631578947368424, - "acc_norm_stderr": 0.03639057569952925 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.29248366013071897, - "acc_stderr": 0.018403415710109772, - "acc_norm": 0.29248366013071897, - "acc_norm_stderr": 0.018403415710109772 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.25177304964539005, - "acc_stderr": 0.025892151156709405, - "acc_norm": 0.25177304964539005, - "acc_norm_stderr": 0.025892151156709405 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.32142857142857145, - "acc_stderr": 0.04432804055291519, - "acc_norm": 0.32142857142857145, - "acc_norm_stderr": 0.04432804055291519 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.22685185185185186, - "acc_stderr": 0.028561650102422256, - "acc_norm": 0.22685185185185186, - "acc_norm_stderr": 0.028561650102422256 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2346368715083799, - "acc_stderr": 0.014173044098303675, - "acc_norm": 0.2346368715083799, - "acc_norm_stderr": 0.014173044098303675 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.38, - "acc_stderr": 0.04878317312145633, - "acc_norm": 0.38, - "acc_norm_stderr": 0.04878317312145633 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.28, - "acc_stderr": 0.045126085985421276, - "acc_norm": 0.28, - "acc_norm_stderr": 0.045126085985421276 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.2647058823529412, - "acc_stderr": 0.026799562024887688, - "acc_norm": 0.2647058823529412, - "acc_norm_stderr": 0.026799562024887688 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.3836734693877551, - "acc_stderr": 0.031130880396235936, - "acc_norm": 0.3836734693877551, - "acc_norm_stderr": 0.031130880396235936 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.43037974683544306, - "acc_stderr": 0.032230171959375976, - "acc_norm": 0.43037974683544306, - "acc_norm_stderr": 0.032230171959375976 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2796610169491525, - "acc_stderr": 0.011463397393861964, - "acc_norm": 0.2796610169491525, - "acc_norm_stderr": 0.011463397393861964 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.03308611113236437, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.03308611113236437 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.28484848484848485, - "acc_stderr": 0.035243908445117836, - "acc_norm": 0.28484848484848485, - "acc_norm_stderr": 0.035243908445117836 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2741738066095471, - "mc1_stderr": 0.015616518497219374, - "mc2": 0.4290947920734499, - "mc2_stderr": 0.015338103665717853 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.30633802816901406, - "acc_stderr": 0.015801911286714723, - "acc_norm": 0.3568075117370892, - "acc_norm_stderr": 0.016421873267918906 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "lcw99/llama2-ko-chang-instruct-chat", - "model_sha": "27d6cedd2e8b77997e17ce92467b20b191dbdbef", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/lcw99/llama2-ko-chang-instruct-chat/result_2023-10-15 20:02:06.json b/lcw99/llama2-ko-chang-instruct-chat/result_2023-10-15 20:02:06.json deleted file mode 100644 index 5437c1d6d18a5507e93ddb1248eacf770211cbec..0000000000000000000000000000000000000000 --- a/lcw99/llama2-ko-chang-instruct-chat/result_2023-10-15 20:02:06.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.3447098976109215, - "acc_stderr": 0.013888816286782114, - "acc_norm": 0.39419795221843, - "acc_norm_stderr": 0.014280522667467327 - }, - "harness|ko_hellaswag|10": { - "acc": 0.39852619000199163, - "acc_stderr": 0.004885942040894556, - "acc_norm": 0.5248954391555467, - "acc_norm_stderr": 0.0049835924109341715 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.38596491228070173, - "acc_stderr": 0.03733756969066164, - "acc_norm": 0.38596491228070173, - "acc_norm_stderr": 0.03733756969066164 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.23300970873786409, - "acc_stderr": 0.04185832598928315, - "acc_norm": 0.23300970873786409, - "acc_norm_stderr": 0.04185832598928315 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.37292464878671777, - "acc_stderr": 0.017292868269453924, - "acc_norm": 0.37292464878671777, - "acc_norm_stderr": 0.017292868269453924 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.362962962962963, - "acc_stderr": 0.04153948404742399, - "acc_norm": 0.362962962962963, - "acc_norm_stderr": 0.04153948404742399 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.24, - "acc_stderr": 0.042923469599092816, - "acc_norm": 0.24, - "acc_norm_stderr": 0.042923469599092816 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.225531914893617, - "acc_stderr": 0.02732107841738753, - "acc_norm": 0.225531914893617, - "acc_norm_stderr": 0.02732107841738753 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.21686746987951808, - "acc_stderr": 0.03208284450356365, - "acc_norm": 0.21686746987951808, - "acc_norm_stderr": 0.03208284450356365 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.33762057877813506, - "acc_stderr": 0.026858825879488558, - "acc_norm": 0.33762057877813506, - "acc_norm_stderr": 0.026858825879488558 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.33183856502242154, - "acc_stderr": 0.03160295143776679, - "acc_norm": 0.33183856502242154, - "acc_norm_stderr": 0.03160295143776679 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.35877862595419846, - "acc_stderr": 0.04206739313864908, - "acc_norm": 0.35877862595419846, - "acc_norm_stderr": 0.04206739313864908 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.26, - "acc_stderr": 0.0440844002276808, - "acc_norm": 0.26, - "acc_norm_stderr": 0.0440844002276808 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.2828282828282828, - "acc_stderr": 0.03208779558786751, - "acc_norm": 0.2828282828282828, - "acc_norm_stderr": 0.03208779558786751 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.3586206896551724, - "acc_stderr": 0.039966295748767186, - "acc_norm": 0.3586206896551724, - "acc_norm_stderr": 0.039966295748767186 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.20588235294117646, - "acc_stderr": 0.04023382273617747, - "acc_norm": 0.20588235294117646, - "acc_norm_stderr": 0.04023382273617747 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.2605042016806723, - "acc_stderr": 0.028510251512341926, - "acc_norm": 0.2605042016806723, - "acc_norm_stderr": 0.028510251512341926 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.20256410256410257, - "acc_stderr": 0.020377660970371372, - "acc_norm": 0.20256410256410257, - "acc_norm_stderr": 0.020377660970371372 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.42, - "acc_stderr": 0.049604496374885836, - "acc_norm": 0.42, - "acc_norm_stderr": 0.049604496374885836 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.2, - "acc_stderr": 0.04020151261036846, - "acc_norm": 0.2, - "acc_norm_stderr": 0.04020151261036846 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.3055555555555556, - "acc_stderr": 0.044531975073749834, - "acc_norm": 0.3055555555555556, - "acc_norm_stderr": 0.044531975073749834 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.22660098522167488, - "acc_stderr": 0.029454863835292982, - "acc_norm": 0.22660098522167488, - "acc_norm_stderr": 0.029454863835292982 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.267741935483871, - "acc_stderr": 0.025189006660212385, - "acc_norm": 0.267741935483871, - "acc_norm_stderr": 0.025189006660212385 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.38461538461538464, - "acc_stderr": 0.03187195347942466, - "acc_norm": 0.38461538461538464, - "acc_norm_stderr": 0.03187195347942466 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.26037735849056604, - "acc_stderr": 0.02700876609070809, - "acc_norm": 0.26037735849056604, - "acc_norm_stderr": 0.02700876609070809 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.2909090909090909, - "acc_stderr": 0.04350271442923243, - "acc_norm": 0.2909090909090909, - "acc_norm_stderr": 0.04350271442923243 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.24074074074074073, - "acc_stderr": 0.026067159222275794, - "acc_norm": 0.24074074074074073, - "acc_norm_stderr": 0.026067159222275794 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2582781456953642, - "acc_stderr": 0.035737053147634576, - "acc_norm": 0.2582781456953642, - "acc_norm_stderr": 0.035737053147634576 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.35323383084577115, - "acc_stderr": 0.03379790611796776, - "acc_norm": 0.35323383084577115, - "acc_norm_stderr": 0.03379790611796776 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.26011560693641617, - "acc_stderr": 0.033450369167889904, - "acc_norm": 0.26011560693641617, - "acc_norm_stderr": 0.033450369167889904 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.20634920634920634, - "acc_stderr": 0.020842290930114662, - "acc_norm": 0.20634920634920634, - "acc_norm_stderr": 0.020842290930114662 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2708333333333333, - "acc_stderr": 0.03716177437566017, - "acc_norm": 0.2708333333333333, - "acc_norm_stderr": 0.03716177437566017 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.2, - "acc_stderr": 0.040201512610368445, - "acc_norm": 0.2, - "acc_norm_stderr": 0.040201512610368445 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.26, - "acc_stderr": 0.0440844002276808, - "acc_norm": 0.26, - "acc_norm_stderr": 0.0440844002276808 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.3179190751445087, - "acc_stderr": 0.025070713719153186, - "acc_norm": 0.3179190751445087, - "acc_norm_stderr": 0.025070713719153186 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.26993865030674846, - "acc_stderr": 0.034878251684978906, - "acc_norm": 0.26993865030674846, - "acc_norm_stderr": 0.034878251684978906 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.3425925925925926, - "acc_stderr": 0.02640614597362566, - "acc_norm": 0.3425925925925926, - "acc_norm_stderr": 0.02640614597362566 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.24, - "acc_stderr": 0.04292346959909283, - "acc_norm": 0.24, - "acc_norm_stderr": 0.04292346959909283 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.3005181347150259, - "acc_stderr": 0.033088185944157494, - "acc_norm": 0.3005181347150259, - "acc_norm_stderr": 0.033088185944157494 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2543859649122807, - "acc_stderr": 0.040969851398436716, - "acc_norm": 0.2543859649122807, - "acc_norm_stderr": 0.040969851398436716 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.29357798165137616, - "acc_stderr": 0.019525151122639667, - "acc_norm": 0.29357798165137616, - "acc_norm_stderr": 0.019525151122639667 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.21428571428571427, - "acc_stderr": 0.03670066451047181, - "acc_norm": 0.21428571428571427, - "acc_norm_stderr": 0.03670066451047181 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.3300653594771242, - "acc_stderr": 0.026925654653615693, - "acc_norm": 0.3300653594771242, - "acc_norm_stderr": 0.026925654653615693 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.4214876033057851, - "acc_stderr": 0.045077322787750944, - "acc_norm": 0.4214876033057851, - "acc_norm_stderr": 0.045077322787750944 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.19078947368421054, - "acc_stderr": 0.031975658210325, - "acc_norm": 0.19078947368421054, - "acc_norm_stderr": 0.031975658210325 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.28921568627450983, - "acc_stderr": 0.018342529845275908, - "acc_norm": 0.28921568627450983, - "acc_norm_stderr": 0.018342529845275908 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.25886524822695034, - "acc_stderr": 0.026129572527180848, - "acc_norm": 0.25886524822695034, - "acc_norm_stderr": 0.026129572527180848 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.24107142857142858, - "acc_stderr": 0.04059867246952687, - "acc_norm": 0.24107142857142858, - "acc_norm_stderr": 0.04059867246952687 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.22685185185185186, - "acc_stderr": 0.028561650102422273, - "acc_norm": 0.22685185185185186, - "acc_norm_stderr": 0.028561650102422273 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.23798882681564246, - "acc_stderr": 0.014242630070574892, - "acc_norm": 0.23798882681564246, - "acc_norm_stderr": 0.014242630070574892 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.37, - "acc_stderr": 0.04852365870939098, - "acc_norm": 0.37, - "acc_norm_stderr": 0.04852365870939098 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.27, - "acc_stderr": 0.0446196043338474, - "acc_norm": 0.27, - "acc_norm_stderr": 0.0446196043338474 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.30514705882352944, - "acc_stderr": 0.027971541370170605, - "acc_norm": 0.30514705882352944, - "acc_norm_stderr": 0.027971541370170605 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.39183673469387753, - "acc_stderr": 0.03125127591089165, - "acc_norm": 0.39183673469387753, - "acc_norm_stderr": 0.03125127591089165 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.3459915611814346, - "acc_stderr": 0.03096481058878671, - "acc_norm": 0.3459915611814346, - "acc_norm_stderr": 0.03096481058878671 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.273142112125163, - "acc_stderr": 0.01138015056783041, - "acc_norm": 0.273142112125163, - "acc_norm_stderr": 0.01138015056783041 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.3235294117647059, - "acc_stderr": 0.03283472056108567, - "acc_norm": 0.3235294117647059, - "acc_norm_stderr": 0.03283472056108567 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.296969696969697, - "acc_stderr": 0.03567969772268048, - "acc_norm": 0.296969696969697, - "acc_norm_stderr": 0.03567969772268048 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.27050183598531213, - "mc1_stderr": 0.015550778332842885, - "mc2": 0.4212326635036667, - "mc2_stderr": 0.015192123492522393 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.30164319248826293, - "acc_stderr": 0.015733330645500615, - "acc_norm": 0.3591549295774648, - "acc_norm_stderr": 0.016445711213506777 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "lcw99/llama2-ko-chang-instruct-chat", - "model_sha": "eaba470f33eb377cb27696dbc1f9a76fc03d4fe3", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/lcw99/polyglot-ko-12.8b-chang-instruct-chat/result_2023-09-27 08:17:24.json b/lcw99/polyglot-ko-12.8b-chang-instruct-chat/result_2023-09-27 08:17:24.json deleted file mode 100644 index dcd621e3b96b64be7fc3487e7c2c544f309a22f8..0000000000000000000000000000000000000000 --- a/lcw99/polyglot-ko-12.8b-chang-instruct-chat/result_2023-09-27 08:17:24.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.2986348122866894, - "acc_stderr": 0.013374078615068756, - "acc_norm": 0.34897610921501704, - "acc_norm_stderr": 0.013928933461382497 - }, - "harness|ko_hellaswag|10": { - "acc": 0.4010157339175463, - "acc_stderr": 0.004891025533633027, - "acc_norm": 0.527185819557857, - "acc_norm_stderr": 0.004982400368939667 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.30994152046783624, - "acc_stderr": 0.03546976959393161, - "acc_norm": 0.30994152046783624, - "acc_norm_stderr": 0.03546976959393161 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.18446601941747573, - "acc_stderr": 0.03840423627288276, - "acc_norm": 0.18446601941747573, - "acc_norm_stderr": 0.03840423627288276 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.26947637292464877, - "acc_stderr": 0.01586624307321506, - "acc_norm": 0.26947637292464877, - "acc_norm_stderr": 0.01586624307321506 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.34074074074074073, - "acc_stderr": 0.040943762699967946, - "acc_norm": 0.34074074074074073, - "acc_norm_stderr": 0.040943762699967946 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.32, - "acc_stderr": 0.04688261722621503, - "acc_norm": 0.32, - "acc_norm_stderr": 0.04688261722621503 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.24680851063829787, - "acc_stderr": 0.0281854413012341, - "acc_norm": 0.24680851063829787, - "acc_norm_stderr": 0.0281854413012341 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.24096385542168675, - "acc_stderr": 0.033293941190735296, - "acc_norm": 0.24096385542168675, - "acc_norm_stderr": 0.033293941190735296 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.3183279742765273, - "acc_stderr": 0.026457225067811025, - "acc_norm": 0.3183279742765273, - "acc_norm_stderr": 0.026457225067811025 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.19730941704035873, - "acc_stderr": 0.02670985334496796, - "acc_norm": 0.19730941704035873, - "acc_norm_stderr": 0.02670985334496796 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.3816793893129771, - "acc_stderr": 0.0426073515764456, - "acc_norm": 0.3816793893129771, - "acc_norm_stderr": 0.0426073515764456 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.21, - "acc_stderr": 0.040936018074033256, - "acc_norm": 0.21, - "acc_norm_stderr": 0.040936018074033256 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.25252525252525254, - "acc_stderr": 0.030954055470365907, - "acc_norm": 0.25252525252525254, - "acc_norm_stderr": 0.030954055470365907 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.2896551724137931, - "acc_stderr": 0.03780019230438015, - "acc_norm": 0.2896551724137931, - "acc_norm_stderr": 0.03780019230438015 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.23529411764705882, - "acc_stderr": 0.04220773659171451, - "acc_norm": 0.23529411764705882, - "acc_norm_stderr": 0.04220773659171451 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.2184873949579832, - "acc_stderr": 0.02684151432295893, - "acc_norm": 0.2184873949579832, - "acc_norm_stderr": 0.02684151432295893 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.2128205128205128, - "acc_stderr": 0.020752423722128002, - "acc_norm": 0.2128205128205128, - "acc_norm_stderr": 0.020752423722128002 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695235, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695235 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.25925925925925924, - "acc_stderr": 0.04236511258094631, - "acc_norm": 0.25925925925925924, - "acc_norm_stderr": 0.04236511258094631 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.2955665024630542, - "acc_stderr": 0.032104944337514575, - "acc_norm": 0.2955665024630542, - "acc_norm_stderr": 0.032104944337514575 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.267741935483871, - "acc_stderr": 0.025189006660212385, - "acc_norm": 0.267741935483871, - "acc_norm_stderr": 0.025189006660212385 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.2606837606837607, - "acc_stderr": 0.02876034895652341, - "acc_norm": 0.2606837606837607, - "acc_norm_stderr": 0.02876034895652341 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.22264150943396227, - "acc_stderr": 0.025604233470899105, - "acc_norm": 0.22264150943396227, - "acc_norm_stderr": 0.025604233470899105 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.24545454545454545, - "acc_stderr": 0.04122066502878285, - "acc_norm": 0.24545454545454545, - "acc_norm_stderr": 0.04122066502878285 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.25555555555555554, - "acc_stderr": 0.02659393910184408, - "acc_norm": 0.25555555555555554, - "acc_norm_stderr": 0.02659393910184408 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2913907284768212, - "acc_stderr": 0.03710185726119995, - "acc_norm": 0.2913907284768212, - "acc_norm_stderr": 0.03710185726119995 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.20398009950248755, - "acc_stderr": 0.02849317624532609, - "acc_norm": 0.20398009950248755, - "acc_norm_stderr": 0.02849317624532609 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.23121387283236994, - "acc_stderr": 0.03214737302029469, - "acc_norm": 0.23121387283236994, - "acc_norm_stderr": 0.03214737302029469 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2857142857142857, - "acc_stderr": 0.023266512213730575, - "acc_norm": 0.2857142857142857, - "acc_norm_stderr": 0.023266512213730575 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2222222222222222, - "acc_stderr": 0.034765901043041336, - "acc_norm": 0.2222222222222222, - "acc_norm_stderr": 0.034765901043041336 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.16, - "acc_stderr": 0.036845294917747094, - "acc_norm": 0.16, - "acc_norm_stderr": 0.036845294917747094 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.28, - "acc_stderr": 0.045126085985421276, - "acc_norm": 0.28, - "acc_norm_stderr": 0.045126085985421276 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.26011560693641617, - "acc_stderr": 0.02361867831006937, - "acc_norm": 0.26011560693641617, - "acc_norm_stderr": 0.02361867831006937 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.3067484662576687, - "acc_stderr": 0.036230899157241474, - "acc_norm": 0.3067484662576687, - "acc_norm_stderr": 0.036230899157241474 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.2839506172839506, - "acc_stderr": 0.02508947852376513, - "acc_norm": 0.2839506172839506, - "acc_norm_stderr": 0.02508947852376513 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.23834196891191708, - "acc_stderr": 0.030748905363909895, - "acc_norm": 0.23834196891191708, - "acc_norm_stderr": 0.030748905363909895 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.23684210526315788, - "acc_stderr": 0.03999423879281336, - "acc_norm": 0.23684210526315788, - "acc_norm_stderr": 0.03999423879281336 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.24587155963302754, - "acc_stderr": 0.018461940968708457, - "acc_norm": 0.24587155963302754, - "acc_norm_stderr": 0.018461940968708457 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.18253968253968253, - "acc_stderr": 0.03455071019102148, - "acc_norm": 0.18253968253968253, - "acc_norm_stderr": 0.03455071019102148 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.25163398692810457, - "acc_stderr": 0.024848018263875195, - "acc_norm": 0.25163398692810457, - "acc_norm_stderr": 0.024848018263875195 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.23, - "acc_stderr": 0.04229525846816506, - "acc_norm": 0.23, - "acc_norm_stderr": 0.04229525846816506 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.33884297520661155, - "acc_stderr": 0.04320767807536669, - "acc_norm": 0.33884297520661155, - "acc_norm_stderr": 0.04320767807536669 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.2565789473684211, - "acc_stderr": 0.0355418036802569, - "acc_norm": 0.2565789473684211, - "acc_norm_stderr": 0.0355418036802569 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.25980392156862747, - "acc_stderr": 0.017740899509177788, - "acc_norm": 0.25980392156862747, - "acc_norm_stderr": 0.017740899509177788 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.19148936170212766, - "acc_stderr": 0.023472645247949425, - "acc_norm": 0.19148936170212766, - "acc_norm_stderr": 0.023472645247949425 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.3392857142857143, - "acc_stderr": 0.0449394906861354, - "acc_norm": 0.3392857142857143, - "acc_norm_stderr": 0.0449394906861354 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.20833333333333334, - "acc_stderr": 0.027696910713093936, - "acc_norm": 0.20833333333333334, - "acc_norm_stderr": 0.027696910713093936 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.24581005586592178, - "acc_stderr": 0.014400296429225606, - "acc_norm": 0.24581005586592178, - "acc_norm_stderr": 0.014400296429225606 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.29, - "acc_stderr": 0.04560480215720684, - "acc_norm": 0.29, - "acc_norm_stderr": 0.04560480215720684 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.32, - "acc_stderr": 0.046882617226215034, - "acc_norm": 0.32, - "acc_norm_stderr": 0.046882617226215034 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.2757352941176471, - "acc_stderr": 0.027146271936625162, - "acc_norm": 0.2757352941176471, - "acc_norm_stderr": 0.027146271936625162 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.24489795918367346, - "acc_stderr": 0.027529637440174934, - "acc_norm": 0.24489795918367346, - "acc_norm_stderr": 0.027529637440174934 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.28270042194092826, - "acc_stderr": 0.029312814153955914, - "acc_norm": 0.28270042194092826, - "acc_norm_stderr": 0.029312814153955914 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2542372881355932, - "acc_stderr": 0.011121129007840664, - "acc_norm": 0.2542372881355932, - "acc_norm_stderr": 0.011121129007840664 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.24509803921568626, - "acc_stderr": 0.03019028245350195, - "acc_norm": 0.24509803921568626, - "acc_norm_stderr": 0.03019028245350195 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.21818181818181817, - "acc_stderr": 0.03225078108306289, - "acc_norm": 0.21818181818181817, - "acc_norm_stderr": 0.03225078108306289 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2839657282741738, - "mc1_stderr": 0.01578537085839671, - "mc2": 0.4444330897605926, - "mc2_stderr": 0.015483222855074748 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.5176056338028169, - "acc_stderr": 0.017129150724246804, - "acc_norm": 0.5727699530516432, - "acc_norm_stderr": 0.016957281432341424 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "lcw99/polyglot-ko-12.8b-chang-instruct-chat", - "model_sha": "a16de096eb135e66b90314e5ab84116c9f0f9d1b", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/leo911kim/Exodia-7B/result_2023-10-19 12:01:38.json b/leo911kim/Exodia-7B/result_2023-10-19 12:01:38.json deleted file mode 100644 index a27ec66229dcc209e0f9543dc8969288f582c4fd..0000000000000000000000000000000000000000 --- a/leo911kim/Exodia-7B/result_2023-10-19 12:01:38.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.19027303754266212, - "acc_stderr": 0.011470424179225702, - "acc_norm": 0.2363481228668942, - "acc_norm_stderr": 0.012414960524301832 - }, - "harness|ko_hellaswag|10": { - "acc": 0.2703644692292372, - "acc_stderr": 0.004432403734882273, - "acc_norm": 0.2969527982473611, - "acc_norm_stderr": 0.004559817589182076 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.22807017543859648, - "acc_stderr": 0.03218093795602357, - "acc_norm": 0.22807017543859648, - "acc_norm_stderr": 0.03218093795602357 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.1650485436893204, - "acc_stderr": 0.03675668832233188, - "acc_norm": 0.1650485436893204, - "acc_norm_stderr": 0.03675668832233188 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.27586206896551724, - "acc_stderr": 0.01598281477469563, - "acc_norm": 0.27586206896551724, - "acc_norm_stderr": 0.01598281477469563 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.3037037037037037, - "acc_stderr": 0.03972552884785137, - "acc_norm": 0.3037037037037037, - "acc_norm_stderr": 0.03972552884785137 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.28, - "acc_stderr": 0.045126085985421255, - "acc_norm": 0.28, - "acc_norm_stderr": 0.045126085985421255 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.32340425531914896, - "acc_stderr": 0.030579442773610337, - "acc_norm": 0.32340425531914896, - "acc_norm_stderr": 0.030579442773610337 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.28313253012048195, - "acc_stderr": 0.03507295431370519, - "acc_norm": 0.28313253012048195, - "acc_norm_stderr": 0.03507295431370519 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.2990353697749196, - "acc_stderr": 0.026003301117885142, - "acc_norm": 0.2990353697749196, - "acc_norm_stderr": 0.026003301117885142 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.29596412556053814, - "acc_stderr": 0.030636591348699817, - "acc_norm": 0.29596412556053814, - "acc_norm_stderr": 0.030636591348699817 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.24427480916030533, - "acc_stderr": 0.03768335959728744, - "acc_norm": 0.24427480916030533, - "acc_norm_stderr": 0.03768335959728744 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.24, - "acc_stderr": 0.04292346959909282, - "acc_norm": 0.24, - "acc_norm_stderr": 0.04292346959909282 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.20202020202020202, - "acc_stderr": 0.028606204289229872, - "acc_norm": 0.20202020202020202, - "acc_norm_stderr": 0.028606204289229872 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.2413793103448276, - "acc_stderr": 0.03565998174135303, - "acc_norm": 0.2413793103448276, - "acc_norm_stderr": 0.03565998174135303 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.16666666666666666, - "acc_stderr": 0.03708284662416542, - "acc_norm": 0.16666666666666666, - "acc_norm_stderr": 0.03708284662416542 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.2773109243697479, - "acc_stderr": 0.02907937453948001, - "acc_norm": 0.2773109243697479, - "acc_norm_stderr": 0.02907937453948001 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.2743589743589744, - "acc_stderr": 0.02262276576749322, - "acc_norm": 0.2743589743589744, - "acc_norm_stderr": 0.02262276576749322 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.28, - "acc_stderr": 0.045126085985421296, - "acc_norm": 0.28, - "acc_norm_stderr": 0.045126085985421296 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.2, - "acc_stderr": 0.040201512610368445, - "acc_norm": 0.2, - "acc_norm_stderr": 0.040201512610368445 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.2037037037037037, - "acc_stderr": 0.038935425188248475, - "acc_norm": 0.2037037037037037, - "acc_norm_stderr": 0.038935425188248475 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.29064039408866993, - "acc_stderr": 0.031947400722655395, - "acc_norm": 0.29064039408866993, - "acc_norm_stderr": 0.031947400722655395 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.26129032258064516, - "acc_stderr": 0.02499305339776482, - "acc_norm": 0.26129032258064516, - "acc_norm_stderr": 0.02499305339776482 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.32905982905982906, - "acc_stderr": 0.030782321577688163, - "acc_norm": 0.32905982905982906, - "acc_norm_stderr": 0.030782321577688163 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.22264150943396227, - "acc_stderr": 0.025604233470899105, - "acc_norm": 0.22264150943396227, - "acc_norm_stderr": 0.025604233470899105 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.21818181818181817, - "acc_stderr": 0.03955932861795833, - "acc_norm": 0.21818181818181817, - "acc_norm_stderr": 0.03955932861795833 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.29259259259259257, - "acc_stderr": 0.027738969632176088, - "acc_norm": 0.29259259259259257, - "acc_norm_stderr": 0.027738969632176088 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2582781456953642, - "acc_stderr": 0.035737053147634576, - "acc_norm": 0.2582781456953642, - "acc_norm_stderr": 0.035737053147634576 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.24875621890547264, - "acc_stderr": 0.030567675938916718, - "acc_norm": 0.24875621890547264, - "acc_norm_stderr": 0.030567675938916718 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.17341040462427745, - "acc_stderr": 0.02886810787497064, - "acc_norm": 0.17341040462427745, - "acc_norm_stderr": 0.02886810787497064 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2275132275132275, - "acc_stderr": 0.021591269407823774, - "acc_norm": 0.2275132275132275, - "acc_norm_stderr": 0.021591269407823774 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2361111111111111, - "acc_stderr": 0.03551446610810826, - "acc_norm": 0.2361111111111111, - "acc_norm_stderr": 0.03551446610810826 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252606, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252606 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.22, - "acc_stderr": 0.04163331998932269, - "acc_norm": 0.22, - "acc_norm_stderr": 0.04163331998932269 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.2138728323699422, - "acc_stderr": 0.022075709251757177, - "acc_norm": 0.2138728323699422, - "acc_norm_stderr": 0.022075709251757177 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.27607361963190186, - "acc_stderr": 0.0351238528370505, - "acc_norm": 0.27607361963190186, - "acc_norm_stderr": 0.0351238528370505 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.25925925925925924, - "acc_stderr": 0.024383665531035457, - "acc_norm": 0.25925925925925924, - "acc_norm_stderr": 0.024383665531035457 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.26, - "acc_stderr": 0.0440844002276808, - "acc_norm": 0.26, - "acc_norm_stderr": 0.0440844002276808 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.19689119170984457, - "acc_stderr": 0.028697873971860695, - "acc_norm": 0.19689119170984457, - "acc_norm_stderr": 0.028697873971860695 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2543859649122807, - "acc_stderr": 0.040969851398436716, - "acc_norm": 0.2543859649122807, - "acc_norm_stderr": 0.040969851398436716 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.24954128440366974, - "acc_stderr": 0.018553897629501614, - "acc_norm": 0.24954128440366974, - "acc_norm_stderr": 0.018553897629501614 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.12698412698412698, - "acc_stderr": 0.029780417522688434, - "acc_norm": 0.12698412698412698, - "acc_norm_stderr": 0.029780417522688434 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.22549019607843138, - "acc_stderr": 0.023929155517351294, - "acc_norm": 0.22549019607843138, - "acc_norm_stderr": 0.023929155517351294 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.18, - "acc_stderr": 0.038612291966536955, - "acc_norm": 0.18, - "acc_norm_stderr": 0.038612291966536955 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.2809917355371901, - "acc_stderr": 0.04103203830514512, - "acc_norm": 0.2809917355371901, - "acc_norm_stderr": 0.04103203830514512 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.19078947368421054, - "acc_stderr": 0.03197565821032501, - "acc_norm": 0.19078947368421054, - "acc_norm_stderr": 0.03197565821032501 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.2369281045751634, - "acc_stderr": 0.017201662169789796, - "acc_norm": 0.2369281045751634, - "acc_norm_stderr": 0.017201662169789796 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2624113475177305, - "acc_stderr": 0.02624492034984301, - "acc_norm": 0.2624113475177305, - "acc_norm_stderr": 0.02624492034984301 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.21428571428571427, - "acc_stderr": 0.03894641120044792, - "acc_norm": 0.21428571428571427, - "acc_norm_stderr": 0.03894641120044792 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.44907407407407407, - "acc_stderr": 0.03392238405321617, - "acc_norm": 0.44907407407407407, - "acc_norm_stderr": 0.03392238405321617 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2424581005586592, - "acc_stderr": 0.01433352205921789, - "acc_norm": 0.2424581005586592, - "acc_norm_stderr": 0.01433352205921789 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252604, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252604 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695235, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695235 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.44485294117647056, - "acc_stderr": 0.03018753206032938, - "acc_norm": 0.44485294117647056, - "acc_norm_stderr": 0.03018753206032938 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.2612244897959184, - "acc_stderr": 0.028123429335142773, - "acc_norm": 0.2612244897959184, - "acc_norm_stderr": 0.028123429335142773 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.24050632911392406, - "acc_stderr": 0.027820781981149685, - "acc_norm": 0.24050632911392406, - "acc_norm_stderr": 0.027820781981149685 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.227509778357236, - "acc_stderr": 0.010707188576864226, - "acc_norm": 0.227509778357236, - "acc_norm_stderr": 0.010707188576864226 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.24509803921568626, - "acc_stderr": 0.030190282453501936, - "acc_norm": 0.24509803921568626, - "acc_norm_stderr": 0.030190282453501936 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.21818181818181817, - "acc_stderr": 0.03225078108306289, - "acc_norm": 0.21818181818181817, - "acc_norm_stderr": 0.03225078108306289 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2668298653610771, - "mc1_stderr": 0.015483691939237269, - "mc2": 0.42019223039185516, - "mc2_stderr": 0.01650268606738961 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.44835680751173707, - "acc_stderr": 0.0170481098700285, - "acc_norm": 0.5516431924882629, - "acc_norm_stderr": 0.017048109870028496 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "leo911kim/Exodia-7B", - "model_sha": "b3e1f98b934da7498bb18ce0cb9e0fc857593656", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/leo911kim/Exodia-kor-7B-v2/result_2023-10-19 23:16:19.json b/leo911kim/Exodia-kor-7B-v2/result_2023-10-19 23:16:19.json deleted file mode 100644 index 7de523e0b89feadb7d440f7c12e21f8de5e4821b..0000000000000000000000000000000000000000 --- a/leo911kim/Exodia-kor-7B-v2/result_2023-10-19 23:16:19.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.2568259385665529, - "acc_stderr": 0.0127669237941168, - "acc_norm": 0.3122866894197952, - "acc_norm_stderr": 0.013542598541688065 - }, - "harness|ko_hellaswag|10": { - "acc": 0.344353714399522, - "acc_stderr": 0.004741859753178411, - "acc_norm": 0.4522007568213503, - "acc_norm_stderr": 0.004966928094797574 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.3391812865497076, - "acc_stderr": 0.03631053496488905, - "acc_norm": 0.3391812865497076, - "acc_norm_stderr": 0.03631053496488905 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.21359223300970873, - "acc_stderr": 0.04058042015646034, - "acc_norm": 0.21359223300970873, - "acc_norm_stderr": 0.04058042015646034 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.3524904214559387, - "acc_stderr": 0.017084150244081376, - "acc_norm": 0.3524904214559387, - "acc_norm_stderr": 0.017084150244081376 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.3111111111111111, - "acc_stderr": 0.03999262876617723, - "acc_norm": 0.3111111111111111, - "acc_norm_stderr": 0.03999262876617723 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.29, - "acc_stderr": 0.04560480215720684, - "acc_norm": 0.29, - "acc_norm_stderr": 0.04560480215720684 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.28085106382978725, - "acc_stderr": 0.029379170464124818, - "acc_norm": 0.28085106382978725, - "acc_norm_stderr": 0.029379170464124818 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.29518072289156627, - "acc_stderr": 0.0355092018568963, - "acc_norm": 0.29518072289156627, - "acc_norm_stderr": 0.0355092018568963 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.3633440514469453, - "acc_stderr": 0.02731684767419272, - "acc_norm": 0.3633440514469453, - "acc_norm_stderr": 0.02731684767419272 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.3542600896860987, - "acc_stderr": 0.03210062154134987, - "acc_norm": 0.3542600896860987, - "acc_norm_stderr": 0.03210062154134987 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.40458015267175573, - "acc_stderr": 0.043046937953806645, - "acc_norm": 0.40458015267175573, - "acc_norm_stderr": 0.043046937953806645 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.3484848484848485, - "acc_stderr": 0.033948539651564025, - "acc_norm": 0.3484848484848485, - "acc_norm_stderr": 0.033948539651564025 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.4068965517241379, - "acc_stderr": 0.04093793981266237, - "acc_norm": 0.4068965517241379, - "acc_norm_stderr": 0.04093793981266237 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.21568627450980393, - "acc_stderr": 0.04092563958237654, - "acc_norm": 0.21568627450980393, - "acc_norm_stderr": 0.04092563958237654 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.3697478991596639, - "acc_stderr": 0.031357095996135904, - "acc_norm": 0.3697478991596639, - "acc_norm_stderr": 0.031357095996135904 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.023901157979402544, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.023901157979402544 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.36, - "acc_stderr": 0.048241815132442176, - "acc_norm": 0.36, - "acc_norm_stderr": 0.048241815132442176 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.2, - "acc_stderr": 0.04020151261036846, - "acc_norm": 0.2, - "acc_norm_stderr": 0.04020151261036846 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.4537037037037037, - "acc_stderr": 0.04812917324536821, - "acc_norm": 0.4537037037037037, - "acc_norm_stderr": 0.04812917324536821 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.2413793103448276, - "acc_stderr": 0.03010833071801162, - "acc_norm": 0.2413793103448276, - "acc_norm_stderr": 0.03010833071801162 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.34516129032258064, - "acc_stderr": 0.027045746573534323, - "acc_norm": 0.34516129032258064, - "acc_norm_stderr": 0.027045746573534323 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.4700854700854701, - "acc_stderr": 0.03269741106812444, - "acc_norm": 0.4700854700854701, - "acc_norm_stderr": 0.03269741106812444 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.30943396226415093, - "acc_stderr": 0.028450154794118627, - "acc_norm": 0.30943396226415093, - "acc_norm_stderr": 0.028450154794118627 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.2818181818181818, - "acc_stderr": 0.04309118709946459, - "acc_norm": 0.2818181818181818, - "acc_norm_stderr": 0.04309118709946459 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.23333333333333334, - "acc_stderr": 0.02578787422095932, - "acc_norm": 0.23333333333333334, - "acc_norm_stderr": 0.02578787422095932 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.31788079470198677, - "acc_stderr": 0.038020397601079024, - "acc_norm": 0.31788079470198677, - "acc_norm_stderr": 0.038020397601079024 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.4228855721393035, - "acc_stderr": 0.03493231777421282, - "acc_norm": 0.4228855721393035, - "acc_norm_stderr": 0.03493231777421282 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.2774566473988439, - "acc_stderr": 0.034140140070440354, - "acc_norm": 0.2774566473988439, - "acc_norm_stderr": 0.034140140070440354 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.25925925925925924, - "acc_stderr": 0.022569897074918424, - "acc_norm": 0.25925925925925924, - "acc_norm_stderr": 0.022569897074918424 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2777777777777778, - "acc_stderr": 0.03745554791462457, - "acc_norm": 0.2777777777777778, - "acc_norm_stderr": 0.03745554791462457 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.42, - "acc_stderr": 0.049604496374885836, - "acc_norm": 0.42, - "acc_norm_stderr": 0.049604496374885836 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.37572254335260113, - "acc_stderr": 0.02607431485165708, - "acc_norm": 0.37572254335260113, - "acc_norm_stderr": 0.02607431485165708 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.26380368098159507, - "acc_stderr": 0.03462419931615624, - "acc_norm": 0.26380368098159507, - "acc_norm_stderr": 0.03462419931615624 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.3487654320987654, - "acc_stderr": 0.02651759772446501, - "acc_norm": 0.3487654320987654, - "acc_norm_stderr": 0.02651759772446501 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695235, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695235 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.38341968911917096, - "acc_stderr": 0.03508984236295341, - "acc_norm": 0.38341968911917096, - "acc_norm_stderr": 0.03508984236295341 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2807017543859649, - "acc_stderr": 0.04227054451232199, - "acc_norm": 0.2807017543859649, - "acc_norm_stderr": 0.04227054451232199 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.3651376146788991, - "acc_stderr": 0.020642801454384005, - "acc_norm": 0.3651376146788991, - "acc_norm_stderr": 0.020642801454384005 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.21428571428571427, - "acc_stderr": 0.03670066451047181, - "acc_norm": 0.21428571428571427, - "acc_norm_stderr": 0.03670066451047181 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.40522875816993464, - "acc_stderr": 0.028110928492809075, - "acc_norm": 0.40522875816993464, - "acc_norm_stderr": 0.028110928492809075 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.4380165289256198, - "acc_stderr": 0.04529146804435792, - "acc_norm": 0.4380165289256198, - "acc_norm_stderr": 0.04529146804435792 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.24342105263157895, - "acc_stderr": 0.034923496688842384, - "acc_norm": 0.24342105263157895, - "acc_norm_stderr": 0.034923496688842384 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.28921568627450983, - "acc_stderr": 0.018342529845275908, - "acc_norm": 0.28921568627450983, - "acc_norm_stderr": 0.018342529845275908 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.30851063829787234, - "acc_stderr": 0.027553366165101366, - "acc_norm": 0.30851063829787234, - "acc_norm_stderr": 0.027553366165101366 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.25892857142857145, - "acc_stderr": 0.04157751539865629, - "acc_norm": 0.25892857142857145, - "acc_norm_stderr": 0.04157751539865629 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.4074074074074074, - "acc_stderr": 0.03350991604696042, - "acc_norm": 0.4074074074074074, - "acc_norm_stderr": 0.03350991604696042 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2245810055865922, - "acc_stderr": 0.01395680366654464, - "acc_norm": 0.2245810055865922, - "acc_norm_stderr": 0.01395680366654464 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695236, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695236 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.4375, - "acc_stderr": 0.030134614954403924, - "acc_norm": 0.4375, - "acc_norm_stderr": 0.030134614954403924 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.3306122448979592, - "acc_stderr": 0.030116426296540613, - "acc_norm": 0.3306122448979592, - "acc_norm_stderr": 0.030116426296540613 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.350210970464135, - "acc_stderr": 0.03105239193758435, - "acc_norm": 0.350210970464135, - "acc_norm_stderr": 0.03105239193758435 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.288135593220339, - "acc_stderr": 0.011567140661324561, - "acc_norm": 0.288135593220339, - "acc_norm_stderr": 0.011567140661324561 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.29901960784313725, - "acc_stderr": 0.03213325717373617, - "acc_norm": 0.29901960784313725, - "acc_norm_stderr": 0.03213325717373617 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.24848484848484848, - "acc_stderr": 0.03374402644139404, - "acc_norm": 0.24848484848484848, - "acc_norm_stderr": 0.03374402644139404 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.23990208078335373, - "mc1_stderr": 0.01494881267906214, - "mc2": 0.3833926324458877, - "mc2_stderr": 0.015094351709331206 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.3908450704225352, - "acc_stderr": 0.016726359269640344, - "acc_norm": 0.46830985915492956, - "acc_norm_stderr": 0.01710531885082842 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "leo911kim/Exodia-kor-7B-v2", - "model_sha": "f759698eb4ddc2b9afa9d234ee130e10ce92a61a", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/leo911kim/Exodia-kor-7b-v2/result_2023-10-19 16:35:19.json b/leo911kim/Exodia-kor-7b-v2/result_2023-10-19 16:35:19.json deleted file mode 100644 index 859ec4e2032759fea31757458ff9e3b576ed9c63..0000000000000000000000000000000000000000 --- a/leo911kim/Exodia-kor-7b-v2/result_2023-10-19 16:35:19.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.2568259385665529, - "acc_stderr": 0.0127669237941168, - "acc_norm": 0.3122866894197952, - "acc_norm_stderr": 0.013542598541688065 - }, - "harness|ko_hellaswag|10": { - "acc": 0.344353714399522, - "acc_stderr": 0.004741859753178411, - "acc_norm": 0.4522007568213503, - "acc_norm_stderr": 0.004966928094797574 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.3391812865497076, - "acc_stderr": 0.03631053496488905, - "acc_norm": 0.3391812865497076, - "acc_norm_stderr": 0.03631053496488905 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.21359223300970873, - "acc_stderr": 0.04058042015646034, - "acc_norm": 0.21359223300970873, - "acc_norm_stderr": 0.04058042015646034 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.3524904214559387, - "acc_stderr": 0.017084150244081376, - "acc_norm": 0.3524904214559387, - "acc_norm_stderr": 0.017084150244081376 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.3111111111111111, - "acc_stderr": 0.03999262876617723, - "acc_norm": 0.3111111111111111, - "acc_norm_stderr": 0.03999262876617723 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.29, - "acc_stderr": 0.04560480215720684, - "acc_norm": 0.29, - "acc_norm_stderr": 0.04560480215720684 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.28085106382978725, - "acc_stderr": 0.029379170464124818, - "acc_norm": 0.28085106382978725, - "acc_norm_stderr": 0.029379170464124818 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.29518072289156627, - "acc_stderr": 0.0355092018568963, - "acc_norm": 0.29518072289156627, - "acc_norm_stderr": 0.0355092018568963 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.3633440514469453, - "acc_stderr": 0.02731684767419272, - "acc_norm": 0.3633440514469453, - "acc_norm_stderr": 0.02731684767419272 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.3542600896860987, - "acc_stderr": 0.03210062154134987, - "acc_norm": 0.3542600896860987, - "acc_norm_stderr": 0.03210062154134987 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.40458015267175573, - "acc_stderr": 0.043046937953806645, - "acc_norm": 0.40458015267175573, - "acc_norm_stderr": 0.043046937953806645 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.3484848484848485, - "acc_stderr": 0.033948539651564025, - "acc_norm": 0.3484848484848485, - "acc_norm_stderr": 0.033948539651564025 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.4068965517241379, - "acc_stderr": 0.04093793981266237, - "acc_norm": 0.4068965517241379, - "acc_norm_stderr": 0.04093793981266237 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.21568627450980393, - "acc_stderr": 0.04092563958237654, - "acc_norm": 0.21568627450980393, - "acc_norm_stderr": 0.04092563958237654 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.3697478991596639, - "acc_stderr": 0.031357095996135904, - "acc_norm": 0.3697478991596639, - "acc_norm_stderr": 0.031357095996135904 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.023901157979402544, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.023901157979402544 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.36, - "acc_stderr": 0.048241815132442176, - "acc_norm": 0.36, - "acc_norm_stderr": 0.048241815132442176 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.2, - "acc_stderr": 0.04020151261036846, - "acc_norm": 0.2, - "acc_norm_stderr": 0.04020151261036846 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.4537037037037037, - "acc_stderr": 0.04812917324536821, - "acc_norm": 0.4537037037037037, - "acc_norm_stderr": 0.04812917324536821 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.2413793103448276, - "acc_stderr": 0.03010833071801162, - "acc_norm": 0.2413793103448276, - "acc_norm_stderr": 0.03010833071801162 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.34516129032258064, - "acc_stderr": 0.027045746573534323, - "acc_norm": 0.34516129032258064, - "acc_norm_stderr": 0.027045746573534323 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.4700854700854701, - "acc_stderr": 0.03269741106812444, - "acc_norm": 0.4700854700854701, - "acc_norm_stderr": 0.03269741106812444 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.30943396226415093, - "acc_stderr": 0.028450154794118627, - "acc_norm": 0.30943396226415093, - "acc_norm_stderr": 0.028450154794118627 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.2818181818181818, - "acc_stderr": 0.04309118709946459, - "acc_norm": 0.2818181818181818, - "acc_norm_stderr": 0.04309118709946459 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.23333333333333334, - "acc_stderr": 0.02578787422095932, - "acc_norm": 0.23333333333333334, - "acc_norm_stderr": 0.02578787422095932 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.31788079470198677, - "acc_stderr": 0.038020397601079024, - "acc_norm": 0.31788079470198677, - "acc_norm_stderr": 0.038020397601079024 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.4228855721393035, - "acc_stderr": 0.03493231777421282, - "acc_norm": 0.4228855721393035, - "acc_norm_stderr": 0.03493231777421282 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.2774566473988439, - "acc_stderr": 0.034140140070440354, - "acc_norm": 0.2774566473988439, - "acc_norm_stderr": 0.034140140070440354 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.25925925925925924, - "acc_stderr": 0.022569897074918424, - "acc_norm": 0.25925925925925924, - "acc_norm_stderr": 0.022569897074918424 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2777777777777778, - "acc_stderr": 0.03745554791462457, - "acc_norm": 0.2777777777777778, - "acc_norm_stderr": 0.03745554791462457 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.42, - "acc_stderr": 0.049604496374885836, - "acc_norm": 0.42, - "acc_norm_stderr": 0.049604496374885836 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.37572254335260113, - "acc_stderr": 0.02607431485165708, - "acc_norm": 0.37572254335260113, - "acc_norm_stderr": 0.02607431485165708 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.26380368098159507, - "acc_stderr": 0.03462419931615624, - "acc_norm": 0.26380368098159507, - "acc_norm_stderr": 0.03462419931615624 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.3487654320987654, - "acc_stderr": 0.02651759772446501, - "acc_norm": 0.3487654320987654, - "acc_norm_stderr": 0.02651759772446501 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695235, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695235 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.38341968911917096, - "acc_stderr": 0.03508984236295341, - "acc_norm": 0.38341968911917096, - "acc_norm_stderr": 0.03508984236295341 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2807017543859649, - "acc_stderr": 0.04227054451232199, - "acc_norm": 0.2807017543859649, - "acc_norm_stderr": 0.04227054451232199 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.3651376146788991, - "acc_stderr": 0.020642801454384005, - "acc_norm": 0.3651376146788991, - "acc_norm_stderr": 0.020642801454384005 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.21428571428571427, - "acc_stderr": 0.03670066451047181, - "acc_norm": 0.21428571428571427, - "acc_norm_stderr": 0.03670066451047181 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.40522875816993464, - "acc_stderr": 0.028110928492809075, - "acc_norm": 0.40522875816993464, - "acc_norm_stderr": 0.028110928492809075 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.4380165289256198, - "acc_stderr": 0.04529146804435792, - "acc_norm": 0.4380165289256198, - "acc_norm_stderr": 0.04529146804435792 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.24342105263157895, - "acc_stderr": 0.034923496688842384, - "acc_norm": 0.24342105263157895, - "acc_norm_stderr": 0.034923496688842384 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.28921568627450983, - "acc_stderr": 0.018342529845275908, - "acc_norm": 0.28921568627450983, - "acc_norm_stderr": 0.018342529845275908 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.30851063829787234, - "acc_stderr": 0.027553366165101366, - "acc_norm": 0.30851063829787234, - "acc_norm_stderr": 0.027553366165101366 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.25892857142857145, - "acc_stderr": 0.04157751539865629, - "acc_norm": 0.25892857142857145, - "acc_norm_stderr": 0.04157751539865629 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.4074074074074074, - "acc_stderr": 0.03350991604696042, - "acc_norm": 0.4074074074074074, - "acc_norm_stderr": 0.03350991604696042 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2245810055865922, - "acc_stderr": 0.01395680366654464, - "acc_norm": 0.2245810055865922, - "acc_norm_stderr": 0.01395680366654464 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695236, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695236 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.4375, - "acc_stderr": 0.030134614954403924, - "acc_norm": 0.4375, - "acc_norm_stderr": 0.030134614954403924 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.3306122448979592, - "acc_stderr": 0.030116426296540613, - "acc_norm": 0.3306122448979592, - "acc_norm_stderr": 0.030116426296540613 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.350210970464135, - "acc_stderr": 0.03105239193758435, - "acc_norm": 0.350210970464135, - "acc_norm_stderr": 0.03105239193758435 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.288135593220339, - "acc_stderr": 0.011567140661324561, - "acc_norm": 0.288135593220339, - "acc_norm_stderr": 0.011567140661324561 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.29901960784313725, - "acc_stderr": 0.03213325717373617, - "acc_norm": 0.29901960784313725, - "acc_norm_stderr": 0.03213325717373617 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.24848484848484848, - "acc_stderr": 0.03374402644139404, - "acc_norm": 0.24848484848484848, - "acc_norm_stderr": 0.03374402644139404 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.23990208078335373, - "mc1_stderr": 0.01494881267906214, - "mc2": 0.3833926324458877, - "mc2_stderr": 0.015094351709331206 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.3908450704225352, - "acc_stderr": 0.016726359269640344, - "acc_norm": 0.46830985915492956, - "acc_norm_stderr": 0.01710531885082842 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "leo911kim/Exodia-kor-7b-v2", - "model_sha": "f759698eb4ddc2b9afa9d234ee130e10ce92a61a", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/leo911kim/Exodia-kor-7b/result_2023-10-19 10:10:30.json b/leo911kim/Exodia-kor-7b/result_2023-10-19 10:10:30.json deleted file mode 100644 index 2fc4312bf53f899cb5693fe54cb466539d3b4ed4..0000000000000000000000000000000000000000 --- a/leo911kim/Exodia-kor-7b/result_2023-10-19 10:10:30.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.2636518771331058, - "acc_stderr": 0.012875929151297066, - "acc_norm": 0.3370307167235495, - "acc_norm_stderr": 0.01381347665290227 - }, - "harness|ko_hellaswag|10": { - "acc": 0.35331607249551883, - "acc_stderr": 0.004770229206838891, - "acc_norm": 0.4847639912368054, - "acc_norm_stderr": 0.004987464257999312 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.34502923976608185, - "acc_stderr": 0.036459813773888065, - "acc_norm": 0.34502923976608185, - "acc_norm_stderr": 0.036459813773888065 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.20388349514563106, - "acc_stderr": 0.0398913985953177, - "acc_norm": 0.20388349514563106, - "acc_norm_stderr": 0.0398913985953177 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.26053639846743293, - "acc_stderr": 0.015696008563807096, - "acc_norm": 0.26053639846743293, - "acc_norm_stderr": 0.015696008563807096 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.21481481481481482, - "acc_stderr": 0.035478541985608236, - "acc_norm": 0.21481481481481482, - "acc_norm_stderr": 0.035478541985608236 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.24, - "acc_stderr": 0.042923469599092816, - "acc_norm": 0.24, - "acc_norm_stderr": 0.042923469599092816 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.26382978723404255, - "acc_stderr": 0.028809989854102987, - "acc_norm": 0.26382978723404255, - "acc_norm_stderr": 0.028809989854102987 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.2710843373493976, - "acc_stderr": 0.03460579907553027, - "acc_norm": 0.2710843373493976, - "acc_norm_stderr": 0.03460579907553027 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.21221864951768488, - "acc_stderr": 0.023222756797435122, - "acc_norm": 0.21221864951768488, - "acc_norm_stderr": 0.023222756797435122 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.3542600896860987, - "acc_stderr": 0.03210062154134987, - "acc_norm": 0.3542600896860987, - "acc_norm_stderr": 0.03210062154134987 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.2595419847328244, - "acc_stderr": 0.03844876139785271, - "acc_norm": 0.2595419847328244, - "acc_norm_stderr": 0.03844876139785271 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.32, - "acc_stderr": 0.04688261722621503, - "acc_norm": 0.32, - "acc_norm_stderr": 0.04688261722621503 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.20707070707070707, - "acc_stderr": 0.028869778460267042, - "acc_norm": 0.20707070707070707, - "acc_norm_stderr": 0.028869778460267042 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.2413793103448276, - "acc_stderr": 0.03565998174135302, - "acc_norm": 0.2413793103448276, - "acc_norm_stderr": 0.03565998174135302 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.21568627450980393, - "acc_stderr": 0.04092563958237654, - "acc_norm": 0.21568627450980393, - "acc_norm_stderr": 0.04092563958237654 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.2689075630252101, - "acc_stderr": 0.028801392193631273, - "acc_norm": 0.2689075630252101, - "acc_norm_stderr": 0.028801392193631273 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.2205128205128205, - "acc_stderr": 0.02102067268082791, - "acc_norm": 0.2205128205128205, - "acc_norm_stderr": 0.02102067268082791 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.18, - "acc_stderr": 0.038612291966536955, - "acc_norm": 0.18, - "acc_norm_stderr": 0.038612291966536955 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.32407407407407407, - "acc_stderr": 0.04524596007030049, - "acc_norm": 0.32407407407407407, - "acc_norm_stderr": 0.04524596007030049 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.15270935960591134, - "acc_stderr": 0.025308904539380627, - "acc_norm": 0.15270935960591134, - "acc_norm_stderr": 0.025308904539380627 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.2161290322580645, - "acc_stderr": 0.023415293433568525, - "acc_norm": 0.2161290322580645, - "acc_norm_stderr": 0.023415293433568525 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.37606837606837606, - "acc_stderr": 0.031733936329694824, - "acc_norm": 0.37606837606837606, - "acc_norm_stderr": 0.031733936329694824 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.21509433962264152, - "acc_stderr": 0.025288394502891377, - "acc_norm": 0.21509433962264152, - "acc_norm_stderr": 0.025288394502891377 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.24545454545454545, - "acc_stderr": 0.041220665028782834, - "acc_norm": 0.24545454545454545, - "acc_norm_stderr": 0.041220665028782834 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.2074074074074074, - "acc_stderr": 0.024720713193952172, - "acc_norm": 0.2074074074074074, - "acc_norm_stderr": 0.024720713193952172 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2185430463576159, - "acc_stderr": 0.03374235550425694, - "acc_norm": 0.2185430463576159, - "acc_norm_stderr": 0.03374235550425694 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.2885572139303483, - "acc_stderr": 0.0320384104021332, - "acc_norm": 0.2885572139303483, - "acc_norm_stderr": 0.0320384104021332 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.20809248554913296, - "acc_stderr": 0.0309528902177499, - "acc_norm": 0.20809248554913296, - "acc_norm_stderr": 0.0309528902177499 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.20899470899470898, - "acc_stderr": 0.020940481565334835, - "acc_norm": 0.20899470899470898, - "acc_norm_stderr": 0.020940481565334835 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2708333333333333, - "acc_stderr": 0.03716177437566017, - "acc_norm": 0.2708333333333333, - "acc_norm_stderr": 0.03716177437566017 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.2, - "acc_stderr": 0.040201512610368445, - "acc_norm": 0.2, - "acc_norm_stderr": 0.040201512610368445 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.35, - "acc_stderr": 0.047937248544110196, - "acc_norm": 0.35, - "acc_norm_stderr": 0.047937248544110196 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.27167630057803466, - "acc_stderr": 0.023948512905468348, - "acc_norm": 0.27167630057803466, - "acc_norm_stderr": 0.023948512905468348 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.24539877300613497, - "acc_stderr": 0.03380939813943354, - "acc_norm": 0.24539877300613497, - "acc_norm_stderr": 0.03380939813943354 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.25308641975308643, - "acc_stderr": 0.024191808600713, - "acc_norm": 0.25308641975308643, - "acc_norm_stderr": 0.024191808600713 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.21, - "acc_stderr": 0.040936018074033256, - "acc_norm": 0.21, - "acc_norm_stderr": 0.040936018074033256 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.21761658031088082, - "acc_stderr": 0.029778663037752943, - "acc_norm": 0.21761658031088082, - "acc_norm_stderr": 0.029778663037752943 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2631578947368421, - "acc_stderr": 0.04142439719489362, - "acc_norm": 0.2631578947368421, - "acc_norm_stderr": 0.04142439719489362 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.23486238532110093, - "acc_stderr": 0.018175110510343585, - "acc_norm": 0.23486238532110093, - "acc_norm_stderr": 0.018175110510343585 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.31746031746031744, - "acc_stderr": 0.04163453031302859, - "acc_norm": 0.31746031746031744, - "acc_norm_stderr": 0.04163453031302859 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.2875816993464052, - "acc_stderr": 0.02591780611714716, - "acc_norm": 0.2875816993464052, - "acc_norm_stderr": 0.02591780611714716 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252605, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252605 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.30578512396694213, - "acc_stderr": 0.04205953933884122, - "acc_norm": 0.30578512396694213, - "acc_norm_stderr": 0.04205953933884122 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.17763157894736842, - "acc_stderr": 0.03110318238312338, - "acc_norm": 0.17763157894736842, - "acc_norm_stderr": 0.03110318238312338 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.2696078431372549, - "acc_stderr": 0.017952449196987866, - "acc_norm": 0.2696078431372549, - "acc_norm_stderr": 0.017952449196987866 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.23049645390070922, - "acc_stderr": 0.025123739226872405, - "acc_norm": 0.23049645390070922, - "acc_norm_stderr": 0.025123739226872405 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.3125, - "acc_stderr": 0.043994650575715215, - "acc_norm": 0.3125, - "acc_norm_stderr": 0.043994650575715215 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.17592592592592593, - "acc_stderr": 0.025967420958258526, - "acc_norm": 0.17592592592592593, - "acc_norm_stderr": 0.025967420958258526 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.23798882681564246, - "acc_stderr": 0.014242630070574892, - "acc_norm": 0.23798882681564246, - "acc_norm_stderr": 0.014242630070574892 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.26, - "acc_stderr": 0.0440844002276808, - "acc_norm": 0.26, - "acc_norm_stderr": 0.0440844002276808 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.26, - "acc_stderr": 0.044084400227680794, - "acc_norm": 0.26, - "acc_norm_stderr": 0.044084400227680794 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.20588235294117646, - "acc_stderr": 0.024562204314142314, - "acc_norm": 0.20588235294117646, - "acc_norm_stderr": 0.024562204314142314 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.24489795918367346, - "acc_stderr": 0.027529637440174913, - "acc_norm": 0.24489795918367346, - "acc_norm_stderr": 0.027529637440174913 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.31645569620253167, - "acc_stderr": 0.030274974880218974, - "acc_norm": 0.31645569620253167, - "acc_norm_stderr": 0.030274974880218974 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2588005215123859, - "acc_stderr": 0.01118610904656461, - "acc_norm": 0.2588005215123859, - "acc_norm_stderr": 0.01118610904656461 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.27941176470588236, - "acc_stderr": 0.031493281045079556, - "acc_norm": 0.27941176470588236, - "acc_norm_stderr": 0.031493281045079556 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.23030303030303031, - "acc_stderr": 0.032876667586034886, - "acc_norm": 0.23030303030303031, - "acc_norm_stderr": 0.032876667586034886 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.24724602203182375, - "mc1_stderr": 0.01510240479735965, - "mc2": 0.39305582191498534, - "mc2_stderr": 0.015037592654153921 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.3028169014084507, - "acc_stderr": 0.01575065796584493, - "acc_norm": 0.37910798122065725, - "acc_norm_stderr": 0.016631243508204944 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "leo911kim/Exodia-kor-7b", - "model_sha": "dfb83ef9894aadda3301f98602d4d45cfd19c192", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/lifelongeek/ko-7b-ins/result_2023-10-13 13:58:31.json b/lifelongeek/ko-7b-ins/result_2023-10-13 13:58:31.json deleted file mode 100644 index cfdccee1a0b4adb0dfd61c218de153c240374d3d..0000000000000000000000000000000000000000 --- a/lifelongeek/ko-7b-ins/result_2023-10-13 13:58:31.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.20051194539249148, - "acc_stderr": 0.01170031805049937, - "acc_norm": 0.2363481228668942, - "acc_norm_stderr": 0.012414960524301811 - }, - "harness|ko_hellaswag|10": { - "acc": 0.2500497908783111, - "acc_stderr": 0.004321564303822431, - "acc_norm": 0.246265684126668, - "acc_norm_stderr": 0.004299546103761434 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.3216374269005848, - "acc_stderr": 0.03582529442573122, - "acc_norm": 0.3216374269005848, - "acc_norm_stderr": 0.03582529442573122 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.17475728155339806, - "acc_stderr": 0.037601780060266196, - "acc_norm": 0.17475728155339806, - "acc_norm_stderr": 0.037601780060266196 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.23754789272030652, - "acc_stderr": 0.015218733046150191, - "acc_norm": 0.23754789272030652, - "acc_norm_stderr": 0.015218733046150191 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.18518518518518517, - "acc_stderr": 0.03355677216313142, - "acc_norm": 0.18518518518518517, - "acc_norm_stderr": 0.03355677216313142 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.22, - "acc_stderr": 0.04163331998932268, - "acc_norm": 0.22, - "acc_norm_stderr": 0.04163331998932268 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.26382978723404255, - "acc_stderr": 0.028809989854102987, - "acc_norm": 0.26382978723404255, - "acc_norm_stderr": 0.028809989854102987 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.28313253012048195, - "acc_stderr": 0.03507295431370518, - "acc_norm": 0.28313253012048195, - "acc_norm_stderr": 0.03507295431370518 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.1864951768488746, - "acc_stderr": 0.022122439772480757, - "acc_norm": 0.1864951768488746, - "acc_norm_stderr": 0.022122439772480757 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.31390134529147984, - "acc_stderr": 0.031146796482972465, - "acc_norm": 0.31390134529147984, - "acc_norm_stderr": 0.031146796482972465 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.2595419847328244, - "acc_stderr": 0.03844876139785271, - "acc_norm": 0.2595419847328244, - "acc_norm_stderr": 0.03844876139785271 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.17676767676767677, - "acc_stderr": 0.027178752639044915, - "acc_norm": 0.17676767676767677, - "acc_norm_stderr": 0.027178752639044915 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.2413793103448276, - "acc_stderr": 0.03565998174135302, - "acc_norm": 0.2413793103448276, - "acc_norm_stderr": 0.03565998174135302 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.21568627450980393, - "acc_stderr": 0.04092563958237654, - "acc_norm": 0.21568627450980393, - "acc_norm_stderr": 0.04092563958237654 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.21008403361344538, - "acc_stderr": 0.026461398717471874, - "acc_norm": 0.21008403361344538, - "acc_norm_stderr": 0.026461398717471874 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.20256410256410257, - "acc_stderr": 0.020377660970371393, - "acc_norm": 0.20256410256410257, - "acc_norm_stderr": 0.020377660970371393 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.28, - "acc_stderr": 0.045126085985421276, - "acc_norm": 0.28, - "acc_norm_stderr": 0.045126085985421276 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.18, - "acc_stderr": 0.038612291966536955, - "acc_norm": 0.18, - "acc_norm_stderr": 0.038612291966536955 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.25925925925925924, - "acc_stderr": 0.04236511258094633, - "acc_norm": 0.25925925925925924, - "acc_norm_stderr": 0.04236511258094633 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.15270935960591134, - "acc_stderr": 0.025308904539380627, - "acc_norm": 0.15270935960591134, - "acc_norm_stderr": 0.025308904539380627 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.1774193548387097, - "acc_stderr": 0.021732540689329276, - "acc_norm": 0.1774193548387097, - "acc_norm_stderr": 0.021732540689329276 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.2905982905982906, - "acc_stderr": 0.029745048572674057, - "acc_norm": 0.2905982905982906, - "acc_norm_stderr": 0.029745048572674057 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.21509433962264152, - "acc_stderr": 0.025288394502891377, - "acc_norm": 0.21509433962264152, - "acc_norm_stderr": 0.025288394502891377 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.21818181818181817, - "acc_stderr": 0.03955932861795833, - "acc_norm": 0.21818181818181817, - "acc_norm_stderr": 0.03955932861795833 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.2111111111111111, - "acc_stderr": 0.024882116857655075, - "acc_norm": 0.2111111111111111, - "acc_norm_stderr": 0.024882116857655075 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.1986754966887417, - "acc_stderr": 0.03257847384436775, - "acc_norm": 0.1986754966887417, - "acc_norm_stderr": 0.03257847384436775 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.24378109452736318, - "acc_stderr": 0.030360490154014645, - "acc_norm": 0.24378109452736318, - "acc_norm_stderr": 0.030360490154014645 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.20809248554913296, - "acc_stderr": 0.030952890217749884, - "acc_norm": 0.20809248554913296, - "acc_norm_stderr": 0.030952890217749884 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.20899470899470898, - "acc_stderr": 0.020940481565334835, - "acc_norm": 0.20899470899470898, - "acc_norm_stderr": 0.020940481565334835 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2569444444444444, - "acc_stderr": 0.03653946969442099, - "acc_norm": 0.2569444444444444, - "acc_norm_stderr": 0.03653946969442099 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.2, - "acc_stderr": 0.040201512610368445, - "acc_norm": 0.2, - "acc_norm_stderr": 0.040201512610368445 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542129, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542129 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.24855491329479767, - "acc_stderr": 0.023267528432100174, - "acc_norm": 0.24855491329479767, - "acc_norm_stderr": 0.023267528432100174 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.22085889570552147, - "acc_stderr": 0.03259177392742178, - "acc_norm": 0.22085889570552147, - "acc_norm_stderr": 0.03259177392742178 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.21604938271604937, - "acc_stderr": 0.022899162918445796, - "acc_norm": 0.21604938271604937, - "acc_norm_stderr": 0.022899162918445796 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.21, - "acc_stderr": 0.040936018074033256, - "acc_norm": 0.21, - "acc_norm_stderr": 0.040936018074033256 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.19689119170984457, - "acc_stderr": 0.028697873971860677, - "acc_norm": 0.19689119170984457, - "acc_norm_stderr": 0.028697873971860677 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.23684210526315788, - "acc_stderr": 0.039994238792813365, - "acc_norm": 0.23684210526315788, - "acc_norm_stderr": 0.039994238792813365 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.1926605504587156, - "acc_stderr": 0.016909276884936097, - "acc_norm": 0.1926605504587156, - "acc_norm_stderr": 0.016909276884936097 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.2857142857142857, - "acc_stderr": 0.04040610178208841, - "acc_norm": 0.2857142857142857, - "acc_norm_stderr": 0.04040610178208841 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.22549019607843138, - "acc_stderr": 0.02392915551735129, - "acc_norm": 0.22549019607843138, - "acc_norm_stderr": 0.02392915551735129 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.2396694214876033, - "acc_stderr": 0.038968789850704164, - "acc_norm": 0.2396694214876033, - "acc_norm_stderr": 0.038968789850704164 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.17763157894736842, - "acc_stderr": 0.03110318238312338, - "acc_norm": 0.17763157894736842, - "acc_norm_stderr": 0.03110318238312338 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.25, - "acc_stderr": 0.01751781884501444, - "acc_norm": 0.25, - "acc_norm_stderr": 0.01751781884501444 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.23404255319148937, - "acc_stderr": 0.025257861359432414, - "acc_norm": 0.23404255319148937, - "acc_norm_stderr": 0.025257861359432414 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.3125, - "acc_stderr": 0.043994650575715215, - "acc_norm": 0.3125, - "acc_norm_stderr": 0.043994650575715215 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.1527777777777778, - "acc_stderr": 0.024536326026134238, - "acc_norm": 0.1527777777777778, - "acc_norm_stderr": 0.024536326026134238 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.23798882681564246, - "acc_stderr": 0.014242630070574892, - "acc_norm": 0.23798882681564246, - "acc_norm_stderr": 0.014242630070574892 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.26, - "acc_stderr": 0.0440844002276808, - "acc_norm": 0.26, - "acc_norm_stderr": 0.0440844002276808 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.18382352941176472, - "acc_stderr": 0.02352924218519311, - "acc_norm": 0.18382352941176472, - "acc_norm_stderr": 0.02352924218519311 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.18775510204081633, - "acc_stderr": 0.025000256039546198, - "acc_norm": 0.18775510204081633, - "acc_norm_stderr": 0.025000256039546198 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.270042194092827, - "acc_stderr": 0.028900721906293426, - "acc_norm": 0.270042194092827, - "acc_norm_stderr": 0.028900721906293426 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2457627118644068, - "acc_stderr": 0.010996156635142692, - "acc_norm": 0.2457627118644068, - "acc_norm_stderr": 0.010996156635142692 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.25, - "acc_stderr": 0.03039153369274154, - "acc_norm": 0.25, - "acc_norm_stderr": 0.03039153369274154 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.21818181818181817, - "acc_stderr": 0.03225078108306289, - "acc_norm": 0.21818181818181817, - "acc_norm_stderr": 0.03225078108306289 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2839657282741738, - "mc1_stderr": 0.015785370858396736, - "mc2": 0.4901664286815018, - "mc2_stderr": 0.016461517029586932 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.029342723004694836, - "acc_stderr": 0.005785200090218143, - "acc_norm": 0.21830985915492956, - "acc_norm_stderr": 0.014160846392954365 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "lifelongeek/ko-7b-ins", - "model_sha": "4970a8c78104fed617103be2763fb54e8e90ca72", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/maum-ai/llamaum-13b-chat-qlora-s/result_2023-10-01 03:59:55.json b/maum-ai/llamaum-13b-chat-qlora-s/result_2023-10-01 03:59:55.json deleted file mode 100644 index 3bb2ddf727eb4a31ffc047010451ae28e6fe2cf9..0000000000000000000000000000000000000000 --- a/maum-ai/llamaum-13b-chat-qlora-s/result_2023-10-01 03:59:55.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.3370307167235495, - "acc_stderr": 0.013813476652902279, - "acc_norm": 0.39419795221843, - "acc_norm_stderr": 0.014280522667467325 - }, - "harness|ko_hellaswag|10": { - "acc": 0.364070902210715, - "acc_stderr": 0.004801852881329742, - "acc_norm": 0.462158932483569, - "acc_norm_stderr": 0.004975470690867166 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.45614035087719296, - "acc_stderr": 0.03820042586602967, - "acc_norm": 0.45614035087719296, - "acc_norm_stderr": 0.03820042586602967 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.49514563106796117, - "acc_stderr": 0.049505043821289195, - "acc_norm": 0.49514563106796117, - "acc_norm_stderr": 0.049505043821289195 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.4725415070242657, - "acc_stderr": 0.017852981266633955, - "acc_norm": 0.4725415070242657, - "acc_norm_stderr": 0.017852981266633955 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.3851851851851852, - "acc_stderr": 0.042039210401562783, - "acc_norm": 0.3851851851851852, - "acc_norm_stderr": 0.042039210401562783 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.3446808510638298, - "acc_stderr": 0.03106898596312215, - "acc_norm": 0.3446808510638298, - "acc_norm_stderr": 0.03106898596312215 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3855421686746988, - "acc_stderr": 0.03789134424611548, - "acc_norm": 0.3855421686746988, - "acc_norm_stderr": 0.03789134424611548 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.4340836012861736, - "acc_stderr": 0.0281502322445356, - "acc_norm": 0.4340836012861736, - "acc_norm_stderr": 0.0281502322445356 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.4260089686098655, - "acc_stderr": 0.0331883328621728, - "acc_norm": 0.4260089686098655, - "acc_norm_stderr": 0.0331883328621728 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.4732824427480916, - "acc_stderr": 0.04379024936553893, - "acc_norm": 0.4732824427480916, - "acc_norm_stderr": 0.04379024936553893 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.36, - "acc_stderr": 0.04824181513244218, - "acc_norm": 0.36, - "acc_norm_stderr": 0.04824181513244218 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.5606060606060606, - "acc_stderr": 0.035360859475294805, - "acc_norm": 0.5606060606060606, - "acc_norm_stderr": 0.035360859475294805 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.41379310344827586, - "acc_stderr": 0.041042692118062316, - "acc_norm": 0.41379310344827586, - "acc_norm_stderr": 0.041042692118062316 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.29411764705882354, - "acc_stderr": 0.04533838195929775, - "acc_norm": 0.29411764705882354, - "acc_norm_stderr": 0.04533838195929775 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.39915966386554624, - "acc_stderr": 0.03181110032413925, - "acc_norm": 0.39915966386554624, - "acc_norm_stderr": 0.03181110032413925 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.3974358974358974, - "acc_stderr": 0.024811920017903836, - "acc_norm": 0.3974358974358974, - "acc_norm_stderr": 0.024811920017903836 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.55, - "acc_stderr": 0.049999999999999996, - "acc_norm": 0.55, - "acc_norm_stderr": 0.049999999999999996 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695235, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695235 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.49074074074074076, - "acc_stderr": 0.04832853553437055, - "acc_norm": 0.49074074074074076, - "acc_norm_stderr": 0.04832853553437055 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.41379310344827586, - "acc_stderr": 0.03465304488406796, - "acc_norm": 0.41379310344827586, - "acc_norm_stderr": 0.03465304488406796 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.44193548387096776, - "acc_stderr": 0.028251557906849727, - "acc_norm": 0.44193548387096776, - "acc_norm_stderr": 0.028251557906849727 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.6324786324786325, - "acc_stderr": 0.031585391577456365, - "acc_norm": 0.6324786324786325, - "acc_norm_stderr": 0.031585391577456365 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.4075471698113208, - "acc_stderr": 0.030242233800854498, - "acc_norm": 0.4075471698113208, - "acc_norm_stderr": 0.030242233800854498 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.44545454545454544, - "acc_stderr": 0.047605488214603246, - "acc_norm": 0.44545454545454544, - "acc_norm_stderr": 0.047605488214603246 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.3074074074074074, - "acc_stderr": 0.028133252578815642, - "acc_norm": 0.3074074074074074, - "acc_norm_stderr": 0.028133252578815642 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2913907284768212, - "acc_stderr": 0.037101857261199946, - "acc_norm": 0.2913907284768212, - "acc_norm_stderr": 0.037101857261199946 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.6268656716417911, - "acc_stderr": 0.034198326081760065, - "acc_norm": 0.6268656716417911, - "acc_norm_stderr": 0.034198326081760065 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.37572254335260113, - "acc_stderr": 0.036928207672648664, - "acc_norm": 0.37572254335260113, - "acc_norm_stderr": 0.036928207672648664 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.024278568024307702, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.024278568024307702 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3055555555555556, - "acc_stderr": 0.03852084696008534, - "acc_norm": 0.3055555555555556, - "acc_norm_stderr": 0.03852084696008534 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.34, - "acc_stderr": 0.047609522856952344, - "acc_norm": 0.34, - "acc_norm_stderr": 0.047609522856952344 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.55, - "acc_stderr": 0.05, - "acc_norm": 0.55, - "acc_norm_stderr": 0.05 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.4913294797687861, - "acc_stderr": 0.02691504735536981, - "acc_norm": 0.4913294797687861, - "acc_norm_stderr": 0.02691504735536981 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.43558282208588955, - "acc_stderr": 0.03895632464138937, - "acc_norm": 0.43558282208588955, - "acc_norm_stderr": 0.03895632464138937 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.4228395061728395, - "acc_stderr": 0.027487472980871598, - "acc_norm": 0.4228395061728395, - "acc_norm_stderr": 0.027487472980871598 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.5077720207253886, - "acc_stderr": 0.03608003225569654, - "acc_norm": 0.5077720207253886, - "acc_norm_stderr": 0.03608003225569654 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2807017543859649, - "acc_stderr": 0.04227054451232199, - "acc_norm": 0.2807017543859649, - "acc_norm_stderr": 0.04227054451232199 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.43853211009174314, - "acc_stderr": 0.021274713073954562, - "acc_norm": 0.43853211009174314, - "acc_norm_stderr": 0.021274713073954562 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.29365079365079366, - "acc_stderr": 0.04073524322147126, - "acc_norm": 0.29365079365079366, - "acc_norm_stderr": 0.04073524322147126 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.4411764705882353, - "acc_stderr": 0.02843109544417664, - "acc_norm": 0.4411764705882353, - "acc_norm_stderr": 0.02843109544417664 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.38, - "acc_stderr": 0.04878317312145633, - "acc_norm": 0.38, - "acc_norm_stderr": 0.04878317312145633 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.6446280991735537, - "acc_stderr": 0.0436923632657398, - "acc_norm": 0.6446280991735537, - "acc_norm_stderr": 0.0436923632657398 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.4868421052631579, - "acc_stderr": 0.04067533136309174, - "acc_norm": 0.4868421052631579, - "acc_norm_stderr": 0.04067533136309174 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.3545751633986928, - "acc_stderr": 0.019353360547553697, - "acc_norm": 0.3545751633986928, - "acc_norm_stderr": 0.019353360547553697 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.3475177304964539, - "acc_stderr": 0.02840662780959095, - "acc_norm": 0.3475177304964539, - "acc_norm_stderr": 0.02840662780959095 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.25892857142857145, - "acc_stderr": 0.041577515398656284, - "acc_norm": 0.25892857142857145, - "acc_norm_stderr": 0.041577515398656284 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.3148148148148148, - "acc_stderr": 0.03167468706828979, - "acc_norm": 0.3148148148148148, - "acc_norm_stderr": 0.03167468706828979 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.23128491620111732, - "acc_stderr": 0.014102223623152567, - "acc_norm": 0.23128491620111732, - "acc_norm_stderr": 0.014102223623152567 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.4, - "acc_stderr": 0.049236596391733084, - "acc_norm": 0.4, - "acc_norm_stderr": 0.049236596391733084 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.48, - "acc_stderr": 0.050211673156867795, - "acc_norm": 0.48, - "acc_norm_stderr": 0.050211673156867795 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.3014705882352941, - "acc_stderr": 0.027875982114273168, - "acc_norm": 0.3014705882352941, - "acc_norm_stderr": 0.027875982114273168 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.4857142857142857, - "acc_stderr": 0.03199615232806287, - "acc_norm": 0.4857142857142857, - "acc_norm_stderr": 0.03199615232806287 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.5611814345991561, - "acc_stderr": 0.032302649315470375, - "acc_norm": 0.5611814345991561, - "acc_norm_stderr": 0.032302649315470375 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.3428943937418514, - "acc_stderr": 0.012123463271585899, - "acc_norm": 0.3428943937418514, - "acc_norm_stderr": 0.012123463271585899 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.4264705882352941, - "acc_stderr": 0.03471157907953426, - "acc_norm": 0.4264705882352941, - "acc_norm_stderr": 0.03471157907953426 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.4666666666666667, - "acc_stderr": 0.038956580652718446, - "acc_norm": 0.4666666666666667, - "acc_norm_stderr": 0.038956580652718446 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2607099143206854, - "mc1_stderr": 0.015368841620766372, - "mc2": 0.4146930075606435, - "mc2_stderr": 0.015301613292343582 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.39906103286384975, - "acc_stderr": 0.016786883026085487, - "acc_norm": 0.4624413145539906, - "acc_norm_stderr": 0.017091354221581635 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "maum-ai/llamaum-13b-chat-qlora-s", - "model_sha": "209891592ed47343e7654b1b7fdc1a514089df3b", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/maum-ai/llamaum-13b-instruct-s/result_2023-10-11 14:37:45.json b/maum-ai/llamaum-13b-instruct-s/result_2023-10-11 14:37:45.json deleted file mode 100644 index 31079842af0739d2aaaeb24ea347b1f41ef7a538..0000000000000000000000000000000000000000 --- a/maum-ai/llamaum-13b-instruct-s/result_2023-10-11 14:37:45.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.28668941979522183, - "acc_stderr": 0.013214986329274762, - "acc_norm": 0.35665529010238906, - "acc_norm_stderr": 0.013998056902620196 - }, - "harness|ko_hellaswag|10": { - "acc": 0.35570603465445133, - "acc_stderr": 0.00477748315963403, - "acc_norm": 0.4393547102170882, - "acc_norm_stderr": 0.004952942072999276 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.38596491228070173, - "acc_stderr": 0.03733756969066165, - "acc_norm": 0.38596491228070173, - "acc_norm_stderr": 0.03733756969066165 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.24271844660194175, - "acc_stderr": 0.042450224863844956, - "acc_norm": 0.24271844660194175, - "acc_norm_stderr": 0.042450224863844956 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.3652618135376756, - "acc_stderr": 0.01721853002883864, - "acc_norm": 0.3652618135376756, - "acc_norm_stderr": 0.01721853002883864 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.34814814814814815, - "acc_stderr": 0.041153246103369526, - "acc_norm": 0.34814814814814815, - "acc_norm_stderr": 0.041153246103369526 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.32, - "acc_stderr": 0.046882617226215034, - "acc_norm": 0.32, - "acc_norm_stderr": 0.046882617226215034 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.2851063829787234, - "acc_stderr": 0.029513196625539345, - "acc_norm": 0.2851063829787234, - "acc_norm_stderr": 0.029513196625539345 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.23493975903614459, - "acc_stderr": 0.03300533186128922, - "acc_norm": 0.23493975903614459, - "acc_norm_stderr": 0.03300533186128922 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.33440514469453375, - "acc_stderr": 0.026795422327893947, - "acc_norm": 0.33440514469453375, - "acc_norm_stderr": 0.026795422327893947 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.32286995515695066, - "acc_stderr": 0.031381476375754995, - "acc_norm": 0.32286995515695066, - "acc_norm_stderr": 0.031381476375754995 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.24427480916030533, - "acc_stderr": 0.03768335959728743, - "acc_norm": 0.24427480916030533, - "acc_norm_stderr": 0.03768335959728743 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.26, - "acc_stderr": 0.044084400227680794, - "acc_norm": 0.26, - "acc_norm_stderr": 0.044084400227680794 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.2828282828282828, - "acc_stderr": 0.032087795587867514, - "acc_norm": 0.2828282828282828, - "acc_norm_stderr": 0.032087795587867514 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.30344827586206896, - "acc_stderr": 0.03831226048850333, - "acc_norm": 0.30344827586206896, - "acc_norm_stderr": 0.03831226048850333 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.16666666666666666, - "acc_stderr": 0.03708284662416544, - "acc_norm": 0.16666666666666666, - "acc_norm_stderr": 0.03708284662416544 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.24369747899159663, - "acc_stderr": 0.027886828078380565, - "acc_norm": 0.24369747899159663, - "acc_norm_stderr": 0.027886828078380565 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.25384615384615383, - "acc_stderr": 0.022066054378726257, - "acc_norm": 0.25384615384615383, - "acc_norm_stderr": 0.022066054378726257 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.39, - "acc_stderr": 0.04902071300001974, - "acc_norm": 0.39, - "acc_norm_stderr": 0.04902071300001974 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.32, - "acc_stderr": 0.046882617226215034, - "acc_norm": 0.32, - "acc_norm_stderr": 0.046882617226215034 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.37037037037037035, - "acc_stderr": 0.04668408033024931, - "acc_norm": 0.37037037037037035, - "acc_norm_stderr": 0.04668408033024931 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.3103448275862069, - "acc_stderr": 0.03255086769970103, - "acc_norm": 0.3103448275862069, - "acc_norm_stderr": 0.03255086769970103 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.2709677419354839, - "acc_stderr": 0.02528441611490016, - "acc_norm": 0.2709677419354839, - "acc_norm_stderr": 0.02528441611490016 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.4358974358974359, - "acc_stderr": 0.032485775115783995, - "acc_norm": 0.4358974358974359, - "acc_norm_stderr": 0.032485775115783995 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.24150943396226415, - "acc_stderr": 0.026341480371118345, - "acc_norm": 0.24150943396226415, - "acc_norm_stderr": 0.026341480371118345 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.33636363636363636, - "acc_stderr": 0.04525393596302506, - "acc_norm": 0.33636363636363636, - "acc_norm_stderr": 0.04525393596302506 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.26296296296296295, - "acc_stderr": 0.026842057873833706, - "acc_norm": 0.26296296296296295, - "acc_norm_stderr": 0.026842057873833706 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.24503311258278146, - "acc_stderr": 0.03511807571804725, - "acc_norm": 0.24503311258278146, - "acc_norm_stderr": 0.03511807571804725 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.373134328358209, - "acc_stderr": 0.034198326081760065, - "acc_norm": 0.373134328358209, - "acc_norm_stderr": 0.034198326081760065 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.2832369942196532, - "acc_stderr": 0.03435568056047875, - "acc_norm": 0.2832369942196532, - "acc_norm_stderr": 0.03435568056047875 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.291005291005291, - "acc_stderr": 0.023393826500484875, - "acc_norm": 0.291005291005291, - "acc_norm_stderr": 0.023393826500484875 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2569444444444444, - "acc_stderr": 0.03653946969442099, - "acc_norm": 0.2569444444444444, - "acc_norm_stderr": 0.03653946969442099 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.18, - "acc_stderr": 0.03861229196653695, - "acc_norm": 0.18, - "acc_norm_stderr": 0.03861229196653695 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.39, - "acc_stderr": 0.04902071300001974, - "acc_norm": 0.39, - "acc_norm_stderr": 0.04902071300001974 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.32947976878612717, - "acc_stderr": 0.0253052581318797, - "acc_norm": 0.32947976878612717, - "acc_norm_stderr": 0.0253052581318797 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.32515337423312884, - "acc_stderr": 0.03680350371286462, - "acc_norm": 0.32515337423312884, - "acc_norm_stderr": 0.03680350371286462 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.3271604938271605, - "acc_stderr": 0.02610567386140981, - "acc_norm": 0.3271604938271605, - "acc_norm_stderr": 0.02610567386140981 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.26, - "acc_stderr": 0.04408440022768078, - "acc_norm": 0.26, - "acc_norm_stderr": 0.04408440022768078 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.30569948186528495, - "acc_stderr": 0.03324837939758159, - "acc_norm": 0.30569948186528495, - "acc_norm_stderr": 0.03324837939758159 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2894736842105263, - "acc_stderr": 0.04266339443159394, - "acc_norm": 0.2894736842105263, - "acc_norm_stderr": 0.04266339443159394 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.26788990825688075, - "acc_stderr": 0.018987462257978652, - "acc_norm": 0.26788990825688075, - "acc_norm_stderr": 0.018987462257978652 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.18253968253968253, - "acc_stderr": 0.034550710191021496, - "acc_norm": 0.18253968253968253, - "acc_norm_stderr": 0.034550710191021496 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.3006535947712418, - "acc_stderr": 0.026256053835718968, - "acc_norm": 0.3006535947712418, - "acc_norm_stderr": 0.026256053835718968 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.4462809917355372, - "acc_stderr": 0.0453793517794788, - "acc_norm": 0.4462809917355372, - "acc_norm_stderr": 0.0453793517794788 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.28289473684210525, - "acc_stderr": 0.03665349695640767, - "acc_norm": 0.28289473684210525, - "acc_norm_stderr": 0.03665349695640767 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.2875816993464052, - "acc_stderr": 0.018311653053648222, - "acc_norm": 0.2875816993464052, - "acc_norm_stderr": 0.018311653053648222 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2978723404255319, - "acc_stderr": 0.027281608344469414, - "acc_norm": 0.2978723404255319, - "acc_norm_stderr": 0.027281608344469414 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.25, - "acc_stderr": 0.04109974682633932, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04109974682633932 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.21296296296296297, - "acc_stderr": 0.027920963147993666, - "acc_norm": 0.21296296296296297, - "acc_norm_stderr": 0.027920963147993666 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.24692737430167597, - "acc_stderr": 0.014422292204808852, - "acc_norm": 0.24692737430167597, - "acc_norm_stderr": 0.014422292204808852 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.26, - "acc_stderr": 0.04408440022768079, - "acc_norm": 0.26, - "acc_norm_stderr": 0.04408440022768079 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.37, - "acc_stderr": 0.048523658709391, - "acc_norm": 0.37, - "acc_norm_stderr": 0.048523658709391 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.19852941176470587, - "acc_stderr": 0.024231013370541097, - "acc_norm": 0.19852941176470587, - "acc_norm_stderr": 0.024231013370541097 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.24081632653061225, - "acc_stderr": 0.02737294220178816, - "acc_norm": 0.24081632653061225, - "acc_norm_stderr": 0.02737294220178816 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.3628691983122363, - "acc_stderr": 0.03129920825530213, - "acc_norm": 0.3628691983122363, - "acc_norm_stderr": 0.03129920825530213 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.27249022164276404, - "acc_stderr": 0.01137165829431153, - "acc_norm": 0.27249022164276404, - "acc_norm_stderr": 0.01137165829431153 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.03308611113236436, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.03308611113236436 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.3393939393939394, - "acc_stderr": 0.03697442205031596, - "acc_norm": 0.3393939393939394, - "acc_norm_stderr": 0.03697442205031596 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2839657282741738, - "mc1_stderr": 0.015785370858396708, - "mc2": 0.4469469691662156, - "mc2_stderr": 0.015668694918169947 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.3544600938967136, - "acc_stderr": 0.016397605788502082, - "acc_norm": 0.5152582159624414, - "acc_norm_stderr": 0.01713179666374492 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "maum-ai/llamaum-13b-instruct-s", - "model_sha": "d9a9f9c019908c2d302da856473891095ad81940", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/maum-ai/llamaum-13b-instruct-v1/result_2023-10-11 06:55:52.json b/maum-ai/llamaum-13b-instruct-v1/result_2023-10-11 06:55:52.json deleted file mode 100644 index de539ef6385bccf8786b67ece619794d19db4742..0000000000000000000000000000000000000000 --- a/maum-ai/llamaum-13b-instruct-v1/result_2023-10-11 06:55:52.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.3967576791808874, - "acc_stderr": 0.014296513020180639, - "acc_norm": 0.45819112627986347, - "acc_norm_stderr": 0.014560220308714702 - }, - "harness|ko_hellaswag|10": { - "acc": 0.41047600079665403, - "acc_stderr": 0.004909148239488287, - "acc_norm": 0.5376419040031866, - "acc_norm_stderr": 0.004975621147406092 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.5380116959064327, - "acc_stderr": 0.03823727092882307, - "acc_norm": 0.5380116959064327, - "acc_norm_stderr": 0.03823727092882307 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.5533980582524272, - "acc_stderr": 0.04922424153458933, - "acc_norm": 0.5533980582524272, - "acc_norm_stderr": 0.04922424153458933 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.5006385696040868, - "acc_stderr": 0.01787994891443169, - "acc_norm": 0.5006385696040868, - "acc_norm_stderr": 0.01787994891443169 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.45185185185185184, - "acc_stderr": 0.04299268905480864, - "acc_norm": 0.45185185185185184, - "acc_norm_stderr": 0.04299268905480864 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.35319148936170214, - "acc_stderr": 0.03124532520276193, - "acc_norm": 0.35319148936170214, - "acc_norm_stderr": 0.03124532520276193 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.37349397590361444, - "acc_stderr": 0.037658451171688624, - "acc_norm": 0.37349397590361444, - "acc_norm_stderr": 0.037658451171688624 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.4662379421221865, - "acc_stderr": 0.028333277109562786, - "acc_norm": 0.4662379421221865, - "acc_norm_stderr": 0.028333277109562786 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.43946188340807174, - "acc_stderr": 0.03331092511038179, - "acc_norm": 0.43946188340807174, - "acc_norm_stderr": 0.03331092511038179 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.44274809160305345, - "acc_stderr": 0.0435644720266507, - "acc_norm": 0.44274809160305345, - "acc_norm_stderr": 0.0435644720266507 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.42, - "acc_stderr": 0.049604496374885836, - "acc_norm": 0.42, - "acc_norm_stderr": 0.049604496374885836 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.48484848484848486, - "acc_stderr": 0.03560716516531061, - "acc_norm": 0.48484848484848486, - "acc_norm_stderr": 0.03560716516531061 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.35172413793103446, - "acc_stderr": 0.0397923663749741, - "acc_norm": 0.35172413793103446, - "acc_norm_stderr": 0.0397923663749741 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.2549019607843137, - "acc_stderr": 0.043364327079931785, - "acc_norm": 0.2549019607843137, - "acc_norm_stderr": 0.043364327079931785 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.41596638655462187, - "acc_stderr": 0.03201650100739615, - "acc_norm": 0.41596638655462187, - "acc_norm_stderr": 0.03201650100739615 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.4153846153846154, - "acc_stderr": 0.024985354923102315, - "acc_norm": 0.4153846153846154, - "acc_norm_stderr": 0.024985354923102315 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.49, - "acc_stderr": 0.05024183937956911, - "acc_norm": 0.49, - "acc_norm_stderr": 0.05024183937956911 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.32, - "acc_stderr": 0.04688261722621504, - "acc_norm": 0.32, - "acc_norm_stderr": 0.04688261722621504 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.4537037037037037, - "acc_stderr": 0.04812917324536823, - "acc_norm": 0.4537037037037037, - "acc_norm_stderr": 0.04812917324536823 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.3694581280788177, - "acc_stderr": 0.03395970381998575, - "acc_norm": 0.3694581280788177, - "acc_norm_stderr": 0.03395970381998575 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.4032258064516129, - "acc_stderr": 0.02790615082604114, - "acc_norm": 0.4032258064516129, - "acc_norm_stderr": 0.02790615082604114 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.6623931623931624, - "acc_stderr": 0.030980296992618558, - "acc_norm": 0.6623931623931624, - "acc_norm_stderr": 0.030980296992618558 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.39245283018867927, - "acc_stderr": 0.030052580579557845, - "acc_norm": 0.39245283018867927, - "acc_norm_stderr": 0.030052580579557845 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.4727272727272727, - "acc_stderr": 0.04782001791380063, - "acc_norm": 0.4727272727272727, - "acc_norm_stderr": 0.04782001791380063 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.2851851851851852, - "acc_stderr": 0.027528599210340492, - "acc_norm": 0.2851851851851852, - "acc_norm_stderr": 0.027528599210340492 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.271523178807947, - "acc_stderr": 0.036313298039696545, - "acc_norm": 0.271523178807947, - "acc_norm_stderr": 0.036313298039696545 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.5572139303482587, - "acc_stderr": 0.03512310964123935, - "acc_norm": 0.5572139303482587, - "acc_norm_stderr": 0.03512310964123935 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.3872832369942196, - "acc_stderr": 0.03714325906302065, - "acc_norm": 0.3872832369942196, - "acc_norm_stderr": 0.03714325906302065 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.30952380952380953, - "acc_stderr": 0.023809523809523867, - "acc_norm": 0.30952380952380953, - "acc_norm_stderr": 0.023809523809523867 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.03942082639927214, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.03942082639927214 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.24, - "acc_stderr": 0.042923469599092816, - "acc_norm": 0.24, - "acc_norm_stderr": 0.042923469599092816 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.58, - "acc_stderr": 0.049604496374885836, - "acc_norm": 0.58, - "acc_norm_stderr": 0.049604496374885836 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.4624277456647399, - "acc_stderr": 0.026842985519615375, - "acc_norm": 0.4624277456647399, - "acc_norm_stderr": 0.026842985519615375 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.39263803680981596, - "acc_stderr": 0.03836740907831029, - "acc_norm": 0.39263803680981596, - "acc_norm_stderr": 0.03836740907831029 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.46296296296296297, - "acc_stderr": 0.027744313443376536, - "acc_norm": 0.46296296296296297, - "acc_norm_stderr": 0.027744313443376536 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542127, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542127 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.47668393782383417, - "acc_stderr": 0.03604513672442206, - "acc_norm": 0.47668393782383417, - "acc_norm_stderr": 0.03604513672442206 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2543859649122807, - "acc_stderr": 0.040969851398436716, - "acc_norm": 0.2543859649122807, - "acc_norm_stderr": 0.040969851398436716 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.4990825688073395, - "acc_stderr": 0.021437287056051215, - "acc_norm": 0.4990825688073395, - "acc_norm_stderr": 0.021437287056051215 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.23809523809523808, - "acc_stderr": 0.038095238095238106, - "acc_norm": 0.23809523809523808, - "acc_norm_stderr": 0.038095238095238106 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.3790849673202614, - "acc_stderr": 0.027780141207023344, - "acc_norm": 0.3790849673202614, - "acc_norm_stderr": 0.027780141207023344 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.46, - "acc_stderr": 0.05009082659620332, - "acc_norm": 0.46, - "acc_norm_stderr": 0.05009082659620332 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.6033057851239669, - "acc_stderr": 0.044658697805310094, - "acc_norm": 0.6033057851239669, - "acc_norm_stderr": 0.044658697805310094 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.46710526315789475, - "acc_stderr": 0.04060127035236397, - "acc_norm": 0.46710526315789475, - "acc_norm_stderr": 0.04060127035236397 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.36764705882352944, - "acc_stderr": 0.019506291693954854, - "acc_norm": 0.36764705882352944, - "acc_norm_stderr": 0.019506291693954854 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.3120567375886525, - "acc_stderr": 0.02764012054516993, - "acc_norm": 0.3120567375886525, - "acc_norm_stderr": 0.02764012054516993 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.25, - "acc_stderr": 0.04109974682633932, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04109974682633932 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.3055555555555556, - "acc_stderr": 0.031415546294025425, - "acc_norm": 0.3055555555555556, - "acc_norm_stderr": 0.031415546294025425 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.24692737430167597, - "acc_stderr": 0.014422292204808852, - "acc_norm": 0.24692737430167597, - "acc_norm_stderr": 0.014422292204808852 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695235, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695235 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.41, - "acc_stderr": 0.049431107042371025, - "acc_norm": 0.41, - "acc_norm_stderr": 0.049431107042371025 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.35294117647058826, - "acc_stderr": 0.029029422815681407, - "acc_norm": 0.35294117647058826, - "acc_norm_stderr": 0.029029422815681407 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.363265306122449, - "acc_stderr": 0.030789051139030806, - "acc_norm": 0.363265306122449, - "acc_norm_stderr": 0.030789051139030806 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.569620253164557, - "acc_stderr": 0.03223017195937598, - "acc_norm": 0.569620253164557, - "acc_norm_stderr": 0.03223017195937598 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.3350717079530639, - "acc_stderr": 0.012055499471330373, - "acc_norm": 0.3350717079530639, - "acc_norm_stderr": 0.012055499471330373 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.43137254901960786, - "acc_stderr": 0.03476099060501637, - "acc_norm": 0.43137254901960786, - "acc_norm_stderr": 0.03476099060501637 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.5272727272727272, - "acc_stderr": 0.03898531605579418, - "acc_norm": 0.5272727272727272, - "acc_norm_stderr": 0.03898531605579418 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2827417380660955, - "mc1_stderr": 0.015764770836777308, - "mc2": 0.4426389060165117, - "mc2_stderr": 0.015221328776941925 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.32511737089201875, - "acc_stderr": 0.016057185777207574, - "acc_norm": 0.5093896713615024, - "acc_norm_stderr": 0.017136756699169635 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "maum-ai/llamaum-13b-instruct-v1", - "model_sha": "10d1ae8e0155ba956a1e4cb16dd3b35415dea098", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/maywell/Synatra-11B-Instruct-v0.3-pre/result_2023-10-19 00:08:33.json b/maywell/Synatra-11B-Instruct-v0.3-pre/result_2023-10-19 00:08:33.json deleted file mode 100644 index ce92998a6a1c9939841cf697e2c92bbf1c49cd53..0000000000000000000000000000000000000000 --- a/maywell/Synatra-11B-Instruct-v0.3-pre/result_2023-10-19 00:08:33.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.3412969283276451, - "acc_stderr": 0.01385583128749772, - "acc_norm": 0.40017064846416384, - "acc_norm_stderr": 0.014317197787809186 - }, - "harness|ko_hellaswag|10": { - "acc": 0.36606253734315874, - "acc_stderr": 0.00480742334322458, - "acc_norm": 0.47719577773351923, - "acc_norm_stderr": 0.004984589012289372 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.4853801169590643, - "acc_stderr": 0.038331852752130205, - "acc_norm": 0.4853801169590643, - "acc_norm_stderr": 0.038331852752130205 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.5436893203883495, - "acc_stderr": 0.04931801994220416, - "acc_norm": 0.5436893203883495, - "acc_norm_stderr": 0.04931801994220416 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.49680715197956576, - "acc_stderr": 0.01787959894593307, - "acc_norm": 0.49680715197956576, - "acc_norm_stderr": 0.01787959894593307 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.37777777777777777, - "acc_stderr": 0.04188307537595853, - "acc_norm": 0.37777777777777777, - "acc_norm_stderr": 0.04188307537595853 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.38, - "acc_stderr": 0.04878317312145633, - "acc_norm": 0.38, - "acc_norm_stderr": 0.04878317312145633 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.3617021276595745, - "acc_stderr": 0.031410821975962386, - "acc_norm": 0.3617021276595745, - "acc_norm_stderr": 0.031410821975962386 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.35542168674698793, - "acc_stderr": 0.03726214354322415, - "acc_norm": 0.35542168674698793, - "acc_norm_stderr": 0.03726214354322415 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.5080385852090032, - "acc_stderr": 0.028394421370984538, - "acc_norm": 0.5080385852090032, - "acc_norm_stderr": 0.028394421370984538 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.3991031390134529, - "acc_stderr": 0.03286745312567961, - "acc_norm": 0.3991031390134529, - "acc_norm_stderr": 0.03286745312567961 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.4732824427480916, - "acc_stderr": 0.04379024936553893, - "acc_norm": 0.4732824427480916, - "acc_norm_stderr": 0.04379024936553893 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.45, - "acc_stderr": 0.04999999999999999, - "acc_norm": 0.45, - "acc_norm_stderr": 0.04999999999999999 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.5606060606060606, - "acc_stderr": 0.035360859475294805, - "acc_norm": 0.5606060606060606, - "acc_norm_stderr": 0.035360859475294805 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.4482758620689655, - "acc_stderr": 0.04144311810878151, - "acc_norm": 0.4482758620689655, - "acc_norm_stderr": 0.04144311810878151 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.29411764705882354, - "acc_stderr": 0.04533838195929774, - "acc_norm": 0.29411764705882354, - "acc_norm_stderr": 0.04533838195929774 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.4495798319327731, - "acc_stderr": 0.03231293497137707, - "acc_norm": 0.4495798319327731, - "acc_norm_stderr": 0.03231293497137707 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.45897435897435895, - "acc_stderr": 0.025265525491284295, - "acc_norm": 0.45897435897435895, - "acc_norm_stderr": 0.025265525491284295 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.48, - "acc_stderr": 0.05021167315686781, - "acc_norm": 0.48, - "acc_norm_stderr": 0.05021167315686781 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.36, - "acc_stderr": 0.04824181513244218, - "acc_norm": 0.36, - "acc_norm_stderr": 0.04824181513244218 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.4722222222222222, - "acc_stderr": 0.04826217294139894, - "acc_norm": 0.4722222222222222, - "acc_norm_stderr": 0.04826217294139894 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.4039408866995074, - "acc_stderr": 0.0345245390382204, - "acc_norm": 0.4039408866995074, - "acc_norm_stderr": 0.0345245390382204 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.45161290322580644, - "acc_stderr": 0.028310500348568392, - "acc_norm": 0.45161290322580644, - "acc_norm_stderr": 0.028310500348568392 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.6324786324786325, - "acc_stderr": 0.03158539157745636, - "acc_norm": 0.6324786324786325, - "acc_norm_stderr": 0.03158539157745636 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.4716981132075472, - "acc_stderr": 0.030723535249006107, - "acc_norm": 0.4716981132075472, - "acc_norm_stderr": 0.030723535249006107 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.37272727272727274, - "acc_stderr": 0.04631381319425464, - "acc_norm": 0.37272727272727274, - "acc_norm_stderr": 0.04631381319425464 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.3037037037037037, - "acc_stderr": 0.02803792996911499, - "acc_norm": 0.3037037037037037, - "acc_norm_stderr": 0.02803792996911499 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.31788079470198677, - "acc_stderr": 0.038020397601079024, - "acc_norm": 0.31788079470198677, - "acc_norm_stderr": 0.038020397601079024 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.582089552238806, - "acc_stderr": 0.034875586404620636, - "acc_norm": 0.582089552238806, - "acc_norm_stderr": 0.034875586404620636 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.4508670520231214, - "acc_stderr": 0.03794012674697031, - "acc_norm": 0.4508670520231214, - "acc_norm_stderr": 0.03794012674697031 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.3439153439153439, - "acc_stderr": 0.02446442662559643, - "acc_norm": 0.3439153439153439, - "acc_norm_stderr": 0.02446442662559643 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3472222222222222, - "acc_stderr": 0.039812405437178615, - "acc_norm": 0.3472222222222222, - "acc_norm_stderr": 0.039812405437178615 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.52, - "acc_stderr": 0.050211673156867795, - "acc_norm": 0.52, - "acc_norm_stderr": 0.050211673156867795 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.49710982658959535, - "acc_stderr": 0.02691864538323901, - "acc_norm": 0.49710982658959535, - "acc_norm_stderr": 0.02691864538323901 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.4049079754601227, - "acc_stderr": 0.038566721635489125, - "acc_norm": 0.4049079754601227, - "acc_norm_stderr": 0.038566721635489125 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.4537037037037037, - "acc_stderr": 0.027701228468542602, - "acc_norm": 0.4537037037037037, - "acc_norm_stderr": 0.027701228468542602 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.35, - "acc_stderr": 0.047937248544110196, - "acc_norm": 0.35, - "acc_norm_stderr": 0.047937248544110196 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.47150259067357514, - "acc_stderr": 0.036025735712884414, - "acc_norm": 0.47150259067357514, - "acc_norm_stderr": 0.036025735712884414 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2894736842105263, - "acc_stderr": 0.04266339443159394, - "acc_norm": 0.2894736842105263, - "acc_norm_stderr": 0.04266339443159394 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.4990825688073395, - "acc_stderr": 0.021437287056051215, - "acc_norm": 0.4990825688073395, - "acc_norm_stderr": 0.021437287056051215 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.30158730158730157, - "acc_stderr": 0.04104947269903394, - "acc_norm": 0.30158730158730157, - "acc_norm_stderr": 0.04104947269903394 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.4673202614379085, - "acc_stderr": 0.02856869975222588, - "acc_norm": 0.4673202614379085, - "acc_norm_stderr": 0.02856869975222588 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.41, - "acc_stderr": 0.04943110704237103, - "acc_norm": 0.41, - "acc_norm_stderr": 0.04943110704237103 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.6033057851239669, - "acc_stderr": 0.044658697805310094, - "acc_norm": 0.6033057851239669, - "acc_norm_stderr": 0.044658697805310094 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.40789473684210525, - "acc_stderr": 0.03999309712777472, - "acc_norm": 0.40789473684210525, - "acc_norm_stderr": 0.03999309712777472 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.3839869281045752, - "acc_stderr": 0.019675808135281525, - "acc_norm": 0.3839869281045752, - "acc_norm_stderr": 0.019675808135281525 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2978723404255319, - "acc_stderr": 0.027281608344469414, - "acc_norm": 0.2978723404255319, - "acc_norm_stderr": 0.027281608344469414 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.35714285714285715, - "acc_stderr": 0.04547960999764376, - "acc_norm": 0.35714285714285715, - "acc_norm_stderr": 0.04547960999764376 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.375, - "acc_stderr": 0.033016908987210894, - "acc_norm": 0.375, - "acc_norm_stderr": 0.033016908987210894 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.23016759776536314, - "acc_stderr": 0.014078339253425807, - "acc_norm": 0.23016759776536314, - "acc_norm_stderr": 0.014078339253425807 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.42, - "acc_stderr": 0.049604496374885836, - "acc_norm": 0.42, - "acc_norm_stderr": 0.049604496374885836 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.47, - "acc_stderr": 0.05016135580465919, - "acc_norm": 0.47, - "acc_norm_stderr": 0.05016135580465919 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.38235294117647056, - "acc_stderr": 0.029520095697687765, - "acc_norm": 0.38235294117647056, - "acc_norm_stderr": 0.029520095697687765 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.4775510204081633, - "acc_stderr": 0.03197694118713673, - "acc_norm": 0.4775510204081633, - "acc_norm_stderr": 0.03197694118713673 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.5949367088607594, - "acc_stderr": 0.031955147413706725, - "acc_norm": 0.5949367088607594, - "acc_norm_stderr": 0.031955147413706725 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.31486310299869624, - "acc_stderr": 0.011862561755715937, - "acc_norm": 0.31486310299869624, - "acc_norm_stderr": 0.011862561755715937 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.49019607843137253, - "acc_stderr": 0.03508637358630572, - "acc_norm": 0.49019607843137253, - "acc_norm_stderr": 0.03508637358630572 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.49696969696969695, - "acc_stderr": 0.03904272341431856, - "acc_norm": 0.49696969696969695, - "acc_norm_stderr": 0.03904272341431856 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2778457772337821, - "mc1_stderr": 0.015680929364024633, - "mc2": 0.45111653933523216, - "mc2_stderr": 0.015355758550705367 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.31103286384976525, - "acc_stderr": 0.015868563452870778, - "acc_norm": 0.42488262910798125, - "acc_norm_stderr": 0.016945248826821704 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "maywell/Synatra-11B-Instruct-v0.3-pre", - "model_sha": "a40fe5c95687a32967ea4573e958356a214ae652", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/maywell/Synatra-11B-Tb2M_SM/result_2023-10-16 01:20:57.json b/maywell/Synatra-11B-Tb2M_SM/result_2023-10-16 01:20:57.json deleted file mode 100644 index 6a8ad76fa24bdd968f5f31b1798e950201d83cff..0000000000000000000000000000000000000000 --- a/maywell/Synatra-11B-Tb2M_SM/result_2023-10-16 01:20:57.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.1766211604095563, - "acc_stderr": 0.011144042769316503, - "acc_norm": 0.24146757679180889, - "acc_norm_stderr": 0.012506564839739432 - }, - "harness|ko_hellaswag|10": { - "acc": 0.2528380800637323, - "acc_stderr": 0.004337506344899919, - "acc_norm": 0.24965146385182235, - "acc_norm_stderr": 0.004319267432460665 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.21052631578947367, - "acc_stderr": 0.0312678171466318, - "acc_norm": 0.21052631578947367, - "acc_norm_stderr": 0.0312678171466318 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.2524271844660194, - "acc_stderr": 0.04301250399690877, - "acc_norm": 0.2524271844660194, - "acc_norm_stderr": 0.04301250399690877 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.28735632183908044, - "acc_stderr": 0.0161824107306827, - "acc_norm": 0.28735632183908044, - "acc_norm_stderr": 0.0161824107306827 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.2518518518518518, - "acc_stderr": 0.03749850709174021, - "acc_norm": 0.2518518518518518, - "acc_norm_stderr": 0.03749850709174021 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.26, - "acc_stderr": 0.04408440022768077, - "acc_norm": 0.26, - "acc_norm_stderr": 0.04408440022768077 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.32340425531914896, - "acc_stderr": 0.03057944277361034, - "acc_norm": 0.32340425531914896, - "acc_norm_stderr": 0.03057944277361034 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3192771084337349, - "acc_stderr": 0.03629335329947859, - "acc_norm": 0.3192771084337349, - "acc_norm_stderr": 0.03629335329947859 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.2733118971061093, - "acc_stderr": 0.025311765975426115, - "acc_norm": 0.2733118971061093, - "acc_norm_stderr": 0.025311765975426115 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.37668161434977576, - "acc_stderr": 0.03252113489929189, - "acc_norm": 0.37668161434977576, - "acc_norm_stderr": 0.03252113489929189 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.22900763358778625, - "acc_stderr": 0.036853466317118506, - "acc_norm": 0.22900763358778625, - "acc_norm_stderr": 0.036853466317118506 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.26, - "acc_stderr": 0.04408440022768077, - "acc_norm": 0.26, - "acc_norm_stderr": 0.04408440022768077 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.21717171717171718, - "acc_stderr": 0.02937661648494563, - "acc_norm": 0.21717171717171718, - "acc_norm_stderr": 0.02937661648494563 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.2206896551724138, - "acc_stderr": 0.03455930201924811, - "acc_norm": 0.2206896551724138, - "acc_norm_stderr": 0.03455930201924811 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.19607843137254902, - "acc_stderr": 0.03950581861179962, - "acc_norm": 0.19607843137254902, - "acc_norm_stderr": 0.03950581861179962 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.23109243697478993, - "acc_stderr": 0.027381406927868973, - "acc_norm": 0.23109243697478993, - "acc_norm_stderr": 0.027381406927868973 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.2205128205128205, - "acc_stderr": 0.021020672680827912, - "acc_norm": 0.2205128205128205, - "acc_norm_stderr": 0.021020672680827912 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.24, - "acc_stderr": 0.04292346959909283, - "acc_norm": 0.24, - "acc_norm_stderr": 0.04292346959909283 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.2962962962962963, - "acc_stderr": 0.044143436668549335, - "acc_norm": 0.2962962962962963, - "acc_norm_stderr": 0.044143436668549335 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.270935960591133, - "acc_stderr": 0.031270907132976984, - "acc_norm": 0.270935960591133, - "acc_norm_stderr": 0.031270907132976984 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.25483870967741934, - "acc_stderr": 0.024790118459332208, - "acc_norm": 0.25483870967741934, - "acc_norm_stderr": 0.024790118459332208 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.2564102564102564, - "acc_stderr": 0.02860595370200424, - "acc_norm": 0.2564102564102564, - "acc_norm_stderr": 0.02860595370200424 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.2679245283018868, - "acc_stderr": 0.027257260322494845, - "acc_norm": 0.2679245283018868, - "acc_norm_stderr": 0.027257260322494845 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.34545454545454546, - "acc_stderr": 0.04554619617541054, - "acc_norm": 0.34545454545454546, - "acc_norm_stderr": 0.04554619617541054 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.26296296296296295, - "acc_stderr": 0.02684205787383371, - "acc_norm": 0.26296296296296295, - "acc_norm_stderr": 0.02684205787383371 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.1986754966887417, - "acc_stderr": 0.03257847384436776, - "acc_norm": 0.1986754966887417, - "acc_norm_stderr": 0.03257847384436776 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.24378109452736318, - "acc_stderr": 0.030360490154014645, - "acc_norm": 0.24378109452736318, - "acc_norm_stderr": 0.030360490154014645 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.20809248554913296, - "acc_stderr": 0.030952890217749884, - "acc_norm": 0.20809248554913296, - "acc_norm_stderr": 0.030952890217749884 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2566137566137566, - "acc_stderr": 0.022494510767503154, - "acc_norm": 0.2566137566137566, - "acc_norm_stderr": 0.022494510767503154 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2222222222222222, - "acc_stderr": 0.03476590104304134, - "acc_norm": 0.2222222222222222, - "acc_norm_stderr": 0.03476590104304134 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.21, - "acc_stderr": 0.040936018074033256, - "acc_norm": 0.21, - "acc_norm_stderr": 0.040936018074033256 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.21, - "acc_stderr": 0.040936018074033256, - "acc_norm": 0.21, - "acc_norm_stderr": 0.040936018074033256 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.24566473988439305, - "acc_stderr": 0.02317629820399201, - "acc_norm": 0.24566473988439305, - "acc_norm_stderr": 0.02317629820399201 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.24539877300613497, - "acc_stderr": 0.03380939813943353, - "acc_norm": 0.24539877300613497, - "acc_norm_stderr": 0.03380939813943353 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.2654320987654321, - "acc_stderr": 0.02456922360046085, - "acc_norm": 0.2654320987654321, - "acc_norm_stderr": 0.02456922360046085 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.23, - "acc_stderr": 0.04229525846816506, - "acc_norm": 0.23, - "acc_norm_stderr": 0.04229525846816506 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.20725388601036268, - "acc_stderr": 0.029252823291803627, - "acc_norm": 0.20725388601036268, - "acc_norm_stderr": 0.029252823291803627 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2807017543859649, - "acc_stderr": 0.042270544512321984, - "acc_norm": 0.2807017543859649, - "acc_norm_stderr": 0.042270544512321984 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.23669724770642203, - "acc_stderr": 0.018224078117299095, - "acc_norm": 0.23669724770642203, - "acc_norm_stderr": 0.018224078117299095 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.1984126984126984, - "acc_stderr": 0.03567016675276862, - "acc_norm": 0.1984126984126984, - "acc_norm_stderr": 0.03567016675276862 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.22549019607843138, - "acc_stderr": 0.02392915551735129, - "acc_norm": 0.22549019607843138, - "acc_norm_stderr": 0.02392915551735129 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.26, - "acc_stderr": 0.044084400227680794, - "acc_norm": 0.26, - "acc_norm_stderr": 0.044084400227680794 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.24793388429752067, - "acc_stderr": 0.039418975265163025, - "acc_norm": 0.24793388429752067, - "acc_norm_stderr": 0.039418975265163025 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.18421052631578946, - "acc_stderr": 0.031546980450822305, - "acc_norm": 0.18421052631578946, - "acc_norm_stderr": 0.031546980450822305 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.2565359477124183, - "acc_stderr": 0.017667841612378984, - "acc_norm": 0.2565359477124183, - "acc_norm_stderr": 0.017667841612378984 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.23404255319148937, - "acc_stderr": 0.025257861359432414, - "acc_norm": 0.23404255319148937, - "acc_norm_stderr": 0.025257861359432414 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.3125, - "acc_stderr": 0.043994650575715215, - "acc_norm": 0.3125, - "acc_norm_stderr": 0.043994650575715215 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.1527777777777778, - "acc_stderr": 0.024536326026134238, - "acc_norm": 0.1527777777777778, - "acc_norm_stderr": 0.024536326026134238 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.23798882681564246, - "acc_stderr": 0.014242630070574892, - "acc_norm": 0.23798882681564246, - "acc_norm_stderr": 0.014242630070574892 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.15, - "acc_stderr": 0.0358870281282637, - "acc_norm": 0.15, - "acc_norm_stderr": 0.0358870281282637 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.23, - "acc_stderr": 0.04229525846816507, - "acc_norm": 0.23, - "acc_norm_stderr": 0.04229525846816507 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.20220588235294118, - "acc_stderr": 0.02439819298665492, - "acc_norm": 0.20220588235294118, - "acc_norm_stderr": 0.02439819298665492 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.18775510204081633, - "acc_stderr": 0.025000256039546198, - "acc_norm": 0.18775510204081633, - "acc_norm_stderr": 0.025000256039546198 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.2616033755274262, - "acc_stderr": 0.028609516716994934, - "acc_norm": 0.2616033755274262, - "acc_norm_stderr": 0.028609516716994934 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2457627118644068, - "acc_stderr": 0.010996156635142692, - "acc_norm": 0.2457627118644068, - "acc_norm_stderr": 0.010996156635142692 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.23529411764705882, - "acc_stderr": 0.029771775228145638, - "acc_norm": 0.23529411764705882, - "acc_norm_stderr": 0.029771775228145638 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.24242424242424243, - "acc_stderr": 0.033464098810559534, - "acc_norm": 0.24242424242424243, - "acc_norm_stderr": 0.033464098810559534 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2423500611995104, - "mc1_stderr": 0.015000674373570345, - "mc2": 0.4752303618111022, - "mc2_stderr": 0.01719345285029173 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.08685446009389672, - "acc_stderr": 0.009653866382991476, - "acc_norm": 0.36971830985915494, - "acc_norm_stderr": 0.016547715475045563 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "maywell/Synatra-11B-Tb2M_SM", - "model_sha": "7f2867881e6ebd2f1383a3d0be8b5573dd4897ad", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/maywell/Synatra-11B-Testbench-2/result_2023-10-16 00:13:06.json b/maywell/Synatra-11B-Testbench-2/result_2023-10-16 00:13:06.json deleted file mode 100644 index 3dc459cf931bf7cb75bd2893f542472fd7e9a147..0000000000000000000000000000000000000000 --- a/maywell/Synatra-11B-Testbench-2/result_2023-10-16 00:13:06.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.3455631399317406, - "acc_stderr": 0.013896938461145685, - "acc_norm": 0.4197952218430034, - "acc_norm_stderr": 0.01442218122630302 - }, - "harness|ko_hellaswag|10": { - "acc": 0.37731527584146585, - "acc_stderr": 0.00483724201519111, - "acc_norm": 0.48775144393547104, - "acc_norm_stderr": 0.0049882839816310495 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.5321637426900585, - "acc_stderr": 0.03826882417660369, - "acc_norm": 0.5321637426900585, - "acc_norm_stderr": 0.03826882417660369 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.4854368932038835, - "acc_stderr": 0.04948637324026637, - "acc_norm": 0.4854368932038835, - "acc_norm_stderr": 0.04948637324026637 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.49680715197956576, - "acc_stderr": 0.01787959894593307, - "acc_norm": 0.49680715197956576, - "acc_norm_stderr": 0.01787959894593307 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.32592592592592595, - "acc_stderr": 0.040491220417025055, - "acc_norm": 0.32592592592592595, - "acc_norm_stderr": 0.040491220417025055 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.36, - "acc_stderr": 0.048241815132442176, - "acc_norm": 0.36, - "acc_norm_stderr": 0.048241815132442176 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.3404255319148936, - "acc_stderr": 0.030976692998534443, - "acc_norm": 0.3404255319148936, - "acc_norm_stderr": 0.030976692998534443 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.39156626506024095, - "acc_stderr": 0.037998574544796354, - "acc_norm": 0.39156626506024095, - "acc_norm_stderr": 0.037998574544796354 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.4630225080385852, - "acc_stderr": 0.02832032583010591, - "acc_norm": 0.4630225080385852, - "acc_norm_stderr": 0.02832032583010591 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.40358744394618834, - "acc_stderr": 0.03292802819330314, - "acc_norm": 0.40358744394618834, - "acc_norm_stderr": 0.03292802819330314 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.48854961832061067, - "acc_stderr": 0.043841400240780176, - "acc_norm": 0.48854961832061067, - "acc_norm_stderr": 0.043841400240780176 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.54, - "acc_stderr": 0.05009082659620333, - "acc_norm": 0.54, - "acc_norm_stderr": 0.05009082659620333 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.5656565656565656, - "acc_stderr": 0.03531505879359183, - "acc_norm": 0.5656565656565656, - "acc_norm_stderr": 0.03531505879359183 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.5103448275862069, - "acc_stderr": 0.04165774775728762, - "acc_norm": 0.5103448275862069, - "acc_norm_stderr": 0.04165774775728762 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.27450980392156865, - "acc_stderr": 0.04440521906179327, - "acc_norm": 0.27450980392156865, - "acc_norm_stderr": 0.04440521906179327 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.4411764705882353, - "acc_stderr": 0.032252942323996406, - "acc_norm": 0.4411764705882353, - "acc_norm_stderr": 0.032252942323996406 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.4358974358974359, - "acc_stderr": 0.025141801511177498, - "acc_norm": 0.4358974358974359, - "acc_norm_stderr": 0.025141801511177498 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.51, - "acc_stderr": 0.05024183937956912, - "acc_norm": 0.51, - "acc_norm_stderr": 0.05024183937956912 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.28, - "acc_stderr": 0.045126085985421276, - "acc_norm": 0.28, - "acc_norm_stderr": 0.045126085985421276 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.46296296296296297, - "acc_stderr": 0.04820403072760627, - "acc_norm": 0.46296296296296297, - "acc_norm_stderr": 0.04820403072760627 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.37438423645320196, - "acc_stderr": 0.03405155380561952, - "acc_norm": 0.37438423645320196, - "acc_norm_stderr": 0.03405155380561952 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.44516129032258067, - "acc_stderr": 0.02827241018621491, - "acc_norm": 0.44516129032258067, - "acc_norm_stderr": 0.02827241018621491 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.6367521367521367, - "acc_stderr": 0.03150712523091264, - "acc_norm": 0.6367521367521367, - "acc_norm_stderr": 0.03150712523091264 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.5056603773584906, - "acc_stderr": 0.03077090076385131, - "acc_norm": 0.5056603773584906, - "acc_norm_stderr": 0.03077090076385131 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.4727272727272727, - "acc_stderr": 0.04782001791380063, - "acc_norm": 0.4727272727272727, - "acc_norm_stderr": 0.04782001791380063 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.28888888888888886, - "acc_stderr": 0.027634907264178544, - "acc_norm": 0.28888888888888886, - "acc_norm_stderr": 0.027634907264178544 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2980132450331126, - "acc_stderr": 0.037345356767871984, - "acc_norm": 0.2980132450331126, - "acc_norm_stderr": 0.037345356767871984 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.5870646766169154, - "acc_stderr": 0.03481520803367348, - "acc_norm": 0.5870646766169154, - "acc_norm_stderr": 0.03481520803367348 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.4277456647398844, - "acc_stderr": 0.03772446857518028, - "acc_norm": 0.4277456647398844, - "acc_norm_stderr": 0.03772446857518028 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.024278568024307688, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.024278568024307688 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.375, - "acc_stderr": 0.04048439222695598, - "acc_norm": 0.375, - "acc_norm_stderr": 0.04048439222695598 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.39, - "acc_stderr": 0.04902071300001974, - "acc_norm": 0.39, - "acc_norm_stderr": 0.04902071300001974 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.56, - "acc_stderr": 0.049888765156985884, - "acc_norm": 0.56, - "acc_norm_stderr": 0.049888765156985884 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.523121387283237, - "acc_stderr": 0.026890297881303118, - "acc_norm": 0.523121387283237, - "acc_norm_stderr": 0.026890297881303118 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.39263803680981596, - "acc_stderr": 0.038367409078310294, - "acc_norm": 0.39263803680981596, - "acc_norm_stderr": 0.038367409078310294 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.404320987654321, - "acc_stderr": 0.027306625297327684, - "acc_norm": 0.404320987654321, - "acc_norm_stderr": 0.027306625297327684 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.41, - "acc_stderr": 0.049431107042371025, - "acc_norm": 0.41, - "acc_norm_stderr": 0.049431107042371025 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.5181347150259067, - "acc_stderr": 0.036060650018329185, - "acc_norm": 0.5181347150259067, - "acc_norm_stderr": 0.036060650018329185 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.30701754385964913, - "acc_stderr": 0.043391383225798594, - "acc_norm": 0.30701754385964913, - "acc_norm_stderr": 0.043391383225798594 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.48990825688073397, - "acc_stderr": 0.021432956203453316, - "acc_norm": 0.48990825688073397, - "acc_norm_stderr": 0.021432956203453316 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.38095238095238093, - "acc_stderr": 0.04343525428949097, - "acc_norm": 0.38095238095238093, - "acc_norm_stderr": 0.04343525428949097 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.46078431372549017, - "acc_stderr": 0.028541722692618874, - "acc_norm": 0.46078431372549017, - "acc_norm_stderr": 0.028541722692618874 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.41, - "acc_stderr": 0.04943110704237103, - "acc_norm": 0.41, - "acc_norm_stderr": 0.04943110704237103 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.6446280991735537, - "acc_stderr": 0.0436923632657398, - "acc_norm": 0.6446280991735537, - "acc_norm_stderr": 0.0436923632657398 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.4276315789473684, - "acc_stderr": 0.04026097083296558, - "acc_norm": 0.4276315789473684, - "acc_norm_stderr": 0.04026097083296558 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.35784313725490197, - "acc_stderr": 0.019393058402355442, - "acc_norm": 0.35784313725490197, - "acc_norm_stderr": 0.019393058402355442 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.30141843971631205, - "acc_stderr": 0.02737412888263115, - "acc_norm": 0.30141843971631205, - "acc_norm_stderr": 0.02737412888263115 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.41964285714285715, - "acc_stderr": 0.046840993210771065, - "acc_norm": 0.41964285714285715, - "acc_norm_stderr": 0.046840993210771065 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.39351851851851855, - "acc_stderr": 0.03331747876370312, - "acc_norm": 0.39351851851851855, - "acc_norm_stderr": 0.03331747876370312 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.27932960893854747, - "acc_stderr": 0.015005762446786154, - "acc_norm": 0.27932960893854747, - "acc_norm_stderr": 0.015005762446786154 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.39, - "acc_stderr": 0.04902071300001975, - "acc_norm": 0.39, - "acc_norm_stderr": 0.04902071300001975 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.49, - "acc_stderr": 0.05024183937956912, - "acc_norm": 0.49, - "acc_norm_stderr": 0.05024183937956912 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.41544117647058826, - "acc_stderr": 0.029935342707877746, - "acc_norm": 0.41544117647058826, - "acc_norm_stderr": 0.029935342707877746 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.4857142857142857, - "acc_stderr": 0.03199615232806287, - "acc_norm": 0.4857142857142857, - "acc_norm_stderr": 0.03199615232806287 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.5527426160337553, - "acc_stderr": 0.03236564251614192, - "acc_norm": 0.5527426160337553, - "acc_norm_stderr": 0.03236564251614192 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2985658409387223, - "acc_stderr": 0.011688060141794224, - "acc_norm": 0.2985658409387223, - "acc_norm_stderr": 0.011688060141794224 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.38235294117647056, - "acc_stderr": 0.03410785338904719, - "acc_norm": 0.38235294117647056, - "acc_norm_stderr": 0.03410785338904719 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.38181818181818183, - "acc_stderr": 0.037937131711656344, - "acc_norm": 0.38181818181818183, - "acc_norm_stderr": 0.037937131711656344 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.27050183598531213, - "mc1_stderr": 0.015550778332842881, - "mc2": 0.4342691202696536, - "mc2_stderr": 0.015037727340783071 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.23708920187793428, - "acc_stderr": 0.014579008468781515, - "acc_norm": 0.3004694835680751, - "acc_norm_stderr": 0.015715881218015145 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "maywell/Synatra-11B-Testbench-2", - "model_sha": "50c90dfe257d5c5ad4c3c6a1fb29f6a5066c085a", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/maywell/Synatra-11B-Testbench-2/result_2023-10-16 00:23:46.json b/maywell/Synatra-11B-Testbench-2/result_2023-10-16 00:23:46.json deleted file mode 100644 index 265881eedff28cb77f05b7317de7d747fbf1f411..0000000000000000000000000000000000000000 --- a/maywell/Synatra-11B-Testbench-2/result_2023-10-16 00:23:46.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.3455631399317406, - "acc_stderr": 0.013896938461145685, - "acc_norm": 0.4197952218430034, - "acc_norm_stderr": 0.01442218122630302 - }, - "harness|ko_hellaswag|10": { - "acc": 0.37731527584146585, - "acc_stderr": 0.00483724201519111, - "acc_norm": 0.48775144393547104, - "acc_norm_stderr": 0.0049882839816310495 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.5263157894736842, - "acc_stderr": 0.03829509868994727, - "acc_norm": 0.5263157894736842, - "acc_norm_stderr": 0.03829509868994727 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.4854368932038835, - "acc_stderr": 0.04948637324026637, - "acc_norm": 0.4854368932038835, - "acc_norm_stderr": 0.04948637324026637 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.49680715197956576, - "acc_stderr": 0.01787959894593307, - "acc_norm": 0.49680715197956576, - "acc_norm_stderr": 0.01787959894593307 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.32592592592592595, - "acc_stderr": 0.040491220417025055, - "acc_norm": 0.32592592592592595, - "acc_norm_stderr": 0.040491220417025055 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.36, - "acc_stderr": 0.048241815132442176, - "acc_norm": 0.36, - "acc_norm_stderr": 0.048241815132442176 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.3404255319148936, - "acc_stderr": 0.030976692998534443, - "acc_norm": 0.3404255319148936, - "acc_norm_stderr": 0.030976692998534443 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.39156626506024095, - "acc_stderr": 0.037998574544796354, - "acc_norm": 0.39156626506024095, - "acc_norm_stderr": 0.037998574544796354 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.4630225080385852, - "acc_stderr": 0.02832032583010591, - "acc_norm": 0.4630225080385852, - "acc_norm_stderr": 0.02832032583010591 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.40358744394618834, - "acc_stderr": 0.03292802819330314, - "acc_norm": 0.40358744394618834, - "acc_norm_stderr": 0.03292802819330314 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.48854961832061067, - "acc_stderr": 0.043841400240780176, - "acc_norm": 0.48854961832061067, - "acc_norm_stderr": 0.043841400240780176 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.54, - "acc_stderr": 0.05009082659620333, - "acc_norm": 0.54, - "acc_norm_stderr": 0.05009082659620333 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.5656565656565656, - "acc_stderr": 0.03531505879359183, - "acc_norm": 0.5656565656565656, - "acc_norm_stderr": 0.03531505879359183 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.5103448275862069, - "acc_stderr": 0.04165774775728762, - "acc_norm": 0.5103448275862069, - "acc_norm_stderr": 0.04165774775728762 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.27450980392156865, - "acc_stderr": 0.04440521906179327, - "acc_norm": 0.27450980392156865, - "acc_norm_stderr": 0.04440521906179327 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.4411764705882353, - "acc_stderr": 0.032252942323996406, - "acc_norm": 0.4411764705882353, - "acc_norm_stderr": 0.032252942323996406 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.4358974358974359, - "acc_stderr": 0.025141801511177498, - "acc_norm": 0.4358974358974359, - "acc_norm_stderr": 0.025141801511177498 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.51, - "acc_stderr": 0.05024183937956912, - "acc_norm": 0.51, - "acc_norm_stderr": 0.05024183937956912 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.28, - "acc_stderr": 0.045126085985421276, - "acc_norm": 0.28, - "acc_norm_stderr": 0.045126085985421276 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.46296296296296297, - "acc_stderr": 0.04820403072760627, - "acc_norm": 0.46296296296296297, - "acc_norm_stderr": 0.04820403072760627 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.37438423645320196, - "acc_stderr": 0.03405155380561952, - "acc_norm": 0.37438423645320196, - "acc_norm_stderr": 0.03405155380561952 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.44516129032258067, - "acc_stderr": 0.02827241018621491, - "acc_norm": 0.44516129032258067, - "acc_norm_stderr": 0.02827241018621491 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.6367521367521367, - "acc_stderr": 0.03150712523091264, - "acc_norm": 0.6367521367521367, - "acc_norm_stderr": 0.03150712523091264 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.5056603773584906, - "acc_stderr": 0.03077090076385131, - "acc_norm": 0.5056603773584906, - "acc_norm_stderr": 0.03077090076385131 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.4727272727272727, - "acc_stderr": 0.04782001791380063, - "acc_norm": 0.4727272727272727, - "acc_norm_stderr": 0.04782001791380063 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.28888888888888886, - "acc_stderr": 0.027634907264178544, - "acc_norm": 0.28888888888888886, - "acc_norm_stderr": 0.027634907264178544 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2980132450331126, - "acc_stderr": 0.037345356767871984, - "acc_norm": 0.2980132450331126, - "acc_norm_stderr": 0.037345356767871984 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.5870646766169154, - "acc_stderr": 0.03481520803367348, - "acc_norm": 0.5870646766169154, - "acc_norm_stderr": 0.03481520803367348 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.4277456647398844, - "acc_stderr": 0.03772446857518028, - "acc_norm": 0.4277456647398844, - "acc_norm_stderr": 0.03772446857518028 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.024278568024307688, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.024278568024307688 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.375, - "acc_stderr": 0.04048439222695598, - "acc_norm": 0.375, - "acc_norm_stderr": 0.04048439222695598 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.39, - "acc_stderr": 0.04902071300001974, - "acc_norm": 0.39, - "acc_norm_stderr": 0.04902071300001974 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.56, - "acc_stderr": 0.049888765156985884, - "acc_norm": 0.56, - "acc_norm_stderr": 0.049888765156985884 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.523121387283237, - "acc_stderr": 0.026890297881303118, - "acc_norm": 0.523121387283237, - "acc_norm_stderr": 0.026890297881303118 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.39263803680981596, - "acc_stderr": 0.038367409078310294, - "acc_norm": 0.39263803680981596, - "acc_norm_stderr": 0.038367409078310294 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.404320987654321, - "acc_stderr": 0.027306625297327684, - "acc_norm": 0.404320987654321, - "acc_norm_stderr": 0.027306625297327684 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.41, - "acc_stderr": 0.049431107042371025, - "acc_norm": 0.41, - "acc_norm_stderr": 0.049431107042371025 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.5181347150259067, - "acc_stderr": 0.036060650018329185, - "acc_norm": 0.5181347150259067, - "acc_norm_stderr": 0.036060650018329185 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.30701754385964913, - "acc_stderr": 0.043391383225798594, - "acc_norm": 0.30701754385964913, - "acc_norm_stderr": 0.043391383225798594 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.48990825688073397, - "acc_stderr": 0.021432956203453316, - "acc_norm": 0.48990825688073397, - "acc_norm_stderr": 0.021432956203453316 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.38095238095238093, - "acc_stderr": 0.04343525428949097, - "acc_norm": 0.38095238095238093, - "acc_norm_stderr": 0.04343525428949097 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.46078431372549017, - "acc_stderr": 0.028541722692618874, - "acc_norm": 0.46078431372549017, - "acc_norm_stderr": 0.028541722692618874 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.41, - "acc_stderr": 0.04943110704237103, - "acc_norm": 0.41, - "acc_norm_stderr": 0.04943110704237103 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.6446280991735537, - "acc_stderr": 0.0436923632657398, - "acc_norm": 0.6446280991735537, - "acc_norm_stderr": 0.0436923632657398 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.4276315789473684, - "acc_stderr": 0.04026097083296558, - "acc_norm": 0.4276315789473684, - "acc_norm_stderr": 0.04026097083296558 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.35784313725490197, - "acc_stderr": 0.019393058402355442, - "acc_norm": 0.35784313725490197, - "acc_norm_stderr": 0.019393058402355442 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.30141843971631205, - "acc_stderr": 0.02737412888263115, - "acc_norm": 0.30141843971631205, - "acc_norm_stderr": 0.02737412888263115 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.41964285714285715, - "acc_stderr": 0.046840993210771065, - "acc_norm": 0.41964285714285715, - "acc_norm_stderr": 0.046840993210771065 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.39351851851851855, - "acc_stderr": 0.03331747876370312, - "acc_norm": 0.39351851851851855, - "acc_norm_stderr": 0.03331747876370312 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.27932960893854747, - "acc_stderr": 0.015005762446786154, - "acc_norm": 0.27932960893854747, - "acc_norm_stderr": 0.015005762446786154 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.39, - "acc_stderr": 0.04902071300001975, - "acc_norm": 0.39, - "acc_norm_stderr": 0.04902071300001975 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.49, - "acc_stderr": 0.05024183937956912, - "acc_norm": 0.49, - "acc_norm_stderr": 0.05024183937956912 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.41544117647058826, - "acc_stderr": 0.029935342707877746, - "acc_norm": 0.41544117647058826, - "acc_norm_stderr": 0.029935342707877746 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.4857142857142857, - "acc_stderr": 0.03199615232806287, - "acc_norm": 0.4857142857142857, - "acc_norm_stderr": 0.03199615232806287 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.5527426160337553, - "acc_stderr": 0.03236564251614192, - "acc_norm": 0.5527426160337553, - "acc_norm_stderr": 0.03236564251614192 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2985658409387223, - "acc_stderr": 0.011688060141794224, - "acc_norm": 0.2985658409387223, - "acc_norm_stderr": 0.011688060141794224 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.38235294117647056, - "acc_stderr": 0.03410785338904719, - "acc_norm": 0.38235294117647056, - "acc_norm_stderr": 0.03410785338904719 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.38181818181818183, - "acc_stderr": 0.037937131711656344, - "acc_norm": 0.38181818181818183, - "acc_norm_stderr": 0.037937131711656344 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.27050183598531213, - "mc1_stderr": 0.015550778332842881, - "mc2": 0.4342691202696536, - "mc2_stderr": 0.015037727340783071 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.23708920187793428, - "acc_stderr": 0.014579008468781515, - "acc_norm": 0.3004694835680751, - "acc_norm_stderr": 0.015715881218015145 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "maywell/Synatra-11B-Testbench-2", - "model_sha": "50c90dfe257d5c5ad4c3c6a1fb29f6a5066c085a", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/maywell/Synatra-11B-Testbench/result_2023-10-15 12:35:17.json b/maywell/Synatra-11B-Testbench/result_2023-10-15 12:35:17.json deleted file mode 100644 index 70ba48d57dd1df6056b88514b70dbada063cb03c..0000000000000000000000000000000000000000 --- a/maywell/Synatra-11B-Testbench/result_2023-10-15 12:35:17.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.3378839590443686, - "acc_stderr": 0.013822047922283509, - "acc_norm": 0.3856655290102389, - "acc_norm_stderr": 0.014224250973257177 - }, - "harness|ko_hellaswag|10": { - "acc": 0.37024497112129057, - "acc_stderr": 0.004818833521340358, - "acc_norm": 0.4742083250348536, - "acc_norm_stderr": 0.00498313847960438 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.5263157894736842, - "acc_stderr": 0.03829509868994727, - "acc_norm": 0.5263157894736842, - "acc_norm_stderr": 0.03829509868994727 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.5145631067961165, - "acc_stderr": 0.04948637324026637, - "acc_norm": 0.5145631067961165, - "acc_norm_stderr": 0.04948637324026637 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.5019157088122606, - "acc_stderr": 0.017879832259026677, - "acc_norm": 0.5019157088122606, - "acc_norm_stderr": 0.017879832259026677 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.4, - "acc_stderr": 0.04232073695151589, - "acc_norm": 0.4, - "acc_norm_stderr": 0.04232073695151589 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.32, - "acc_stderr": 0.046882617226215034, - "acc_norm": 0.32, - "acc_norm_stderr": 0.046882617226215034 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.33617021276595743, - "acc_stderr": 0.03088161852067694, - "acc_norm": 0.33617021276595743, - "acc_norm_stderr": 0.03088161852067694 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3674698795180723, - "acc_stderr": 0.03753267402120574, - "acc_norm": 0.3674698795180723, - "acc_norm_stderr": 0.03753267402120574 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.4437299035369775, - "acc_stderr": 0.02821768355665232, - "acc_norm": 0.4437299035369775, - "acc_norm_stderr": 0.02821768355665232 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.4080717488789238, - "acc_stderr": 0.03298574607842822, - "acc_norm": 0.4080717488789238, - "acc_norm_stderr": 0.03298574607842822 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.4122137404580153, - "acc_stderr": 0.04317171194870255, - "acc_norm": 0.4122137404580153, - "acc_norm_stderr": 0.04317171194870255 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.47, - "acc_stderr": 0.05016135580465919, - "acc_norm": 0.47, - "acc_norm_stderr": 0.05016135580465919 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.5505050505050505, - "acc_stderr": 0.035441324919479704, - "acc_norm": 0.5505050505050505, - "acc_norm_stderr": 0.035441324919479704 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.5103448275862069, - "acc_stderr": 0.04165774775728763, - "acc_norm": 0.5103448275862069, - "acc_norm_stderr": 0.04165774775728763 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.18627450980392157, - "acc_stderr": 0.038739587141493524, - "acc_norm": 0.18627450980392157, - "acc_norm_stderr": 0.038739587141493524 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.42436974789915966, - "acc_stderr": 0.032104790510157764, - "acc_norm": 0.42436974789915966, - "acc_norm_stderr": 0.032104790510157764 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.4307692307692308, - "acc_stderr": 0.02510682066053975, - "acc_norm": 0.4307692307692308, - "acc_norm_stderr": 0.02510682066053975 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.52, - "acc_stderr": 0.05021167315686779, - "acc_norm": 0.52, - "acc_norm_stderr": 0.05021167315686779 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.28, - "acc_stderr": 0.045126085985421276, - "acc_norm": 0.28, - "acc_norm_stderr": 0.045126085985421276 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.4722222222222222, - "acc_stderr": 0.04826217294139894, - "acc_norm": 0.4722222222222222, - "acc_norm_stderr": 0.04826217294139894 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.3842364532019704, - "acc_stderr": 0.0342239856565755, - "acc_norm": 0.3842364532019704, - "acc_norm_stderr": 0.0342239856565755 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.42258064516129035, - "acc_stderr": 0.02810096472427264, - "acc_norm": 0.42258064516129035, - "acc_norm_stderr": 0.02810096472427264 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.6623931623931624, - "acc_stderr": 0.030980296992618558, - "acc_norm": 0.6623931623931624, - "acc_norm_stderr": 0.030980296992618558 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.4679245283018868, - "acc_stderr": 0.03070948699255655, - "acc_norm": 0.4679245283018868, - "acc_norm_stderr": 0.03070948699255655 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.4727272727272727, - "acc_stderr": 0.04782001791380063, - "acc_norm": 0.4727272727272727, - "acc_norm_stderr": 0.04782001791380063 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.31851851851851853, - "acc_stderr": 0.02840653309060846, - "acc_norm": 0.31851851851851853, - "acc_norm_stderr": 0.02840653309060846 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.31125827814569534, - "acc_stderr": 0.03780445850526733, - "acc_norm": 0.31125827814569534, - "acc_norm_stderr": 0.03780445850526733 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.5870646766169154, - "acc_stderr": 0.03481520803367348, - "acc_norm": 0.5870646766169154, - "acc_norm_stderr": 0.03481520803367348 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.41040462427745666, - "acc_stderr": 0.03750757044895538, - "acc_norm": 0.41040462427745666, - "acc_norm_stderr": 0.03750757044895538 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.32275132275132273, - "acc_stderr": 0.024078943243597016, - "acc_norm": 0.32275132275132273, - "acc_norm_stderr": 0.024078943243597016 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3611111111111111, - "acc_stderr": 0.040166600304512336, - "acc_norm": 0.3611111111111111, - "acc_norm_stderr": 0.040166600304512336 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.36, - "acc_stderr": 0.04824181513244218, - "acc_norm": 0.36, - "acc_norm_stderr": 0.04824181513244218 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.56, - "acc_stderr": 0.049888765156985884, - "acc_norm": 0.56, - "acc_norm_stderr": 0.049888765156985884 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.4884393063583815, - "acc_stderr": 0.026911898686377913, - "acc_norm": 0.4884393063583815, - "acc_norm_stderr": 0.026911898686377913 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.4110429447852761, - "acc_stderr": 0.038656978537853624, - "acc_norm": 0.4110429447852761, - "acc_norm_stderr": 0.038656978537853624 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.4074074074074074, - "acc_stderr": 0.027339546640662727, - "acc_norm": 0.4074074074074074, - "acc_norm_stderr": 0.027339546640662727 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.4, - "acc_stderr": 0.049236596391733084, - "acc_norm": 0.4, - "acc_norm_stderr": 0.049236596391733084 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.46632124352331605, - "acc_stderr": 0.036002440698671784, - "acc_norm": 0.46632124352331605, - "acc_norm_stderr": 0.036002440698671784 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2894736842105263, - "acc_stderr": 0.04266339443159394, - "acc_norm": 0.2894736842105263, - "acc_norm_stderr": 0.04266339443159394 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.46055045871559636, - "acc_stderr": 0.021370494609995093, - "acc_norm": 0.46055045871559636, - "acc_norm_stderr": 0.021370494609995093 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.04216370213557835, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.04216370213557835 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.43790849673202614, - "acc_stderr": 0.02840830202033269, - "acc_norm": 0.43790849673202614, - "acc_norm_stderr": 0.02840830202033269 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.4, - "acc_stderr": 0.04923659639173309, - "acc_norm": 0.4, - "acc_norm_stderr": 0.04923659639173309 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.6446280991735537, - "acc_stderr": 0.0436923632657398, - "acc_norm": 0.6446280991735537, - "acc_norm_stderr": 0.0436923632657398 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.4144736842105263, - "acc_stderr": 0.04008973785779207, - "acc_norm": 0.4144736842105263, - "acc_norm_stderr": 0.04008973785779207 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.3464052287581699, - "acc_stderr": 0.01924978569171721, - "acc_norm": 0.3464052287581699, - "acc_norm_stderr": 0.01924978569171721 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.3404255319148936, - "acc_stderr": 0.028267657482650154, - "acc_norm": 0.3404255319148936, - "acc_norm_stderr": 0.028267657482650154 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.44642857142857145, - "acc_stderr": 0.047184714852195886, - "acc_norm": 0.44642857142857145, - "acc_norm_stderr": 0.047184714852195886 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.3287037037037037, - "acc_stderr": 0.03203614084670058, - "acc_norm": 0.3287037037037037, - "acc_norm_stderr": 0.03203614084670058 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2748603351955307, - "acc_stderr": 0.01493131670322051, - "acc_norm": 0.2748603351955307, - "acc_norm_stderr": 0.01493131670322051 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.39, - "acc_stderr": 0.04902071300001975, - "acc_norm": 0.39, - "acc_norm_stderr": 0.04902071300001975 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.5, - "acc_stderr": 0.050251890762960605, - "acc_norm": 0.5, - "acc_norm_stderr": 0.050251890762960605 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.38235294117647056, - "acc_stderr": 0.029520095697687758, - "acc_norm": 0.38235294117647056, - "acc_norm_stderr": 0.029520095697687758 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.46938775510204084, - "acc_stderr": 0.031949171367580624, - "acc_norm": 0.46938775510204084, - "acc_norm_stderr": 0.031949171367580624 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.5189873417721519, - "acc_stderr": 0.03252375148090447, - "acc_norm": 0.5189873417721519, - "acc_norm_stderr": 0.03252375148090447 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.29986962190352023, - "acc_stderr": 0.011702660860193986, - "acc_norm": 0.29986962190352023, - "acc_norm_stderr": 0.011702660860193986 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.4117647058823529, - "acc_stderr": 0.034542365853806094, - "acc_norm": 0.4117647058823529, - "acc_norm_stderr": 0.034542365853806094 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.3878787878787879, - "acc_stderr": 0.038049136539710114, - "acc_norm": 0.3878787878787879, - "acc_norm_stderr": 0.038049136539710114 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2766217870257038, - "mc1_stderr": 0.015659605755326902, - "mc2": 0.4475458217061865, - "mc2_stderr": 0.015253457911461817 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.23826291079812206, - "acc_stderr": 0.014603803898011351, - "acc_norm": 0.30164319248826293, - "acc_norm_stderr": 0.015733330645500608 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "maywell/Synatra-11B-Testbench", - "model_sha": "9399ea6c2a1d955e31d6b4d68b2b86115aea0e59", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/maywell/Synatra-7B-Instruct-v0.2/result_2023-10-12 03:42:18.json b/maywell/Synatra-7B-Instruct-v0.2/result_2023-10-12 03:42:18.json deleted file mode 100644 index 3f3d687830a0ff7fdf06f495445b98e84bcec321..0000000000000000000000000000000000000000 --- a/maywell/Synatra-7B-Instruct-v0.2/result_2023-10-12 03:42:18.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.3626279863481229, - "acc_stderr": 0.014049106564955003, - "acc_norm": 0.4180887372013652, - "acc_norm_stderr": 0.014413988396996084 - }, - "harness|ko_hellaswag|10": { - "acc": 0.38149770961959767, - "acc_stderr": 0.00484761521647345, - "acc_norm": 0.49352718581955785, - "acc_norm_stderr": 0.004989363276955168 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.47953216374269003, - "acc_stderr": 0.038316105328219316, - "acc_norm": 0.47953216374269003, - "acc_norm_stderr": 0.038316105328219316 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.4854368932038835, - "acc_stderr": 0.04948637324026637, - "acc_norm": 0.4854368932038835, - "acc_norm_stderr": 0.04948637324026637 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.5070242656449553, - "acc_stderr": 0.017878199003432214, - "acc_norm": 0.5070242656449553, - "acc_norm_stderr": 0.017878199003432214 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.43703703703703706, - "acc_stderr": 0.04284958639753399, - "acc_norm": 0.43703703703703706, - "acc_norm_stderr": 0.04284958639753399 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.39574468085106385, - "acc_stderr": 0.03196758697835363, - "acc_norm": 0.39574468085106385, - "acc_norm_stderr": 0.03196758697835363 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3795180722891566, - "acc_stderr": 0.037777988227480165, - "acc_norm": 0.3795180722891566, - "acc_norm_stderr": 0.037777988227480165 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.4790996784565916, - "acc_stderr": 0.028373270961069414, - "acc_norm": 0.4790996784565916, - "acc_norm_stderr": 0.028373270961069414 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.4260089686098655, - "acc_stderr": 0.03318833286217281, - "acc_norm": 0.4260089686098655, - "acc_norm_stderr": 0.03318833286217281 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.48854961832061067, - "acc_stderr": 0.043841400240780176, - "acc_norm": 0.48854961832061067, - "acc_norm_stderr": 0.043841400240780176 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.44, - "acc_stderr": 0.04988876515698589, - "acc_norm": 0.44, - "acc_norm_stderr": 0.04988876515698589 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.5606060606060606, - "acc_stderr": 0.035360859475294805, - "acc_norm": 0.5606060606060606, - "acc_norm_stderr": 0.035360859475294805 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.4896551724137931, - "acc_stderr": 0.04165774775728763, - "acc_norm": 0.4896551724137931, - "acc_norm_stderr": 0.04165774775728763 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.24509803921568626, - "acc_stderr": 0.042801058373643945, - "acc_norm": 0.24509803921568626, - "acc_norm_stderr": 0.042801058373643945 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.4411764705882353, - "acc_stderr": 0.032252942323996406, - "acc_norm": 0.4411764705882353, - "acc_norm_stderr": 0.032252942323996406 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.45384615384615384, - "acc_stderr": 0.02524277098712617, - "acc_norm": 0.45384615384615384, - "acc_norm_stderr": 0.02524277098712617 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.58, - "acc_stderr": 0.04960449637488583, - "acc_norm": 0.58, - "acc_norm_stderr": 0.04960449637488583 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.48148148148148145, - "acc_stderr": 0.04830366024635331, - "acc_norm": 0.48148148148148145, - "acc_norm_stderr": 0.04830366024635331 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.3793103448275862, - "acc_stderr": 0.03413963805906235, - "acc_norm": 0.3793103448275862, - "acc_norm_stderr": 0.03413963805906235 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.44516129032258067, - "acc_stderr": 0.02827241018621491, - "acc_norm": 0.44516129032258067, - "acc_norm_stderr": 0.02827241018621491 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.6666666666666666, - "acc_stderr": 0.030882736974138656, - "acc_norm": 0.6666666666666666, - "acc_norm_stderr": 0.030882736974138656 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.5056603773584906, - "acc_stderr": 0.03077090076385131, - "acc_norm": 0.5056603773584906, - "acc_norm_stderr": 0.03077090076385131 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.45454545454545453, - "acc_stderr": 0.04769300568972743, - "acc_norm": 0.45454545454545453, - "acc_norm_stderr": 0.04769300568972743 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.3074074074074074, - "acc_stderr": 0.028133252578815635, - "acc_norm": 0.3074074074074074, - "acc_norm_stderr": 0.028133252578815635 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2847682119205298, - "acc_stderr": 0.03684881521389023, - "acc_norm": 0.2847682119205298, - "acc_norm_stderr": 0.03684881521389023 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.5870646766169154, - "acc_stderr": 0.03481520803367348, - "acc_norm": 0.5870646766169154, - "acc_norm_stderr": 0.03481520803367348 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.4393063583815029, - "acc_stderr": 0.03784271932887468, - "acc_norm": 0.4393063583815029, - "acc_norm_stderr": 0.03784271932887468 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.34656084656084657, - "acc_stderr": 0.024508777521028414, - "acc_norm": 0.34656084656084657, - "acc_norm_stderr": 0.024508777521028414 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3541666666666667, - "acc_stderr": 0.039994111357535424, - "acc_norm": 0.3541666666666667, - "acc_norm_stderr": 0.039994111357535424 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.4, - "acc_stderr": 0.049236596391733084, - "acc_norm": 0.4, - "acc_norm_stderr": 0.049236596391733084 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.6, - "acc_stderr": 0.04923659639173309, - "acc_norm": 0.6, - "acc_norm_stderr": 0.04923659639173309 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.5086705202312138, - "acc_stderr": 0.026915047355369804, - "acc_norm": 0.5086705202312138, - "acc_norm_stderr": 0.026915047355369804 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.36809815950920244, - "acc_stderr": 0.03789213935838396, - "acc_norm": 0.36809815950920244, - "acc_norm_stderr": 0.03789213935838396 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.46296296296296297, - "acc_stderr": 0.02774431344337654, - "acc_norm": 0.46296296296296297, - "acc_norm_stderr": 0.02774431344337654 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.32, - "acc_stderr": 0.04688261722621505, - "acc_norm": 0.32, - "acc_norm_stderr": 0.04688261722621505 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.5077720207253886, - "acc_stderr": 0.036080032255696545, - "acc_norm": 0.5077720207253886, - "acc_norm_stderr": 0.036080032255696545 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2894736842105263, - "acc_stderr": 0.042663394431593955, - "acc_norm": 0.2894736842105263, - "acc_norm_stderr": 0.042663394431593955 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.5100917431192661, - "acc_stderr": 0.021432956203453316, - "acc_norm": 0.5100917431192661, - "acc_norm_stderr": 0.021432956203453316 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.3968253968253968, - "acc_stderr": 0.04375888492727062, - "acc_norm": 0.3968253968253968, - "acc_norm_stderr": 0.04375888492727062 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.4738562091503268, - "acc_stderr": 0.028590752958852394, - "acc_norm": 0.4738562091503268, - "acc_norm_stderr": 0.028590752958852394 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.5, - "acc_stderr": 0.050251890762960605, - "acc_norm": 0.5, - "acc_norm_stderr": 0.050251890762960605 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.6528925619834711, - "acc_stderr": 0.04345724570292534, - "acc_norm": 0.6528925619834711, - "acc_norm_stderr": 0.04345724570292534 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.42105263157894735, - "acc_stderr": 0.04017901275981748, - "acc_norm": 0.42105263157894735, - "acc_norm_stderr": 0.04017901275981748 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.39052287581699346, - "acc_stderr": 0.019737008998094593, - "acc_norm": 0.39052287581699346, - "acc_norm_stderr": 0.019737008998094593 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2978723404255319, - "acc_stderr": 0.027281608344469414, - "acc_norm": 0.2978723404255319, - "acc_norm_stderr": 0.027281608344469414 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.36607142857142855, - "acc_stderr": 0.0457237235873743, - "acc_norm": 0.36607142857142855, - "acc_norm_stderr": 0.0457237235873743 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.39814814814814814, - "acc_stderr": 0.033384734032074016, - "acc_norm": 0.39814814814814814, - "acc_norm_stderr": 0.033384734032074016 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.3139664804469274, - "acc_stderr": 0.015521923933523635, - "acc_norm": 0.3139664804469274, - "acc_norm_stderr": 0.015521923933523635 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.39, - "acc_stderr": 0.04902071300001975, - "acc_norm": 0.39, - "acc_norm_stderr": 0.04902071300001975 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.51, - "acc_stderr": 0.05024183937956912, - "acc_norm": 0.51, - "acc_norm_stderr": 0.05024183937956912 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.4227941176470588, - "acc_stderr": 0.03000856284500348, - "acc_norm": 0.4227941176470588, - "acc_norm_stderr": 0.03000856284500348 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.5346938775510204, - "acc_stderr": 0.03193207024425314, - "acc_norm": 0.5346938775510204, - "acc_norm_stderr": 0.03193207024425314 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.5991561181434599, - "acc_stderr": 0.031900803894732356, - "acc_norm": 0.5991561181434599, - "acc_norm_stderr": 0.031900803894732356 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.3050847457627119, - "acc_stderr": 0.01175993961808546, - "acc_norm": 0.3050847457627119, - "acc_norm_stderr": 0.01175993961808546 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.4803921568627451, - "acc_stderr": 0.03506612560524866, - "acc_norm": 0.4803921568627451, - "acc_norm_stderr": 0.03506612560524866 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.5212121212121212, - "acc_stderr": 0.03900828913737302, - "acc_norm": 0.5212121212121212, - "acc_norm_stderr": 0.03900828913737302 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.28886168910648713, - "mc1_stderr": 0.015866346401384304, - "mc2": 0.4577444189927008, - "mc2_stderr": 0.015214396697030213 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.37089201877934275, - "acc_stderr": 0.016558521692487345, - "acc_norm": 0.4295774647887324, - "acc_norm_stderr": 0.01696892392010678 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "maywell/Synatra-7B-Instruct-v0.2", - "model_sha": "b5b306a3004b2781b4f1bd5dd6807478305d0ae7", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/maywell/Synatra-V0.1-7B-Instruct/result_2023-10-10 09:40:24.json b/maywell/Synatra-V0.1-7B-Instruct/result_2023-10-10 09:40:24.json deleted file mode 100644 index ab5b310e2143cbc969954fbd24434b9baa608207..0000000000000000000000000000000000000000 --- a/maywell/Synatra-V0.1-7B-Instruct/result_2023-10-10 09:40:24.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.3609215017064846, - "acc_stderr": 0.014034761386175452, - "acc_norm": 0.41723549488054607, - "acc_norm_stderr": 0.01440982551840308 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3813981278629755, - "acc_stderr": 0.004847372670134637, - "acc_norm": 0.49283011352320255, - "acc_norm_stderr": 0.004989268362968721 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.4619883040935672, - "acc_stderr": 0.03823727092882307, - "acc_norm": 0.4619883040935672, - "acc_norm_stderr": 0.03823727092882307 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.5436893203883495, - "acc_stderr": 0.04931801994220416, - "acc_norm": 0.5436893203883495, - "acc_norm_stderr": 0.04931801994220416 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.46871008939974457, - "acc_stderr": 0.017844918090468544, - "acc_norm": 0.46871008939974457, - "acc_norm_stderr": 0.017844918090468544 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.04072314811876837, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.04072314811876837 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.37, - "acc_stderr": 0.048523658709391, - "acc_norm": 0.37, - "acc_norm_stderr": 0.048523658709391 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.4297872340425532, - "acc_stderr": 0.03236214467715564, - "acc_norm": 0.4297872340425532, - "acc_norm_stderr": 0.03236214467715564 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3855421686746988, - "acc_stderr": 0.03789134424611548, - "acc_norm": 0.3855421686746988, - "acc_norm_stderr": 0.03789134424611548 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.4855305466237942, - "acc_stderr": 0.02838619808417768, - "acc_norm": 0.4855305466237942, - "acc_norm_stderr": 0.02838619808417768 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.4484304932735426, - "acc_stderr": 0.03337883736255098, - "acc_norm": 0.4484304932735426, - "acc_norm_stderr": 0.03337883736255098 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.44274809160305345, - "acc_stderr": 0.0435644720266507, - "acc_norm": 0.44274809160305345, - "acc_norm_stderr": 0.0435644720266507 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.43, - "acc_stderr": 0.049756985195624284, - "acc_norm": 0.43, - "acc_norm_stderr": 0.049756985195624284 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.5505050505050505, - "acc_stderr": 0.0354413249194797, - "acc_norm": 0.5505050505050505, - "acc_norm_stderr": 0.0354413249194797 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.496551724137931, - "acc_stderr": 0.04166567577101579, - "acc_norm": 0.496551724137931, - "acc_norm_stderr": 0.04166567577101579 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.3235294117647059, - "acc_stderr": 0.046550104113196177, - "acc_norm": 0.3235294117647059, - "acc_norm_stderr": 0.046550104113196177 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.42857142857142855, - "acc_stderr": 0.032145368597886394, - "acc_norm": 0.42857142857142855, - "acc_norm_stderr": 0.032145368597886394 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.41794871794871796, - "acc_stderr": 0.025007329882461224, - "acc_norm": 0.41794871794871796, - "acc_norm_stderr": 0.025007329882461224 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.57, - "acc_stderr": 0.049756985195624284, - "acc_norm": 0.57, - "acc_norm_stderr": 0.049756985195624284 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542129, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542129 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.4444444444444444, - "acc_stderr": 0.04803752235190192, - "acc_norm": 0.4444444444444444, - "acc_norm_stderr": 0.04803752235190192 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.42857142857142855, - "acc_stderr": 0.03481904844438804, - "acc_norm": 0.42857142857142855, - "acc_norm_stderr": 0.03481904844438804 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.43870967741935485, - "acc_stderr": 0.02822949732031722, - "acc_norm": 0.43870967741935485, - "acc_norm_stderr": 0.02822949732031722 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.6495726495726496, - "acc_stderr": 0.0312561082442188, - "acc_norm": 0.6495726495726496, - "acc_norm_stderr": 0.0312561082442188 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.49056603773584906, - "acc_stderr": 0.030767394707808086, - "acc_norm": 0.49056603773584906, - "acc_norm_stderr": 0.030767394707808086 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.43636363636363634, - "acc_stderr": 0.04750185058907297, - "acc_norm": 0.43636363636363634, - "acc_norm_stderr": 0.04750185058907297 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.26296296296296295, - "acc_stderr": 0.026842057873833713, - "acc_norm": 0.26296296296296295, - "acc_norm_stderr": 0.026842057873833713 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.3509933774834437, - "acc_stderr": 0.03896981964257375, - "acc_norm": 0.3509933774834437, - "acc_norm_stderr": 0.03896981964257375 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.5572139303482587, - "acc_stderr": 0.03512310964123935, - "acc_norm": 0.5572139303482587, - "acc_norm_stderr": 0.03512310964123935 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.3872832369942196, - "acc_stderr": 0.03714325906302065, - "acc_norm": 0.3872832369942196, - "acc_norm_stderr": 0.03714325906302065 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.3386243386243386, - "acc_stderr": 0.024373197867983067, - "acc_norm": 0.3386243386243386, - "acc_norm_stderr": 0.024373197867983067 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.03942082639927213, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.03942082639927213 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.61, - "acc_stderr": 0.04902071300001975, - "acc_norm": 0.61, - "acc_norm_stderr": 0.04902071300001975 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.48265895953757226, - "acc_stderr": 0.02690290045866664, - "acc_norm": 0.48265895953757226, - "acc_norm_stderr": 0.02690290045866664 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.4049079754601227, - "acc_stderr": 0.038566721635489125, - "acc_norm": 0.4049079754601227, - "acc_norm_stderr": 0.038566721635489125 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.44753086419753085, - "acc_stderr": 0.027667138569422697, - "acc_norm": 0.44753086419753085, - "acc_norm_stderr": 0.027667138569422697 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.42, - "acc_stderr": 0.04960449637488583, - "acc_norm": 0.42, - "acc_norm_stderr": 0.04960449637488583 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.49740932642487046, - "acc_stderr": 0.03608390745384486, - "acc_norm": 0.49740932642487046, - "acc_norm_stderr": 0.03608390745384486 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2982456140350877, - "acc_stderr": 0.043036840335373173, - "acc_norm": 0.2982456140350877, - "acc_norm_stderr": 0.043036840335373173 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.47522935779816516, - "acc_stderr": 0.02141099975363592, - "acc_norm": 0.47522935779816516, - "acc_norm_stderr": 0.02141099975363592 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.35714285714285715, - "acc_stderr": 0.04285714285714281, - "acc_norm": 0.35714285714285715, - "acc_norm_stderr": 0.04285714285714281 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.47058823529411764, - "acc_stderr": 0.028580341065138293, - "acc_norm": 0.47058823529411764, - "acc_norm_stderr": 0.028580341065138293 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.48, - "acc_stderr": 0.050211673156867795, - "acc_norm": 0.48, - "acc_norm_stderr": 0.050211673156867795 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.6363636363636364, - "acc_stderr": 0.043913262867240704, - "acc_norm": 0.6363636363636364, - "acc_norm_stderr": 0.043913262867240704 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.42105263157894735, - "acc_stderr": 0.04017901275981749, - "acc_norm": 0.42105263157894735, - "acc_norm_stderr": 0.04017901275981749 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.35130718954248363, - "acc_stderr": 0.01931267606578656, - "acc_norm": 0.35130718954248363, - "acc_norm_stderr": 0.01931267606578656 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2978723404255319, - "acc_stderr": 0.027281608344469414, - "acc_norm": 0.2978723404255319, - "acc_norm_stderr": 0.027281608344469414 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.35714285714285715, - "acc_stderr": 0.04547960999764376, - "acc_norm": 0.35714285714285715, - "acc_norm_stderr": 0.04547960999764376 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.39351851851851855, - "acc_stderr": 0.03331747876370312, - "acc_norm": 0.39351851851851855, - "acc_norm_stderr": 0.03331747876370312 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2759776536312849, - "acc_stderr": 0.014950103002475363, - "acc_norm": 0.2759776536312849, - "acc_norm_stderr": 0.014950103002475363 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.44, - "acc_stderr": 0.04988876515698589, - "acc_norm": 0.44, - "acc_norm_stderr": 0.04988876515698589 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.56, - "acc_stderr": 0.04988876515698589, - "acc_norm": 0.56, - "acc_norm_stderr": 0.04988876515698589 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.3602941176470588, - "acc_stderr": 0.02916312857067073, - "acc_norm": 0.3602941176470588, - "acc_norm_stderr": 0.02916312857067073 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.5142857142857142, - "acc_stderr": 0.03199615232806286, - "acc_norm": 0.5142857142857142, - "acc_norm_stderr": 0.03199615232806286 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.5780590717299579, - "acc_stderr": 0.032148146302403695, - "acc_norm": 0.5780590717299579, - "acc_norm_stderr": 0.032148146302403695 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.3135593220338983, - "acc_stderr": 0.011849234291459329, - "acc_norm": 0.3135593220338983, - "acc_norm_stderr": 0.011849234291459329 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.4411764705882353, - "acc_stderr": 0.03484941514429231, - "acc_norm": 0.4411764705882353, - "acc_norm_stderr": 0.03484941514429231 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.48484848484848486, - "acc_stderr": 0.03902551007374448, - "acc_norm": 0.48484848484848486, - "acc_norm_stderr": 0.03902551007374448 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2607099143206854, - "mc1_stderr": 0.015368841620766373, - "mc2": 0.43748297535795655, - "mc2_stderr": 0.015378495166878805 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.3086854460093897, - "acc_stderr": 0.015835476890718975, - "acc_norm": 0.3931924882629108, - "acc_norm_stderr": 0.01674415749294927 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "maywell/Synatra-V0.1-7B-Instruct", - "model_sha": "4d37e91f047ef6e981e0817d43ba0d7fac52582b", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/maywell/Synatra_TbST02M_IN01/result_2023-10-16 09:50:44.json b/maywell/Synatra_TbST02M_IN01/result_2023-10-16 09:50:44.json deleted file mode 100644 index 11e4774137c8e409bb0d7cb1192b53d8b430e577..0000000000000000000000000000000000000000 --- a/maywell/Synatra_TbST02M_IN01/result_2023-10-16 09:50:44.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.3097269624573379, - "acc_stderr": 0.01351205841523836, - "acc_norm": 0.38310580204778155, - "acc_norm_stderr": 0.014206472661672877 - }, - "harness|ko_hellaswag|10": { - "acc": 0.35331607249551883, - "acc_stderr": 0.004770229206838901, - "acc_norm": 0.4451304521011751, - "acc_norm_stderr": 0.004959645263390238 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.4678362573099415, - "acc_stderr": 0.03826882417660369, - "acc_norm": 0.4678362573099415, - "acc_norm_stderr": 0.03826882417660369 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.46601941747572817, - "acc_stderr": 0.0493929144727348, - "acc_norm": 0.46601941747572817, - "acc_norm_stderr": 0.0493929144727348 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.49169859514687103, - "acc_stderr": 0.017877498991072008, - "acc_norm": 0.49169859514687103, - "acc_norm_stderr": 0.017877498991072008 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.37777777777777777, - "acc_stderr": 0.04188307537595853, - "acc_norm": 0.37777777777777777, - "acc_norm_stderr": 0.04188307537595853 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.27, - "acc_stderr": 0.044619604333847394, - "acc_norm": 0.27, - "acc_norm_stderr": 0.044619604333847394 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.4, - "acc_stderr": 0.03202563076101735, - "acc_norm": 0.4, - "acc_norm_stderr": 0.03202563076101735 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3674698795180723, - "acc_stderr": 0.03753267402120574, - "acc_norm": 0.3674698795180723, - "acc_norm_stderr": 0.03753267402120574 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.5080385852090032, - "acc_stderr": 0.028394421370984538, - "acc_norm": 0.5080385852090032, - "acc_norm_stderr": 0.028394421370984538 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.42152466367713004, - "acc_stderr": 0.03314190222110658, - "acc_norm": 0.42152466367713004, - "acc_norm_stderr": 0.03314190222110658 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.5038167938931297, - "acc_stderr": 0.04385162325601553, - "acc_norm": 0.5038167938931297, - "acc_norm_stderr": 0.04385162325601553 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.4, - "acc_stderr": 0.049236596391733084, - "acc_norm": 0.4, - "acc_norm_stderr": 0.049236596391733084 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.5757575757575758, - "acc_stderr": 0.03521224908841586, - "acc_norm": 0.5757575757575758, - "acc_norm_stderr": 0.03521224908841586 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.4827586206896552, - "acc_stderr": 0.04164188720169377, - "acc_norm": 0.4827586206896552, - "acc_norm_stderr": 0.04164188720169377 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.04690650298201942, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.04690650298201942 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.42857142857142855, - "acc_stderr": 0.03214536859788639, - "acc_norm": 0.42857142857142855, - "acc_norm_stderr": 0.03214536859788639 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.45897435897435895, - "acc_stderr": 0.025265525491284295, - "acc_norm": 0.45897435897435895, - "acc_norm_stderr": 0.025265525491284295 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.55, - "acc_stderr": 0.04999999999999999, - "acc_norm": 0.55, - "acc_norm_stderr": 0.04999999999999999 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.27, - "acc_stderr": 0.044619604333847394, - "acc_norm": 0.27, - "acc_norm_stderr": 0.044619604333847394 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.4722222222222222, - "acc_stderr": 0.04826217294139894, - "acc_norm": 0.4722222222222222, - "acc_norm_stderr": 0.04826217294139894 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.37438423645320196, - "acc_stderr": 0.03405155380561952, - "acc_norm": 0.37438423645320196, - "acc_norm_stderr": 0.03405155380561952 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.44193548387096776, - "acc_stderr": 0.02825155790684974, - "acc_norm": 0.44193548387096776, - "acc_norm_stderr": 0.02825155790684974 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.6965811965811965, - "acc_stderr": 0.03011821010694265, - "acc_norm": 0.6965811965811965, - "acc_norm_stderr": 0.03011821010694265 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.5132075471698113, - "acc_stderr": 0.030762134874500476, - "acc_norm": 0.5132075471698113, - "acc_norm_stderr": 0.030762134874500476 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.45454545454545453, - "acc_stderr": 0.04769300568972743, - "acc_norm": 0.45454545454545453, - "acc_norm_stderr": 0.04769300568972743 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.3037037037037037, - "acc_stderr": 0.02803792996911499, - "acc_norm": 0.3037037037037037, - "acc_norm_stderr": 0.02803792996911499 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2781456953642384, - "acc_stderr": 0.03658603262763743, - "acc_norm": 0.2781456953642384, - "acc_norm_stderr": 0.03658603262763743 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.5323383084577115, - "acc_stderr": 0.03528131472933607, - "acc_norm": 0.5323383084577115, - "acc_norm_stderr": 0.03528131472933607 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.4161849710982659, - "acc_stderr": 0.037585177754049466, - "acc_norm": 0.4161849710982659, - "acc_norm_stderr": 0.037585177754049466 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.31746031746031744, - "acc_stderr": 0.023973861998992062, - "acc_norm": 0.31746031746031744, - "acc_norm_stderr": 0.023973861998992062 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3263888888888889, - "acc_stderr": 0.03921067198982266, - "acc_norm": 0.3263888888888889, - "acc_norm_stderr": 0.03921067198982266 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.4, - "acc_stderr": 0.04923659639173309, - "acc_norm": 0.4, - "acc_norm_stderr": 0.04923659639173309 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.59, - "acc_stderr": 0.04943110704237101, - "acc_norm": 0.59, - "acc_norm_stderr": 0.04943110704237101 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.49421965317919075, - "acc_stderr": 0.026917296179149116, - "acc_norm": 0.49421965317919075, - "acc_norm_stderr": 0.026917296179149116 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.39263803680981596, - "acc_stderr": 0.03836740907831029, - "acc_norm": 0.39263803680981596, - "acc_norm_stderr": 0.03836740907831029 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.45987654320987653, - "acc_stderr": 0.027731022753539274, - "acc_norm": 0.45987654320987653, - "acc_norm_stderr": 0.027731022753539274 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.32, - "acc_stderr": 0.04688261722621505, - "acc_norm": 0.32, - "acc_norm_stderr": 0.04688261722621505 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.538860103626943, - "acc_stderr": 0.035975244117345775, - "acc_norm": 0.538860103626943, - "acc_norm_stderr": 0.035975244117345775 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2719298245614035, - "acc_stderr": 0.041857744240220575, - "acc_norm": 0.2719298245614035, - "acc_norm_stderr": 0.041857744240220575 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.5064220183486239, - "acc_stderr": 0.021435554820013077, - "acc_norm": 0.5064220183486239, - "acc_norm_stderr": 0.021435554820013077 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.38095238095238093, - "acc_stderr": 0.043435254289490965, - "acc_norm": 0.38095238095238093, - "acc_norm_stderr": 0.043435254289490965 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.5, - "acc_stderr": 0.028629916715693413, - "acc_norm": 0.5, - "acc_norm_stderr": 0.028629916715693413 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.49, - "acc_stderr": 0.05024183937956911, - "acc_norm": 0.49, - "acc_norm_stderr": 0.05024183937956911 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.6363636363636364, - "acc_stderr": 0.043913262867240704, - "acc_norm": 0.6363636363636364, - "acc_norm_stderr": 0.043913262867240704 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.4276315789473684, - "acc_stderr": 0.040260970832965585, - "acc_norm": 0.4276315789473684, - "acc_norm_stderr": 0.040260970832965585 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.36764705882352944, - "acc_stderr": 0.01950629169395486, - "acc_norm": 0.36764705882352944, - "acc_norm_stderr": 0.01950629169395486 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2978723404255319, - "acc_stderr": 0.027281608344469414, - "acc_norm": 0.2978723404255319, - "acc_norm_stderr": 0.027281608344469414 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.4017857142857143, - "acc_stderr": 0.04653333146973646, - "acc_norm": 0.4017857142857143, - "acc_norm_stderr": 0.04653333146973646 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.4398148148148148, - "acc_stderr": 0.033851779760448106, - "acc_norm": 0.4398148148148148, - "acc_norm_stderr": 0.033851779760448106 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.3039106145251397, - "acc_stderr": 0.01538284558758452, - "acc_norm": 0.3039106145251397, - "acc_norm_stderr": 0.01538284558758452 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.35, - "acc_stderr": 0.047937248544110196, - "acc_norm": 0.35, - "acc_norm_stderr": 0.047937248544110196 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.44, - "acc_stderr": 0.04988876515698589, - "acc_norm": 0.44, - "acc_norm_stderr": 0.04988876515698589 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.4338235294117647, - "acc_stderr": 0.030105636570016636, - "acc_norm": 0.4338235294117647, - "acc_norm_stderr": 0.030105636570016636 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.5346938775510204, - "acc_stderr": 0.03193207024425314, - "acc_norm": 0.5346938775510204, - "acc_norm_stderr": 0.03193207024425314 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.5864978902953587, - "acc_stderr": 0.03205649904851858, - "acc_norm": 0.5864978902953587, - "acc_norm_stderr": 0.03205649904851858 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.3011734028683181, - "acc_stderr": 0.01171714875164844, - "acc_norm": 0.3011734028683181, - "acc_norm_stderr": 0.01171714875164844 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.4215686274509804, - "acc_stderr": 0.03465868196380757, - "acc_norm": 0.4215686274509804, - "acc_norm_stderr": 0.03465868196380757 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.509090909090909, - "acc_stderr": 0.0390369864774844, - "acc_norm": 0.509090909090909, - "acc_norm_stderr": 0.0390369864774844 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.33047735618115054, - "mc1_stderr": 0.016466769613698293, - "mc2": 0.5058685155948915, - "mc2_stderr": 0.01583111147395693 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.29225352112676056, - "acc_stderr": 0.015590281423747502, - "acc_norm": 0.36032863849765256, - "acc_norm_stderr": 0.01645746969570512 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "maywell/Synatra_TbST02M_IN01", - "model_sha": "370087b45ea10cedcb3c698b2327a8c99d7b7b57", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/maywell/Synatra_TbST11B_EP01/result_2023-10-18 07:35:40.json b/maywell/Synatra_TbST11B_EP01/result_2023-10-18 07:35:40.json deleted file mode 100644 index 480151aa4c514bec9a5b7e181302dfd92fd78d2f..0000000000000000000000000000000000000000 --- a/maywell/Synatra_TbST11B_EP01/result_2023-10-18 07:35:40.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.34812286689419797, - "acc_stderr": 0.013921008595179344, - "acc_norm": 0.40784982935153585, - "acc_norm_stderr": 0.014361097288449691 - }, - "harness|ko_hellaswag|10": { - "acc": 0.36875124477195775, - "acc_stderr": 0.0048148030984368085, - "acc_norm": 0.4722166899024099, - "acc_norm_stderr": 0.004982072108448084 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.45614035087719296, - "acc_stderr": 0.03820042586602966, - "acc_norm": 0.45614035087719296, - "acc_norm_stderr": 0.03820042586602966 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.5145631067961165, - "acc_stderr": 0.04948637324026637, - "acc_norm": 0.5145631067961165, - "acc_norm_stderr": 0.04948637324026637 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.4840357598978289, - "acc_stderr": 0.01787084750608173, - "acc_norm": 0.4840357598978289, - "acc_norm_stderr": 0.01787084750608173 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.35555555555555557, - "acc_stderr": 0.04135176749720386, - "acc_norm": 0.35555555555555557, - "acc_norm_stderr": 0.04135176749720386 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.37, - "acc_stderr": 0.048523658709391, - "acc_norm": 0.37, - "acc_norm_stderr": 0.048523658709391 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.3574468085106383, - "acc_stderr": 0.03132941789476425, - "acc_norm": 0.3574468085106383, - "acc_norm_stderr": 0.03132941789476425 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3795180722891566, - "acc_stderr": 0.03777798822748016, - "acc_norm": 0.3795180722891566, - "acc_norm_stderr": 0.03777798822748016 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.5016077170418006, - "acc_stderr": 0.02839794490780661, - "acc_norm": 0.5016077170418006, - "acc_norm_stderr": 0.02839794490780661 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.38565022421524664, - "acc_stderr": 0.03266842214289201, - "acc_norm": 0.38565022421524664, - "acc_norm_stderr": 0.03266842214289201 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.4351145038167939, - "acc_stderr": 0.04348208051644858, - "acc_norm": 0.4351145038167939, - "acc_norm_stderr": 0.04348208051644858 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.43, - "acc_stderr": 0.049756985195624284, - "acc_norm": 0.43, - "acc_norm_stderr": 0.049756985195624284 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.5808080808080808, - "acc_stderr": 0.035155207286704175, - "acc_norm": 0.5808080808080808, - "acc_norm_stderr": 0.035155207286704175 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.4827586206896552, - "acc_stderr": 0.04164188720169377, - "acc_norm": 0.4827586206896552, - "acc_norm_stderr": 0.04164188720169377 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.29411764705882354, - "acc_stderr": 0.045338381959297736, - "acc_norm": 0.29411764705882354, - "acc_norm_stderr": 0.045338381959297736 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.47478991596638653, - "acc_stderr": 0.0324371805513741, - "acc_norm": 0.47478991596638653, - "acc_norm_stderr": 0.0324371805513741 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.4461538461538462, - "acc_stderr": 0.025203571773028333, - "acc_norm": 0.4461538461538462, - "acc_norm_stderr": 0.025203571773028333 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.5, - "acc_stderr": 0.050251890762960605, - "acc_norm": 0.5, - "acc_norm_stderr": 0.050251890762960605 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.32, - "acc_stderr": 0.04688261722621504, - "acc_norm": 0.32, - "acc_norm_stderr": 0.04688261722621504 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.49074074074074076, - "acc_stderr": 0.04832853553437055, - "acc_norm": 0.49074074074074076, - "acc_norm_stderr": 0.04832853553437055 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.37438423645320196, - "acc_stderr": 0.03405155380561952, - "acc_norm": 0.37438423645320196, - "acc_norm_stderr": 0.03405155380561952 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.432258064516129, - "acc_stderr": 0.02818173972001941, - "acc_norm": 0.432258064516129, - "acc_norm_stderr": 0.02818173972001941 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.6666666666666666, - "acc_stderr": 0.030882736974138653, - "acc_norm": 0.6666666666666666, - "acc_norm_stderr": 0.030882736974138653 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.5094339622641509, - "acc_stderr": 0.030767394707808107, - "acc_norm": 0.5094339622641509, - "acc_norm_stderr": 0.030767394707808107 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.4090909090909091, - "acc_stderr": 0.04709306978661896, - "acc_norm": 0.4090909090909091, - "acc_norm_stderr": 0.04709306978661896 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.3074074074074074, - "acc_stderr": 0.028133252578815642, - "acc_norm": 0.3074074074074074, - "acc_norm_stderr": 0.028133252578815642 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.3443708609271523, - "acc_stderr": 0.03879687024073327, - "acc_norm": 0.3443708609271523, - "acc_norm_stderr": 0.03879687024073327 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.6069651741293532, - "acc_stderr": 0.0345368246603156, - "acc_norm": 0.6069651741293532, - "acc_norm_stderr": 0.0345368246603156 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.4393063583815029, - "acc_stderr": 0.037842719328874674, - "acc_norm": 0.4393063583815029, - "acc_norm_stderr": 0.037842719328874674 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.3544973544973545, - "acc_stderr": 0.024636830602841997, - "acc_norm": 0.3544973544973545, - "acc_norm_stderr": 0.024636830602841997 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3472222222222222, - "acc_stderr": 0.039812405437178615, - "acc_norm": 0.3472222222222222, - "acc_norm_stderr": 0.039812405437178615 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.35, - "acc_stderr": 0.0479372485441102, - "acc_norm": 0.35, - "acc_norm_stderr": 0.0479372485441102 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.55, - "acc_stderr": 0.05, - "acc_norm": 0.55, - "acc_norm_stderr": 0.05 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.4884393063583815, - "acc_stderr": 0.026911898686377913, - "acc_norm": 0.4884393063583815, - "acc_norm_stderr": 0.026911898686377913 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.44171779141104295, - "acc_stderr": 0.039015918258361836, - "acc_norm": 0.44171779141104295, - "acc_norm_stderr": 0.039015918258361836 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.43209876543209874, - "acc_stderr": 0.02756301097160668, - "acc_norm": 0.43209876543209874, - "acc_norm_stderr": 0.02756301097160668 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.38, - "acc_stderr": 0.048783173121456316, - "acc_norm": 0.38, - "acc_norm_stderr": 0.048783173121456316 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.47668393782383417, - "acc_stderr": 0.03604513672442205, - "acc_norm": 0.47668393782383417, - "acc_norm_stderr": 0.03604513672442205 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2982456140350877, - "acc_stderr": 0.04303684033537317, - "acc_norm": 0.2982456140350877, - "acc_norm_stderr": 0.04303684033537317 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.48807339449541287, - "acc_stderr": 0.021431223617362223, - "acc_norm": 0.48807339449541287, - "acc_norm_stderr": 0.021431223617362223 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.29365079365079366, - "acc_stderr": 0.04073524322147124, - "acc_norm": 0.29365079365079366, - "acc_norm_stderr": 0.04073524322147124 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.4673202614379085, - "acc_stderr": 0.02856869975222588, - "acc_norm": 0.4673202614379085, - "acc_norm_stderr": 0.02856869975222588 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.43, - "acc_stderr": 0.049756985195624284, - "acc_norm": 0.43, - "acc_norm_stderr": 0.049756985195624284 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.628099173553719, - "acc_stderr": 0.04412015806624504, - "acc_norm": 0.628099173553719, - "acc_norm_stderr": 0.04412015806624504 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.4342105263157895, - "acc_stderr": 0.040335656678483184, - "acc_norm": 0.4342105263157895, - "acc_norm_stderr": 0.040335656678483184 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.3660130718954248, - "acc_stderr": 0.019488025745529658, - "acc_norm": 0.3660130718954248, - "acc_norm_stderr": 0.019488025745529658 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.32269503546099293, - "acc_stderr": 0.027889139300534792, - "acc_norm": 0.32269503546099293, - "acc_norm_stderr": 0.027889139300534792 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.3125, - "acc_stderr": 0.043994650575715215, - "acc_norm": 0.3125, - "acc_norm_stderr": 0.043994650575715215 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.3611111111111111, - "acc_stderr": 0.032757734861009996, - "acc_norm": 0.3611111111111111, - "acc_norm_stderr": 0.032757734861009996 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.23016759776536314, - "acc_stderr": 0.014078339253425807, - "acc_norm": 0.23016759776536314, - "acc_norm_stderr": 0.014078339253425807 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.43, - "acc_stderr": 0.049756985195624284, - "acc_norm": 0.43, - "acc_norm_stderr": 0.049756985195624284 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.45, - "acc_stderr": 0.05, - "acc_norm": 0.45, - "acc_norm_stderr": 0.05 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.3860294117647059, - "acc_stderr": 0.029573269134411124, - "acc_norm": 0.3860294117647059, - "acc_norm_stderr": 0.029573269134411124 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.4775510204081633, - "acc_stderr": 0.03197694118713673, - "acc_norm": 0.4775510204081633, - "acc_norm_stderr": 0.03197694118713673 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.5864978902953587, - "acc_stderr": 0.03205649904851858, - "acc_norm": 0.5864978902953587, - "acc_norm_stderr": 0.03205649904851858 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.3155149934810952, - "acc_stderr": 0.011869184843058643, - "acc_norm": 0.3155149934810952, - "acc_norm_stderr": 0.011869184843058643 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.49019607843137253, - "acc_stderr": 0.03508637358630572, - "acc_norm": 0.49019607843137253, - "acc_norm_stderr": 0.03508637358630572 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.5151515151515151, - "acc_stderr": 0.03902551007374448, - "acc_norm": 0.5151515151515151, - "acc_norm_stderr": 0.03902551007374448 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2766217870257038, - "mc1_stderr": 0.015659605755326912, - "mc2": 0.4484601943910918, - "mc2_stderr": 0.015458891626438749 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.35093896713615025, - "acc_stderr": 0.016360395003030395, - "acc_norm": 0.4612676056338028, - "acc_norm_stderr": 0.017088275735102635 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "maywell/Synatra_TbST11B_EP01", - "model_sha": "54fd9892d9189a077506a024602f48d83af7d383", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/maywell/synatra_V0.01/result_2023-10-07 13:31:59.json b/maywell/synatra_V0.01/result_2023-10-07 13:31:59.json deleted file mode 100644 index d151f1ca0749a988537ffaa5f5a264c76fa18b5e..0000000000000000000000000000000000000000 --- a/maywell/synatra_V0.01/result_2023-10-07 13:31:59.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.2901023890784983, - "acc_stderr": 0.013261573677520769, - "acc_norm": 0.3412969283276451, - "acc_norm_stderr": 0.01385583128749772 - }, - "harness|ko_hellaswag|10": { - "acc": 0.33210515833499304, - "acc_stderr": 0.00470005967137463, - "acc_norm": 0.41585341565425216, - "acc_norm_stderr": 0.004918612098944034 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.38011695906432746, - "acc_stderr": 0.03722965741385539, - "acc_norm": 0.38011695906432746, - "acc_norm_stderr": 0.03722965741385539 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.5048543689320388, - "acc_stderr": 0.04950504382128921, - "acc_norm": 0.5048543689320388, - "acc_norm_stderr": 0.04950504382128921 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.3997445721583653, - "acc_stderr": 0.01751684790705327, - "acc_norm": 0.3997445721583653, - "acc_norm_stderr": 0.01751684790705327 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.31851851851851853, - "acc_stderr": 0.04024778401977111, - "acc_norm": 0.31851851851851853, - "acc_norm_stderr": 0.04024778401977111 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.29, - "acc_stderr": 0.04560480215720684, - "acc_norm": 0.29, - "acc_norm_stderr": 0.04560480215720684 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.32340425531914896, - "acc_stderr": 0.030579442773610334, - "acc_norm": 0.32340425531914896, - "acc_norm_stderr": 0.030579442773610334 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3614457831325301, - "acc_stderr": 0.03740059382029319, - "acc_norm": 0.3614457831325301, - "acc_norm_stderr": 0.03740059382029319 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.4180064308681672, - "acc_stderr": 0.02801365189199507, - "acc_norm": 0.4180064308681672, - "acc_norm_stderr": 0.02801365189199507 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.34080717488789236, - "acc_stderr": 0.03181149747055359, - "acc_norm": 0.34080717488789236, - "acc_norm_stderr": 0.03181149747055359 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.44274809160305345, - "acc_stderr": 0.043564472026650695, - "acc_norm": 0.44274809160305345, - "acc_norm_stderr": 0.043564472026650695 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.42, - "acc_stderr": 0.049604496374885836, - "acc_norm": 0.42, - "acc_norm_stderr": 0.049604496374885836 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.5252525252525253, - "acc_stderr": 0.03557806245087314, - "acc_norm": 0.5252525252525253, - "acc_norm_stderr": 0.03557806245087314 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.41379310344827586, - "acc_stderr": 0.041042692118062316, - "acc_norm": 0.41379310344827586, - "acc_norm_stderr": 0.041042692118062316 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.04690650298201943, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.04690650298201943 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.453781512605042, - "acc_stderr": 0.03233943468182087, - "acc_norm": 0.453781512605042, - "acc_norm_stderr": 0.03233943468182087 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.38974358974358975, - "acc_stderr": 0.024726967886647074, - "acc_norm": 0.38974358974358975, - "acc_norm_stderr": 0.024726967886647074 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.52, - "acc_stderr": 0.050211673156867795, - "acc_norm": 0.52, - "acc_norm_stderr": 0.050211673156867795 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.4444444444444444, - "acc_stderr": 0.04803752235190192, - "acc_norm": 0.4444444444444444, - "acc_norm_stderr": 0.04803752235190192 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.3645320197044335, - "acc_stderr": 0.0338640574606209, - "acc_norm": 0.3645320197044335, - "acc_norm_stderr": 0.0338640574606209 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.38387096774193546, - "acc_stderr": 0.027666182075539635, - "acc_norm": 0.38387096774193546, - "acc_norm_stderr": 0.027666182075539635 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.6196581196581197, - "acc_stderr": 0.03180425204384099, - "acc_norm": 0.6196581196581197, - "acc_norm_stderr": 0.03180425204384099 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.37735849056603776, - "acc_stderr": 0.029832808114796005, - "acc_norm": 0.37735849056603776, - "acc_norm_stderr": 0.029832808114796005 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.41818181818181815, - "acc_stderr": 0.047245774057315705, - "acc_norm": 0.41818181818181815, - "acc_norm_stderr": 0.047245774057315705 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.3074074074074074, - "acc_stderr": 0.028133252578815642, - "acc_norm": 0.3074074074074074, - "acc_norm_stderr": 0.028133252578815642 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.33774834437086093, - "acc_stderr": 0.038615575462551684, - "acc_norm": 0.33774834437086093, - "acc_norm_stderr": 0.038615575462551684 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.527363184079602, - "acc_stderr": 0.035302355173346824, - "acc_norm": 0.527363184079602, - "acc_norm_stderr": 0.035302355173346824 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.34104046242774566, - "acc_stderr": 0.036146654241808254, - "acc_norm": 0.34104046242774566, - "acc_norm_stderr": 0.036146654241808254 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.3492063492063492, - "acc_stderr": 0.02455229220934266, - "acc_norm": 0.3492063492063492, - "acc_norm_stderr": 0.02455229220934266 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3125, - "acc_stderr": 0.038760854559127644, - "acc_norm": 0.3125, - "acc_norm_stderr": 0.038760854559127644 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.41, - "acc_stderr": 0.049431107042371025, - "acc_norm": 0.41, - "acc_norm_stderr": 0.049431107042371025 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.53, - "acc_stderr": 0.050161355804659205, - "acc_norm": 0.53, - "acc_norm_stderr": 0.050161355804659205 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.3872832369942196, - "acc_stderr": 0.026226158605124655, - "acc_norm": 0.3872832369942196, - "acc_norm_stderr": 0.026226158605124655 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.3619631901840491, - "acc_stderr": 0.037757007291414416, - "acc_norm": 0.3619631901840491, - "acc_norm_stderr": 0.037757007291414416 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.36419753086419754, - "acc_stderr": 0.026774929899722327, - "acc_norm": 0.36419753086419754, - "acc_norm_stderr": 0.026774929899722327 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.32, - "acc_stderr": 0.046882617226215034, - "acc_norm": 0.32, - "acc_norm_stderr": 0.046882617226215034 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.40414507772020725, - "acc_stderr": 0.0354150857888402, - "acc_norm": 0.40414507772020725, - "acc_norm_stderr": 0.0354150857888402 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2894736842105263, - "acc_stderr": 0.04266339443159395, - "acc_norm": 0.2894736842105263, - "acc_norm_stderr": 0.04266339443159395 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.4055045871559633, - "acc_stderr": 0.02105099799189684, - "acc_norm": 0.4055045871559633, - "acc_norm_stderr": 0.02105099799189684 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.42857142857142855, - "acc_stderr": 0.0442626668137991, - "acc_norm": 0.42857142857142855, - "acc_norm_stderr": 0.0442626668137991 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.3790849673202614, - "acc_stderr": 0.02778014120702333, - "acc_norm": 0.3790849673202614, - "acc_norm_stderr": 0.02778014120702333 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.43, - "acc_stderr": 0.049756985195624284, - "acc_norm": 0.43, - "acc_norm_stderr": 0.049756985195624284 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.5619834710743802, - "acc_stderr": 0.045291468044357915, - "acc_norm": 0.5619834710743802, - "acc_norm_stderr": 0.045291468044357915 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.40131578947368424, - "acc_stderr": 0.03988903703336285, - "acc_norm": 0.40131578947368424, - "acc_norm_stderr": 0.03988903703336285 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.3202614379084967, - "acc_stderr": 0.018875682938069443, - "acc_norm": 0.3202614379084967, - "acc_norm_stderr": 0.018875682938069443 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.3262411347517731, - "acc_stderr": 0.02796845304356317, - "acc_norm": 0.3262411347517731, - "acc_norm_stderr": 0.02796845304356317 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.3125, - "acc_stderr": 0.043994650575715215, - "acc_norm": 0.3125, - "acc_norm_stderr": 0.043994650575715215 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.44907407407407407, - "acc_stderr": 0.03392238405321617, - "acc_norm": 0.44907407407407407, - "acc_norm_stderr": 0.03392238405321617 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.3106145251396648, - "acc_stderr": 0.015476515438005566, - "acc_norm": 0.3106145251396648, - "acc_norm_stderr": 0.015476515438005566 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.4, - "acc_stderr": 0.04923659639173309, - "acc_norm": 0.4, - "acc_norm_stderr": 0.04923659639173309 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.45, - "acc_stderr": 0.049999999999999996, - "acc_norm": 0.45, - "acc_norm_stderr": 0.049999999999999996 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.4338235294117647, - "acc_stderr": 0.030105636570016636, - "acc_norm": 0.4338235294117647, - "acc_norm_stderr": 0.030105636570016636 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.43673469387755104, - "acc_stderr": 0.03175195237583322, - "acc_norm": 0.43673469387755104, - "acc_norm_stderr": 0.03175195237583322 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.4767932489451477, - "acc_stderr": 0.032512152011410174, - "acc_norm": 0.4767932489451477, - "acc_norm_stderr": 0.032512152011410174 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.28683181225554105, - "acc_stderr": 0.011551504781176933, - "acc_norm": 0.28683181225554105, - "acc_norm_stderr": 0.011551504781176933 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.39705882352941174, - "acc_stderr": 0.034341311647191286, - "acc_norm": 0.39705882352941174, - "acc_norm_stderr": 0.034341311647191286 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.34545454545454546, - "acc_stderr": 0.03713158067481913, - "acc_norm": 0.34545454545454546, - "acc_norm_stderr": 0.03713158067481913 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.32313341493268055, - "mc1_stderr": 0.016371836286454614, - "mc2": 0.4992370707389853, - "mc2_stderr": 0.01568220201461622 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.2711267605633803, - "acc_stderr": 0.015238682312617987, - "acc_norm": 0.3474178403755869, - "acc_norm_stderr": 0.016322206819108932 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "maywell/synatra_V0.01", - "model_sha": "c27df4dbc7624ea0bcbf0b0ff149d49b58713a4e", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/mistralai/Mistral-7B-Instruct-v0.1/result_2023-10-16 06:07:31.json b/mistralai/Mistral-7B-Instruct-v0.1/result_2023-10-16 06:07:31.json deleted file mode 100644 index b789e64b115660cf983c168971a9c496eee6f0d3..0000000000000000000000000000000000000000 --- a/mistralai/Mistral-7B-Instruct-v0.1/result_2023-10-16 06:07:31.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.2645051194539249, - "acc_stderr": 0.012889272949313368, - "acc_norm": 0.32849829351535836, - "acc_norm_stderr": 0.013724978465537378 - }, - "harness|ko_hellaswag|10": { - "acc": 0.32682732523401714, - "acc_stderr": 0.004680949283855315, - "acc_norm": 0.3868751244771958, - "acc_norm_stderr": 0.004860393011974685 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.38596491228070173, - "acc_stderr": 0.03733756969066163, - "acc_norm": 0.38596491228070173, - "acc_norm_stderr": 0.03733756969066163 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.4563106796116505, - "acc_stderr": 0.04931801994220414, - "acc_norm": 0.4563106796116505, - "acc_norm_stderr": 0.04931801994220414 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.38569604086845466, - "acc_stderr": 0.01740647661921291, - "acc_norm": 0.38569604086845466, - "acc_norm_stderr": 0.01740647661921291 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.3037037037037037, - "acc_stderr": 0.039725528847851375, - "acc_norm": 0.3037037037037037, - "acc_norm_stderr": 0.039725528847851375 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.29, - "acc_stderr": 0.04560480215720683, - "acc_norm": 0.29, - "acc_norm_stderr": 0.04560480215720683 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.32340425531914896, - "acc_stderr": 0.030579442773610337, - "acc_norm": 0.32340425531914896, - "acc_norm_stderr": 0.030579442773610337 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3192771084337349, - "acc_stderr": 0.036293353299478595, - "acc_norm": 0.3192771084337349, - "acc_norm_stderr": 0.036293353299478595 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.41479099678456594, - "acc_stderr": 0.027982680459759563, - "acc_norm": 0.41479099678456594, - "acc_norm_stderr": 0.027982680459759563 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.36771300448430494, - "acc_stderr": 0.03236198350928276, - "acc_norm": 0.36771300448430494, - "acc_norm_stderr": 0.03236198350928276 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.3893129770992366, - "acc_stderr": 0.04276486542814591, - "acc_norm": 0.3893129770992366, - "acc_norm_stderr": 0.04276486542814591 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.41, - "acc_stderr": 0.04943110704237102, - "acc_norm": 0.41, - "acc_norm_stderr": 0.04943110704237102 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.4444444444444444, - "acc_stderr": 0.03540294377095367, - "acc_norm": 0.4444444444444444, - "acc_norm_stderr": 0.03540294377095367 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.45517241379310347, - "acc_stderr": 0.04149886942192117, - "acc_norm": 0.45517241379310347, - "acc_norm_stderr": 0.04149886942192117 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.27450980392156865, - "acc_stderr": 0.044405219061793254, - "acc_norm": 0.27450980392156865, - "acc_norm_stderr": 0.044405219061793254 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.39915966386554624, - "acc_stderr": 0.03181110032413926, - "acc_norm": 0.39915966386554624, - "acc_norm_stderr": 0.03181110032413926 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.37435897435897436, - "acc_stderr": 0.024537591572830524, - "acc_norm": 0.37435897435897436, - "acc_norm_stderr": 0.024537591572830524 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.51, - "acc_stderr": 0.05024183937956912, - "acc_norm": 0.51, - "acc_norm_stderr": 0.05024183937956912 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.4074074074074074, - "acc_stderr": 0.04750077341199985, - "acc_norm": 0.4074074074074074, - "acc_norm_stderr": 0.04750077341199985 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.3448275862068966, - "acc_stderr": 0.033442837442804574, - "acc_norm": 0.3448275862068966, - "acc_norm_stderr": 0.033442837442804574 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3774193548387097, - "acc_stderr": 0.027575960723278253, - "acc_norm": 0.3774193548387097, - "acc_norm_stderr": 0.027575960723278253 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.6111111111111112, - "acc_stderr": 0.031937057262002924, - "acc_norm": 0.6111111111111112, - "acc_norm_stderr": 0.031937057262002924 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.43018867924528303, - "acc_stderr": 0.03047144586718323, - "acc_norm": 0.43018867924528303, - "acc_norm_stderr": 0.03047144586718323 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.37272727272727274, - "acc_stderr": 0.04631381319425463, - "acc_norm": 0.37272727272727274, - "acc_norm_stderr": 0.04631381319425463 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.3037037037037037, - "acc_stderr": 0.02803792996911499, - "acc_norm": 0.3037037037037037, - "acc_norm_stderr": 0.02803792996911499 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2980132450331126, - "acc_stderr": 0.037345356767871984, - "acc_norm": 0.2980132450331126, - "acc_norm_stderr": 0.037345356767871984 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.47761194029850745, - "acc_stderr": 0.035319879302087305, - "acc_norm": 0.47761194029850745, - "acc_norm_stderr": 0.035319879302087305 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.3699421965317919, - "acc_stderr": 0.03681229633394319, - "acc_norm": 0.3699421965317919, - "acc_norm_stderr": 0.03681229633394319 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.31216931216931215, - "acc_stderr": 0.023865206836972602, - "acc_norm": 0.31216931216931215, - "acc_norm_stderr": 0.023865206836972602 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3125, - "acc_stderr": 0.038760854559127644, - "acc_norm": 0.3125, - "acc_norm_stderr": 0.038760854559127644 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.35, - "acc_stderr": 0.047937248544110196, - "acc_norm": 0.35, - "acc_norm_stderr": 0.047937248544110196 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.51, - "acc_stderr": 0.05024183937956911, - "acc_norm": 0.51, - "acc_norm_stderr": 0.05024183937956911 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.4046242774566474, - "acc_stderr": 0.02642481659400985, - "acc_norm": 0.4046242774566474, - "acc_norm_stderr": 0.02642481659400985 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.37423312883435583, - "acc_stderr": 0.038020681028996146, - "acc_norm": 0.37423312883435583, - "acc_norm_stderr": 0.038020681028996146 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.3271604938271605, - "acc_stderr": 0.026105673861409814, - "acc_norm": 0.3271604938271605, - "acc_norm_stderr": 0.026105673861409814 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.37, - "acc_stderr": 0.04852365870939099, - "acc_norm": 0.37, - "acc_norm_stderr": 0.04852365870939099 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.41968911917098445, - "acc_stderr": 0.03561587327685884, - "acc_norm": 0.41968911917098445, - "acc_norm_stderr": 0.03561587327685884 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.22807017543859648, - "acc_stderr": 0.03947152782669415, - "acc_norm": 0.22807017543859648, - "acc_norm_stderr": 0.03947152782669415 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.3834862385321101, - "acc_stderr": 0.020847156641915984, - "acc_norm": 0.3834862385321101, - "acc_norm_stderr": 0.020847156641915984 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.42857142857142855, - "acc_stderr": 0.0442626668137991, - "acc_norm": 0.42857142857142855, - "acc_norm_stderr": 0.0442626668137991 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.4477124183006536, - "acc_stderr": 0.028472938478033526, - "acc_norm": 0.4477124183006536, - "acc_norm_stderr": 0.028472938478033526 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.4, - "acc_stderr": 0.049236596391733084, - "acc_norm": 0.4, - "acc_norm_stderr": 0.049236596391733084 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.5371900826446281, - "acc_stderr": 0.04551711196104218, - "acc_norm": 0.5371900826446281, - "acc_norm_stderr": 0.04551711196104218 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.3223684210526316, - "acc_stderr": 0.03803510248351585, - "acc_norm": 0.3223684210526316, - "acc_norm_stderr": 0.03803510248351585 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.3055555555555556, - "acc_stderr": 0.01863559403442397, - "acc_norm": 0.3055555555555556, - "acc_norm_stderr": 0.01863559403442397 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.3262411347517731, - "acc_stderr": 0.027968453043563168, - "acc_norm": 0.3262411347517731, - "acc_norm_stderr": 0.027968453043563168 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.30357142857142855, - "acc_stderr": 0.043642261558410445, - "acc_norm": 0.30357142857142855, - "acc_norm_stderr": 0.043642261558410445 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.41203703703703703, - "acc_stderr": 0.03356787758160835, - "acc_norm": 0.41203703703703703, - "acc_norm_stderr": 0.03356787758160835 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.31843575418994413, - "acc_stderr": 0.015581008080360274, - "acc_norm": 0.31843575418994413, - "acc_norm_stderr": 0.015581008080360274 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.42, - "acc_stderr": 0.049604496374885836, - "acc_norm": 0.42, - "acc_norm_stderr": 0.049604496374885836 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.44, - "acc_stderr": 0.04988876515698589, - "acc_norm": 0.44, - "acc_norm_stderr": 0.04988876515698589 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.38235294117647056, - "acc_stderr": 0.029520095697687758, - "acc_norm": 0.38235294117647056, - "acc_norm_stderr": 0.029520095697687758 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.44081632653061226, - "acc_stderr": 0.03178419114175363, - "acc_norm": 0.44081632653061226, - "acc_norm_stderr": 0.03178419114175363 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.47257383966244726, - "acc_stderr": 0.032498227183013026, - "acc_norm": 0.47257383966244726, - "acc_norm_stderr": 0.032498227183013026 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2966101694915254, - "acc_stderr": 0.011665946586082868, - "acc_norm": 0.2966101694915254, - "acc_norm_stderr": 0.011665946586082868 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.3235294117647059, - "acc_stderr": 0.03283472056108567, - "acc_norm": 0.3235294117647059, - "acc_norm_stderr": 0.03283472056108567 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.3515151515151515, - "acc_stderr": 0.0372820699868265, - "acc_norm": 0.3515151515151515, - "acc_norm_stderr": 0.0372820699868265 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.3243574051407589, - "mc1_stderr": 0.01638797677964793, - "mc2": 0.49917419306073907, - "mc2_stderr": 0.016202138687957245 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.2535211267605634, - "acc_stderr": 0.014912520668056978, - "acc_norm": 0.3075117370892019, - "acc_norm_stderr": 0.015818754158516964 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "mistralai/Mistral-7B-Instruct-v0.1", - "model_sha": "7ad5799710574ba1c1d953eba3077af582f3a773", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/mistralai/Mistral-7B-v0.1/result_2023-10-06 00:26:01.json b/mistralai/Mistral-7B-v0.1/result_2023-10-06 00:26:01.json deleted file mode 100644 index 8d11e53b90dfeb0b4acdf248230628173b630f95..0000000000000000000000000000000000000000 --- a/mistralai/Mistral-7B-v0.1/result_2023-10-06 00:26:01.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.33532423208191126, - "acc_stderr": 0.01379618294778556, - "acc_norm": 0.38139931740614336, - "acc_norm_stderr": 0.01419438908668526 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3703445528779128, - "acc_stderr": 0.004819100456867818, - "acc_norm": 0.481876120294762, - "acc_norm_stderr": 0.004986502296931182 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.4619883040935672, - "acc_stderr": 0.03823727092882307, - "acc_norm": 0.4619883040935672, - "acc_norm_stderr": 0.03823727092882307 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.5631067961165048, - "acc_stderr": 0.049111471073657764, - "acc_norm": 0.5631067961165048, - "acc_norm_stderr": 0.049111471073657764 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.46871008939974457, - "acc_stderr": 0.017844918090468544, - "acc_norm": 0.46871008939974457, - "acc_norm_stderr": 0.017844918090468544 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.4148148148148148, - "acc_stderr": 0.042561937679014075, - "acc_norm": 0.4148148148148148, - "acc_norm_stderr": 0.042561937679014075 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.40425531914893614, - "acc_stderr": 0.03208115750788684, - "acc_norm": 0.40425531914893614, - "acc_norm_stderr": 0.03208115750788684 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.43373493975903615, - "acc_stderr": 0.03858158940685515, - "acc_norm": 0.43373493975903615, - "acc_norm_stderr": 0.03858158940685515 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.5112540192926045, - "acc_stderr": 0.028390897396863533, - "acc_norm": 0.5112540192926045, - "acc_norm_stderr": 0.028390897396863533 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.4439461883408072, - "acc_stderr": 0.03334625674242728, - "acc_norm": 0.4439461883408072, - "acc_norm_stderr": 0.03334625674242728 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.5038167938931297, - "acc_stderr": 0.043851623256015534, - "acc_norm": 0.5038167938931297, - "acc_norm_stderr": 0.043851623256015534 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.37, - "acc_stderr": 0.04852365870939099, - "acc_norm": 0.37, - "acc_norm_stderr": 0.04852365870939099 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.5505050505050505, - "acc_stderr": 0.035441324919479704, - "acc_norm": 0.5505050505050505, - "acc_norm_stderr": 0.035441324919479704 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.4482758620689655, - "acc_stderr": 0.041443118108781506, - "acc_norm": 0.4482758620689655, - "acc_norm_stderr": 0.041443118108781506 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.24509803921568626, - "acc_stderr": 0.04280105837364395, - "acc_norm": 0.24509803921568626, - "acc_norm_stderr": 0.04280105837364395 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.542016806722689, - "acc_stderr": 0.03236361111951941, - "acc_norm": 0.542016806722689, - "acc_norm_stderr": 0.03236361111951941 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.4794871794871795, - "acc_stderr": 0.025329663163489943, - "acc_norm": 0.4794871794871795, - "acc_norm_stderr": 0.025329663163489943 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.57, - "acc_stderr": 0.04975698519562429, - "acc_norm": 0.57, - "acc_norm_stderr": 0.04975698519562429 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.34, - "acc_stderr": 0.047609522856952344, - "acc_norm": 0.34, - "acc_norm_stderr": 0.047609522856952344 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.5277777777777778, - "acc_stderr": 0.04826217294139894, - "acc_norm": 0.5277777777777778, - "acc_norm_stderr": 0.04826217294139894 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.43842364532019706, - "acc_stderr": 0.03491207857486519, - "acc_norm": 0.43842364532019706, - "acc_norm_stderr": 0.03491207857486519 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.4935483870967742, - "acc_stderr": 0.02844163823354051, - "acc_norm": 0.4935483870967742, - "acc_norm_stderr": 0.02844163823354051 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.7435897435897436, - "acc_stderr": 0.028605953702004243, - "acc_norm": 0.7435897435897436, - "acc_norm_stderr": 0.028605953702004243 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.4339622641509434, - "acc_stderr": 0.030503292013342592, - "acc_norm": 0.4339622641509434, - "acc_norm_stderr": 0.030503292013342592 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.509090909090909, - "acc_stderr": 0.0478833976870286, - "acc_norm": 0.509090909090909, - "acc_norm_stderr": 0.0478833976870286 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.337037037037037, - "acc_stderr": 0.028820884666253252, - "acc_norm": 0.337037037037037, - "acc_norm_stderr": 0.028820884666253252 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2582781456953642, - "acc_stderr": 0.035737053147634576, - "acc_norm": 0.2582781456953642, - "acc_norm_stderr": 0.035737053147634576 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.5970149253731343, - "acc_stderr": 0.034683432951111266, - "acc_norm": 0.5970149253731343, - "acc_norm_stderr": 0.034683432951111266 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.3815028901734104, - "acc_stderr": 0.03703851193099521, - "acc_norm": 0.3815028901734104, - "acc_norm_stderr": 0.03703851193099521 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.373015873015873, - "acc_stderr": 0.02490699045899257, - "acc_norm": 0.373015873015873, - "acc_norm_stderr": 0.02490699045899257 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3472222222222222, - "acc_stderr": 0.039812405437178615, - "acc_norm": 0.3472222222222222, - "acc_norm_stderr": 0.039812405437178615 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.41, - "acc_stderr": 0.049431107042371025, - "acc_norm": 0.41, - "acc_norm_stderr": 0.049431107042371025 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.58, - "acc_stderr": 0.04960449637488584, - "acc_norm": 0.58, - "acc_norm_stderr": 0.04960449637488584 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.523121387283237, - "acc_stderr": 0.026890297881303118, - "acc_norm": 0.523121387283237, - "acc_norm_stderr": 0.026890297881303118 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.5153374233128835, - "acc_stderr": 0.039265223787088424, - "acc_norm": 0.5153374233128835, - "acc_norm_stderr": 0.039265223787088424 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.4382716049382716, - "acc_stderr": 0.027607914087400473, - "acc_norm": 0.4382716049382716, - "acc_norm_stderr": 0.027607914087400473 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.35, - "acc_stderr": 0.0479372485441102, - "acc_norm": 0.35, - "acc_norm_stderr": 0.0479372485441102 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.5129533678756477, - "acc_stderr": 0.0360722806104775, - "acc_norm": 0.5129533678756477, - "acc_norm_stderr": 0.0360722806104775 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.04434600701584925, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.04434600701584925 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.4917431192660551, - "acc_stderr": 0.021434399918214338, - "acc_norm": 0.4917431192660551, - "acc_norm_stderr": 0.021434399918214338 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.35714285714285715, - "acc_stderr": 0.04285714285714281, - "acc_norm": 0.35714285714285715, - "acc_norm_stderr": 0.04285714285714281 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.45751633986928103, - "acc_stderr": 0.028526383452142628, - "acc_norm": 0.45751633986928103, - "acc_norm_stderr": 0.028526383452142628 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.48, - "acc_stderr": 0.050211673156867795, - "acc_norm": 0.48, - "acc_norm_stderr": 0.050211673156867795 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.6942148760330579, - "acc_stderr": 0.04205953933884124, - "acc_norm": 0.6942148760330579, - "acc_norm_stderr": 0.04205953933884124 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.4407894736842105, - "acc_stderr": 0.04040311062490435, - "acc_norm": 0.4407894736842105, - "acc_norm_stderr": 0.04040311062490435 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.39869281045751637, - "acc_stderr": 0.01980828131744984, - "acc_norm": 0.39869281045751637, - "acc_norm_stderr": 0.01980828131744984 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.35815602836879434, - "acc_stderr": 0.028602085862759412, - "acc_norm": 0.35815602836879434, - "acc_norm_stderr": 0.028602085862759412 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.4017857142857143, - "acc_stderr": 0.04653333146973646, - "acc_norm": 0.4017857142857143, - "acc_norm_stderr": 0.04653333146973646 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.4351851851851852, - "acc_stderr": 0.03381200005643525, - "acc_norm": 0.4351851851851852, - "acc_norm_stderr": 0.03381200005643525 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.34413407821229053, - "acc_stderr": 0.015889221313307094, - "acc_norm": 0.34413407821229053, - "acc_norm_stderr": 0.015889221313307094 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.44, - "acc_stderr": 0.04988876515698589, - "acc_norm": 0.44, - "acc_norm_stderr": 0.04988876515698589 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.58, - "acc_stderr": 0.049604496374885836, - "acc_norm": 0.58, - "acc_norm_stderr": 0.049604496374885836 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.41544117647058826, - "acc_stderr": 0.029935342707877743, - "acc_norm": 0.41544117647058826, - "acc_norm_stderr": 0.029935342707877743 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.563265306122449, - "acc_stderr": 0.03175195237583323, - "acc_norm": 0.563265306122449, - "acc_norm_stderr": 0.03175195237583323 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.5738396624472574, - "acc_stderr": 0.03219035703131774, - "acc_norm": 0.5738396624472574, - "acc_norm_stderr": 0.03219035703131774 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.32790091264667537, - "acc_stderr": 0.011989936640666535, - "acc_norm": 0.32790091264667537, - "acc_norm_stderr": 0.011989936640666535 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.44607843137254904, - "acc_stderr": 0.03488845451304974, - "acc_norm": 0.44607843137254904, - "acc_norm_stderr": 0.03488845451304974 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.45454545454545453, - "acc_stderr": 0.03888176921674099, - "acc_norm": 0.45454545454545453, - "acc_norm_stderr": 0.03888176921674099 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2864137086903305, - "mc1_stderr": 0.015826142439502342, - "mc2": 0.4613168911756529, - "mc2_stderr": 0.015417066073991514 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.24295774647887325, - "acc_stderr": 0.01470146638508064, - "acc_norm": 0.29694835680751175, - "acc_norm_stderr": 0.015662796197363146 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "mistralai/Mistral-7B-v0.1", - "model_sha": "5e9c98b96d071dce59368012254c55b0ec6f8658", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/mncai/Mistral-7B-v0.1-alpaca-1k/result_2023-10-22 05:59:52.json b/mncai/Mistral-7B-v0.1-alpaca-1k/result_2023-10-22 05:59:52.json deleted file mode 100644 index f1b9d7f3b1496469974b97d71350eaa6ef1c4f44..0000000000000000000000000000000000000000 --- a/mncai/Mistral-7B-v0.1-alpaca-1k/result_2023-10-22 05:59:52.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.3046075085324232, - "acc_stderr": 0.013449522109932487, - "acc_norm": 0.3438566552901024, - "acc_norm_stderr": 0.013880644570156222 - }, - "harness|ko_hellaswag|10": { - "acc": 0.36875124477195775, - "acc_stderr": 0.0048148030984368154, - "acc_norm": 0.4697271459868552, - "acc_norm_stderr": 0.00498062728714758 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.52046783625731, - "acc_stderr": 0.0383161053282193, - "acc_norm": 0.52046783625731, - "acc_norm_stderr": 0.0383161053282193 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.5436893203883495, - "acc_stderr": 0.049318019942204146, - "acc_norm": 0.5436893203883495, - "acc_norm_stderr": 0.049318019942204146 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.4648786717752235, - "acc_stderr": 0.01783579880629064, - "acc_norm": 0.4648786717752235, - "acc_norm_stderr": 0.01783579880629064 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.362962962962963, - "acc_stderr": 0.041539484047424, - "acc_norm": 0.362962962962963, - "acc_norm_stderr": 0.041539484047424 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.42127659574468085, - "acc_stderr": 0.03227834510146267, - "acc_norm": 0.42127659574468085, - "acc_norm_stderr": 0.03227834510146267 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.40963855421686746, - "acc_stderr": 0.03828401115079021, - "acc_norm": 0.40963855421686746, - "acc_norm_stderr": 0.03828401115079021 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.4340836012861736, - "acc_stderr": 0.0281502322445356, - "acc_norm": 0.4340836012861736, - "acc_norm_stderr": 0.0281502322445356 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.43946188340807174, - "acc_stderr": 0.03331092511038179, - "acc_norm": 0.43946188340807174, - "acc_norm_stderr": 0.03331092511038179 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.4580152671755725, - "acc_stderr": 0.04369802690578757, - "acc_norm": 0.4580152671755725, - "acc_norm_stderr": 0.04369802690578757 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.38, - "acc_stderr": 0.048783173121456316, - "acc_norm": 0.38, - "acc_norm_stderr": 0.048783173121456316 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.5151515151515151, - "acc_stderr": 0.0356071651653106, - "acc_norm": 0.5151515151515151, - "acc_norm_stderr": 0.0356071651653106 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.5103448275862069, - "acc_stderr": 0.04165774775728763, - "acc_norm": 0.5103448275862069, - "acc_norm_stderr": 0.04165774775728763 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.17647058823529413, - "acc_stderr": 0.03793281185307809, - "acc_norm": 0.17647058823529413, - "acc_norm_stderr": 0.03793281185307809 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.4831932773109244, - "acc_stderr": 0.03246013680375308, - "acc_norm": 0.4831932773109244, - "acc_norm_stderr": 0.03246013680375308 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.4307692307692308, - "acc_stderr": 0.025106820660539743, - "acc_norm": 0.4307692307692308, - "acc_norm_stderr": 0.025106820660539743 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.56, - "acc_stderr": 0.04988876515698589, - "acc_norm": 0.56, - "acc_norm_stderr": 0.04988876515698589 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.26, - "acc_stderr": 0.044084400227680794, - "acc_norm": 0.26, - "acc_norm_stderr": 0.044084400227680794 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.48148148148148145, - "acc_stderr": 0.04830366024635331, - "acc_norm": 0.48148148148148145, - "acc_norm_stderr": 0.04830366024635331 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.43842364532019706, - "acc_stderr": 0.03491207857486518, - "acc_norm": 0.43842364532019706, - "acc_norm_stderr": 0.03491207857486518 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.45483870967741935, - "acc_stderr": 0.028327743091561053, - "acc_norm": 0.45483870967741935, - "acc_norm_stderr": 0.028327743091561053 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.7094017094017094, - "acc_stderr": 0.029745048572674078, - "acc_norm": 0.7094017094017094, - "acc_norm_stderr": 0.029745048572674078 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.4075471698113208, - "acc_stderr": 0.0302422338008545, - "acc_norm": 0.4075471698113208, - "acc_norm_stderr": 0.0302422338008545 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.5272727272727272, - "acc_stderr": 0.04782001791380061, - "acc_norm": 0.5272727272727272, - "acc_norm_stderr": 0.04782001791380061 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.34814814814814815, - "acc_stderr": 0.029045600290616258, - "acc_norm": 0.34814814814814815, - "acc_norm_stderr": 0.029045600290616258 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2913907284768212, - "acc_stderr": 0.03710185726119995, - "acc_norm": 0.2913907284768212, - "acc_norm_stderr": 0.03710185726119995 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.6019900497512438, - "acc_stderr": 0.034611994290400135, - "acc_norm": 0.6019900497512438, - "acc_norm_stderr": 0.034611994290400135 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.3815028901734104, - "acc_stderr": 0.03703851193099521, - "acc_norm": 0.3815028901734104, - "acc_norm_stderr": 0.03703851193099521 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.335978835978836, - "acc_stderr": 0.024326310529149138, - "acc_norm": 0.335978835978836, - "acc_norm_stderr": 0.024326310529149138 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2847222222222222, - "acc_stderr": 0.037738099906869334, - "acc_norm": 0.2847222222222222, - "acc_norm_stderr": 0.037738099906869334 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.38, - "acc_stderr": 0.048783173121456316, - "acc_norm": 0.38, - "acc_norm_stderr": 0.048783173121456316 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.56, - "acc_stderr": 0.049888765156985884, - "acc_norm": 0.56, - "acc_norm_stderr": 0.049888765156985884 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.49421965317919075, - "acc_stderr": 0.026917296179149116, - "acc_norm": 0.49421965317919075, - "acc_norm_stderr": 0.026917296179149116 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.4601226993865031, - "acc_stderr": 0.03915857291436971, - "acc_norm": 0.4601226993865031, - "acc_norm_stderr": 0.03915857291436971 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.43209876543209874, - "acc_stderr": 0.027563010971606676, - "acc_norm": 0.43209876543209874, - "acc_norm_stderr": 0.027563010971606676 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.32, - "acc_stderr": 0.04688261722621504, - "acc_norm": 0.32, - "acc_norm_stderr": 0.04688261722621504 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.47150259067357514, - "acc_stderr": 0.036025735712884414, - "acc_norm": 0.47150259067357514, - "acc_norm_stderr": 0.036025735712884414 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.39473684210526316, - "acc_stderr": 0.04598188057816542, - "acc_norm": 0.39473684210526316, - "acc_norm_stderr": 0.04598188057816542 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.45504587155963305, - "acc_stderr": 0.021350503090925167, - "acc_norm": 0.45504587155963305, - "acc_norm_stderr": 0.021350503090925167 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.30952380952380953, - "acc_stderr": 0.041349130183033156, - "acc_norm": 0.30952380952380953, - "acc_norm_stderr": 0.041349130183033156 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.5, - "acc_stderr": 0.028629916715693413, - "acc_norm": 0.5, - "acc_norm_stderr": 0.028629916715693413 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.46, - "acc_stderr": 0.05009082659620332, - "acc_norm": 0.46, - "acc_norm_stderr": 0.05009082659620332 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.6446280991735537, - "acc_stderr": 0.0436923632657398, - "acc_norm": 0.6446280991735537, - "acc_norm_stderr": 0.0436923632657398 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.34868421052631576, - "acc_stderr": 0.038781398887976104, - "acc_norm": 0.34868421052631576, - "acc_norm_stderr": 0.038781398887976104 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.3790849673202614, - "acc_stderr": 0.01962744474841223, - "acc_norm": 0.3790849673202614, - "acc_norm_stderr": 0.01962744474841223 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.3617021276595745, - "acc_stderr": 0.028663820147199492, - "acc_norm": 0.3617021276595745, - "acc_norm_stderr": 0.028663820147199492 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.38392857142857145, - "acc_stderr": 0.04616143075028547, - "acc_norm": 0.38392857142857145, - "acc_norm_stderr": 0.04616143075028547 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.37962962962962965, - "acc_stderr": 0.03309682581119035, - "acc_norm": 0.37962962962962965, - "acc_norm_stderr": 0.03309682581119035 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2424581005586592, - "acc_stderr": 0.014333522059217892, - "acc_norm": 0.2424581005586592, - "acc_norm_stderr": 0.014333522059217892 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.44, - "acc_stderr": 0.04988876515698589, - "acc_norm": 0.44, - "acc_norm_stderr": 0.04988876515698589 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.62, - "acc_stderr": 0.048783173121456316, - "acc_norm": 0.62, - "acc_norm_stderr": 0.048783173121456316 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.3382352941176471, - "acc_stderr": 0.028739328513983583, - "acc_norm": 0.3382352941176471, - "acc_norm_stderr": 0.028739328513983583 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.46938775510204084, - "acc_stderr": 0.031949171367580624, - "acc_norm": 0.46938775510204084, - "acc_norm_stderr": 0.031949171367580624 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.5063291139240507, - "acc_stderr": 0.0325446201076786, - "acc_norm": 0.5063291139240507, - "acc_norm_stderr": 0.0325446201076786 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2985658409387223, - "acc_stderr": 0.011688060141794231, - "acc_norm": 0.2985658409387223, - "acc_norm_stderr": 0.011688060141794231 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.4166666666666667, - "acc_stderr": 0.03460228327239171, - "acc_norm": 0.4166666666666667, - "acc_norm_stderr": 0.03460228327239171 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.45454545454545453, - "acc_stderr": 0.038881769216741, - "acc_norm": 0.45454545454545453, - "acc_norm_stderr": 0.038881769216741 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.3108935128518972, - "mc1_stderr": 0.016203316673559693, - "mc2": 0.48747691141114763, - "mc2_stderr": 0.015615664106933899 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.31572769953051644, - "acc_stderr": 0.01593331134555563, - "acc_norm": 0.3532863849765258, - "acc_norm_stderr": 0.016385310378526204 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "mncai/Mistral-7B-v0.1-alpaca-1k", - "model_sha": "97a2cb89d4f19712842c4e29c44e1b7821905fac", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/mncai/Mistral-7B-v0.1-combine-1k/result_2023-10-22 06:02:36.json b/mncai/Mistral-7B-v0.1-combine-1k/result_2023-10-22 06:02:36.json deleted file mode 100644 index f5c2cfe1b40cba5c81910e2a87137cb3618d7ff4..0000000000000000000000000000000000000000 --- a/mncai/Mistral-7B-v0.1-combine-1k/result_2023-10-22 06:02:36.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.30802047781569963, - "acc_stderr": 0.01349142951729204, - "acc_norm": 0.3515358361774744, - "acc_norm_stderr": 0.013952413699600938 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3610834495120494, - "acc_stderr": 0.004793330525656211, - "acc_norm": 0.45120493925512845, - "acc_norm_stderr": 0.004965963647210315 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.2573099415204678, - "acc_stderr": 0.03352799844161865, - "acc_norm": 0.2573099415204678, - "acc_norm_stderr": 0.03352799844161865 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.4077669902912621, - "acc_stderr": 0.048657775704107696, - "acc_norm": 0.4077669902912621, - "acc_norm_stderr": 0.048657775704107696 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.2503192848020434, - "acc_stderr": 0.01549108895149458, - "acc_norm": 0.2503192848020434, - "acc_norm_stderr": 0.01549108895149458 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.23703703703703705, - "acc_stderr": 0.03673731683969506, - "acc_norm": 0.23703703703703705, - "acc_norm_stderr": 0.03673731683969506 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.21, - "acc_stderr": 0.040936018074033256, - "acc_norm": 0.21, - "acc_norm_stderr": 0.040936018074033256 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.20425531914893616, - "acc_stderr": 0.026355158413349424, - "acc_norm": 0.20425531914893616, - "acc_norm_stderr": 0.026355158413349424 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.19879518072289157, - "acc_stderr": 0.031069390260789396, - "acc_norm": 0.19879518072289157, - "acc_norm_stderr": 0.031069390260789396 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.24115755627009647, - "acc_stderr": 0.024296594034763426, - "acc_norm": 0.24115755627009647, - "acc_norm_stderr": 0.024296594034763426 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.1210762331838565, - "acc_stderr": 0.021894174113185737, - "acc_norm": 0.1210762331838565, - "acc_norm_stderr": 0.021894174113185737 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.3053435114503817, - "acc_stderr": 0.04039314978724561, - "acc_norm": 0.3053435114503817, - "acc_norm_stderr": 0.04039314978724561 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.24, - "acc_stderr": 0.04292346959909284, - "acc_norm": 0.24, - "acc_norm_stderr": 0.04292346959909284 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.3686868686868687, - "acc_stderr": 0.034373055019806184, - "acc_norm": 0.3686868686868687, - "acc_norm_stderr": 0.034373055019806184 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.2482758620689655, - "acc_stderr": 0.036001056927277716, - "acc_norm": 0.2482758620689655, - "acc_norm_stderr": 0.036001056927277716 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.4019607843137255, - "acc_stderr": 0.04878608714466996, - "acc_norm": 0.4019607843137255, - "acc_norm_stderr": 0.04878608714466996 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.3487394957983193, - "acc_stderr": 0.030956636328566548, - "acc_norm": 0.3487394957983193, - "acc_norm_stderr": 0.030956636328566548 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.36666666666666664, - "acc_stderr": 0.024433016466052452, - "acc_norm": 0.36666666666666664, - "acc_norm_stderr": 0.024433016466052452 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.22, - "acc_stderr": 0.0416333199893227, - "acc_norm": 0.22, - "acc_norm_stderr": 0.0416333199893227 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.19, - "acc_stderr": 0.03942772444036623, - "acc_norm": 0.19, - "acc_norm_stderr": 0.03942772444036623 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.23148148148148148, - "acc_stderr": 0.04077494709252626, - "acc_norm": 0.23148148148148148, - "acc_norm_stderr": 0.04077494709252626 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.28078817733990147, - "acc_stderr": 0.0316185633535861, - "acc_norm": 0.28078817733990147, - "acc_norm_stderr": 0.0316185633535861 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3161290322580645, - "acc_stderr": 0.026450874489042764, - "acc_norm": 0.3161290322580645, - "acc_norm_stderr": 0.026450874489042764 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.2094017094017094, - "acc_stderr": 0.026655699653922737, - "acc_norm": 0.2094017094017094, - "acc_norm_stderr": 0.026655699653922737 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.3018867924528302, - "acc_stderr": 0.02825420034443866, - "acc_norm": 0.3018867924528302, - "acc_norm_stderr": 0.02825420034443866 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.24545454545454545, - "acc_stderr": 0.041220665028782834, - "acc_norm": 0.24545454545454545, - "acc_norm_stderr": 0.041220665028782834 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.26296296296296295, - "acc_stderr": 0.02684205787383371, - "acc_norm": 0.26296296296296295, - "acc_norm_stderr": 0.02684205787383371 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.33112582781456956, - "acc_stderr": 0.038425817186598696, - "acc_norm": 0.33112582781456956, - "acc_norm_stderr": 0.038425817186598696 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.2736318407960199, - "acc_stderr": 0.03152439186555401, - "acc_norm": 0.2736318407960199, - "acc_norm_stderr": 0.03152439186555401 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.34104046242774566, - "acc_stderr": 0.036146654241808254, - "acc_norm": 0.34104046242774566, - "acc_norm_stderr": 0.036146654241808254 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2671957671957672, - "acc_stderr": 0.02278967314577656, - "acc_norm": 0.2671957671957672, - "acc_norm_stderr": 0.02278967314577656 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2638888888888889, - "acc_stderr": 0.03685651095897532, - "acc_norm": 0.2638888888888889, - "acc_norm_stderr": 0.03685651095897532 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.41, - "acc_stderr": 0.049431107042371025, - "acc_norm": 0.41, - "acc_norm_stderr": 0.049431107042371025 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.26, - "acc_stderr": 0.044084400227680794, - "acc_norm": 0.26, - "acc_norm_stderr": 0.044084400227680794 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.22832369942196531, - "acc_stderr": 0.022598703804321624, - "acc_norm": 0.22832369942196531, - "acc_norm_stderr": 0.022598703804321624 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.2331288343558282, - "acc_stderr": 0.0332201579577674, - "acc_norm": 0.2331288343558282, - "acc_norm_stderr": 0.0332201579577674 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.2345679012345679, - "acc_stderr": 0.02357688174400572, - "acc_norm": 0.2345679012345679, - "acc_norm_stderr": 0.02357688174400572 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.37305699481865284, - "acc_stderr": 0.03490205592048574, - "acc_norm": 0.37305699481865284, - "acc_norm_stderr": 0.03490205592048574 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.23684210526315788, - "acc_stderr": 0.039994238792813365, - "acc_norm": 0.23684210526315788, - "acc_norm_stderr": 0.039994238792813365 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.3559633027522936, - "acc_stderr": 0.020528559278244214, - "acc_norm": 0.3559633027522936, - "acc_norm_stderr": 0.020528559278244214 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.36507936507936506, - "acc_stderr": 0.04306241259127153, - "acc_norm": 0.36507936507936506, - "acc_norm_stderr": 0.04306241259127153 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.29411764705882354, - "acc_stderr": 0.026090162504279053, - "acc_norm": 0.29411764705882354, - "acc_norm_stderr": 0.026090162504279053 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.21, - "acc_stderr": 0.040936018074033256, - "acc_norm": 0.21, - "acc_norm_stderr": 0.040936018074033256 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.14049586776859505, - "acc_stderr": 0.031722334260021585, - "acc_norm": 0.14049586776859505, - "acc_norm_stderr": 0.031722334260021585 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.34210526315789475, - "acc_stderr": 0.03860731599316091, - "acc_norm": 0.34210526315789475, - "acc_norm_stderr": 0.03860731599316091 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.22058823529411764, - "acc_stderr": 0.016774672365468517, - "acc_norm": 0.22058823529411764, - "acc_norm_stderr": 0.016774672365468517 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.24468085106382978, - "acc_stderr": 0.02564555362226673, - "acc_norm": 0.24468085106382978, - "acc_norm_stderr": 0.02564555362226673 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.16964285714285715, - "acc_stderr": 0.0356236785009539, - "acc_norm": 0.16964285714285715, - "acc_norm_stderr": 0.0356236785009539 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.4722222222222222, - "acc_stderr": 0.0340470532865388, - "acc_norm": 0.4722222222222222, - "acc_norm_stderr": 0.0340470532865388 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.27262569832402234, - "acc_stderr": 0.014893391735249608, - "acc_norm": 0.27262569832402234, - "acc_norm_stderr": 0.014893391735249608 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252604, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252604 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.23, - "acc_stderr": 0.04229525846816506, - "acc_norm": 0.23, - "acc_norm_stderr": 0.04229525846816506 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.4485294117647059, - "acc_stderr": 0.030211479609121593, - "acc_norm": 0.4485294117647059, - "acc_norm_stderr": 0.030211479609121593 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.4, - "acc_stderr": 0.03136250240935892, - "acc_norm": 0.4, - "acc_norm_stderr": 0.03136250240935892 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.20253164556962025, - "acc_stderr": 0.026160568246601464, - "acc_norm": 0.20253164556962025, - "acc_norm_stderr": 0.026160568246601464 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.24511082138200782, - "acc_stderr": 0.010986307870045509, - "acc_norm": 0.24511082138200782, - "acc_norm_stderr": 0.010986307870045509 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.2549019607843137, - "acc_stderr": 0.030587591351604246, - "acc_norm": 0.2549019607843137, - "acc_norm_stderr": 0.030587591351604246 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.2545454545454545, - "acc_stderr": 0.03401506715249039, - "acc_norm": 0.2545454545454545, - "acc_norm_stderr": 0.03401506715249039 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2962056303549572, - "mc1_stderr": 0.015983595101811392, - "mc2": 0.4616568963266555, - "mc2_stderr": 0.01577378737316958 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.3380281690140845, - "acc_stderr": 0.016215540194273195, - "acc_norm": 0.45539906103286387, - "acc_norm_stderr": 0.01707145266733428 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "mncai/Mistral-7B-v0.1-combine-1k", - "model_sha": "0f7abf5c07a7f3add4c89c9e3525f29ab89be562", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/mncai/Mistral-7B-v0.1-cot-1k/result_2023-10-22 04:57:06.json b/mncai/Mistral-7B-v0.1-cot-1k/result_2023-10-22 04:57:06.json deleted file mode 100644 index 1650b2b0c37534c8294ed99b44bbe5f102b8d9ea..0000000000000000000000000000000000000000 --- a/mncai/Mistral-7B-v0.1-cot-1k/result_2023-10-22 04:57:06.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.23890784982935154, - "acc_stderr": 0.012461071376316617, - "acc_norm": 0.3003412969283277, - "acc_norm_stderr": 0.01339590930995701 - }, - "harness|ko_hellaswag|10": { - "acc": 0.29187412865962953, - "acc_stderr": 0.004536955796510544, - "acc_norm": 0.3668591913961362, - "acc_norm_stderr": 0.004809626723626839 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.391812865497076, - "acc_stderr": 0.037439798259263996, - "acc_norm": 0.391812865497076, - "acc_norm_stderr": 0.037439798259263996 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.4174757281553398, - "acc_stderr": 0.048828405482122375, - "acc_norm": 0.4174757281553398, - "acc_norm_stderr": 0.048828405482122375 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.39846743295019155, - "acc_stderr": 0.017507438602777405, - "acc_norm": 0.39846743295019155, - "acc_norm_stderr": 0.017507438602777405 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.31851851851851853, - "acc_stderr": 0.04024778401977112, - "acc_norm": 0.31851851851851853, - "acc_norm_stderr": 0.04024778401977112 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.33191489361702126, - "acc_stderr": 0.030783736757745664, - "acc_norm": 0.33191489361702126, - "acc_norm_stderr": 0.030783736757745664 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3614457831325301, - "acc_stderr": 0.037400593820293204, - "acc_norm": 0.3614457831325301, - "acc_norm_stderr": 0.037400593820293204 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.3954983922829582, - "acc_stderr": 0.027770918531427834, - "acc_norm": 0.3954983922829582, - "acc_norm_stderr": 0.027770918531427834 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.3004484304932735, - "acc_stderr": 0.030769352008229136, - "acc_norm": 0.3004484304932735, - "acc_norm_stderr": 0.030769352008229136 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.366412213740458, - "acc_stderr": 0.04225875451969637, - "acc_norm": 0.366412213740458, - "acc_norm_stderr": 0.04225875451969637 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.27, - "acc_stderr": 0.04461960433384741, - "acc_norm": 0.27, - "acc_norm_stderr": 0.04461960433384741 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.35858585858585856, - "acc_stderr": 0.03416903640391521, - "acc_norm": 0.35858585858585856, - "acc_norm_stderr": 0.03416903640391521 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.43448275862068964, - "acc_stderr": 0.041307408795554966, - "acc_norm": 0.43448275862068964, - "acc_norm_stderr": 0.041307408795554966 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.12745098039215685, - "acc_stderr": 0.033182249219420756, - "acc_norm": 0.12745098039215685, - "acc_norm_stderr": 0.033182249219420756 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.38235294117647056, - "acc_stderr": 0.03156663099215415, - "acc_norm": 0.38235294117647056, - "acc_norm_stderr": 0.03156663099215415 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.38974358974358975, - "acc_stderr": 0.024726967886647074, - "acc_norm": 0.38974358974358975, - "acc_norm_stderr": 0.024726967886647074 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.46, - "acc_stderr": 0.05009082659620332, - "acc_norm": 0.46, - "acc_norm_stderr": 0.05009082659620332 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.33, - "acc_stderr": 0.047258156262526045, - "acc_norm": 0.33, - "acc_norm_stderr": 0.047258156262526045 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.4444444444444444, - "acc_stderr": 0.04803752235190193, - "acc_norm": 0.4444444444444444, - "acc_norm_stderr": 0.04803752235190193 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.31527093596059114, - "acc_stderr": 0.03269080871970186, - "acc_norm": 0.31527093596059114, - "acc_norm_stderr": 0.03269080871970186 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.36774193548387096, - "acc_stderr": 0.027430866579973467, - "acc_norm": 0.36774193548387096, - "acc_norm_stderr": 0.027430866579973467 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.4700854700854701, - "acc_stderr": 0.032697411068124425, - "acc_norm": 0.4700854700854701, - "acc_norm_stderr": 0.032697411068124425 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.3584905660377358, - "acc_stderr": 0.029514703583981755, - "acc_norm": 0.3584905660377358, - "acc_norm_stderr": 0.029514703583981755 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.4090909090909091, - "acc_stderr": 0.04709306978661896, - "acc_norm": 0.4090909090909091, - "acc_norm_stderr": 0.04709306978661896 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.2740740740740741, - "acc_stderr": 0.02719593480408562, - "acc_norm": 0.2740740740740741, - "acc_norm_stderr": 0.02719593480408562 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2781456953642384, - "acc_stderr": 0.03658603262763743, - "acc_norm": 0.2781456953642384, - "acc_norm_stderr": 0.03658603262763743 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.5373134328358209, - "acc_stderr": 0.03525675167467973, - "acc_norm": 0.5373134328358209, - "acc_norm_stderr": 0.03525675167467973 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.4046242774566474, - "acc_stderr": 0.0374246119388725, - "acc_norm": 0.4046242774566474, - "acc_norm_stderr": 0.0374246119388725 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.36243386243386244, - "acc_stderr": 0.024757473902752045, - "acc_norm": 0.36243386243386244, - "acc_norm_stderr": 0.024757473902752045 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3055555555555556, - "acc_stderr": 0.03852084696008534, - "acc_norm": 0.3055555555555556, - "acc_norm_stderr": 0.03852084696008534 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.52, - "acc_stderr": 0.05021167315686781, - "acc_norm": 0.52, - "acc_norm_stderr": 0.05021167315686781 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.3872832369942196, - "acc_stderr": 0.026226158605124658, - "acc_norm": 0.3872832369942196, - "acc_norm_stderr": 0.026226158605124658 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.3558282208588957, - "acc_stderr": 0.03761521380046734, - "acc_norm": 0.3558282208588957, - "acc_norm_stderr": 0.03761521380046734 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.39197530864197533, - "acc_stderr": 0.027163686038271247, - "acc_norm": 0.39197530864197533, - "acc_norm_stderr": 0.027163686038271247 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.38341968911917096, - "acc_stderr": 0.03508984236295341, - "acc_norm": 0.38341968911917096, - "acc_norm_stderr": 0.03508984236295341 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2543859649122807, - "acc_stderr": 0.040969851398436716, - "acc_norm": 0.2543859649122807, - "acc_norm_stderr": 0.040969851398436716 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.41284403669724773, - "acc_stderr": 0.021109128133413906, - "acc_norm": 0.41284403669724773, - "acc_norm_stderr": 0.021109128133413906 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.31746031746031744, - "acc_stderr": 0.04163453031302859, - "acc_norm": 0.31746031746031744, - "acc_norm_stderr": 0.04163453031302859 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.38235294117647056, - "acc_stderr": 0.027826109307283693, - "acc_norm": 0.38235294117647056, - "acc_norm_stderr": 0.027826109307283693 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.4462809917355372, - "acc_stderr": 0.0453793517794788, - "acc_norm": 0.4462809917355372, - "acc_norm_stderr": 0.0453793517794788 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.32894736842105265, - "acc_stderr": 0.03823428969926606, - "acc_norm": 0.32894736842105265, - "acc_norm_stderr": 0.03823428969926606 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.2875816993464052, - "acc_stderr": 0.018311653053648222, - "acc_norm": 0.2875816993464052, - "acc_norm_stderr": 0.018311653053648222 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.30141843971631205, - "acc_stderr": 0.02737412888263115, - "acc_norm": 0.30141843971631205, - "acc_norm_stderr": 0.02737412888263115 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.26785714285714285, - "acc_stderr": 0.042032772914677614, - "acc_norm": 0.26785714285714285, - "acc_norm_stderr": 0.042032772914677614 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.3194444444444444, - "acc_stderr": 0.03179876342176851, - "acc_norm": 0.3194444444444444, - "acc_norm_stderr": 0.03179876342176851 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.25139664804469275, - "acc_stderr": 0.014508979453553991, - "acc_norm": 0.25139664804469275, - "acc_norm_stderr": 0.014508979453553991 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.32, - "acc_stderr": 0.04688261722621503, - "acc_norm": 0.32, - "acc_norm_stderr": 0.04688261722621503 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.45, - "acc_stderr": 0.05, - "acc_norm": 0.45, - "acc_norm_stderr": 0.05 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.40441176470588236, - "acc_stderr": 0.02981263070156974, - "acc_norm": 0.40441176470588236, - "acc_norm_stderr": 0.02981263070156974 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.363265306122449, - "acc_stderr": 0.030789051139030806, - "acc_norm": 0.363265306122449, - "acc_norm_stderr": 0.030789051139030806 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.4008438818565401, - "acc_stderr": 0.031900803894732356, - "acc_norm": 0.4008438818565401, - "acc_norm_stderr": 0.031900803894732356 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.28683181225554105, - "acc_stderr": 0.011551504781176935, - "acc_norm": 0.28683181225554105, - "acc_norm_stderr": 0.011551504781176935 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.3480392156862745, - "acc_stderr": 0.03343311240488419, - "acc_norm": 0.3480392156862745, - "acc_norm_stderr": 0.03343311240488419 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.34545454545454546, - "acc_stderr": 0.03713158067481913, - "acc_norm": 0.34545454545454546, - "acc_norm_stderr": 0.03713158067481913 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.25703794369645044, - "mc1_stderr": 0.015298077509485083, - "mc2": 0.4724926099446397, - "mc2_stderr": 0.01706017576010212 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.08568075117370892, - "acc_stderr": 0.009594575928755986, - "acc_norm": 0.2112676056338028, - "acc_norm_stderr": 0.013993183015633253 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "mncai/Mistral-7B-v0.1-cot-1k", - "model_sha": "1be8b893be7dc771e4ac3e9598ef83f3d403fdde", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/mncai/Mistral-7B-v0.1-cot-2k/result_2023-10-22 06:01:22.json b/mncai/Mistral-7B-v0.1-cot-2k/result_2023-10-22 06:01:22.json deleted file mode 100644 index 982c6ec09005852ec96765a2f4f34a2eabda9f25..0000000000000000000000000000000000000000 --- a/mncai/Mistral-7B-v0.1-cot-2k/result_2023-10-22 06:01:22.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.23293515358361774, - "acc_stderr": 0.01235250704261739, - "acc_norm": 0.29180887372013653, - "acc_norm_stderr": 0.013284525292403513 - }, - "harness|ko_hellaswag|10": { - "acc": 0.28888667596096396, - "acc_stderr": 0.0045231884311428975, - "acc_norm": 0.359788886675961, - "acc_norm_stderr": 0.004789575163418653 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.3684210526315789, - "acc_stderr": 0.036996580176568775, - "acc_norm": 0.3684210526315789, - "acc_norm_stderr": 0.036996580176568775 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.3883495145631068, - "acc_stderr": 0.0482572933735639, - "acc_norm": 0.3883495145631068, - "acc_norm_stderr": 0.0482572933735639 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.36270753512132825, - "acc_stderr": 0.0171927086746023, - "acc_norm": 0.36270753512132825, - "acc_norm_stderr": 0.0171927086746023 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.23703703703703705, - "acc_stderr": 0.03673731683969506, - "acc_norm": 0.23703703703703705, - "acc_norm_stderr": 0.03673731683969506 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.22, - "acc_stderr": 0.04163331998932268, - "acc_norm": 0.22, - "acc_norm_stderr": 0.04163331998932268 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.3276595744680851, - "acc_stderr": 0.030683020843230997, - "acc_norm": 0.3276595744680851, - "acc_norm_stderr": 0.030683020843230997 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3313253012048193, - "acc_stderr": 0.03664314777288088, - "acc_norm": 0.3313253012048193, - "acc_norm_stderr": 0.03664314777288088 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.2861736334405145, - "acc_stderr": 0.02567025924218893, - "acc_norm": 0.2861736334405145, - "acc_norm_stderr": 0.02567025924218893 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.36771300448430494, - "acc_stderr": 0.03236198350928276, - "acc_norm": 0.36771300448430494, - "acc_norm_stderr": 0.03236198350928276 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.3511450381679389, - "acc_stderr": 0.04186445163013751, - "acc_norm": 0.3511450381679389, - "acc_norm_stderr": 0.04186445163013751 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.29, - "acc_stderr": 0.04560480215720684, - "acc_norm": 0.29, - "acc_norm_stderr": 0.04560480215720684 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.03358618145732523, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.03358618145732523 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.3310344827586207, - "acc_stderr": 0.039215453124671215, - "acc_norm": 0.3310344827586207, - "acc_norm_stderr": 0.039215453124671215 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.16666666666666666, - "acc_stderr": 0.03708284662416545, - "acc_norm": 0.16666666666666666, - "acc_norm_stderr": 0.03708284662416545 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.41596638655462187, - "acc_stderr": 0.03201650100739614, - "acc_norm": 0.41596638655462187, - "acc_norm_stderr": 0.03201650100739614 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.3282051282051282, - "acc_stderr": 0.023807633198657266, - "acc_norm": 0.3282051282051282, - "acc_norm_stderr": 0.023807633198657266 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.54, - "acc_stderr": 0.05009082659620332, - "acc_norm": 0.54, - "acc_norm_stderr": 0.05009082659620332 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.2, - "acc_stderr": 0.04020151261036846, - "acc_norm": 0.2, - "acc_norm_stderr": 0.04020151261036846 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.35185185185185186, - "acc_stderr": 0.04616631111801715, - "acc_norm": 0.35185185185185186, - "acc_norm_stderr": 0.04616631111801715 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.270935960591133, - "acc_stderr": 0.031270907132976984, - "acc_norm": 0.270935960591133, - "acc_norm_stderr": 0.031270907132976984 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3935483870967742, - "acc_stderr": 0.027791878753132274, - "acc_norm": 0.3935483870967742, - "acc_norm_stderr": 0.027791878753132274 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.5213675213675214, - "acc_stderr": 0.032726164476349545, - "acc_norm": 0.5213675213675214, - "acc_norm_stderr": 0.032726164476349545 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.33584905660377357, - "acc_stderr": 0.02906722014664483, - "acc_norm": 0.33584905660377357, - "acc_norm_stderr": 0.02906722014664483 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.42727272727272725, - "acc_stderr": 0.04738198703545483, - "acc_norm": 0.42727272727272725, - "acc_norm_stderr": 0.04738198703545483 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.24814814814814815, - "acc_stderr": 0.0263357394040558, - "acc_norm": 0.24814814814814815, - "acc_norm_stderr": 0.0263357394040558 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2847682119205298, - "acc_stderr": 0.03684881521389023, - "acc_norm": 0.2847682119205298, - "acc_norm_stderr": 0.03684881521389023 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.472636815920398, - "acc_stderr": 0.035302355173346824, - "acc_norm": 0.472636815920398, - "acc_norm_stderr": 0.035302355173346824 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.2947976878612717, - "acc_stderr": 0.034765996075164785, - "acc_norm": 0.2947976878612717, - "acc_norm_stderr": 0.034765996075164785 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.25396825396825395, - "acc_stderr": 0.02241804289111394, - "acc_norm": 0.25396825396825395, - "acc_norm_stderr": 0.02241804289111394 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3472222222222222, - "acc_stderr": 0.039812405437178615, - "acc_norm": 0.3472222222222222, - "acc_norm_stderr": 0.039812405437178615 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.37, - "acc_stderr": 0.04852365870939099, - "acc_norm": 0.37, - "acc_norm_stderr": 0.04852365870939099 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.41, - "acc_stderr": 0.049431107042371025, - "acc_norm": 0.41, - "acc_norm_stderr": 0.049431107042371025 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.3208092485549133, - "acc_stderr": 0.025131000233647907, - "acc_norm": 0.3208092485549133, - "acc_norm_stderr": 0.025131000233647907 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.3312883435582822, - "acc_stderr": 0.03697983910025588, - "acc_norm": 0.3312883435582822, - "acc_norm_stderr": 0.03697983910025588 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.30864197530864196, - "acc_stderr": 0.025702640260603746, - "acc_norm": 0.30864197530864196, - "acc_norm_stderr": 0.025702640260603746 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.40932642487046633, - "acc_stderr": 0.03548608168860806, - "acc_norm": 0.40932642487046633, - "acc_norm_stderr": 0.03548608168860806 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.23684210526315788, - "acc_stderr": 0.039994238792813365, - "acc_norm": 0.23684210526315788, - "acc_norm_stderr": 0.039994238792813365 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.3908256880733945, - "acc_stderr": 0.020920058346111076, - "acc_norm": 0.3908256880733945, - "acc_norm_stderr": 0.020920058346111076 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.3253968253968254, - "acc_stderr": 0.04190596438871137, - "acc_norm": 0.3253968253968254, - "acc_norm_stderr": 0.04190596438871137 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.02699254433929723, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.02699254433929723 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.33, - "acc_stderr": 0.047258156262526045, - "acc_norm": 0.33, - "acc_norm_stderr": 0.047258156262526045 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.4297520661157025, - "acc_stderr": 0.04519082021319772, - "acc_norm": 0.4297520661157025, - "acc_norm_stderr": 0.04519082021319772 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.23684210526315788, - "acc_stderr": 0.034597776068105365, - "acc_norm": 0.23684210526315788, - "acc_norm_stderr": 0.034597776068105365 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.3055555555555556, - "acc_stderr": 0.018635594034423972, - "acc_norm": 0.3055555555555556, - "acc_norm_stderr": 0.018635594034423972 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.28368794326241137, - "acc_stderr": 0.026891709428343957, - "acc_norm": 0.28368794326241137, - "acc_norm_stderr": 0.026891709428343957 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.36607142857142855, - "acc_stderr": 0.0457237235873743, - "acc_norm": 0.36607142857142855, - "acc_norm_stderr": 0.0457237235873743 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.3148148148148148, - "acc_stderr": 0.031674687068289804, - "acc_norm": 0.3148148148148148, - "acc_norm_stderr": 0.031674687068289804 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.27150837988826815, - "acc_stderr": 0.014874252168095268, - "acc_norm": 0.27150837988826815, - "acc_norm_stderr": 0.014874252168095268 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.37, - "acc_stderr": 0.048523658709391, - "acc_norm": 0.37, - "acc_norm_stderr": 0.048523658709391 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.38, - "acc_stderr": 0.04878317312145632, - "acc_norm": 0.38, - "acc_norm_stderr": 0.04878317312145632 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.33455882352941174, - "acc_stderr": 0.028661996202335307, - "acc_norm": 0.33455882352941174, - "acc_norm_stderr": 0.028661996202335307 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.3346938775510204, - "acc_stderr": 0.030209235226242307, - "acc_norm": 0.3346938775510204, - "acc_norm_stderr": 0.030209235226242307 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.4177215189873418, - "acc_stderr": 0.032103530322412685, - "acc_norm": 0.4177215189873418, - "acc_norm_stderr": 0.032103530322412685 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2653194263363755, - "acc_stderr": 0.011276198843958867, - "acc_norm": 0.2653194263363755, - "acc_norm_stderr": 0.011276198843958867 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.35784313725490197, - "acc_stderr": 0.033644872860882996, - "acc_norm": 0.35784313725490197, - "acc_norm_stderr": 0.033644872860882996 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.3212121212121212, - "acc_stderr": 0.03646204963253812, - "acc_norm": 0.3212121212121212, - "acc_norm_stderr": 0.03646204963253812 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.23990208078335373, - "mc1_stderr": 0.014948812679062135, - "mc2": 0.4322374413935736, - "mc2_stderr": 0.016738398140754854 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.08450704225352113, - "acc_stderr": 0.009534747022295878, - "acc_norm": 0.15492957746478872, - "acc_norm_stderr": 0.012403631035187773 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "mncai/Mistral-7B-v0.1-cot-2k", - "model_sha": "6d58e5dc17884d38f2beeb8ab0d824d24d798530", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/mncai/Mistral-7B-v0.1-orca-1k/result_2023-10-22 04:26:18.json b/mncai/Mistral-7B-v0.1-orca-1k/result_2023-10-22 04:26:18.json deleted file mode 100644 index 1c6bd8f424c1d573be9455c9fc87338985763507..0000000000000000000000000000000000000000 --- a/mncai/Mistral-7B-v0.1-orca-1k/result_2023-10-22 04:26:18.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.31313993174061433, - "acc_stderr": 0.013552671543623494, - "acc_norm": 0.3575085324232082, - "acc_norm_stderr": 0.014005494275916573 - }, - "harness|ko_hellaswag|10": { - "acc": 0.37064329814777935, - "acc_stderr": 0.004819899945342492, - "acc_norm": 0.4643497311292571, - "acc_norm_stderr": 0.004977081808179427 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.39766081871345027, - "acc_stderr": 0.0375363895576169, - "acc_norm": 0.39766081871345027, - "acc_norm_stderr": 0.0375363895576169 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.34951456310679613, - "acc_stderr": 0.04721188506097173, - "acc_norm": 0.34951456310679613, - "acc_norm_stderr": 0.04721188506097173 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.41507024265644954, - "acc_stderr": 0.01762013700365527, - "acc_norm": 0.41507024265644954, - "acc_norm_stderr": 0.01762013700365527 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.31851851851851853, - "acc_stderr": 0.040247784019771096, - "acc_norm": 0.31851851851851853, - "acc_norm_stderr": 0.040247784019771096 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.24, - "acc_stderr": 0.04292346959909283, - "acc_norm": 0.24, - "acc_norm_stderr": 0.04292346959909283 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.33191489361702126, - "acc_stderr": 0.030783736757745643, - "acc_norm": 0.33191489361702126, - "acc_norm_stderr": 0.030783736757745643 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3072289156626506, - "acc_stderr": 0.03591566797824664, - "acc_norm": 0.3072289156626506, - "acc_norm_stderr": 0.03591566797824664 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.36012861736334406, - "acc_stderr": 0.027264297599804012, - "acc_norm": 0.36012861736334406, - "acc_norm_stderr": 0.027264297599804012 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.4125560538116592, - "acc_stderr": 0.03304062175449296, - "acc_norm": 0.4125560538116592, - "acc_norm_stderr": 0.03304062175449296 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.3511450381679389, - "acc_stderr": 0.04186445163013751, - "acc_norm": 0.3511450381679389, - "acc_norm_stderr": 0.04186445163013751 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.26, - "acc_stderr": 0.04408440022768079, - "acc_norm": 0.26, - "acc_norm_stderr": 0.04408440022768079 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.3434343434343434, - "acc_stderr": 0.033832012232444426, - "acc_norm": 0.3434343434343434, - "acc_norm_stderr": 0.033832012232444426 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.31724137931034485, - "acc_stderr": 0.03878352372138622, - "acc_norm": 0.31724137931034485, - "acc_norm_stderr": 0.03878352372138622 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.24509803921568626, - "acc_stderr": 0.04280105837364396, - "acc_norm": 0.24509803921568626, - "acc_norm_stderr": 0.04280105837364396 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.3445378151260504, - "acc_stderr": 0.030868682604121633, - "acc_norm": 0.3445378151260504, - "acc_norm_stderr": 0.030868682604121633 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.3128205128205128, - "acc_stderr": 0.02350757902064535, - "acc_norm": 0.3128205128205128, - "acc_norm_stderr": 0.02350757902064535 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.47, - "acc_stderr": 0.050161355804659205, - "acc_norm": 0.47, - "acc_norm_stderr": 0.050161355804659205 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.4074074074074074, - "acc_stderr": 0.047500773411999854, - "acc_norm": 0.4074074074074074, - "acc_norm_stderr": 0.047500773411999854 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.35467980295566504, - "acc_stderr": 0.033661244890514495, - "acc_norm": 0.35467980295566504, - "acc_norm_stderr": 0.033661244890514495 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3, - "acc_stderr": 0.02606936229533513, - "acc_norm": 0.3, - "acc_norm_stderr": 0.02606936229533513 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.6196581196581197, - "acc_stderr": 0.031804252043840985, - "acc_norm": 0.6196581196581197, - "acc_norm_stderr": 0.031804252043840985 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.30566037735849055, - "acc_stderr": 0.028353298073322663, - "acc_norm": 0.30566037735849055, - "acc_norm_stderr": 0.028353298073322663 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.37272727272727274, - "acc_stderr": 0.04631381319425464, - "acc_norm": 0.37272727272727274, - "acc_norm_stderr": 0.04631381319425464 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.2851851851851852, - "acc_stderr": 0.027528599210340496, - "acc_norm": 0.2851851851851852, - "acc_norm_stderr": 0.027528599210340496 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.32450331125827814, - "acc_stderr": 0.038227469376587525, - "acc_norm": 0.32450331125827814, - "acc_norm_stderr": 0.038227469376587525 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.373134328358209, - "acc_stderr": 0.034198326081760065, - "acc_norm": 0.373134328358209, - "acc_norm_stderr": 0.034198326081760065 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.2774566473988439, - "acc_stderr": 0.03414014007044036, - "acc_norm": 0.2774566473988439, - "acc_norm_stderr": 0.03414014007044036 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.3148148148148148, - "acc_stderr": 0.02391998416404774, - "acc_norm": 0.3148148148148148, - "acc_norm_stderr": 0.02391998416404774 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2986111111111111, - "acc_stderr": 0.03827052357950756, - "acc_norm": 0.2986111111111111, - "acc_norm_stderr": 0.03827052357950756 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.23, - "acc_stderr": 0.042295258468165065, - "acc_norm": 0.23, - "acc_norm_stderr": 0.042295258468165065 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.46, - "acc_stderr": 0.05009082659620332, - "acc_norm": 0.46, - "acc_norm_stderr": 0.05009082659620332 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.36416184971098264, - "acc_stderr": 0.025906632631016127, - "acc_norm": 0.36416184971098264, - "acc_norm_stderr": 0.025906632631016127 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.3312883435582822, - "acc_stderr": 0.03697983910025588, - "acc_norm": 0.3312883435582822, - "acc_norm_stderr": 0.03697983910025588 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.3765432098765432, - "acc_stderr": 0.02695934451874778, - "acc_norm": 0.3765432098765432, - "acc_norm_stderr": 0.02695934451874778 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.27, - "acc_stderr": 0.0446196043338474, - "acc_norm": 0.27, - "acc_norm_stderr": 0.0446196043338474 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.3316062176165803, - "acc_stderr": 0.03397636541089116, - "acc_norm": 0.3316062176165803, - "acc_norm_stderr": 0.03397636541089116 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2982456140350877, - "acc_stderr": 0.04303684033537315, - "acc_norm": 0.2982456140350877, - "acc_norm_stderr": 0.04303684033537315 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.3321100917431193, - "acc_stderr": 0.020192682985423337, - "acc_norm": 0.3321100917431193, - "acc_norm_stderr": 0.020192682985423337 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.18253968253968253, - "acc_stderr": 0.034550710191021475, - "acc_norm": 0.18253968253968253, - "acc_norm_stderr": 0.034550710191021475 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.33986928104575165, - "acc_stderr": 0.027121956071388852, - "acc_norm": 0.33986928104575165, - "acc_norm_stderr": 0.027121956071388852 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.36, - "acc_stderr": 0.04824181513244218, - "acc_norm": 0.36, - "acc_norm_stderr": 0.04824181513244218 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.5702479338842975, - "acc_stderr": 0.04519082021319771, - "acc_norm": 0.5702479338842975, - "acc_norm_stderr": 0.04519082021319771 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.28289473684210525, - "acc_stderr": 0.03665349695640767, - "acc_norm": 0.28289473684210525, - "acc_norm_stderr": 0.03665349695640767 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.3088235294117647, - "acc_stderr": 0.01869085027359529, - "acc_norm": 0.3088235294117647, - "acc_norm_stderr": 0.01869085027359529 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.29432624113475175, - "acc_stderr": 0.02718712701150381, - "acc_norm": 0.29432624113475175, - "acc_norm_stderr": 0.02718712701150381 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.35714285714285715, - "acc_stderr": 0.04547960999764376, - "acc_norm": 0.35714285714285715, - "acc_norm_stderr": 0.04547960999764376 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.25, - "acc_stderr": 0.029531221160930918, - "acc_norm": 0.25, - "acc_norm_stderr": 0.029531221160930918 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.26145251396648045, - "acc_stderr": 0.014696599650364548, - "acc_norm": 0.26145251396648045, - "acc_norm_stderr": 0.014696599650364548 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.26, - "acc_stderr": 0.0440844002276808, - "acc_norm": 0.26, - "acc_norm_stderr": 0.0440844002276808 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.45, - "acc_stderr": 0.05, - "acc_norm": 0.45, - "acc_norm_stderr": 0.05 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.2647058823529412, - "acc_stderr": 0.026799562024887674, - "acc_norm": 0.2647058823529412, - "acc_norm_stderr": 0.026799562024887674 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.27346938775510204, - "acc_stderr": 0.028535560337128438, - "acc_norm": 0.27346938775510204, - "acc_norm_stderr": 0.028535560337128438 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.34177215189873417, - "acc_stderr": 0.030874537537553617, - "acc_norm": 0.34177215189873417, - "acc_norm_stderr": 0.030874537537553617 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.26792698826597133, - "acc_stderr": 0.011311347690633885, - "acc_norm": 0.26792698826597133, - "acc_norm_stderr": 0.011311347690633885 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.28921568627450983, - "acc_stderr": 0.03182231867647553, - "acc_norm": 0.28921568627450983, - "acc_norm_stderr": 0.03182231867647553 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.32727272727272727, - "acc_stderr": 0.03663974994391244, - "acc_norm": 0.32727272727272727, - "acc_norm_stderr": 0.03663974994391244 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2717258261933905, - "mc1_stderr": 0.015572840452875835, - "mc2": 0.4450037389871468, - "mc2_stderr": 0.01574377596952645 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.20657276995305165, - "acc_stderr": 0.013877949635848683, - "acc_norm": 0.3086854460093897, - "acc_norm_stderr": 0.015835476890718972 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "mncai/Mistral-7B-v0.1-orca-1k", - "model_sha": "3bfedee0d952da852fefa84e70f6373174a1deaf", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/mncai/Mistral-7B-v0.1-orca-2k/result_2023-10-22 06:00:38.json b/mncai/Mistral-7B-v0.1-orca-2k/result_2023-10-22 06:00:38.json deleted file mode 100644 index f4b5ea0ec726c8a2674faacea3c4f1425f40243b..0000000000000000000000000000000000000000 --- a/mncai/Mistral-7B-v0.1-orca-2k/result_2023-10-22 06:00:38.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.3174061433447099, - "acc_stderr": 0.01360223908803817, - "acc_norm": 0.35580204778157, - "acc_norm_stderr": 0.013990571137918763 - }, - "harness|ko_hellaswag|10": { - "acc": 0.37293367855008963, - "acc_stderr": 0.004825963768772218, - "acc_norm": 0.4615614419438359, - "acc_norm_stderr": 0.004975014529648631 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.4327485380116959, - "acc_stderr": 0.03799978644370607, - "acc_norm": 0.4327485380116959, - "acc_norm_stderr": 0.03799978644370607 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.4563106796116505, - "acc_stderr": 0.04931801994220414, - "acc_norm": 0.4563106796116505, - "acc_norm_stderr": 0.04931801994220414 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.4227330779054917, - "acc_stderr": 0.017665180351954062, - "acc_norm": 0.4227330779054917, - "acc_norm_stderr": 0.017665180351954062 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.35555555555555557, - "acc_stderr": 0.04135176749720386, - "acc_norm": 0.35555555555555557, - "acc_norm_stderr": 0.04135176749720386 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542127, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542127 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.3404255319148936, - "acc_stderr": 0.03097669299853443, - "acc_norm": 0.3404255319148936, - "acc_norm_stderr": 0.03097669299853443 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.40963855421686746, - "acc_stderr": 0.038284011150790206, - "acc_norm": 0.40963855421686746, - "acc_norm_stderr": 0.038284011150790206 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.42443729903536975, - "acc_stderr": 0.028071928247946205, - "acc_norm": 0.42443729903536975, - "acc_norm_stderr": 0.028071928247946205 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.40358744394618834, - "acc_stderr": 0.03292802819330315, - "acc_norm": 0.40358744394618834, - "acc_norm_stderr": 0.03292802819330315 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.3969465648854962, - "acc_stderr": 0.04291135671009225, - "acc_norm": 0.3969465648854962, - "acc_norm_stderr": 0.04291135671009225 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.37, - "acc_stderr": 0.04852365870939098, - "acc_norm": 0.37, - "acc_norm_stderr": 0.04852365870939098 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.5050505050505051, - "acc_stderr": 0.035621707606254015, - "acc_norm": 0.5050505050505051, - "acc_norm_stderr": 0.035621707606254015 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.36551724137931035, - "acc_stderr": 0.04013124195424386, - "acc_norm": 0.36551724137931035, - "acc_norm_stderr": 0.04013124195424386 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.24509803921568626, - "acc_stderr": 0.04280105837364395, - "acc_norm": 0.24509803921568626, - "acc_norm_stderr": 0.04280105837364395 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.39915966386554624, - "acc_stderr": 0.03181110032413926, - "acc_norm": 0.39915966386554624, - "acc_norm_stderr": 0.03181110032413926 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.3974358974358974, - "acc_stderr": 0.024811920017903836, - "acc_norm": 0.3974358974358974, - "acc_norm_stderr": 0.024811920017903836 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.56, - "acc_stderr": 0.049888765156985905, - "acc_norm": 0.56, - "acc_norm_stderr": 0.049888765156985905 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.49074074074074076, - "acc_stderr": 0.04832853553437055, - "acc_norm": 0.49074074074074076, - "acc_norm_stderr": 0.04832853553437055 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.3399014778325123, - "acc_stderr": 0.033327690684107895, - "acc_norm": 0.3399014778325123, - "acc_norm_stderr": 0.033327690684107895 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.4032258064516129, - "acc_stderr": 0.02790615082604114, - "acc_norm": 0.4032258064516129, - "acc_norm_stderr": 0.02790615082604114 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.6452991452991453, - "acc_stderr": 0.03134250486245402, - "acc_norm": 0.6452991452991453, - "acc_norm_stderr": 0.03134250486245402 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.4188679245283019, - "acc_stderr": 0.030365050829115208, - "acc_norm": 0.4188679245283019, - "acc_norm_stderr": 0.030365050829115208 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.4, - "acc_stderr": 0.0469237132203465, - "acc_norm": 0.4, - "acc_norm_stderr": 0.0469237132203465 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.3, - "acc_stderr": 0.027940457136228412, - "acc_norm": 0.3, - "acc_norm_stderr": 0.027940457136228412 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2980132450331126, - "acc_stderr": 0.037345356767871984, - "acc_norm": 0.2980132450331126, - "acc_norm_stderr": 0.037345356767871984 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.5074626865671642, - "acc_stderr": 0.03535140084276719, - "acc_norm": 0.5074626865671642, - "acc_norm_stderr": 0.03535140084276719 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.3179190751445087, - "acc_stderr": 0.03550683989165581, - "acc_norm": 0.3179190751445087, - "acc_norm_stderr": 0.03550683989165581 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.35714285714285715, - "acc_stderr": 0.024677862841332783, - "acc_norm": 0.35714285714285715, - "acc_norm_stderr": 0.024677862841332783 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3819444444444444, - "acc_stderr": 0.040629907841466674, - "acc_norm": 0.3819444444444444, - "acc_norm_stderr": 0.040629907841466674 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.36, - "acc_stderr": 0.048241815132442176, - "acc_norm": 0.36, - "acc_norm_stderr": 0.048241815132442176 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.56, - "acc_stderr": 0.049888765156985884, - "acc_norm": 0.56, - "acc_norm_stderr": 0.049888765156985884 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.3959537572254335, - "acc_stderr": 0.02632981334194626, - "acc_norm": 0.3959537572254335, - "acc_norm_stderr": 0.02632981334194626 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.4294478527607362, - "acc_stderr": 0.038890666191127216, - "acc_norm": 0.4294478527607362, - "acc_norm_stderr": 0.038890666191127216 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.35185185185185186, - "acc_stderr": 0.02657148348071997, - "acc_norm": 0.35185185185185186, - "acc_norm_stderr": 0.02657148348071997 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.28, - "acc_stderr": 0.045126085985421276, - "acc_norm": 0.28, - "acc_norm_stderr": 0.045126085985421276 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.43523316062176165, - "acc_stderr": 0.03578038165008586, - "acc_norm": 0.43523316062176165, - "acc_norm_stderr": 0.03578038165008586 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.30701754385964913, - "acc_stderr": 0.04339138322579859, - "acc_norm": 0.30701754385964913, - "acc_norm_stderr": 0.04339138322579859 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.45871559633027525, - "acc_stderr": 0.02136412253388169, - "acc_norm": 0.45871559633027525, - "acc_norm_stderr": 0.02136412253388169 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.042163702135578345, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.042163702135578345 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.39215686274509803, - "acc_stderr": 0.027956046165424516, - "acc_norm": 0.39215686274509803, - "acc_norm_stderr": 0.027956046165424516 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.44, - "acc_stderr": 0.049888765156985884, - "acc_norm": 0.44, - "acc_norm_stderr": 0.049888765156985884 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.5702479338842975, - "acc_stderr": 0.04519082021319772, - "acc_norm": 0.5702479338842975, - "acc_norm_stderr": 0.04519082021319772 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.34210526315789475, - "acc_stderr": 0.038607315993160904, - "acc_norm": 0.34210526315789475, - "acc_norm_stderr": 0.038607315993160904 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.3300653594771242, - "acc_stderr": 0.019023726160724553, - "acc_norm": 0.3300653594771242, - "acc_norm_stderr": 0.019023726160724553 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.36524822695035464, - "acc_stderr": 0.028723863853281274, - "acc_norm": 0.36524822695035464, - "acc_norm_stderr": 0.028723863853281274 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.25892857142857145, - "acc_stderr": 0.04157751539865629, - "acc_norm": 0.25892857142857145, - "acc_norm_stderr": 0.04157751539865629 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.4166666666666667, - "acc_stderr": 0.03362277436608043, - "acc_norm": 0.4166666666666667, - "acc_norm_stderr": 0.03362277436608043 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2748603351955307, - "acc_stderr": 0.014931316703220513, - "acc_norm": 0.2748603351955307, - "acc_norm_stderr": 0.014931316703220513 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.32, - "acc_stderr": 0.046882617226215034, - "acc_norm": 0.32, - "acc_norm_stderr": 0.046882617226215034 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.43, - "acc_stderr": 0.049756985195624284, - "acc_norm": 0.43, - "acc_norm_stderr": 0.049756985195624284 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.43014705882352944, - "acc_stderr": 0.030074971917302875, - "acc_norm": 0.43014705882352944, - "acc_norm_stderr": 0.030074971917302875 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.3877551020408163, - "acc_stderr": 0.03119223072679566, - "acc_norm": 0.3877551020408163, - "acc_norm_stderr": 0.03119223072679566 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.5021097046413502, - "acc_stderr": 0.032546938018020076, - "acc_norm": 0.5021097046413502, - "acc_norm_stderr": 0.032546938018020076 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.29139504563233376, - "acc_stderr": 0.011605720214257615, - "acc_norm": 0.29139504563233376, - "acc_norm_stderr": 0.011605720214257615 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.36764705882352944, - "acc_stderr": 0.03384132045674118, - "acc_norm": 0.36764705882352944, - "acc_norm_stderr": 0.03384132045674118 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.4484848484848485, - "acc_stderr": 0.03883565977956929, - "acc_norm": 0.4484848484848485, - "acc_norm_stderr": 0.03883565977956929 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.3108935128518972, - "mc1_stderr": 0.016203316673559693, - "mc2": 0.49057702125408326, - "mc2_stderr": 0.015830176414087203 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.11854460093896714, - "acc_stderr": 0.011080931765790569, - "acc_norm": 0.20892018779342722, - "acc_norm_stderr": 0.01393591775496239 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "mncai/Mistral-7B-v0.1-orca-2k", - "model_sha": "60e3c4f7563c49c1c03e6e7ea873148ebc240b8d", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/mncai/Mistral-7B-v0.1-orca_platy-1k/result_2023-10-22 05:06:43.json b/mncai/Mistral-7B-v0.1-orca_platy-1k/result_2023-10-22 05:06:43.json deleted file mode 100644 index 0bacf82e018f198094831475bba099263cedfe54..0000000000000000000000000000000000000000 --- a/mncai/Mistral-7B-v0.1-orca_platy-1k/result_2023-10-22 05:06:43.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.2977815699658703, - "acc_stderr": 0.013363080107244487, - "acc_norm": 0.3293515358361775, - "acc_norm_stderr": 0.013734057652635474 - }, - "harness|ko_hellaswag|10": { - "acc": 0.36168094005178253, - "acc_stderr": 0.004795051037917727, - "acc_norm": 0.4523999203345947, - "acc_norm_stderr": 0.004967118575905286 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.43859649122807015, - "acc_stderr": 0.03805797505590459, - "acc_norm": 0.43859649122807015, - "acc_norm_stderr": 0.03805797505590459 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.33980582524271846, - "acc_stderr": 0.04689765937278135, - "acc_norm": 0.33980582524271846, - "acc_norm_stderr": 0.04689765937278135 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.3537675606641124, - "acc_stderr": 0.017098184708161906, - "acc_norm": 0.3537675606641124, - "acc_norm_stderr": 0.017098184708161906 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.2, - "acc_stderr": 0.03455473702325438, - "acc_norm": 0.2, - "acc_norm_stderr": 0.03455473702325438 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.22, - "acc_stderr": 0.04163331998932268, - "acc_norm": 0.22, - "acc_norm_stderr": 0.04163331998932268 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.2765957446808511, - "acc_stderr": 0.029241883869628834, - "acc_norm": 0.2765957446808511, - "acc_norm_stderr": 0.029241883869628834 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3614457831325301, - "acc_stderr": 0.037400593820293204, - "acc_norm": 0.3614457831325301, - "acc_norm_stderr": 0.037400593820293204 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.2090032154340836, - "acc_stderr": 0.02309314039837422, - "acc_norm": 0.2090032154340836, - "acc_norm_stderr": 0.02309314039837422 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.33183856502242154, - "acc_stderr": 0.031602951437766785, - "acc_norm": 0.33183856502242154, - "acc_norm_stderr": 0.031602951437766785 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.2900763358778626, - "acc_stderr": 0.03980066246467765, - "acc_norm": 0.2900763358778626, - "acc_norm_stderr": 0.03980066246467765 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.32, - "acc_stderr": 0.046882617226215034, - "acc_norm": 0.32, - "acc_norm_stderr": 0.046882617226215034 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.2727272727272727, - "acc_stderr": 0.03173071239071724, - "acc_norm": 0.2727272727272727, - "acc_norm_stderr": 0.03173071239071724 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.296551724137931, - "acc_stderr": 0.03806142687309993, - "acc_norm": 0.296551724137931, - "acc_norm_stderr": 0.03806142687309993 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.21568627450980393, - "acc_stderr": 0.04092563958237654, - "acc_norm": 0.21568627450980393, - "acc_norm_stderr": 0.04092563958237654 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.3025210084033613, - "acc_stderr": 0.029837962388291926, - "acc_norm": 0.3025210084033613, - "acc_norm_stderr": 0.029837962388291926 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.31025641025641026, - "acc_stderr": 0.02345467488940429, - "acc_norm": 0.31025641025641026, - "acc_norm_stderr": 0.02345467488940429 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.5, - "acc_stderr": 0.050251890762960605, - "acc_norm": 0.5, - "acc_norm_stderr": 0.050251890762960605 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.19, - "acc_stderr": 0.03942772444036623, - "acc_norm": 0.19, - "acc_norm_stderr": 0.03942772444036623 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.2777777777777778, - "acc_stderr": 0.04330043749650742, - "acc_norm": 0.2777777777777778, - "acc_norm_stderr": 0.04330043749650742 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.24630541871921183, - "acc_stderr": 0.030315099285617722, - "acc_norm": 0.24630541871921183, - "acc_norm_stderr": 0.030315099285617722 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.27419354838709675, - "acc_stderr": 0.025378139970885193, - "acc_norm": 0.27419354838709675, - "acc_norm_stderr": 0.025378139970885193 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.5384615384615384, - "acc_stderr": 0.03265903381186194, - "acc_norm": 0.5384615384615384, - "acc_norm_stderr": 0.03265903381186194 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.24150943396226415, - "acc_stderr": 0.026341480371118355, - "acc_norm": 0.24150943396226415, - "acc_norm_stderr": 0.026341480371118355 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.3181818181818182, - "acc_stderr": 0.04461272175910508, - "acc_norm": 0.3181818181818182, - "acc_norm_stderr": 0.04461272175910508 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.2222222222222222, - "acc_stderr": 0.025348097468097845, - "acc_norm": 0.2222222222222222, - "acc_norm_stderr": 0.025348097468097845 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.23841059602649006, - "acc_stderr": 0.0347918557259966, - "acc_norm": 0.23841059602649006, - "acc_norm_stderr": 0.0347918557259966 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.4129353233830846, - "acc_stderr": 0.03481520803367348, - "acc_norm": 0.4129353233830846, - "acc_norm_stderr": 0.03481520803367348 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.2254335260115607, - "acc_stderr": 0.03186209851641144, - "acc_norm": 0.2254335260115607, - "acc_norm_stderr": 0.03186209851641144 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.24338624338624337, - "acc_stderr": 0.022101128787415436, - "acc_norm": 0.24338624338624337, - "acc_norm_stderr": 0.022101128787415436 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2847222222222222, - "acc_stderr": 0.03773809990686934, - "acc_norm": 0.2847222222222222, - "acc_norm_stderr": 0.03773809990686934 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.38, - "acc_stderr": 0.048783173121456316, - "acc_norm": 0.38, - "acc_norm_stderr": 0.048783173121456316 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.24855491329479767, - "acc_stderr": 0.023267528432100174, - "acc_norm": 0.24855491329479767, - "acc_norm_stderr": 0.023267528432100174 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.26380368098159507, - "acc_stderr": 0.03462419931615625, - "acc_norm": 0.26380368098159507, - "acc_norm_stderr": 0.03462419931615625 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.2839506172839506, - "acc_stderr": 0.025089478523765134, - "acc_norm": 0.2839506172839506, - "acc_norm_stderr": 0.025089478523765134 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.29533678756476683, - "acc_stderr": 0.03292296639155142, - "acc_norm": 0.29533678756476683, - "acc_norm_stderr": 0.03292296639155142 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.23684210526315788, - "acc_stderr": 0.03999423879281337, - "acc_norm": 0.23684210526315788, - "acc_norm_stderr": 0.03999423879281337 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.27155963302752295, - "acc_stderr": 0.019069098363191445, - "acc_norm": 0.27155963302752295, - "acc_norm_stderr": 0.019069098363191445 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.3492063492063492, - "acc_stderr": 0.04263906892795132, - "acc_norm": 0.3492063492063492, - "acc_norm_stderr": 0.04263906892795132 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.35294117647058826, - "acc_stderr": 0.027363593284684944, - "acc_norm": 0.35294117647058826, - "acc_norm_stderr": 0.027363593284684944 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.43, - "acc_stderr": 0.04975698519562428, - "acc_norm": 0.43, - "acc_norm_stderr": 0.04975698519562428 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.4462809917355372, - "acc_stderr": 0.04537935177947879, - "acc_norm": 0.4462809917355372, - "acc_norm_stderr": 0.04537935177947879 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.25, - "acc_stderr": 0.03523807393012047, - "acc_norm": 0.25, - "acc_norm_stderr": 0.03523807393012047 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.28104575163398693, - "acc_stderr": 0.01818521895431808, - "acc_norm": 0.28104575163398693, - "acc_norm_stderr": 0.01818521895431808 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.25177304964539005, - "acc_stderr": 0.0258921511567094, - "acc_norm": 0.25177304964539005, - "acc_norm_stderr": 0.0258921511567094 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.3392857142857143, - "acc_stderr": 0.04493949068613539, - "acc_norm": 0.3392857142857143, - "acc_norm_stderr": 0.04493949068613539 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.19907407407407407, - "acc_stderr": 0.027232298462690253, - "acc_norm": 0.19907407407407407, - "acc_norm_stderr": 0.027232298462690253 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.23798882681564246, - "acc_stderr": 0.014242630070574892, - "acc_norm": 0.23798882681564246, - "acc_norm_stderr": 0.014242630070574892 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695236, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695236 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.44, - "acc_stderr": 0.049888765156985884, - "acc_norm": 0.44, - "acc_norm_stderr": 0.049888765156985884 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.2536764705882353, - "acc_stderr": 0.026431329870789562, - "acc_norm": 0.2536764705882353, - "acc_norm_stderr": 0.026431329870789562 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.39591836734693875, - "acc_stderr": 0.03130802899065685, - "acc_norm": 0.39591836734693875, - "acc_norm_stderr": 0.03130802899065685 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.350210970464135, - "acc_stderr": 0.031052391937584353, - "acc_norm": 0.350210970464135, - "acc_norm_stderr": 0.031052391937584353 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.26140808344198174, - "acc_stderr": 0.011222528169771314, - "acc_norm": 0.26140808344198174, - "acc_norm_stderr": 0.011222528169771314 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.3088235294117647, - "acc_stderr": 0.03242661719827218, - "acc_norm": 0.3088235294117647, - "acc_norm_stderr": 0.03242661719827218 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.28484848484848485, - "acc_stderr": 0.03524390844511782, - "acc_norm": 0.28484848484848485, - "acc_norm_stderr": 0.03524390844511782 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2998776009791922, - "mc1_stderr": 0.016040352966713616, - "mc2": 0.4583811632060745, - "mc2_stderr": 0.015833164608294075 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.5140845070422535, - "acc_stderr": 0.01713297775480435, - "acc_norm": 0.6103286384976526, - "acc_norm_stderr": 0.016717307802784468 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "mncai/Mistral-7B-v0.1-orca_platy-1k", - "model_sha": "37f66cf60e2e2b4e299419202fe22a45b8d96874", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/mncai/Mistral-7B-v0.1-platy-1k/result_2023-10-22 04:56:50.json b/mncai/Mistral-7B-v0.1-platy-1k/result_2023-10-22 04:56:50.json deleted file mode 100644 index a31da9b030062c2156d9cf1e090e2ff20b4b799a..0000000000000000000000000000000000000000 --- a/mncai/Mistral-7B-v0.1-platy-1k/result_2023-10-22 04:56:50.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.30119453924914674, - "acc_stderr": 0.01340674176784762, - "acc_norm": 0.34044368600682595, - "acc_norm_stderr": 0.013847460518892976 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3564031069508066, - "acc_stderr": 0.004779574402771374, - "acc_norm": 0.45269866560446126, - "acc_norm_stderr": 0.004967402792744855 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.2982456140350877, - "acc_stderr": 0.03508771929824565, - "acc_norm": 0.2982456140350877, - "acc_norm_stderr": 0.03508771929824565 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.1941747572815534, - "acc_stderr": 0.03916667762822584, - "acc_norm": 0.1941747572815534, - "acc_norm_stderr": 0.03916667762822584 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.2707535121328225, - "acc_stderr": 0.015889888362560486, - "acc_norm": 0.2707535121328225, - "acc_norm_stderr": 0.015889888362560486 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.04072314811876837, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.04072314811876837 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.20425531914893616, - "acc_stderr": 0.02635515841334941, - "acc_norm": 0.20425531914893616, - "acc_norm_stderr": 0.02635515841334941 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.20481927710843373, - "acc_stderr": 0.03141784291663926, - "acc_norm": 0.20481927710843373, - "acc_norm_stderr": 0.03141784291663926 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.2990353697749196, - "acc_stderr": 0.02600330111788514, - "acc_norm": 0.2990353697749196, - "acc_norm_stderr": 0.02600330111788514 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.20179372197309417, - "acc_stderr": 0.02693611191280227, - "acc_norm": 0.20179372197309417, - "acc_norm_stderr": 0.02693611191280227 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.22900763358778625, - "acc_stderr": 0.036853466317118506, - "acc_norm": 0.22900763358778625, - "acc_norm_stderr": 0.036853466317118506 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.2, - "acc_stderr": 0.04020151261036845, - "acc_norm": 0.2, - "acc_norm_stderr": 0.04020151261036845 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.25252525252525254, - "acc_stderr": 0.030954055470365907, - "acc_norm": 0.25252525252525254, - "acc_norm_stderr": 0.030954055470365907 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.296551724137931, - "acc_stderr": 0.03806142687309994, - "acc_norm": 0.296551724137931, - "acc_norm_stderr": 0.03806142687309994 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.21568627450980393, - "acc_stderr": 0.04092563958237657, - "acc_norm": 0.21568627450980393, - "acc_norm_stderr": 0.04092563958237657 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.21008403361344538, - "acc_stderr": 0.026461398717471874, - "acc_norm": 0.21008403361344538, - "acc_norm_stderr": 0.026461398717471874 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.2128205128205128, - "acc_stderr": 0.020752423722128002, - "acc_norm": 0.2128205128205128, - "acc_norm_stderr": 0.020752423722128002 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252604, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252604 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.23148148148148148, - "acc_stderr": 0.04077494709252627, - "acc_norm": 0.23148148148148148, - "acc_norm_stderr": 0.04077494709252627 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.2955665024630542, - "acc_stderr": 0.032104944337514575, - "acc_norm": 0.2955665024630542, - "acc_norm_stderr": 0.032104944337514575 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.25161290322580643, - "acc_stderr": 0.024685979286239956, - "acc_norm": 0.25161290322580643, - "acc_norm_stderr": 0.024685979286239956 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.2606837606837607, - "acc_stderr": 0.028760348956523414, - "acc_norm": 0.2606837606837607, - "acc_norm_stderr": 0.028760348956523414 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.2188679245283019, - "acc_stderr": 0.025447863825108597, - "acc_norm": 0.2188679245283019, - "acc_norm_stderr": 0.025447863825108597 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.20909090909090908, - "acc_stderr": 0.03895091015724135, - "acc_norm": 0.20909090909090908, - "acc_norm_stderr": 0.03895091015724135 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.26296296296296295, - "acc_stderr": 0.026842057873833706, - "acc_norm": 0.26296296296296295, - "acc_norm_stderr": 0.026842057873833706 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.271523178807947, - "acc_stderr": 0.03631329803969653, - "acc_norm": 0.271523178807947, - "acc_norm_stderr": 0.03631329803969653 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.24875621890547264, - "acc_stderr": 0.030567675938916714, - "acc_norm": 0.24875621890547264, - "acc_norm_stderr": 0.030567675938916714 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.24855491329479767, - "acc_stderr": 0.03295304696818317, - "acc_norm": 0.24855491329479767, - "acc_norm_stderr": 0.03295304696818317 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2671957671957672, - "acc_stderr": 0.022789673145776575, - "acc_norm": 0.2671957671957672, - "acc_norm_stderr": 0.022789673145776575 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2569444444444444, - "acc_stderr": 0.03653946969442099, - "acc_norm": 0.2569444444444444, - "acc_norm_stderr": 0.03653946969442099 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.18, - "acc_stderr": 0.03861229196653695, - "acc_norm": 0.18, - "acc_norm_stderr": 0.03861229196653695 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.29190751445086704, - "acc_stderr": 0.02447699407624734, - "acc_norm": 0.29190751445086704, - "acc_norm_stderr": 0.02447699407624734 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.3006134969325153, - "acc_stderr": 0.03602511318806771, - "acc_norm": 0.3006134969325153, - "acc_norm_stderr": 0.03602511318806771 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.2932098765432099, - "acc_stderr": 0.025329888171900926, - "acc_norm": 0.2932098765432099, - "acc_norm_stderr": 0.025329888171900926 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.22797927461139897, - "acc_stderr": 0.030276909945178256, - "acc_norm": 0.22797927461139897, - "acc_norm_stderr": 0.030276909945178256 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.24561403508771928, - "acc_stderr": 0.04049339297748141, - "acc_norm": 0.24561403508771928, - "acc_norm_stderr": 0.04049339297748141 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.22201834862385322, - "acc_stderr": 0.017818849564796617, - "acc_norm": 0.22201834862385322, - "acc_norm_stderr": 0.017818849564796617 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.15079365079365079, - "acc_stderr": 0.03200686497287392, - "acc_norm": 0.15079365079365079, - "acc_norm_stderr": 0.03200686497287392 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.25163398692810457, - "acc_stderr": 0.0248480182638752, - "acc_norm": 0.25163398692810457, - "acc_norm_stderr": 0.0248480182638752 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.23, - "acc_stderr": 0.04229525846816506, - "acc_norm": 0.23, - "acc_norm_stderr": 0.04229525846816506 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.371900826446281, - "acc_stderr": 0.04412015806624503, - "acc_norm": 0.371900826446281, - "acc_norm_stderr": 0.04412015806624503 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.3026315789473684, - "acc_stderr": 0.03738520676119668, - "acc_norm": 0.3026315789473684, - "acc_norm_stderr": 0.03738520676119668 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.2761437908496732, - "acc_stderr": 0.018087276935663133, - "acc_norm": 0.2761437908496732, - "acc_norm_stderr": 0.018087276935663133 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2695035460992908, - "acc_stderr": 0.026469036818590634, - "acc_norm": 0.2695035460992908, - "acc_norm_stderr": 0.026469036818590634 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.24107142857142858, - "acc_stderr": 0.04059867246952689, - "acc_norm": 0.24107142857142858, - "acc_norm_stderr": 0.04059867246952689 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.21296296296296297, - "acc_stderr": 0.027920963147993662, - "acc_norm": 0.21296296296296297, - "acc_norm_stderr": 0.027920963147993662 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.24692737430167597, - "acc_stderr": 0.014422292204808852, - "acc_norm": 0.24692737430167597, - "acc_norm_stderr": 0.014422292204808852 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.26, - "acc_stderr": 0.04408440022768079, - "acc_norm": 0.26, - "acc_norm_stderr": 0.04408440022768079 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252605, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252605 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.16544117647058823, - "acc_stderr": 0.02257177102549475, - "acc_norm": 0.16544117647058823, - "acc_norm_stderr": 0.02257177102549475 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.24081632653061225, - "acc_stderr": 0.027372942201788167, - "acc_norm": 0.24081632653061225, - "acc_norm_stderr": 0.027372942201788167 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.26582278481012656, - "acc_stderr": 0.028756799629658332, - "acc_norm": 0.26582278481012656, - "acc_norm_stderr": 0.028756799629658332 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2711864406779661, - "acc_stderr": 0.011354581451622985, - "acc_norm": 0.2711864406779661, - "acc_norm_stderr": 0.011354581451622985 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.25980392156862747, - "acc_stderr": 0.030778554678693264, - "acc_norm": 0.25980392156862747, - "acc_norm_stderr": 0.030778554678693264 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.28484848484848485, - "acc_stderr": 0.035243908445117836, - "acc_norm": 0.28484848484848485, - "acc_norm_stderr": 0.035243908445117836 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2607099143206854, - "mc1_stderr": 0.015368841620766379, - "mc2": 0.44338943697081723, - "mc2_stderr": 0.015766267984553387 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.19953051643192488, - "acc_stderr": 0.013699738366459508, - "acc_norm": 0.2664319248826291, - "acc_norm_stderr": 0.015154742533365831 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "mncai/Mistral-7B-v0.1-platy-1k", - "model_sha": "ae6790c706091e0a0ffada183edf6f08e06ba235", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/mncai/mistral-7b-ko-1871-2p1/result_2023-10-06 10:27:15.json b/mncai/mistral-7b-ko-1871-2p1/result_2023-10-06 10:27:15.json deleted file mode 100644 index 906cc38f8d5eb3b89a8d10ed1af09f6827703e22..0000000000000000000000000000000000000000 --- a/mncai/mistral-7b-ko-1871-2p1/result_2023-10-06 10:27:15.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.3250853242320819, - "acc_stderr": 0.013688147309729124, - "acc_norm": 0.3609215017064846, - "acc_norm_stderr": 0.01403476138617546 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3724357697669787, - "acc_stderr": 0.00482465540607556, - "acc_norm": 0.4759012148974308, - "acc_norm_stderr": 0.0049839823961873655 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.5087719298245614, - "acc_stderr": 0.038342347441649924, - "acc_norm": 0.5087719298245614, - "acc_norm_stderr": 0.038342347441649924 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.5436893203883495, - "acc_stderr": 0.049318019942204146, - "acc_norm": 0.5436893203883495, - "acc_norm_stderr": 0.049318019942204146 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.4725415070242657, - "acc_stderr": 0.017852981266633955, - "acc_norm": 0.4725415070242657, - "acc_norm_stderr": 0.017852981266633955 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.37777777777777777, - "acc_stderr": 0.04188307537595853, - "acc_norm": 0.37777777777777777, - "acc_norm_stderr": 0.04188307537595853 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.28, - "acc_stderr": 0.045126085985421276, - "acc_norm": 0.28, - "acc_norm_stderr": 0.045126085985421276 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.4425531914893617, - "acc_stderr": 0.032469569197899575, - "acc_norm": 0.4425531914893617, - "acc_norm_stderr": 0.032469569197899575 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.41566265060240964, - "acc_stderr": 0.03836722176598052, - "acc_norm": 0.41566265060240964, - "acc_norm_stderr": 0.03836722176598052 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.48231511254019294, - "acc_stderr": 0.02838032284907713, - "acc_norm": 0.48231511254019294, - "acc_norm_stderr": 0.02838032284907713 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.39461883408071746, - "acc_stderr": 0.03280400504755292, - "acc_norm": 0.39461883408071746, - "acc_norm_stderr": 0.03280400504755292 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.45038167938931295, - "acc_stderr": 0.04363643698524779, - "acc_norm": 0.45038167938931295, - "acc_norm_stderr": 0.04363643698524779 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.36, - "acc_stderr": 0.04824181513244218, - "acc_norm": 0.36, - "acc_norm_stderr": 0.04824181513244218 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.5555555555555556, - "acc_stderr": 0.035402943770953675, - "acc_norm": 0.5555555555555556, - "acc_norm_stderr": 0.035402943770953675 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.47586206896551725, - "acc_stderr": 0.04161808503501528, - "acc_norm": 0.47586206896551725, - "acc_norm_stderr": 0.04161808503501528 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.2549019607843137, - "acc_stderr": 0.043364327079931785, - "acc_norm": 0.2549019607843137, - "acc_norm_stderr": 0.043364327079931785 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.5042016806722689, - "acc_stderr": 0.0324773433444811, - "acc_norm": 0.5042016806722689, - "acc_norm_stderr": 0.0324773433444811 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.46153846153846156, - "acc_stderr": 0.025275892070240627, - "acc_norm": 0.46153846153846156, - "acc_norm_stderr": 0.025275892070240627 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.54, - "acc_stderr": 0.05009082659620332, - "acc_norm": 0.54, - "acc_norm_stderr": 0.05009082659620332 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.49074074074074076, - "acc_stderr": 0.04832853553437055, - "acc_norm": 0.49074074074074076, - "acc_norm_stderr": 0.04832853553437055 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.4482758620689655, - "acc_stderr": 0.034991131376767445, - "acc_norm": 0.4482758620689655, - "acc_norm_stderr": 0.034991131376767445 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.4774193548387097, - "acc_stderr": 0.028414985019707868, - "acc_norm": 0.4774193548387097, - "acc_norm_stderr": 0.028414985019707868 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.6581196581196581, - "acc_stderr": 0.031075028526507755, - "acc_norm": 0.6581196581196581, - "acc_norm_stderr": 0.031075028526507755 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.4528301886792453, - "acc_stderr": 0.030635627957961823, - "acc_norm": 0.4528301886792453, - "acc_norm_stderr": 0.030635627957961823 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.5454545454545454, - "acc_stderr": 0.04769300568972744, - "acc_norm": 0.5454545454545454, - "acc_norm_stderr": 0.04769300568972744 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.28888888888888886, - "acc_stderr": 0.027634907264178544, - "acc_norm": 0.28888888888888886, - "acc_norm_stderr": 0.027634907264178544 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.31125827814569534, - "acc_stderr": 0.03780445850526733, - "acc_norm": 0.31125827814569534, - "acc_norm_stderr": 0.03780445850526733 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.6069651741293532, - "acc_stderr": 0.0345368246603156, - "acc_norm": 0.6069651741293532, - "acc_norm_stderr": 0.0345368246603156 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.3815028901734104, - "acc_stderr": 0.03703851193099521, - "acc_norm": 0.3815028901734104, - "acc_norm_stderr": 0.03703851193099521 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.36243386243386244, - "acc_stderr": 0.024757473902752042, - "acc_norm": 0.36243386243386244, - "acc_norm_stderr": 0.024757473902752042 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3263888888888889, - "acc_stderr": 0.03921067198982266, - "acc_norm": 0.3263888888888889, - "acc_norm_stderr": 0.03921067198982266 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.39, - "acc_stderr": 0.04902071300001975, - "acc_norm": 0.39, - "acc_norm_stderr": 0.04902071300001975 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.51, - "acc_stderr": 0.05024183937956912, - "acc_norm": 0.51, - "acc_norm_stderr": 0.05024183937956912 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.43352601156069365, - "acc_stderr": 0.026680134761679217, - "acc_norm": 0.43352601156069365, - "acc_norm_stderr": 0.026680134761679217 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.4233128834355828, - "acc_stderr": 0.03881891213334383, - "acc_norm": 0.4233128834355828, - "acc_norm_stderr": 0.03881891213334383 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.4567901234567901, - "acc_stderr": 0.02771666165019404, - "acc_norm": 0.4567901234567901, - "acc_norm_stderr": 0.02771666165019404 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.44041450777202074, - "acc_stderr": 0.03582724530036094, - "acc_norm": 0.44041450777202074, - "acc_norm_stderr": 0.03582724530036094 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.30701754385964913, - "acc_stderr": 0.04339138322579859, - "acc_norm": 0.30701754385964913, - "acc_norm_stderr": 0.04339138322579859 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.47522935779816516, - "acc_stderr": 0.02141099975363592, - "acc_norm": 0.47522935779816516, - "acc_norm_stderr": 0.02141099975363592 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.3253968253968254, - "acc_stderr": 0.04190596438871136, - "acc_norm": 0.3253968253968254, - "acc_norm_stderr": 0.04190596438871136 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.46405228758169936, - "acc_stderr": 0.02855582751652879, - "acc_norm": 0.46405228758169936, - "acc_norm_stderr": 0.02855582751652879 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.44, - "acc_stderr": 0.04988876515698589, - "acc_norm": 0.44, - "acc_norm_stderr": 0.04988876515698589 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.6033057851239669, - "acc_stderr": 0.044658697805310094, - "acc_norm": 0.6033057851239669, - "acc_norm_stderr": 0.044658697805310094 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.4605263157894737, - "acc_stderr": 0.04056242252249033, - "acc_norm": 0.4605263157894737, - "acc_norm_stderr": 0.04056242252249033 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.38235294117647056, - "acc_stderr": 0.01965992249362335, - "acc_norm": 0.38235294117647056, - "acc_norm_stderr": 0.01965992249362335 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.35106382978723405, - "acc_stderr": 0.028473501272963768, - "acc_norm": 0.35106382978723405, - "acc_norm_stderr": 0.028473501272963768 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.3125, - "acc_stderr": 0.043994650575715215, - "acc_norm": 0.3125, - "acc_norm_stderr": 0.043994650575715215 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.42592592592592593, - "acc_stderr": 0.03372343271653063, - "acc_norm": 0.42592592592592593, - "acc_norm_stderr": 0.03372343271653063 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.26927374301675977, - "acc_stderr": 0.014835616582882611, - "acc_norm": 0.26927374301675977, - "acc_norm_stderr": 0.014835616582882611 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.48, - "acc_stderr": 0.050211673156867795, - "acc_norm": 0.48, - "acc_norm_stderr": 0.050211673156867795 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.61, - "acc_stderr": 0.04902071300001974, - "acc_norm": 0.61, - "acc_norm_stderr": 0.04902071300001974 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.43014705882352944, - "acc_stderr": 0.030074971917302875, - "acc_norm": 0.43014705882352944, - "acc_norm_stderr": 0.030074971917302875 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.5102040816326531, - "acc_stderr": 0.03200255347893783, - "acc_norm": 0.5102040816326531, - "acc_norm_stderr": 0.03200255347893783 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.4641350210970464, - "acc_stderr": 0.03246338898055659, - "acc_norm": 0.4641350210970464, - "acc_norm_stderr": 0.03246338898055659 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.3135593220338983, - "acc_stderr": 0.011849234291459324, - "acc_norm": 0.3135593220338983, - "acc_norm_stderr": 0.011849234291459324 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.36764705882352944, - "acc_stderr": 0.03384132045674118, - "acc_norm": 0.36764705882352944, - "acc_norm_stderr": 0.03384132045674118 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.3939393939393939, - "acc_stderr": 0.03815494308688929, - "acc_norm": 0.3939393939393939, - "acc_norm_stderr": 0.03815494308688929 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.31701346389228885, - "mc1_stderr": 0.016289203374403396, - "mc2": 0.4891689873387216, - "mc2_stderr": 0.015571905877884106 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.2664319248826291, - "acc_stderr": 0.01515474253336583, - "acc_norm": 0.3532863849765258, - "acc_norm_stderr": 0.016385310378526204 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "mncai/mistral-7b-ko-1871-2p1", - "model_sha": "1ab1ccefadb9c3e832b4d2018cf0220974f998b3", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/mncai/mistral-ko-f-1871-ep1/result_2023-10-05 07:43:59.json b/mncai/mistral-ko-f-1871-ep1/result_2023-10-05 07:43:59.json deleted file mode 100644 index 2e1dd7b74aa51286a12b69796f22b27c60658109..0000000000000000000000000000000000000000 --- a/mncai/mistral-ko-f-1871-ep1/result_2023-10-05 07:43:59.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.3250853242320819, - "acc_stderr": 0.013688147309729124, - "acc_norm": 0.36177474402730375, - "acc_norm_stderr": 0.01404195794503808 - }, - "harness|ko_hellaswag|10": { - "acc": 0.37263493328022307, - "acc_stderr": 0.004825179407757562, - "acc_norm": 0.47610037841067515, - "acc_norm_stderr": 0.004984077906216099 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.5087719298245614, - "acc_stderr": 0.038342347441649924, - "acc_norm": 0.5087719298245614, - "acc_norm_stderr": 0.038342347441649924 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.5436893203883495, - "acc_stderr": 0.049318019942204146, - "acc_norm": 0.5436893203883495, - "acc_norm_stderr": 0.049318019942204146 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.4725415070242657, - "acc_stderr": 0.017852981266633955, - "acc_norm": 0.4725415070242657, - "acc_norm_stderr": 0.017852981266633955 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.3851851851851852, - "acc_stderr": 0.042039210401562783, - "acc_norm": 0.3851851851851852, - "acc_norm_stderr": 0.042039210401562783 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.28, - "acc_stderr": 0.045126085985421276, - "acc_norm": 0.28, - "acc_norm_stderr": 0.045126085985421276 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.43829787234042555, - "acc_stderr": 0.03243618636108101, - "acc_norm": 0.43829787234042555, - "acc_norm_stderr": 0.03243618636108101 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.41566265060240964, - "acc_stderr": 0.03836722176598052, - "acc_norm": 0.41566265060240964, - "acc_norm_stderr": 0.03836722176598052 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.4790996784565916, - "acc_stderr": 0.028373270961069414, - "acc_norm": 0.4790996784565916, - "acc_norm_stderr": 0.028373270961069414 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.39461883408071746, - "acc_stderr": 0.03280400504755292, - "acc_norm": 0.39461883408071746, - "acc_norm_stderr": 0.03280400504755292 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.44274809160305345, - "acc_stderr": 0.043564472026650695, - "acc_norm": 0.44274809160305345, - "acc_norm_stderr": 0.043564472026650695 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.36, - "acc_stderr": 0.04824181513244218, - "acc_norm": 0.36, - "acc_norm_stderr": 0.04824181513244218 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.5555555555555556, - "acc_stderr": 0.035402943770953675, - "acc_norm": 0.5555555555555556, - "acc_norm_stderr": 0.035402943770953675 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.47586206896551725, - "acc_stderr": 0.04161808503501528, - "acc_norm": 0.47586206896551725, - "acc_norm_stderr": 0.04161808503501528 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.2549019607843137, - "acc_stderr": 0.043364327079931785, - "acc_norm": 0.2549019607843137, - "acc_norm_stderr": 0.043364327079931785 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.5084033613445378, - "acc_stderr": 0.03247390276569669, - "acc_norm": 0.5084033613445378, - "acc_norm_stderr": 0.03247390276569669 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.46153846153846156, - "acc_stderr": 0.025275892070240627, - "acc_norm": 0.46153846153846156, - "acc_norm_stderr": 0.025275892070240627 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.54, - "acc_stderr": 0.05009082659620332, - "acc_norm": 0.54, - "acc_norm_stderr": 0.05009082659620332 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.32, - "acc_stderr": 0.04688261722621504, - "acc_norm": 0.32, - "acc_norm_stderr": 0.04688261722621504 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.49074074074074076, - "acc_stderr": 0.04832853553437055, - "acc_norm": 0.49074074074074076, - "acc_norm_stderr": 0.04832853553437055 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.4482758620689655, - "acc_stderr": 0.034991131376767445, - "acc_norm": 0.4482758620689655, - "acc_norm_stderr": 0.034991131376767445 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.47419354838709676, - "acc_stderr": 0.028406095057653315, - "acc_norm": 0.47419354838709676, - "acc_norm_stderr": 0.028406095057653315 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.6581196581196581, - "acc_stderr": 0.031075028526507755, - "acc_norm": 0.6581196581196581, - "acc_norm_stderr": 0.031075028526507755 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.4528301886792453, - "acc_stderr": 0.030635627957961823, - "acc_norm": 0.4528301886792453, - "acc_norm_stderr": 0.030635627957961823 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.5454545454545454, - "acc_stderr": 0.04769300568972744, - "acc_norm": 0.5454545454545454, - "acc_norm_stderr": 0.04769300568972744 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.28888888888888886, - "acc_stderr": 0.027634907264178544, - "acc_norm": 0.28888888888888886, - "acc_norm_stderr": 0.027634907264178544 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.31125827814569534, - "acc_stderr": 0.03780445850526733, - "acc_norm": 0.31125827814569534, - "acc_norm_stderr": 0.03780445850526733 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.6069651741293532, - "acc_stderr": 0.0345368246603156, - "acc_norm": 0.6069651741293532, - "acc_norm_stderr": 0.0345368246603156 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.3815028901734104, - "acc_stderr": 0.03703851193099521, - "acc_norm": 0.3815028901734104, - "acc_norm_stderr": 0.03703851193099521 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.35978835978835977, - "acc_stderr": 0.024718075944129277, - "acc_norm": 0.35978835978835977, - "acc_norm_stderr": 0.024718075944129277 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3263888888888889, - "acc_stderr": 0.03921067198982266, - "acc_norm": 0.3263888888888889, - "acc_norm_stderr": 0.03921067198982266 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.38, - "acc_stderr": 0.04878317312145632, - "acc_norm": 0.38, - "acc_norm_stderr": 0.04878317312145632 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.51, - "acc_stderr": 0.05024183937956912, - "acc_norm": 0.51, - "acc_norm_stderr": 0.05024183937956912 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.43641618497109824, - "acc_stderr": 0.026700545424943677, - "acc_norm": 0.43641618497109824, - "acc_norm_stderr": 0.026700545424943677 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.4171779141104294, - "acc_stderr": 0.038741028598180814, - "acc_norm": 0.4171779141104294, - "acc_norm_stderr": 0.038741028598180814 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.4567901234567901, - "acc_stderr": 0.02771666165019404, - "acc_norm": 0.4567901234567901, - "acc_norm_stderr": 0.02771666165019404 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.44041450777202074, - "acc_stderr": 0.03582724530036094, - "acc_norm": 0.44041450777202074, - "acc_norm_stderr": 0.03582724530036094 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.3157894736842105, - "acc_stderr": 0.043727482902780085, - "acc_norm": 0.3157894736842105, - "acc_norm_stderr": 0.043727482902780085 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.47706422018348627, - "acc_stderr": 0.021414757058175506, - "acc_norm": 0.47706422018348627, - "acc_norm_stderr": 0.021414757058175506 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.3253968253968254, - "acc_stderr": 0.04190596438871136, - "acc_norm": 0.3253968253968254, - "acc_norm_stderr": 0.04190596438871136 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.46405228758169936, - "acc_stderr": 0.02855582751652879, - "acc_norm": 0.46405228758169936, - "acc_norm_stderr": 0.02855582751652879 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.44, - "acc_stderr": 0.04988876515698589, - "acc_norm": 0.44, - "acc_norm_stderr": 0.04988876515698589 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.6033057851239669, - "acc_stderr": 0.044658697805310094, - "acc_norm": 0.6033057851239669, - "acc_norm_stderr": 0.044658697805310094 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.4605263157894737, - "acc_stderr": 0.04056242252249033, - "acc_norm": 0.4605263157894737, - "acc_norm_stderr": 0.04056242252249033 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.38235294117647056, - "acc_stderr": 0.01965992249362335, - "acc_norm": 0.38235294117647056, - "acc_norm_stderr": 0.01965992249362335 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.35106382978723405, - "acc_stderr": 0.028473501272963768, - "acc_norm": 0.35106382978723405, - "acc_norm_stderr": 0.028473501272963768 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.3125, - "acc_stderr": 0.043994650575715215, - "acc_norm": 0.3125, - "acc_norm_stderr": 0.043994650575715215 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.42592592592592593, - "acc_stderr": 0.03372343271653063, - "acc_norm": 0.42592592592592593, - "acc_norm_stderr": 0.03372343271653063 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.26927374301675977, - "acc_stderr": 0.014835616582882611, - "acc_norm": 0.26927374301675977, - "acc_norm_stderr": 0.014835616582882611 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.48, - "acc_stderr": 0.050211673156867795, - "acc_norm": 0.48, - "acc_norm_stderr": 0.050211673156867795 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.61, - "acc_stderr": 0.04902071300001974, - "acc_norm": 0.61, - "acc_norm_stderr": 0.04902071300001974 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.4264705882352941, - "acc_stderr": 0.030042615832714864, - "acc_norm": 0.4264705882352941, - "acc_norm_stderr": 0.030042615832714864 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.5142857142857142, - "acc_stderr": 0.03199615232806286, - "acc_norm": 0.5142857142857142, - "acc_norm_stderr": 0.03199615232806286 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.459915611814346, - "acc_stderr": 0.03244246810187913, - "acc_norm": 0.459915611814346, - "acc_norm_stderr": 0.03244246810187913 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.31421121251629724, - "acc_stderr": 0.011855911587048228, - "acc_norm": 0.31421121251629724, - "acc_norm_stderr": 0.011855911587048228 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.3627450980392157, - "acc_stderr": 0.03374499356319354, - "acc_norm": 0.3627450980392157, - "acc_norm_stderr": 0.03374499356319354 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.3939393939393939, - "acc_stderr": 0.03815494308688929, - "acc_norm": 0.3939393939393939, - "acc_norm_stderr": 0.03815494308688929 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.31701346389228885, - "mc1_stderr": 0.016289203374403396, - "mc2": 0.48915259522870574, - "mc2_stderr": 0.015572578945104198 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.2664319248826291, - "acc_stderr": 0.01515474253336583, - "acc_norm": 0.3532863849765258, - "acc_norm_stderr": 0.016385310378526204 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "mncai/mistral-ko-f-1871-ep1", - "model_sha": "1ab1ccefadb9c3e832b4d2018cf0220974f998b3", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/momo/polyglot-ko-12.8b-Chat-QLoRA-Merge/result_2023-10-03 01:03:30.json b/momo/polyglot-ko-12.8b-Chat-QLoRA-Merge/result_2023-10-03 01:03:30.json deleted file mode 100644 index 30d8b5187f50b1d173b160d106bba108a34103f0..0000000000000000000000000000000000000000 --- a/momo/polyglot-ko-12.8b-Chat-QLoRA-Merge/result_2023-10-03 01:03:30.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.29948805460750855, - "acc_stderr": 0.01338502163731356, - "acc_norm": 0.35494880546075086, - "acc_norm_stderr": 0.013983036904094094 - }, - "harness|ko_hellaswag|10": { - "acc": 0.38627763393746267, - "acc_stderr": 0.004859004184694615, - "acc_norm": 0.4993029277036447, - "acc_norm_stderr": 0.00498977656227611 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.28654970760233917, - "acc_stderr": 0.034678266857038245, - "acc_norm": 0.28654970760233917, - "acc_norm_stderr": 0.034678266857038245 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.1941747572815534, - "acc_stderr": 0.03916667762822584, - "acc_norm": 0.1941747572815534, - "acc_norm_stderr": 0.03916667762822584 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.280970625798212, - "acc_stderr": 0.016073127851221225, - "acc_norm": 0.280970625798212, - "acc_norm_stderr": 0.016073127851221225 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.3851851851851852, - "acc_stderr": 0.042039210401562783, - "acc_norm": 0.3851851851851852, - "acc_norm_stderr": 0.042039210401562783 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.20851063829787234, - "acc_stderr": 0.02655698211783875, - "acc_norm": 0.20851063829787234, - "acc_norm_stderr": 0.02655698211783875 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.19879518072289157, - "acc_stderr": 0.031069390260789413, - "acc_norm": 0.19879518072289157, - "acc_norm_stderr": 0.031069390260789413 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.3054662379421222, - "acc_stderr": 0.026160584450140474, - "acc_norm": 0.3054662379421222, - "acc_norm_stderr": 0.026160584450140474 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.2062780269058296, - "acc_stderr": 0.02715715047956382, - "acc_norm": 0.2062780269058296, - "acc_norm_stderr": 0.02715715047956382 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.24427480916030533, - "acc_stderr": 0.037683359597287434, - "acc_norm": 0.24427480916030533, - "acc_norm_stderr": 0.037683359597287434 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.2, - "acc_stderr": 0.04020151261036845, - "acc_norm": 0.2, - "acc_norm_stderr": 0.04020151261036845 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.26262626262626265, - "acc_stderr": 0.03135305009533084, - "acc_norm": 0.26262626262626265, - "acc_norm_stderr": 0.03135305009533084 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.3103448275862069, - "acc_stderr": 0.03855289616378949, - "acc_norm": 0.3103448275862069, - "acc_norm_stderr": 0.03855289616378949 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.21568627450980393, - "acc_stderr": 0.04092563958237657, - "acc_norm": 0.21568627450980393, - "acc_norm_stderr": 0.04092563958237657 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.2184873949579832, - "acc_stderr": 0.026841514322958924, - "acc_norm": 0.2184873949579832, - "acc_norm_stderr": 0.026841514322958924 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.2153846153846154, - "acc_stderr": 0.020843034557462878, - "acc_norm": 0.2153846153846154, - "acc_norm_stderr": 0.020843034557462878 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695235, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695235 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.23148148148148148, - "acc_stderr": 0.04077494709252627, - "acc_norm": 0.23148148148148148, - "acc_norm_stderr": 0.04077494709252627 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.2955665024630542, - "acc_stderr": 0.032104944337514575, - "acc_norm": 0.2955665024630542, - "acc_norm_stderr": 0.032104944337514575 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.2645161290322581, - "acc_stderr": 0.02509189237885928, - "acc_norm": 0.2645161290322581, - "acc_norm_stderr": 0.02509189237885928 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.2606837606837607, - "acc_stderr": 0.028760348956523414, - "acc_norm": 0.2606837606837607, - "acc_norm_stderr": 0.028760348956523414 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.2339622641509434, - "acc_stderr": 0.02605529690115292, - "acc_norm": 0.2339622641509434, - "acc_norm_stderr": 0.02605529690115292 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.20909090909090908, - "acc_stderr": 0.03895091015724135, - "acc_norm": 0.20909090909090908, - "acc_norm_stderr": 0.03895091015724135 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.26296296296296295, - "acc_stderr": 0.026842057873833706, - "acc_norm": 0.26296296296296295, - "acc_norm_stderr": 0.026842057873833706 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.271523178807947, - "acc_stderr": 0.03631329803969653, - "acc_norm": 0.271523178807947, - "acc_norm_stderr": 0.03631329803969653 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.24875621890547264, - "acc_stderr": 0.030567675938916714, - "acc_norm": 0.24875621890547264, - "acc_norm_stderr": 0.030567675938916714 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.24855491329479767, - "acc_stderr": 0.03295304696818317, - "acc_norm": 0.24855491329479767, - "acc_norm_stderr": 0.03295304696818317 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2619047619047619, - "acc_stderr": 0.02264421261552521, - "acc_norm": 0.2619047619047619, - "acc_norm_stderr": 0.02264421261552521 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2569444444444444, - "acc_stderr": 0.03653946969442099, - "acc_norm": 0.2569444444444444, - "acc_norm_stderr": 0.03653946969442099 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.17, - "acc_stderr": 0.0377525168068637, - "acc_norm": 0.17, - "acc_norm_stderr": 0.0377525168068637 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.26, - "acc_stderr": 0.04408440022768079, - "acc_norm": 0.26, - "acc_norm_stderr": 0.04408440022768079 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.3092485549132948, - "acc_stderr": 0.02488314057007176, - "acc_norm": 0.3092485549132948, - "acc_norm_stderr": 0.02488314057007176 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.3006134969325153, - "acc_stderr": 0.03602511318806771, - "acc_norm": 0.3006134969325153, - "acc_norm_stderr": 0.03602511318806771 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.30246913580246915, - "acc_stderr": 0.025557653981868038, - "acc_norm": 0.30246913580246915, - "acc_norm_stderr": 0.025557653981868038 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.22797927461139897, - "acc_stderr": 0.030276909945178256, - "acc_norm": 0.22797927461139897, - "acc_norm_stderr": 0.030276909945178256 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.24561403508771928, - "acc_stderr": 0.04049339297748141, - "acc_norm": 0.24561403508771928, - "acc_norm_stderr": 0.04049339297748141 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.22568807339449543, - "acc_stderr": 0.01792308766780305, - "acc_norm": 0.22568807339449543, - "acc_norm_stderr": 0.01792308766780305 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.14285714285714285, - "acc_stderr": 0.03129843185743809, - "acc_norm": 0.14285714285714285, - "acc_norm_stderr": 0.03129843185743809 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.26143790849673204, - "acc_stderr": 0.025160998214292456, - "acc_norm": 0.26143790849673204, - "acc_norm_stderr": 0.025160998214292456 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.22, - "acc_stderr": 0.041633319989322674, - "acc_norm": 0.22, - "acc_norm_stderr": 0.041633319989322674 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.38016528925619836, - "acc_stderr": 0.04431324501968431, - "acc_norm": 0.38016528925619836, - "acc_norm_stderr": 0.04431324501968431 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.3223684210526316, - "acc_stderr": 0.038035102483515854, - "acc_norm": 0.3223684210526316, - "acc_norm_stderr": 0.038035102483515854 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.2761437908496732, - "acc_stderr": 0.018087276935663133, - "acc_norm": 0.2761437908496732, - "acc_norm_stderr": 0.018087276935663133 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2730496453900709, - "acc_stderr": 0.026577860943307854, - "acc_norm": 0.2730496453900709, - "acc_norm_stderr": 0.026577860943307854 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.24107142857142858, - "acc_stderr": 0.04059867246952689, - "acc_norm": 0.24107142857142858, - "acc_norm_stderr": 0.04059867246952689 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.20833333333333334, - "acc_stderr": 0.027696910713093936, - "acc_norm": 0.20833333333333334, - "acc_norm_stderr": 0.027696910713093936 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.24692737430167597, - "acc_stderr": 0.014422292204808852, - "acc_norm": 0.24692737430167597, - "acc_norm_stderr": 0.014422292204808852 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.26, - "acc_stderr": 0.04408440022768079, - "acc_norm": 0.26, - "acc_norm_stderr": 0.04408440022768079 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252606, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252606 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.16544117647058823, - "acc_stderr": 0.02257177102549475, - "acc_norm": 0.16544117647058823, - "acc_norm_stderr": 0.02257177102549475 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.24081632653061225, - "acc_stderr": 0.027372942201788167, - "acc_norm": 0.24081632653061225, - "acc_norm_stderr": 0.027372942201788167 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.31645569620253167, - "acc_stderr": 0.030274974880218977, - "acc_norm": 0.31645569620253167, - "acc_norm_stderr": 0.030274974880218977 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2692307692307692, - "acc_stderr": 0.011328734403140327, - "acc_norm": 0.2692307692307692, - "acc_norm_stderr": 0.011328734403140327 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.27941176470588236, - "acc_stderr": 0.03149328104507956, - "acc_norm": 0.27941176470588236, - "acc_norm_stderr": 0.03149328104507956 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.3515151515151515, - "acc_stderr": 0.037282069986826503, - "acc_norm": 0.3515151515151515, - "acc_norm_stderr": 0.037282069986826503 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.24112607099143207, - "mc1_stderr": 0.014974827279752337, - "mc2": 0.3942593710384486, - "mc2_stderr": 0.014811018314989769 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.7312206572769953, - "acc_stderr": 0.015196983421381498, - "acc_norm": 0.7769953051643192, - "acc_norm_stderr": 0.014269258984221404 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "momo/polyglot-ko-12.8b-Chat-QLoRA-Merge", - "model_sha": "ab414980e53f3a34d9414be8e2452d4eef35f3cd", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/momo/polyglot-ko-12.8b-Chat-QLoRA-Merge/result_2023-10-03 01:08:21.json b/momo/polyglot-ko-12.8b-Chat-QLoRA-Merge/result_2023-10-03 01:08:21.json deleted file mode 100644 index 1549365b2e2e2c7702cfcd19b9669703916e52fa..0000000000000000000000000000000000000000 --- a/momo/polyglot-ko-12.8b-Chat-QLoRA-Merge/result_2023-10-03 01:08:21.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.29948805460750855, - "acc_stderr": 0.01338502163731356, - "acc_norm": 0.35494880546075086, - "acc_norm_stderr": 0.013983036904094094 - }, - "harness|ko_hellaswag|10": { - "acc": 0.38627763393746267, - "acc_stderr": 0.004859004184694615, - "acc_norm": 0.4993029277036447, - "acc_norm_stderr": 0.00498977656227611 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.28654970760233917, - "acc_stderr": 0.034678266857038245, - "acc_norm": 0.28654970760233917, - "acc_norm_stderr": 0.034678266857038245 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.1941747572815534, - "acc_stderr": 0.03916667762822584, - "acc_norm": 0.1941747572815534, - "acc_norm_stderr": 0.03916667762822584 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.280970625798212, - "acc_stderr": 0.016073127851221225, - "acc_norm": 0.280970625798212, - "acc_norm_stderr": 0.016073127851221225 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.3851851851851852, - "acc_stderr": 0.042039210401562783, - "acc_norm": 0.3851851851851852, - "acc_norm_stderr": 0.042039210401562783 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.20851063829787234, - "acc_stderr": 0.02655698211783875, - "acc_norm": 0.20851063829787234, - "acc_norm_stderr": 0.02655698211783875 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.19879518072289157, - "acc_stderr": 0.031069390260789413, - "acc_norm": 0.19879518072289157, - "acc_norm_stderr": 0.031069390260789413 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.3054662379421222, - "acc_stderr": 0.026160584450140474, - "acc_norm": 0.3054662379421222, - "acc_norm_stderr": 0.026160584450140474 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.2062780269058296, - "acc_stderr": 0.02715715047956382, - "acc_norm": 0.2062780269058296, - "acc_norm_stderr": 0.02715715047956382 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.24427480916030533, - "acc_stderr": 0.037683359597287434, - "acc_norm": 0.24427480916030533, - "acc_norm_stderr": 0.037683359597287434 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.2, - "acc_stderr": 0.04020151261036845, - "acc_norm": 0.2, - "acc_norm_stderr": 0.04020151261036845 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.26262626262626265, - "acc_stderr": 0.03135305009533084, - "acc_norm": 0.26262626262626265, - "acc_norm_stderr": 0.03135305009533084 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.3103448275862069, - "acc_stderr": 0.03855289616378949, - "acc_norm": 0.3103448275862069, - "acc_norm_stderr": 0.03855289616378949 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.21568627450980393, - "acc_stderr": 0.04092563958237657, - "acc_norm": 0.21568627450980393, - "acc_norm_stderr": 0.04092563958237657 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.2184873949579832, - "acc_stderr": 0.026841514322958924, - "acc_norm": 0.2184873949579832, - "acc_norm_stderr": 0.026841514322958924 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.2153846153846154, - "acc_stderr": 0.020843034557462878, - "acc_norm": 0.2153846153846154, - "acc_norm_stderr": 0.020843034557462878 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695235, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695235 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.23148148148148148, - "acc_stderr": 0.04077494709252627, - "acc_norm": 0.23148148148148148, - "acc_norm_stderr": 0.04077494709252627 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.2955665024630542, - "acc_stderr": 0.032104944337514575, - "acc_norm": 0.2955665024630542, - "acc_norm_stderr": 0.032104944337514575 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.2645161290322581, - "acc_stderr": 0.02509189237885928, - "acc_norm": 0.2645161290322581, - "acc_norm_stderr": 0.02509189237885928 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.2606837606837607, - "acc_stderr": 0.028760348956523414, - "acc_norm": 0.2606837606837607, - "acc_norm_stderr": 0.028760348956523414 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.2339622641509434, - "acc_stderr": 0.02605529690115292, - "acc_norm": 0.2339622641509434, - "acc_norm_stderr": 0.02605529690115292 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.20909090909090908, - "acc_stderr": 0.03895091015724135, - "acc_norm": 0.20909090909090908, - "acc_norm_stderr": 0.03895091015724135 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.26296296296296295, - "acc_stderr": 0.026842057873833706, - "acc_norm": 0.26296296296296295, - "acc_norm_stderr": 0.026842057873833706 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.271523178807947, - "acc_stderr": 0.03631329803969653, - "acc_norm": 0.271523178807947, - "acc_norm_stderr": 0.03631329803969653 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.24875621890547264, - "acc_stderr": 0.030567675938916714, - "acc_norm": 0.24875621890547264, - "acc_norm_stderr": 0.030567675938916714 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.24855491329479767, - "acc_stderr": 0.03295304696818317, - "acc_norm": 0.24855491329479767, - "acc_norm_stderr": 0.03295304696818317 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2619047619047619, - "acc_stderr": 0.02264421261552521, - "acc_norm": 0.2619047619047619, - "acc_norm_stderr": 0.02264421261552521 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2569444444444444, - "acc_stderr": 0.03653946969442099, - "acc_norm": 0.2569444444444444, - "acc_norm_stderr": 0.03653946969442099 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.17, - "acc_stderr": 0.0377525168068637, - "acc_norm": 0.17, - "acc_norm_stderr": 0.0377525168068637 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.26, - "acc_stderr": 0.04408440022768079, - "acc_norm": 0.26, - "acc_norm_stderr": 0.04408440022768079 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.3092485549132948, - "acc_stderr": 0.02488314057007176, - "acc_norm": 0.3092485549132948, - "acc_norm_stderr": 0.02488314057007176 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.3006134969325153, - "acc_stderr": 0.03602511318806771, - "acc_norm": 0.3006134969325153, - "acc_norm_stderr": 0.03602511318806771 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.30246913580246915, - "acc_stderr": 0.025557653981868038, - "acc_norm": 0.30246913580246915, - "acc_norm_stderr": 0.025557653981868038 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.22797927461139897, - "acc_stderr": 0.030276909945178256, - "acc_norm": 0.22797927461139897, - "acc_norm_stderr": 0.030276909945178256 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.24561403508771928, - "acc_stderr": 0.04049339297748141, - "acc_norm": 0.24561403508771928, - "acc_norm_stderr": 0.04049339297748141 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.22568807339449543, - "acc_stderr": 0.01792308766780305, - "acc_norm": 0.22568807339449543, - "acc_norm_stderr": 0.01792308766780305 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.14285714285714285, - "acc_stderr": 0.03129843185743809, - "acc_norm": 0.14285714285714285, - "acc_norm_stderr": 0.03129843185743809 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.26143790849673204, - "acc_stderr": 0.025160998214292456, - "acc_norm": 0.26143790849673204, - "acc_norm_stderr": 0.025160998214292456 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.22, - "acc_stderr": 0.041633319989322674, - "acc_norm": 0.22, - "acc_norm_stderr": 0.041633319989322674 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.38016528925619836, - "acc_stderr": 0.04431324501968431, - "acc_norm": 0.38016528925619836, - "acc_norm_stderr": 0.04431324501968431 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.3223684210526316, - "acc_stderr": 0.038035102483515854, - "acc_norm": 0.3223684210526316, - "acc_norm_stderr": 0.038035102483515854 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.2761437908496732, - "acc_stderr": 0.018087276935663133, - "acc_norm": 0.2761437908496732, - "acc_norm_stderr": 0.018087276935663133 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2730496453900709, - "acc_stderr": 0.026577860943307854, - "acc_norm": 0.2730496453900709, - "acc_norm_stderr": 0.026577860943307854 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.24107142857142858, - "acc_stderr": 0.04059867246952689, - "acc_norm": 0.24107142857142858, - "acc_norm_stderr": 0.04059867246952689 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.20833333333333334, - "acc_stderr": 0.027696910713093936, - "acc_norm": 0.20833333333333334, - "acc_norm_stderr": 0.027696910713093936 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.24692737430167597, - "acc_stderr": 0.014422292204808852, - "acc_norm": 0.24692737430167597, - "acc_norm_stderr": 0.014422292204808852 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.26, - "acc_stderr": 0.04408440022768079, - "acc_norm": 0.26, - "acc_norm_stderr": 0.04408440022768079 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252606, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252606 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.16544117647058823, - "acc_stderr": 0.02257177102549475, - "acc_norm": 0.16544117647058823, - "acc_norm_stderr": 0.02257177102549475 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.24081632653061225, - "acc_stderr": 0.027372942201788167, - "acc_norm": 0.24081632653061225, - "acc_norm_stderr": 0.027372942201788167 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.31645569620253167, - "acc_stderr": 0.030274974880218977, - "acc_norm": 0.31645569620253167, - "acc_norm_stderr": 0.030274974880218977 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2692307692307692, - "acc_stderr": 0.011328734403140327, - "acc_norm": 0.2692307692307692, - "acc_norm_stderr": 0.011328734403140327 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.27941176470588236, - "acc_stderr": 0.03149328104507956, - "acc_norm": 0.27941176470588236, - "acc_norm_stderr": 0.03149328104507956 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.3515151515151515, - "acc_stderr": 0.037282069986826503, - "acc_norm": 0.3515151515151515, - "acc_norm_stderr": 0.037282069986826503 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.24112607099143207, - "mc1_stderr": 0.014974827279752337, - "mc2": 0.3942593710384486, - "mc2_stderr": 0.014811018314989769 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.7312206572769953, - "acc_stderr": 0.015196983421381498, - "acc_norm": 0.7769953051643192, - "acc_norm_stderr": 0.014269258984221404 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "momo/polyglot-ko-12.8b-Chat-QLoRA-Merge", - "model_sha": "793d22f37f5945b22fbc33c447f8cdcaa4a50221", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/momo/polyglot-ko-12.8b-Chat-QLoRA-Merge_v3/result_2023-10-03 17:56:39.json b/momo/polyglot-ko-12.8b-Chat-QLoRA-Merge_v3/result_2023-10-03 17:56:39.json deleted file mode 100644 index 2be382d0fe0653631aa95ed884ec9c4e14fd468c..0000000000000000000000000000000000000000 --- a/momo/polyglot-ko-12.8b-Chat-QLoRA-Merge_v3/result_2023-10-03 17:56:39.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.30119453924914674, - "acc_stderr": 0.013406741767847612, - "acc_norm": 0.3361774744027304, - "acc_norm_stderr": 0.013804855026205763 - }, - "harness|ko_hellaswag|10": { - "acc": 0.38717386974706236, - "acc_stderr": 0.0048610845340870314, - "acc_norm": 0.5007966540529775, - "acc_norm_stderr": 0.0049897750778356564 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.3216374269005848, - "acc_stderr": 0.03582529442573122, - "acc_norm": 0.3216374269005848, - "acc_norm_stderr": 0.03582529442573122 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.17475728155339806, - "acc_stderr": 0.037601780060266196, - "acc_norm": 0.17475728155339806, - "acc_norm_stderr": 0.037601780060266196 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.30140485312899107, - "acc_stderr": 0.016409091097268798, - "acc_norm": 0.30140485312899107, - "acc_norm_stderr": 0.016409091097268798 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.34074074074074073, - "acc_stderr": 0.04094376269996794, - "acc_norm": 0.34074074074074073, - "acc_norm_stderr": 0.04094376269996794 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.32, - "acc_stderr": 0.046882617226215034, - "acc_norm": 0.32, - "acc_norm_stderr": 0.046882617226215034 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.2680851063829787, - "acc_stderr": 0.028957342788342343, - "acc_norm": 0.2680851063829787, - "acc_norm_stderr": 0.028957342788342343 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.24096385542168675, - "acc_stderr": 0.0332939411907353, - "acc_norm": 0.24096385542168675, - "acc_norm_stderr": 0.0332939411907353 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.3536977491961415, - "acc_stderr": 0.027155208103200868, - "acc_norm": 0.3536977491961415, - "acc_norm_stderr": 0.027155208103200868 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.19730941704035873, - "acc_stderr": 0.02670985334496796, - "acc_norm": 0.19730941704035873, - "acc_norm_stderr": 0.02670985334496796 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.24427480916030533, - "acc_stderr": 0.03768335959728743, - "acc_norm": 0.24427480916030533, - "acc_norm_stderr": 0.03768335959728743 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.2, - "acc_stderr": 0.04020151261036846, - "acc_norm": 0.2, - "acc_norm_stderr": 0.04020151261036846 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.30808080808080807, - "acc_stderr": 0.032894773300986155, - "acc_norm": 0.30808080808080807, - "acc_norm_stderr": 0.032894773300986155 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.3310344827586207, - "acc_stderr": 0.03921545312467122, - "acc_norm": 0.3310344827586207, - "acc_norm_stderr": 0.03921545312467122 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.3137254901960784, - "acc_stderr": 0.04617034827006717, - "acc_norm": 0.3137254901960784, - "acc_norm_stderr": 0.04617034827006717 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.28991596638655465, - "acc_stderr": 0.029472485833136112, - "acc_norm": 0.28991596638655465, - "acc_norm_stderr": 0.029472485833136112 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.28974358974358977, - "acc_stderr": 0.02300062824368797, - "acc_norm": 0.28974358974358977, - "acc_norm_stderr": 0.02300062824368797 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.27, - "acc_stderr": 0.044619604333847394, - "acc_norm": 0.27, - "acc_norm_stderr": 0.044619604333847394 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.25925925925925924, - "acc_stderr": 0.042365112580946336, - "acc_norm": 0.25925925925925924, - "acc_norm_stderr": 0.042365112580946336 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.30049261083743845, - "acc_stderr": 0.03225799476233485, - "acc_norm": 0.30049261083743845, - "acc_norm_stderr": 0.03225799476233485 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.25806451612903225, - "acc_stderr": 0.02489246917246283, - "acc_norm": 0.25806451612903225, - "acc_norm_stderr": 0.02489246917246283 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.32051282051282054, - "acc_stderr": 0.03057281131029961, - "acc_norm": 0.32051282051282054, - "acc_norm_stderr": 0.03057281131029961 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.27547169811320754, - "acc_stderr": 0.027495663683724046, - "acc_norm": 0.27547169811320754, - "acc_norm_stderr": 0.027495663683724046 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.24545454545454545, - "acc_stderr": 0.04122066502878285, - "acc_norm": 0.24545454545454545, - "acc_norm_stderr": 0.04122066502878285 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.25925925925925924, - "acc_stderr": 0.02671924078371217, - "acc_norm": 0.25925925925925924, - "acc_norm_stderr": 0.02671924078371217 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2913907284768212, - "acc_stderr": 0.037101857261199966, - "acc_norm": 0.2913907284768212, - "acc_norm_stderr": 0.037101857261199966 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.2835820895522388, - "acc_stderr": 0.03187187537919798, - "acc_norm": 0.2835820895522388, - "acc_norm_stderr": 0.03187187537919798 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.26011560693641617, - "acc_stderr": 0.033450369167889925, - "acc_norm": 0.26011560693641617, - "acc_norm_stderr": 0.033450369167889925 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2671957671957672, - "acc_stderr": 0.022789673145776568, - "acc_norm": 0.2671957671957672, - "acc_norm_stderr": 0.022789673145776568 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2361111111111111, - "acc_stderr": 0.03551446610810826, - "acc_norm": 0.2361111111111111, - "acc_norm_stderr": 0.03551446610810826 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.27, - "acc_stderr": 0.04461960433384741, - "acc_norm": 0.27, - "acc_norm_stderr": 0.04461960433384741 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542128, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542128 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.35260115606936415, - "acc_stderr": 0.02572280220089582, - "acc_norm": 0.35260115606936415, - "acc_norm_stderr": 0.02572280220089582 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.3128834355828221, - "acc_stderr": 0.036429145782924055, - "acc_norm": 0.3128834355828221, - "acc_norm_stderr": 0.036429145782924055 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.30246913580246915, - "acc_stderr": 0.025557653981868045, - "acc_norm": 0.30246913580246915, - "acc_norm_stderr": 0.025557653981868045 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.24, - "acc_stderr": 0.042923469599092816, - "acc_norm": 0.24, - "acc_norm_stderr": 0.042923469599092816 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.2538860103626943, - "acc_stderr": 0.03141024780565319, - "acc_norm": 0.2538860103626943, - "acc_norm_stderr": 0.03141024780565319 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.24561403508771928, - "acc_stderr": 0.04049339297748142, - "acc_norm": 0.24561403508771928, - "acc_norm_stderr": 0.04049339297748142 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.26788990825688075, - "acc_stderr": 0.01898746225797865, - "acc_norm": 0.26788990825688075, - "acc_norm_stderr": 0.01898746225797865 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.2619047619047619, - "acc_stderr": 0.03932537680392871, - "acc_norm": 0.2619047619047619, - "acc_norm_stderr": 0.03932537680392871 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.3431372549019608, - "acc_stderr": 0.02718449890994162, - "acc_norm": 0.3431372549019608, - "acc_norm_stderr": 0.02718449890994162 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.24, - "acc_stderr": 0.04292346959909281, - "acc_norm": 0.24, - "acc_norm_stderr": 0.04292346959909281 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.2231404958677686, - "acc_stderr": 0.03800754475228733, - "acc_norm": 0.2231404958677686, - "acc_norm_stderr": 0.03800754475228733 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.17105263157894737, - "acc_stderr": 0.030643607071677098, - "acc_norm": 0.17105263157894737, - "acc_norm_stderr": 0.030643607071677098 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.29248366013071897, - "acc_stderr": 0.01840341571010979, - "acc_norm": 0.29248366013071897, - "acc_norm_stderr": 0.01840341571010979 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2624113475177305, - "acc_stderr": 0.02624492034984301, - "acc_norm": 0.2624113475177305, - "acc_norm_stderr": 0.02624492034984301 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.21428571428571427, - "acc_stderr": 0.03894641120044792, - "acc_norm": 0.21428571428571427, - "acc_norm_stderr": 0.03894641120044792 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.37962962962962965, - "acc_stderr": 0.03309682581119035, - "acc_norm": 0.37962962962962965, - "acc_norm_stderr": 0.03309682581119035 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2659217877094972, - "acc_stderr": 0.014776765066438893, - "acc_norm": 0.2659217877094972, - "acc_norm_stderr": 0.014776765066438893 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.3161764705882353, - "acc_stderr": 0.02824568739146292, - "acc_norm": 0.3161764705882353, - "acc_norm_stderr": 0.02824568739146292 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.33877551020408164, - "acc_stderr": 0.03029950656215418, - "acc_norm": 0.33877551020408164, - "acc_norm_stderr": 0.03029950656215418 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.379746835443038, - "acc_stderr": 0.031591887529658504, - "acc_norm": 0.379746835443038, - "acc_norm_stderr": 0.031591887529658504 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.31747066492829207, - "acc_stderr": 0.011888892068809309, - "acc_norm": 0.31747066492829207, - "acc_norm_stderr": 0.011888892068809309 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.35294117647058826, - "acc_stderr": 0.03354092437591518, - "acc_norm": 0.35294117647058826, - "acc_norm_stderr": 0.03354092437591518 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.3393939393939394, - "acc_stderr": 0.03697442205031596, - "acc_norm": 0.3393939393939394, - "acc_norm_stderr": 0.03697442205031596 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.24479804161566707, - "mc1_stderr": 0.015051869486715, - "mc2": 0.3985963285925064, - "mc2_stderr": 0.015472671255231826 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.7276995305164319, - "acc_stderr": 0.015259329900235694, - "acc_norm": 0.7629107981220657, - "acc_norm_stderr": 0.014579008468781534 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "momo/polyglot-ko-12.8b-Chat-QLoRA-Merge_v3", - "model_sha": "33bfc3a65f355b210a21b6f7c8f04f49492835bf", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/momo/polyglot-ko-12.8b-Chat-QLoRA-Merge_v3/result_2023-10-03 17:56:47.json b/momo/polyglot-ko-12.8b-Chat-QLoRA-Merge_v3/result_2023-10-03 17:56:47.json deleted file mode 100644 index a81cdf9213001414a8f061eae0c50acdfce76952..0000000000000000000000000000000000000000 --- a/momo/polyglot-ko-12.8b-Chat-QLoRA-Merge_v3/result_2023-10-03 17:56:47.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.30119453924914674, - "acc_stderr": 0.013406741767847612, - "acc_norm": 0.33532423208191126, - "acc_norm_stderr": 0.013796182947785562 - }, - "harness|ko_hellaswag|10": { - "acc": 0.38707428799044014, - "acc_stderr": 0.004860854240821967, - "acc_norm": 0.5005974905397331, - "acc_norm_stderr": 0.004989777848791005 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.32748538011695905, - "acc_stderr": 0.035993357714560276, - "acc_norm": 0.32748538011695905, - "acc_norm_stderr": 0.035993357714560276 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.17475728155339806, - "acc_stderr": 0.037601780060266196, - "acc_norm": 0.17475728155339806, - "acc_norm_stderr": 0.037601780060266196 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.3001277139208174, - "acc_stderr": 0.016389249691317425, - "acc_norm": 0.3001277139208174, - "acc_norm_stderr": 0.016389249691317425 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.34074074074074073, - "acc_stderr": 0.04094376269996794, - "acc_norm": 0.34074074074074073, - "acc_norm_stderr": 0.04094376269996794 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.32, - "acc_stderr": 0.046882617226215034, - "acc_norm": 0.32, - "acc_norm_stderr": 0.046882617226215034 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.2680851063829787, - "acc_stderr": 0.028957342788342343, - "acc_norm": 0.2680851063829787, - "acc_norm_stderr": 0.028957342788342343 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.24096385542168675, - "acc_stderr": 0.0332939411907353, - "acc_norm": 0.24096385542168675, - "acc_norm_stderr": 0.0332939411907353 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.3536977491961415, - "acc_stderr": 0.027155208103200868, - "acc_norm": 0.3536977491961415, - "acc_norm_stderr": 0.027155208103200868 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.19730941704035873, - "acc_stderr": 0.02670985334496796, - "acc_norm": 0.19730941704035873, - "acc_norm_stderr": 0.02670985334496796 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.24427480916030533, - "acc_stderr": 0.03768335959728743, - "acc_norm": 0.24427480916030533, - "acc_norm_stderr": 0.03768335959728743 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.2, - "acc_stderr": 0.04020151261036846, - "acc_norm": 0.2, - "acc_norm_stderr": 0.04020151261036846 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.30808080808080807, - "acc_stderr": 0.032894773300986155, - "acc_norm": 0.30808080808080807, - "acc_norm_stderr": 0.032894773300986155 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.3310344827586207, - "acc_stderr": 0.03921545312467122, - "acc_norm": 0.3310344827586207, - "acc_norm_stderr": 0.03921545312467122 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.3137254901960784, - "acc_stderr": 0.04617034827006717, - "acc_norm": 0.3137254901960784, - "acc_norm_stderr": 0.04617034827006717 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.28991596638655465, - "acc_stderr": 0.029472485833136112, - "acc_norm": 0.28991596638655465, - "acc_norm_stderr": 0.029472485833136112 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.28974358974358977, - "acc_stderr": 0.02300062824368797, - "acc_norm": 0.28974358974358977, - "acc_norm_stderr": 0.02300062824368797 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.27, - "acc_stderr": 0.044619604333847394, - "acc_norm": 0.27, - "acc_norm_stderr": 0.044619604333847394 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.25925925925925924, - "acc_stderr": 0.042365112580946336, - "acc_norm": 0.25925925925925924, - "acc_norm_stderr": 0.042365112580946336 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.30049261083743845, - "acc_stderr": 0.03225799476233485, - "acc_norm": 0.30049261083743845, - "acc_norm_stderr": 0.03225799476233485 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.25806451612903225, - "acc_stderr": 0.02489246917246283, - "acc_norm": 0.25806451612903225, - "acc_norm_stderr": 0.02489246917246283 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.32051282051282054, - "acc_stderr": 0.03057281131029961, - "acc_norm": 0.32051282051282054, - "acc_norm_stderr": 0.03057281131029961 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.27547169811320754, - "acc_stderr": 0.027495663683724046, - "acc_norm": 0.27547169811320754, - "acc_norm_stderr": 0.027495663683724046 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.24545454545454545, - "acc_stderr": 0.04122066502878285, - "acc_norm": 0.24545454545454545, - "acc_norm_stderr": 0.04122066502878285 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.25925925925925924, - "acc_stderr": 0.02671924078371217, - "acc_norm": 0.25925925925925924, - "acc_norm_stderr": 0.02671924078371217 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2913907284768212, - "acc_stderr": 0.037101857261199966, - "acc_norm": 0.2913907284768212, - "acc_norm_stderr": 0.037101857261199966 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.2835820895522388, - "acc_stderr": 0.03187187537919798, - "acc_norm": 0.2835820895522388, - "acc_norm_stderr": 0.03187187537919798 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.2658959537572254, - "acc_stderr": 0.033687629322594316, - "acc_norm": 0.2658959537572254, - "acc_norm_stderr": 0.033687629322594316 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2671957671957672, - "acc_stderr": 0.022789673145776568, - "acc_norm": 0.2671957671957672, - "acc_norm_stderr": 0.022789673145776568 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2361111111111111, - "acc_stderr": 0.03551446610810826, - "acc_norm": 0.2361111111111111, - "acc_norm_stderr": 0.03551446610810826 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.27, - "acc_stderr": 0.04461960433384741, - "acc_norm": 0.27, - "acc_norm_stderr": 0.04461960433384741 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542128, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542128 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.35260115606936415, - "acc_stderr": 0.02572280220089582, - "acc_norm": 0.35260115606936415, - "acc_norm_stderr": 0.02572280220089582 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.3128834355828221, - "acc_stderr": 0.036429145782924055, - "acc_norm": 0.3128834355828221, - "acc_norm_stderr": 0.036429145782924055 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.30246913580246915, - "acc_stderr": 0.025557653981868045, - "acc_norm": 0.30246913580246915, - "acc_norm_stderr": 0.025557653981868045 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.24, - "acc_stderr": 0.042923469599092816, - "acc_norm": 0.24, - "acc_norm_stderr": 0.042923469599092816 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.2538860103626943, - "acc_stderr": 0.03141024780565319, - "acc_norm": 0.2538860103626943, - "acc_norm_stderr": 0.03141024780565319 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.24561403508771928, - "acc_stderr": 0.04049339297748142, - "acc_norm": 0.24561403508771928, - "acc_norm_stderr": 0.04049339297748142 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.26788990825688075, - "acc_stderr": 0.01898746225797865, - "acc_norm": 0.26788990825688075, - "acc_norm_stderr": 0.01898746225797865 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.2619047619047619, - "acc_stderr": 0.03932537680392871, - "acc_norm": 0.2619047619047619, - "acc_norm_stderr": 0.03932537680392871 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.3431372549019608, - "acc_stderr": 0.02718449890994162, - "acc_norm": 0.3431372549019608, - "acc_norm_stderr": 0.02718449890994162 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.24, - "acc_stderr": 0.04292346959909281, - "acc_norm": 0.24, - "acc_norm_stderr": 0.04292346959909281 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.2231404958677686, - "acc_stderr": 0.03800754475228733, - "acc_norm": 0.2231404958677686, - "acc_norm_stderr": 0.03800754475228733 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.17105263157894737, - "acc_stderr": 0.030643607071677098, - "acc_norm": 0.17105263157894737, - "acc_norm_stderr": 0.030643607071677098 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.29248366013071897, - "acc_stderr": 0.01840341571010979, - "acc_norm": 0.29248366013071897, - "acc_norm_stderr": 0.01840341571010979 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2624113475177305, - "acc_stderr": 0.02624492034984301, - "acc_norm": 0.2624113475177305, - "acc_norm_stderr": 0.02624492034984301 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.21428571428571427, - "acc_stderr": 0.03894641120044792, - "acc_norm": 0.21428571428571427, - "acc_norm_stderr": 0.03894641120044792 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.37962962962962965, - "acc_stderr": 0.03309682581119035, - "acc_norm": 0.37962962962962965, - "acc_norm_stderr": 0.03309682581119035 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.264804469273743, - "acc_stderr": 0.014756906483260664, - "acc_norm": 0.264804469273743, - "acc_norm_stderr": 0.014756906483260664 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.3161764705882353, - "acc_stderr": 0.02824568739146292, - "acc_norm": 0.3161764705882353, - "acc_norm_stderr": 0.02824568739146292 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.33877551020408164, - "acc_stderr": 0.03029950656215418, - "acc_norm": 0.33877551020408164, - "acc_norm_stderr": 0.03029950656215418 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.379746835443038, - "acc_stderr": 0.031591887529658504, - "acc_norm": 0.379746835443038, - "acc_norm_stderr": 0.031591887529658504 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.31877444589308995, - "acc_stderr": 0.01190189563578609, - "acc_norm": 0.31877444589308995, - "acc_norm_stderr": 0.01190189563578609 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.35294117647058826, - "acc_stderr": 0.03354092437591518, - "acc_norm": 0.35294117647058826, - "acc_norm_stderr": 0.03354092437591518 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.3393939393939394, - "acc_stderr": 0.03697442205031596, - "acc_norm": 0.3393939393939394, - "acc_norm_stderr": 0.03697442205031596 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.24479804161566707, - "mc1_stderr": 0.015051869486715, - "mc2": 0.39860268740922694, - "mc2_stderr": 0.015473079108834439 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.7276995305164319, - "acc_stderr": 0.015259329900235694, - "acc_norm": 0.7629107981220657, - "acc_norm_stderr": 0.014579008468781534 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "momo/polyglot-ko-12.8b-Chat-QLoRA-Merge_v3", - "model_sha": "33bfc3a65f355b210a21b6f7c8f04f49492835bf", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/momo/polyglot-ko-12.8b-Orca-Chat-QLoRA-Merge-v2/result_2023-10-10 00:11:38.json b/momo/polyglot-ko-12.8b-Orca-Chat-QLoRA-Merge-v2/result_2023-10-10 00:11:38.json deleted file mode 100644 index 8f12834a39172ed468c8d025f13558a8cb1b24ef..0000000000000000000000000000000000000000 --- a/momo/polyglot-ko-12.8b-Orca-Chat-QLoRA-Merge-v2/result_2023-10-10 00:11:38.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.32081911262798635, - "acc_stderr": 0.013640943091946524, - "acc_norm": 0.37457337883959047, - "acc_norm_stderr": 0.014144193471893446 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3913563035251942, - "acc_stderr": 0.004870563921220623, - "acc_norm": 0.5044811790479984, - "acc_norm_stderr": 0.004989581008163209 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.2631578947368421, - "acc_stderr": 0.033773102522091925, - "acc_norm": 0.2631578947368421, - "acc_norm_stderr": 0.033773102522091925 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.32038834951456313, - "acc_stderr": 0.046202840822800406, - "acc_norm": 0.32038834951456313, - "acc_norm_stderr": 0.046202840822800406 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.3231162196679438, - "acc_stderr": 0.016723726512343048, - "acc_norm": 0.3231162196679438, - "acc_norm_stderr": 0.016723726512343048 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.362962962962963, - "acc_stderr": 0.041539484047424004, - "acc_norm": 0.362962962962963, - "acc_norm_stderr": 0.041539484047424004 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542128, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542128 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.2553191489361702, - "acc_stderr": 0.028504856470514178, - "acc_norm": 0.2553191489361702, - "acc_norm_stderr": 0.028504856470514178 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.20481927710843373, - "acc_stderr": 0.031417842916639245, - "acc_norm": 0.20481927710843373, - "acc_norm_stderr": 0.031417842916639245 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.33762057877813506, - "acc_stderr": 0.026858825879488547, - "acc_norm": 0.33762057877813506, - "acc_norm_stderr": 0.026858825879488547 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.21076233183856502, - "acc_stderr": 0.027373095500540193, - "acc_norm": 0.21076233183856502, - "acc_norm_stderr": 0.027373095500540193 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.29770992366412213, - "acc_stderr": 0.04010358942462202, - "acc_norm": 0.29770992366412213, - "acc_norm_stderr": 0.04010358942462202 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.29, - "acc_stderr": 0.04560480215720684, - "acc_norm": 0.29, - "acc_norm_stderr": 0.04560480215720684 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.3939393939393939, - "acc_stderr": 0.034812853382329624, - "acc_norm": 0.3939393939393939, - "acc_norm_stderr": 0.034812853382329624 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.30344827586206896, - "acc_stderr": 0.038312260488503336, - "acc_norm": 0.30344827586206896, - "acc_norm_stderr": 0.038312260488503336 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.28431372549019607, - "acc_stderr": 0.04488482852329017, - "acc_norm": 0.28431372549019607, - "acc_norm_stderr": 0.04488482852329017 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.29831932773109243, - "acc_stderr": 0.02971914287634284, - "acc_norm": 0.29831932773109243, - "acc_norm_stderr": 0.02971914287634284 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.35128205128205126, - "acc_stderr": 0.024203665177902796, - "acc_norm": 0.35128205128205126, - "acc_norm_stderr": 0.024203665177902796 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.24, - "acc_stderr": 0.04292346959909283, - "acc_norm": 0.24, - "acc_norm_stderr": 0.04292346959909283 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.04557239513497752, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.04557239513497752 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.31527093596059114, - "acc_stderr": 0.03269080871970187, - "acc_norm": 0.31527093596059114, - "acc_norm_stderr": 0.03269080871970187 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3193548387096774, - "acc_stderr": 0.02652270967466776, - "acc_norm": 0.3193548387096774, - "acc_norm_stderr": 0.02652270967466776 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.33760683760683763, - "acc_stderr": 0.030980296992618558, - "acc_norm": 0.33760683760683763, - "acc_norm_stderr": 0.030980296992618558 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.29056603773584905, - "acc_stderr": 0.027943219989337142, - "acc_norm": 0.29056603773584905, - "acc_norm_stderr": 0.027943219989337142 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.2818181818181818, - "acc_stderr": 0.043091187099464585, - "acc_norm": 0.2818181818181818, - "acc_norm_stderr": 0.043091187099464585 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.24814814814814815, - "acc_stderr": 0.0263357394040558, - "acc_norm": 0.24814814814814815, - "acc_norm_stderr": 0.0263357394040558 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2781456953642384, - "acc_stderr": 0.03658603262763744, - "acc_norm": 0.2781456953642384, - "acc_norm_stderr": 0.03658603262763744 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.31343283582089554, - "acc_stderr": 0.03280188205348644, - "acc_norm": 0.31343283582089554, - "acc_norm_stderr": 0.03280188205348644 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.3179190751445087, - "acc_stderr": 0.03550683989165582, - "acc_norm": 0.3179190751445087, - "acc_norm_stderr": 0.03550683989165582 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2777777777777778, - "acc_stderr": 0.023068188848261124, - "acc_norm": 0.2777777777777778, - "acc_norm_stderr": 0.023068188848261124 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2916666666666667, - "acc_stderr": 0.038009680605548574, - "acc_norm": 0.2916666666666667, - "acc_norm_stderr": 0.038009680605548574 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.26, - "acc_stderr": 0.04408440022768077, - "acc_norm": 0.26, - "acc_norm_stderr": 0.04408440022768077 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.28, - "acc_stderr": 0.045126085985421276, - "acc_norm": 0.28, - "acc_norm_stderr": 0.045126085985421276 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.3901734104046243, - "acc_stderr": 0.026261677607806642, - "acc_norm": 0.3901734104046243, - "acc_norm_stderr": 0.026261677607806642 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.36809815950920244, - "acc_stderr": 0.03789213935838396, - "acc_norm": 0.36809815950920244, - "acc_norm_stderr": 0.03789213935838396 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.3117283950617284, - "acc_stderr": 0.02577311116963044, - "acc_norm": 0.3117283950617284, - "acc_norm_stderr": 0.02577311116963044 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252606, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252606 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.36787564766839376, - "acc_stderr": 0.03480175668466036, - "acc_norm": 0.36787564766839376, - "acc_norm_stderr": 0.03480175668466036 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2719298245614035, - "acc_stderr": 0.04185774424022057, - "acc_norm": 0.2719298245614035, - "acc_norm_stderr": 0.04185774424022057 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.3431192660550459, - "acc_stderr": 0.020354777736086037, - "acc_norm": 0.3431192660550459, - "acc_norm_stderr": 0.020354777736086037 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.2857142857142857, - "acc_stderr": 0.04040610178208841, - "acc_norm": 0.2857142857142857, - "acc_norm_stderr": 0.04040610178208841 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.3888888888888889, - "acc_stderr": 0.027914055510468008, - "acc_norm": 0.3888888888888889, - "acc_norm_stderr": 0.027914055510468008 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.26, - "acc_stderr": 0.04408440022768078, - "acc_norm": 0.26, - "acc_norm_stderr": 0.04408440022768078 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.2975206611570248, - "acc_stderr": 0.041733491480834974, - "acc_norm": 0.2975206611570248, - "acc_norm_stderr": 0.041733491480834974 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.29605263157894735, - "acc_stderr": 0.037150621549989056, - "acc_norm": 0.29605263157894735, - "acc_norm_stderr": 0.037150621549989056 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.2826797385620915, - "acc_stderr": 0.018217269552053435, - "acc_norm": 0.2826797385620915, - "acc_norm_stderr": 0.018217269552053435 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.24822695035460993, - "acc_stderr": 0.025770015644290403, - "acc_norm": 0.24822695035460993, - "acc_norm_stderr": 0.025770015644290403 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.1875, - "acc_stderr": 0.0370468111477387, - "acc_norm": 0.1875, - "acc_norm_stderr": 0.0370468111477387 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.25462962962962965, - "acc_stderr": 0.029711275860005333, - "acc_norm": 0.25462962962962965, - "acc_norm_stderr": 0.029711275860005333 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.24134078212290502, - "acc_stderr": 0.01431099954796145, - "acc_norm": 0.24134078212290502, - "acc_norm_stderr": 0.01431099954796145 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.33088235294117646, - "acc_stderr": 0.028582709753898445, - "acc_norm": 0.33088235294117646, - "acc_norm_stderr": 0.028582709753898445 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.27755102040816326, - "acc_stderr": 0.028666857790274655, - "acc_norm": 0.27755102040816326, - "acc_norm_stderr": 0.028666857790274655 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.42616033755274263, - "acc_stderr": 0.032190357031317736, - "acc_norm": 0.42616033755274263, - "acc_norm_stderr": 0.032190357031317736 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.3161668839634941, - "acc_stderr": 0.011875780894386578, - "acc_norm": 0.3161668839634941, - "acc_norm_stderr": 0.011875780894386578 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.3480392156862745, - "acc_stderr": 0.03343311240488418, - "acc_norm": 0.3480392156862745, - "acc_norm_stderr": 0.03343311240488418 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.38181818181818183, - "acc_stderr": 0.037937131711656344, - "acc_norm": 0.38181818181818183, - "acc_norm_stderr": 0.037937131711656344 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2778457772337821, - "mc1_stderr": 0.01568092936402464, - "mc2": 0.4401386616406487, - "mc2_stderr": 0.015231170871530949 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.5199530516431925, - "acc_stderr": 0.01712612629283198, - "acc_norm": 0.5751173708920188, - "acc_norm_stderr": 0.0169452488268217 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "momo/polyglot-ko-12.8b-Orca-Chat-QLoRA-Merge-v2", - "model_sha": "fe0117824036ebe2d054ddf14b2ef04a1cb19dda", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/momo/polyglot-ko-12.8b-Orca-Chat-QLoRA-Merge/result_2023-10-06 11:30:44.json b/momo/polyglot-ko-12.8b-Orca-Chat-QLoRA-Merge/result_2023-10-06 11:30:44.json deleted file mode 100644 index dcc3b7d18c7869d5de6480056e211cb8062b75bc..0000000000000000000000000000000000000000 --- a/momo/polyglot-ko-12.8b-Orca-Chat-QLoRA-Merge/result_2023-10-06 11:30:44.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.3216723549488055, - "acc_stderr": 0.013650488084494164, - "acc_norm": 0.37457337883959047, - "acc_norm_stderr": 0.014144193471893446 - }, - "harness|ko_hellaswag|10": { - "acc": 0.391256721768572, - "acc_stderr": 0.004870342592915048, - "acc_norm": 0.5045807608046206, - "acc_norm_stderr": 0.004989572002196686 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.2631578947368421, - "acc_stderr": 0.033773102522091925, - "acc_norm": 0.2631578947368421, - "acc_norm_stderr": 0.033773102522091925 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.32038834951456313, - "acc_stderr": 0.046202840822800406, - "acc_norm": 0.32038834951456313, - "acc_norm_stderr": 0.046202840822800406 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.3231162196679438, - "acc_stderr": 0.016723726512343048, - "acc_norm": 0.3231162196679438, - "acc_norm_stderr": 0.016723726512343048 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.362962962962963, - "acc_stderr": 0.041539484047424004, - "acc_norm": 0.362962962962963, - "acc_norm_stderr": 0.041539484047424004 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542128, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542128 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.2553191489361702, - "acc_stderr": 0.028504856470514178, - "acc_norm": 0.2553191489361702, - "acc_norm_stderr": 0.028504856470514178 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.20481927710843373, - "acc_stderr": 0.031417842916639245, - "acc_norm": 0.20481927710843373, - "acc_norm_stderr": 0.031417842916639245 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.33762057877813506, - "acc_stderr": 0.026858825879488547, - "acc_norm": 0.33762057877813506, - "acc_norm_stderr": 0.026858825879488547 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.21076233183856502, - "acc_stderr": 0.027373095500540193, - "acc_norm": 0.21076233183856502, - "acc_norm_stderr": 0.027373095500540193 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.29770992366412213, - "acc_stderr": 0.04010358942462202, - "acc_norm": 0.29770992366412213, - "acc_norm_stderr": 0.04010358942462202 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.29, - "acc_stderr": 0.04560480215720684, - "acc_norm": 0.29, - "acc_norm_stderr": 0.04560480215720684 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.3939393939393939, - "acc_stderr": 0.034812853382329624, - "acc_norm": 0.3939393939393939, - "acc_norm_stderr": 0.034812853382329624 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.30344827586206896, - "acc_stderr": 0.038312260488503336, - "acc_norm": 0.30344827586206896, - "acc_norm_stderr": 0.038312260488503336 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.28431372549019607, - "acc_stderr": 0.04488482852329017, - "acc_norm": 0.28431372549019607, - "acc_norm_stderr": 0.04488482852329017 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.29831932773109243, - "acc_stderr": 0.02971914287634284, - "acc_norm": 0.29831932773109243, - "acc_norm_stderr": 0.02971914287634284 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.35128205128205126, - "acc_stderr": 0.024203665177902796, - "acc_norm": 0.35128205128205126, - "acc_norm_stderr": 0.024203665177902796 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.24, - "acc_stderr": 0.04292346959909283, - "acc_norm": 0.24, - "acc_norm_stderr": 0.04292346959909283 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.04557239513497752, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.04557239513497752 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.31527093596059114, - "acc_stderr": 0.03269080871970187, - "acc_norm": 0.31527093596059114, - "acc_norm_stderr": 0.03269080871970187 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3193548387096774, - "acc_stderr": 0.02652270967466776, - "acc_norm": 0.3193548387096774, - "acc_norm_stderr": 0.02652270967466776 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.33760683760683763, - "acc_stderr": 0.030980296992618558, - "acc_norm": 0.33760683760683763, - "acc_norm_stderr": 0.030980296992618558 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.29056603773584905, - "acc_stderr": 0.027943219989337142, - "acc_norm": 0.29056603773584905, - "acc_norm_stderr": 0.027943219989337142 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.2818181818181818, - "acc_stderr": 0.043091187099464585, - "acc_norm": 0.2818181818181818, - "acc_norm_stderr": 0.043091187099464585 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.24814814814814815, - "acc_stderr": 0.0263357394040558, - "acc_norm": 0.24814814814814815, - "acc_norm_stderr": 0.0263357394040558 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2781456953642384, - "acc_stderr": 0.03658603262763744, - "acc_norm": 0.2781456953642384, - "acc_norm_stderr": 0.03658603262763744 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.31343283582089554, - "acc_stderr": 0.03280188205348644, - "acc_norm": 0.31343283582089554, - "acc_norm_stderr": 0.03280188205348644 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.3179190751445087, - "acc_stderr": 0.03550683989165582, - "acc_norm": 0.3179190751445087, - "acc_norm_stderr": 0.03550683989165582 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2777777777777778, - "acc_stderr": 0.023068188848261124, - "acc_norm": 0.2777777777777778, - "acc_norm_stderr": 0.023068188848261124 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2916666666666667, - "acc_stderr": 0.038009680605548574, - "acc_norm": 0.2916666666666667, - "acc_norm_stderr": 0.038009680605548574 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.26, - "acc_stderr": 0.04408440022768077, - "acc_norm": 0.26, - "acc_norm_stderr": 0.04408440022768077 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.28, - "acc_stderr": 0.045126085985421276, - "acc_norm": 0.28, - "acc_norm_stderr": 0.045126085985421276 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.3901734104046243, - "acc_stderr": 0.026261677607806642, - "acc_norm": 0.3901734104046243, - "acc_norm_stderr": 0.026261677607806642 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.36809815950920244, - "acc_stderr": 0.03789213935838396, - "acc_norm": 0.36809815950920244, - "acc_norm_stderr": 0.03789213935838396 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.3117283950617284, - "acc_stderr": 0.02577311116963044, - "acc_norm": 0.3117283950617284, - "acc_norm_stderr": 0.02577311116963044 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252606, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252606 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.36787564766839376, - "acc_stderr": 0.03480175668466036, - "acc_norm": 0.36787564766839376, - "acc_norm_stderr": 0.03480175668466036 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2719298245614035, - "acc_stderr": 0.04185774424022057, - "acc_norm": 0.2719298245614035, - "acc_norm_stderr": 0.04185774424022057 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.3431192660550459, - "acc_stderr": 0.020354777736086037, - "acc_norm": 0.3431192660550459, - "acc_norm_stderr": 0.020354777736086037 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.2857142857142857, - "acc_stderr": 0.04040610178208841, - "acc_norm": 0.2857142857142857, - "acc_norm_stderr": 0.04040610178208841 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.3888888888888889, - "acc_stderr": 0.027914055510468008, - "acc_norm": 0.3888888888888889, - "acc_norm_stderr": 0.027914055510468008 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.26, - "acc_stderr": 0.04408440022768078, - "acc_norm": 0.26, - "acc_norm_stderr": 0.04408440022768078 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.2975206611570248, - "acc_stderr": 0.041733491480834974, - "acc_norm": 0.2975206611570248, - "acc_norm_stderr": 0.041733491480834974 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.29605263157894735, - "acc_stderr": 0.037150621549989056, - "acc_norm": 0.29605263157894735, - "acc_norm_stderr": 0.037150621549989056 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.2826797385620915, - "acc_stderr": 0.018217269552053435, - "acc_norm": 0.2826797385620915, - "acc_norm_stderr": 0.018217269552053435 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.24822695035460993, - "acc_stderr": 0.025770015644290403, - "acc_norm": 0.24822695035460993, - "acc_norm_stderr": 0.025770015644290403 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.1875, - "acc_stderr": 0.0370468111477387, - "acc_norm": 0.1875, - "acc_norm_stderr": 0.0370468111477387 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.25462962962962965, - "acc_stderr": 0.029711275860005333, - "acc_norm": 0.25462962962962965, - "acc_norm_stderr": 0.029711275860005333 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.24134078212290502, - "acc_stderr": 0.01431099954796145, - "acc_norm": 0.24134078212290502, - "acc_norm_stderr": 0.01431099954796145 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.33088235294117646, - "acc_stderr": 0.028582709753898445, - "acc_norm": 0.33088235294117646, - "acc_norm_stderr": 0.028582709753898445 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.27755102040816326, - "acc_stderr": 0.028666857790274655, - "acc_norm": 0.27755102040816326, - "acc_norm_stderr": 0.028666857790274655 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.42616033755274263, - "acc_stderr": 0.032190357031317736, - "acc_norm": 0.42616033755274263, - "acc_norm_stderr": 0.032190357031317736 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.3161668839634941, - "acc_stderr": 0.011875780894386578, - "acc_norm": 0.3161668839634941, - "acc_norm_stderr": 0.011875780894386578 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.3480392156862745, - "acc_stderr": 0.03343311240488418, - "acc_norm": 0.3480392156862745, - "acc_norm_stderr": 0.03343311240488418 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.38181818181818183, - "acc_stderr": 0.037937131711656344, - "acc_norm": 0.38181818181818183, - "acc_norm_stderr": 0.037937131711656344 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2778457772337821, - "mc1_stderr": 0.01568092936402464, - "mc2": 0.44014473628399425, - "mc2_stderr": 0.015231766341297711 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.5211267605633803, - "acc_stderr": 0.017124472080967065, - "acc_norm": 0.573943661971831, - "acc_norm_stderr": 0.016951313945591816 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "momo/polyglot-ko-12.8b-Orca-Chat-QLoRA-Merge", - "model_sha": "fe0117824036ebe2d054ddf14b2ef04a1cb19dda", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/momo/polyglot-ko-12.8b-Orca-Chat-QLoRA-Merge/result_2023-10-06 11:30:53.json b/momo/polyglot-ko-12.8b-Orca-Chat-QLoRA-Merge/result_2023-10-06 11:30:53.json deleted file mode 100644 index 9a0ee7a8a2c50b17ab4bd55d95e9450c57018e79..0000000000000000000000000000000000000000 --- a/momo/polyglot-ko-12.8b-Orca-Chat-QLoRA-Merge/result_2023-10-06 11:30:53.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.3216723549488055, - "acc_stderr": 0.013650488084494164, - "acc_norm": 0.37457337883959047, - "acc_norm_stderr": 0.014144193471893446 - }, - "harness|ko_hellaswag|10": { - "acc": 0.391256721768572, - "acc_stderr": 0.004870342592915048, - "acc_norm": 0.5045807608046206, - "acc_norm_stderr": 0.004989572002196686 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.2631578947368421, - "acc_stderr": 0.033773102522091925, - "acc_norm": 0.2631578947368421, - "acc_norm_stderr": 0.033773102522091925 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.32038834951456313, - "acc_stderr": 0.046202840822800406, - "acc_norm": 0.32038834951456313, - "acc_norm_stderr": 0.046202840822800406 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.3231162196679438, - "acc_stderr": 0.016723726512343048, - "acc_norm": 0.3231162196679438, - "acc_norm_stderr": 0.016723726512343048 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.35555555555555557, - "acc_stderr": 0.04135176749720386, - "acc_norm": 0.35555555555555557, - "acc_norm_stderr": 0.04135176749720386 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.27, - "acc_stderr": 0.044619604333847394, - "acc_norm": 0.27, - "acc_norm_stderr": 0.044619604333847394 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.2553191489361702, - "acc_stderr": 0.028504856470514178, - "acc_norm": 0.2553191489361702, - "acc_norm_stderr": 0.028504856470514178 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.21084337349397592, - "acc_stderr": 0.031755547866299194, - "acc_norm": 0.21084337349397592, - "acc_norm_stderr": 0.031755547866299194 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.33762057877813506, - "acc_stderr": 0.026858825879488547, - "acc_norm": 0.33762057877813506, - "acc_norm_stderr": 0.026858825879488547 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.21076233183856502, - "acc_stderr": 0.027373095500540193, - "acc_norm": 0.21076233183856502, - "acc_norm_stderr": 0.027373095500540193 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.29770992366412213, - "acc_stderr": 0.04010358942462202, - "acc_norm": 0.29770992366412213, - "acc_norm_stderr": 0.04010358942462202 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.29, - "acc_stderr": 0.04560480215720684, - "acc_norm": 0.29, - "acc_norm_stderr": 0.04560480215720684 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.3939393939393939, - "acc_stderr": 0.034812853382329624, - "acc_norm": 0.3939393939393939, - "acc_norm_stderr": 0.034812853382329624 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.30344827586206896, - "acc_stderr": 0.038312260488503336, - "acc_norm": 0.30344827586206896, - "acc_norm_stderr": 0.038312260488503336 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.28431372549019607, - "acc_stderr": 0.04488482852329017, - "acc_norm": 0.28431372549019607, - "acc_norm_stderr": 0.04488482852329017 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.29831932773109243, - "acc_stderr": 0.02971914287634284, - "acc_norm": 0.29831932773109243, - "acc_norm_stderr": 0.02971914287634284 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.35128205128205126, - "acc_stderr": 0.024203665177902796, - "acc_norm": 0.35128205128205126, - "acc_norm_stderr": 0.024203665177902796 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.24, - "acc_stderr": 0.04292346959909283, - "acc_norm": 0.24, - "acc_norm_stderr": 0.04292346959909283 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.04557239513497752, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.04557239513497752 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.31527093596059114, - "acc_stderr": 0.03269080871970187, - "acc_norm": 0.31527093596059114, - "acc_norm_stderr": 0.03269080871970187 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3193548387096774, - "acc_stderr": 0.02652270967466776, - "acc_norm": 0.3193548387096774, - "acc_norm_stderr": 0.02652270967466776 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.33760683760683763, - "acc_stderr": 0.030980296992618558, - "acc_norm": 0.33760683760683763, - "acc_norm_stderr": 0.030980296992618558 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.29056603773584905, - "acc_stderr": 0.027943219989337142, - "acc_norm": 0.29056603773584905, - "acc_norm_stderr": 0.027943219989337142 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.2818181818181818, - "acc_stderr": 0.043091187099464585, - "acc_norm": 0.2818181818181818, - "acc_norm_stderr": 0.043091187099464585 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.24814814814814815, - "acc_stderr": 0.0263357394040558, - "acc_norm": 0.24814814814814815, - "acc_norm_stderr": 0.0263357394040558 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2781456953642384, - "acc_stderr": 0.03658603262763744, - "acc_norm": 0.2781456953642384, - "acc_norm_stderr": 0.03658603262763744 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.30845771144278605, - "acc_stderr": 0.032658195885126994, - "acc_norm": 0.30845771144278605, - "acc_norm_stderr": 0.032658195885126994 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.3179190751445087, - "acc_stderr": 0.03550683989165582, - "acc_norm": 0.3179190751445087, - "acc_norm_stderr": 0.03550683989165582 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2777777777777778, - "acc_stderr": 0.023068188848261124, - "acc_norm": 0.2777777777777778, - "acc_norm_stderr": 0.023068188848261124 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2916666666666667, - "acc_stderr": 0.038009680605548574, - "acc_norm": 0.2916666666666667, - "acc_norm_stderr": 0.038009680605548574 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.26, - "acc_stderr": 0.04408440022768077, - "acc_norm": 0.26, - "acc_norm_stderr": 0.04408440022768077 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.28, - "acc_stderr": 0.045126085985421276, - "acc_norm": 0.28, - "acc_norm_stderr": 0.045126085985421276 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.3901734104046243, - "acc_stderr": 0.026261677607806642, - "acc_norm": 0.3901734104046243, - "acc_norm_stderr": 0.026261677607806642 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.36809815950920244, - "acc_stderr": 0.03789213935838396, - "acc_norm": 0.36809815950920244, - "acc_norm_stderr": 0.03789213935838396 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.3117283950617284, - "acc_stderr": 0.02577311116963044, - "acc_norm": 0.3117283950617284, - "acc_norm_stderr": 0.02577311116963044 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252606, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252606 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.36787564766839376, - "acc_stderr": 0.03480175668466036, - "acc_norm": 0.36787564766839376, - "acc_norm_stderr": 0.03480175668466036 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2719298245614035, - "acc_stderr": 0.04185774424022057, - "acc_norm": 0.2719298245614035, - "acc_norm_stderr": 0.04185774424022057 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.3431192660550459, - "acc_stderr": 0.020354777736086037, - "acc_norm": 0.3431192660550459, - "acc_norm_stderr": 0.020354777736086037 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.2857142857142857, - "acc_stderr": 0.04040610178208841, - "acc_norm": 0.2857142857142857, - "acc_norm_stderr": 0.04040610178208841 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.3888888888888889, - "acc_stderr": 0.027914055510468008, - "acc_norm": 0.3888888888888889, - "acc_norm_stderr": 0.027914055510468008 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.26, - "acc_stderr": 0.04408440022768078, - "acc_norm": 0.26, - "acc_norm_stderr": 0.04408440022768078 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.2975206611570248, - "acc_stderr": 0.041733491480834974, - "acc_norm": 0.2975206611570248, - "acc_norm_stderr": 0.041733491480834974 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.29605263157894735, - "acc_stderr": 0.037150621549989056, - "acc_norm": 0.29605263157894735, - "acc_norm_stderr": 0.037150621549989056 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.2826797385620915, - "acc_stderr": 0.018217269552053435, - "acc_norm": 0.2826797385620915, - "acc_norm_stderr": 0.018217269552053435 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.24822695035460993, - "acc_stderr": 0.025770015644290403, - "acc_norm": 0.24822695035460993, - "acc_norm_stderr": 0.025770015644290403 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.1875, - "acc_stderr": 0.0370468111477387, - "acc_norm": 0.1875, - "acc_norm_stderr": 0.0370468111477387 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.25462962962962965, - "acc_stderr": 0.029711275860005333, - "acc_norm": 0.25462962962962965, - "acc_norm_stderr": 0.029711275860005333 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.24134078212290502, - "acc_stderr": 0.01431099954796145, - "acc_norm": 0.24134078212290502, - "acc_norm_stderr": 0.01431099954796145 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.3272058823529412, - "acc_stderr": 0.02850145286039656, - "acc_norm": 0.3272058823529412, - "acc_norm_stderr": 0.02850145286039656 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.2816326530612245, - "acc_stderr": 0.02879518557429127, - "acc_norm": 0.2816326530612245, - "acc_norm_stderr": 0.02879518557429127 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.42616033755274263, - "acc_stderr": 0.032190357031317736, - "acc_norm": 0.42616033755274263, - "acc_norm_stderr": 0.032190357031317736 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.3161668839634941, - "acc_stderr": 0.011875780894386578, - "acc_norm": 0.3161668839634941, - "acc_norm_stderr": 0.011875780894386578 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.3480392156862745, - "acc_stderr": 0.03343311240488418, - "acc_norm": 0.3480392156862745, - "acc_norm_stderr": 0.03343311240488418 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.38181818181818183, - "acc_stderr": 0.037937131711656344, - "acc_norm": 0.38181818181818183, - "acc_norm_stderr": 0.037937131711656344 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2778457772337821, - "mc1_stderr": 0.01568092936402464, - "mc2": 0.44014473628399425, - "mc2_stderr": 0.015231766341297711 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.5211267605633803, - "acc_stderr": 0.017124472080967065, - "acc_norm": 0.573943661971831, - "acc_norm_stderr": 0.016951313945591816 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "momo/polyglot-ko-12.8b-Orca-Chat-QLoRA-Merge", - "model_sha": "fe0117824036ebe2d054ddf14b2ef04a1cb19dda", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/mssongit/Koala-12.8b-v1/result_2023-10-24 06:08:21.json b/mssongit/Koala-12.8b-v1/result_2023-10-24 06:08:21.json deleted file mode 100644 index c4d243ddd9a8f957d2ee1d642375a2d1ef9006a5..0000000000000000000000000000000000000000 --- a/mssongit/Koala-12.8b-v1/result_2023-10-24 06:08:21.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.21245733788395904, - "acc_stderr": 0.01195348290658295, - "acc_norm": 0.2431740614334471, - "acc_norm_stderr": 0.012536554144587096 - }, - "harness|ko_hellaswag|10": { - "acc": 0.2568213503286198, - "acc_stderr": 0.00435987151963954, - "acc_norm": 0.27106154152559253, - "acc_norm_stderr": 0.00443599349258387 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.25146198830409355, - "acc_stderr": 0.033275044238468436, - "acc_norm": 0.25146198830409355, - "acc_norm_stderr": 0.033275044238468436 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.27184466019417475, - "acc_stderr": 0.044052680241409216, - "acc_norm": 0.27184466019417475, - "acc_norm_stderr": 0.044052680241409216 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.21839080459770116, - "acc_stderr": 0.014774358319934486, - "acc_norm": 0.21839080459770116, - "acc_norm_stderr": 0.014774358319934486 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.31851851851851853, - "acc_stderr": 0.04024778401977112, - "acc_norm": 0.31851851851851853, - "acc_norm_stderr": 0.04024778401977112 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.23829787234042554, - "acc_stderr": 0.027851252973889802, - "acc_norm": 0.23829787234042554, - "acc_norm_stderr": 0.027851252973889802 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.1927710843373494, - "acc_stderr": 0.03070982405056527, - "acc_norm": 0.1927710843373494, - "acc_norm_stderr": 0.03070982405056527 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.2733118971061093, - "acc_stderr": 0.02531176597542612, - "acc_norm": 0.2733118971061093, - "acc_norm_stderr": 0.02531176597542612 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.13901345291479822, - "acc_stderr": 0.0232193528344745, - "acc_norm": 0.13901345291479822, - "acc_norm_stderr": 0.0232193528344745 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.32061068702290074, - "acc_stderr": 0.040933292298342784, - "acc_norm": 0.32061068702290074, - "acc_norm_stderr": 0.040933292298342784 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.22, - "acc_stderr": 0.0416333199893227, - "acc_norm": 0.22, - "acc_norm_stderr": 0.0416333199893227 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.30808080808080807, - "acc_stderr": 0.03289477330098615, - "acc_norm": 0.30808080808080807, - "acc_norm_stderr": 0.03289477330098615 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.2827586206896552, - "acc_stderr": 0.037528339580033376, - "acc_norm": 0.2827586206896552, - "acc_norm_stderr": 0.037528339580033376 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.30392156862745096, - "acc_stderr": 0.04576665403207762, - "acc_norm": 0.30392156862745096, - "acc_norm_stderr": 0.04576665403207762 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.25210084033613445, - "acc_stderr": 0.02820554503327773, - "acc_norm": 0.25210084033613445, - "acc_norm_stderr": 0.02820554503327773 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.29743589743589743, - "acc_stderr": 0.023177408131465932, - "acc_norm": 0.29743589743589743, - "acc_norm_stderr": 0.023177408131465932 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.26, - "acc_stderr": 0.04408440022768077, - "acc_norm": 0.26, - "acc_norm_stderr": 0.04408440022768077 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.17592592592592593, - "acc_stderr": 0.036809181416738786, - "acc_norm": 0.17592592592592593, - "acc_norm_stderr": 0.036809181416738786 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.26108374384236455, - "acc_stderr": 0.030903796952114468, - "acc_norm": 0.26108374384236455, - "acc_norm_stderr": 0.030903796952114468 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3032258064516129, - "acc_stderr": 0.026148685930671746, - "acc_norm": 0.3032258064516129, - "acc_norm_stderr": 0.026148685930671746 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.2564102564102564, - "acc_stderr": 0.028605953702004253, - "acc_norm": 0.2564102564102564, - "acc_norm_stderr": 0.028605953702004253 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.2339622641509434, - "acc_stderr": 0.02605529690115292, - "acc_norm": 0.2339622641509434, - "acc_norm_stderr": 0.02605529690115292 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.2909090909090909, - "acc_stderr": 0.04350271442923243, - "acc_norm": 0.2909090909090909, - "acc_norm_stderr": 0.04350271442923243 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.27037037037037037, - "acc_stderr": 0.027080372815145668, - "acc_norm": 0.27037037037037037, - "acc_norm_stderr": 0.027080372815145668 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2847682119205298, - "acc_stderr": 0.036848815213890225, - "acc_norm": 0.2847682119205298, - "acc_norm_stderr": 0.036848815213890225 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.25870646766169153, - "acc_stderr": 0.03096590312357301, - "acc_norm": 0.25870646766169153, - "acc_norm_stderr": 0.03096590312357301 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.2832369942196532, - "acc_stderr": 0.034355680560478746, - "acc_norm": 0.2832369942196532, - "acc_norm_stderr": 0.034355680560478746 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.25396825396825395, - "acc_stderr": 0.022418042891113935, - "acc_norm": 0.25396825396825395, - "acc_norm_stderr": 0.022418042891113935 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2708333333333333, - "acc_stderr": 0.03716177437566017, - "acc_norm": 0.2708333333333333, - "acc_norm_stderr": 0.03716177437566017 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.22, - "acc_stderr": 0.0416333199893227, - "acc_norm": 0.22, - "acc_norm_stderr": 0.0416333199893227 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.2745664739884393, - "acc_stderr": 0.02402774515526502, - "acc_norm": 0.2745664739884393, - "acc_norm_stderr": 0.02402774515526502 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.2331288343558282, - "acc_stderr": 0.0332201579577674, - "acc_norm": 0.2331288343558282, - "acc_norm_stderr": 0.0332201579577674 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.25925925925925924, - "acc_stderr": 0.02438366553103545, - "acc_norm": 0.25925925925925924, - "acc_norm_stderr": 0.02438366553103545 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.21, - "acc_stderr": 0.040936018074033256, - "acc_norm": 0.21, - "acc_norm_stderr": 0.040936018074033256 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.3005181347150259, - "acc_stderr": 0.0330881859441575, - "acc_norm": 0.3005181347150259, - "acc_norm_stderr": 0.0330881859441575 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.23684210526315788, - "acc_stderr": 0.039994238792813344, - "acc_norm": 0.23684210526315788, - "acc_norm_stderr": 0.039994238792813344 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.27522935779816515, - "acc_stderr": 0.019149093743155203, - "acc_norm": 0.27522935779816515, - "acc_norm_stderr": 0.019149093743155203 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.30952380952380953, - "acc_stderr": 0.041349130183033156, - "acc_norm": 0.30952380952380953, - "acc_norm_stderr": 0.041349130183033156 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.3006535947712418, - "acc_stderr": 0.02625605383571896, - "acc_norm": 0.3006535947712418, - "acc_norm_stderr": 0.02625605383571896 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.2, - "acc_stderr": 0.04020151261036845, - "acc_norm": 0.2, - "acc_norm_stderr": 0.04020151261036845 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.1652892561983471, - "acc_stderr": 0.03390780612972776, - "acc_norm": 0.1652892561983471, - "acc_norm_stderr": 0.03390780612972776 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.34210526315789475, - "acc_stderr": 0.03860731599316092, - "acc_norm": 0.34210526315789475, - "acc_norm_stderr": 0.03860731599316092 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.23202614379084968, - "acc_stderr": 0.017077373377857, - "acc_norm": 0.23202614379084968, - "acc_norm_stderr": 0.017077373377857 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.22340425531914893, - "acc_stderr": 0.02484792135806396, - "acc_norm": 0.22340425531914893, - "acc_norm_stderr": 0.02484792135806396 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.24107142857142858, - "acc_stderr": 0.04059867246952687, - "acc_norm": 0.24107142857142858, - "acc_norm_stderr": 0.04059867246952687 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.33796296296296297, - "acc_stderr": 0.03225941352631295, - "acc_norm": 0.33796296296296297, - "acc_norm_stderr": 0.03225941352631295 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.26145251396648045, - "acc_stderr": 0.014696599650364555, - "acc_norm": 0.26145251396648045, - "acc_norm_stderr": 0.014696599650364555 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.27, - "acc_stderr": 0.0446196043338474, - "acc_norm": 0.27, - "acc_norm_stderr": 0.0446196043338474 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.27941176470588236, - "acc_stderr": 0.027257202606114948, - "acc_norm": 0.27941176470588236, - "acc_norm_stderr": 0.027257202606114948 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.2979591836734694, - "acc_stderr": 0.02927956741106567, - "acc_norm": 0.2979591836734694, - "acc_norm_stderr": 0.02927956741106567 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.2320675105485232, - "acc_stderr": 0.027479744550808503, - "acc_norm": 0.2320675105485232, - "acc_norm_stderr": 0.027479744550808503 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.25358539765319427, - "acc_stderr": 0.011111715336101127, - "acc_norm": 0.25358539765319427, - "acc_norm_stderr": 0.011111715336101127 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.25980392156862747, - "acc_stderr": 0.03077855467869326, - "acc_norm": 0.25980392156862747, - "acc_norm_stderr": 0.03077855467869326 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.28484848484848485, - "acc_stderr": 0.03524390844511784, - "acc_norm": 0.28484848484848485, - "acc_norm_stderr": 0.03524390844511784 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2178702570379437, - "mc1_stderr": 0.014450846714123911, - "mc2": 0.4610937921300059, - "mc2_stderr": 0.017090763627039533 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.0892018779342723, - "acc_stderr": 0.009770871054319065, - "acc_norm": 0.29460093896713613, - "acc_norm_stderr": 0.01562678805663152 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "mssongit/Koala-12.8b-v1", - "model_sha": "6e6754abd5a99c7984aa31eff410d3b8ee611ee8", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/nakhyeon/llama-2-ko-qlora4/result_2023-10-20 05:31:57.json b/nakhyeon/llama-2-ko-qlora4/result_2023-10-20 05:31:57.json deleted file mode 100644 index 3405ef63d2f1db4cc94fbe06bbb9cac634cdffec..0000000000000000000000000000000000000000 --- a/nakhyeon/llama-2-ko-qlora4/result_2023-10-20 05:31:57.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.3191126279863481, - "acc_stderr": 0.0136216961191733, - "acc_norm": 0.38139931740614336, - "acc_norm_stderr": 0.01419438908668525 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3826926906990639, - "acc_stderr": 0.004850508945116094, - "acc_norm": 0.49571798446524595, - "acc_norm_stderr": 0.004989598426249537 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.2631578947368421, - "acc_stderr": 0.03377310252209194, - "acc_norm": 0.2631578947368421, - "acc_norm_stderr": 0.03377310252209194 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.2815533980582524, - "acc_stderr": 0.044532548363264673, - "acc_norm": 0.2815533980582524, - "acc_norm_stderr": 0.044532548363264673 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.3371647509578544, - "acc_stderr": 0.01690520742080355, - "acc_norm": 0.3371647509578544, - "acc_norm_stderr": 0.01690520742080355 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.04072314811876837, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.04072314811876837 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.28, - "acc_stderr": 0.045126085985421255, - "acc_norm": 0.28, - "acc_norm_stderr": 0.045126085985421255 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.2978723404255319, - "acc_stderr": 0.02989614568209546, - "acc_norm": 0.2978723404255319, - "acc_norm_stderr": 0.02989614568209546 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3433734939759036, - "acc_stderr": 0.03696584317010602, - "acc_norm": 0.3433734939759036, - "acc_norm_stderr": 0.03696584317010602 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.3279742765273312, - "acc_stderr": 0.0266644108869376, - "acc_norm": 0.3279742765273312, - "acc_norm_stderr": 0.0266644108869376 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.3094170403587444, - "acc_stderr": 0.031024411740572196, - "acc_norm": 0.3094170403587444, - "acc_norm_stderr": 0.031024411740572196 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.40458015267175573, - "acc_stderr": 0.043046937953806645, - "acc_norm": 0.40458015267175573, - "acc_norm_stderr": 0.043046937953806645 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.398989898989899, - "acc_stderr": 0.03488901616852732, - "acc_norm": 0.398989898989899, - "acc_norm_stderr": 0.03488901616852732 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.31724137931034485, - "acc_stderr": 0.038783523721386236, - "acc_norm": 0.31724137931034485, - "acc_norm_stderr": 0.038783523721386236 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.19607843137254902, - "acc_stderr": 0.03950581861179962, - "acc_norm": 0.19607843137254902, - "acc_norm_stderr": 0.03950581861179962 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.31512605042016806, - "acc_stderr": 0.03017680828897434, - "acc_norm": 0.31512605042016806, - "acc_norm_stderr": 0.03017680828897434 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.28205128205128205, - "acc_stderr": 0.022815813098896628, - "acc_norm": 0.28205128205128205, - "acc_norm_stderr": 0.022815813098896628 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.27, - "acc_stderr": 0.044619604333847394, - "acc_norm": 0.27, - "acc_norm_stderr": 0.044619604333847394 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.27, - "acc_stderr": 0.044619604333847394, - "acc_norm": 0.27, - "acc_norm_stderr": 0.044619604333847394 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.3055555555555556, - "acc_stderr": 0.044531975073749834, - "acc_norm": 0.3055555555555556, - "acc_norm_stderr": 0.044531975073749834 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.28078817733990147, - "acc_stderr": 0.03161856335358609, - "acc_norm": 0.28078817733990147, - "acc_norm_stderr": 0.03161856335358609 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3064516129032258, - "acc_stderr": 0.02622648565255388, - "acc_norm": 0.3064516129032258, - "acc_norm_stderr": 0.02622648565255388 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.36324786324786323, - "acc_stderr": 0.03150712523091264, - "acc_norm": 0.36324786324786323, - "acc_norm_stderr": 0.03150712523091264 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.32452830188679244, - "acc_stderr": 0.028815615713432115, - "acc_norm": 0.32452830188679244, - "acc_norm_stderr": 0.028815615713432115 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.36363636363636365, - "acc_stderr": 0.04607582090719976, - "acc_norm": 0.36363636363636365, - "acc_norm_stderr": 0.04607582090719976 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.26296296296296295, - "acc_stderr": 0.026842057873833706, - "acc_norm": 0.26296296296296295, - "acc_norm_stderr": 0.026842057873833706 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.31125827814569534, - "acc_stderr": 0.037804458505267334, - "acc_norm": 0.31125827814569534, - "acc_norm_stderr": 0.037804458505267334 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.3383084577114428, - "acc_stderr": 0.03345563070339192, - "acc_norm": 0.3383084577114428, - "acc_norm_stderr": 0.03345563070339192 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.2543352601156069, - "acc_stderr": 0.0332055644308557, - "acc_norm": 0.2543352601156069, - "acc_norm_stderr": 0.0332055644308557 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.25396825396825395, - "acc_stderr": 0.022418042891113942, - "acc_norm": 0.25396825396825395, - "acc_norm_stderr": 0.022418042891113942 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2152777777777778, - "acc_stderr": 0.03437079344106134, - "acc_norm": 0.2152777777777778, - "acc_norm_stderr": 0.03437079344106134 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252604, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252604 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.35, - "acc_stderr": 0.0479372485441102, - "acc_norm": 0.35, - "acc_norm_stderr": 0.0479372485441102 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.32947976878612717, - "acc_stderr": 0.0253052581318797, - "acc_norm": 0.32947976878612717, - "acc_norm_stderr": 0.0253052581318797 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.25766871165644173, - "acc_stderr": 0.03436150827846917, - "acc_norm": 0.25766871165644173, - "acc_norm_stderr": 0.03436150827846917 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.32407407407407407, - "acc_stderr": 0.02604176620271716, - "acc_norm": 0.32407407407407407, - "acc_norm_stderr": 0.02604176620271716 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.27979274611398963, - "acc_stderr": 0.03239637046735704, - "acc_norm": 0.27979274611398963, - "acc_norm_stderr": 0.03239637046735704 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2631578947368421, - "acc_stderr": 0.041424397194893624, - "acc_norm": 0.2631578947368421, - "acc_norm_stderr": 0.041424397194893624 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.381651376146789, - "acc_stderr": 0.020828148517022593, - "acc_norm": 0.381651376146789, - "acc_norm_stderr": 0.020828148517022593 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.20634920634920634, - "acc_stderr": 0.03619604524124249, - "acc_norm": 0.20634920634920634, - "acc_norm_stderr": 0.03619604524124249 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.369281045751634, - "acc_stderr": 0.02763417668960266, - "acc_norm": 0.369281045751634, - "acc_norm_stderr": 0.02763417668960266 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.3884297520661157, - "acc_stderr": 0.04449270350068382, - "acc_norm": 0.3884297520661157, - "acc_norm_stderr": 0.04449270350068382 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.3618421052631579, - "acc_stderr": 0.03910525752849724, - "acc_norm": 0.3618421052631579, - "acc_norm_stderr": 0.03910525752849724 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.27450980392156865, - "acc_stderr": 0.018054027458815194, - "acc_norm": 0.27450980392156865, - "acc_norm_stderr": 0.018054027458815194 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2801418439716312, - "acc_stderr": 0.026789172351140242, - "acc_norm": 0.2801418439716312, - "acc_norm_stderr": 0.026789172351140242 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.2857142857142857, - "acc_stderr": 0.04287858751340456, - "acc_norm": 0.2857142857142857, - "acc_norm_stderr": 0.04287858751340456 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.35648148148148145, - "acc_stderr": 0.03266478331527272, - "acc_norm": 0.35648148148148145, - "acc_norm_stderr": 0.03266478331527272 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.24134078212290502, - "acc_stderr": 0.014310999547961438, - "acc_norm": 0.24134078212290502, - "acc_norm_stderr": 0.014310999547961438 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542127, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542127 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.23, - "acc_stderr": 0.04229525846816507, - "acc_norm": 0.23, - "acc_norm_stderr": 0.04229525846816507 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.3897058823529412, - "acc_stderr": 0.02962466358115969, - "acc_norm": 0.3897058823529412, - "acc_norm_stderr": 0.02962466358115969 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.2938775510204082, - "acc_stderr": 0.029162738410249772, - "acc_norm": 0.2938775510204082, - "acc_norm_stderr": 0.029162738410249772 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.3628691983122363, - "acc_stderr": 0.031299208255302136, - "acc_norm": 0.3628691983122363, - "acc_norm_stderr": 0.031299208255302136 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2900912646675359, - "acc_stderr": 0.011590375554733095, - "acc_norm": 0.2900912646675359, - "acc_norm_stderr": 0.011590375554733095 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.2696078431372549, - "acc_stderr": 0.03114557065948678, - "acc_norm": 0.2696078431372549, - "acc_norm_stderr": 0.03114557065948678 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.30303030303030304, - "acc_stderr": 0.035886248000917075, - "acc_norm": 0.30303030303030304, - "acc_norm_stderr": 0.035886248000917075 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.23133414932680538, - "mc1_stderr": 0.014761945174862677, - "mc2": 0.37061566370146265, - "mc2_stderr": 0.014735163251703702 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.31338028169014087, - "acc_stderr": 0.01590117396348767, - "acc_norm": 0.49413145539906106, - "acc_norm_stderr": 0.017138598632436264 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "nakhyeon/llama-2-ko-qlora4", - "model_sha": "759cf82ec24f0bd625edfa916f22701d30517591", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/nakhyeon/llama-ko-qlora-1024/result_2023-10-21 07:57:16.json b/nakhyeon/llama-ko-qlora-1024/result_2023-10-21 07:57:16.json deleted file mode 100644 index 8137b8b4591b8a397ba0f4dbc3144c38324c2d06..0000000000000000000000000000000000000000 --- a/nakhyeon/llama-ko-qlora-1024/result_2023-10-21 07:57:16.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.3191126279863481, - "acc_stderr": 0.0136216961191733, - "acc_norm": 0.38139931740614336, - "acc_norm_stderr": 0.01419438908668525 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3826926906990639, - "acc_stderr": 0.004850508945116094, - "acc_norm": 0.49571798446524595, - "acc_norm_stderr": 0.004989598426249537 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.2631578947368421, - "acc_stderr": 0.03377310252209194, - "acc_norm": 0.2631578947368421, - "acc_norm_stderr": 0.03377310252209194 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.2815533980582524, - "acc_stderr": 0.044532548363264673, - "acc_norm": 0.2815533980582524, - "acc_norm_stderr": 0.044532548363264673 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.3371647509578544, - "acc_stderr": 0.01690520742080355, - "acc_norm": 0.3371647509578544, - "acc_norm_stderr": 0.01690520742080355 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.04072314811876837, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.04072314811876837 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.28, - "acc_stderr": 0.045126085985421255, - "acc_norm": 0.28, - "acc_norm_stderr": 0.045126085985421255 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.2978723404255319, - "acc_stderr": 0.02989614568209546, - "acc_norm": 0.2978723404255319, - "acc_norm_stderr": 0.02989614568209546 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3433734939759036, - "acc_stderr": 0.03696584317010602, - "acc_norm": 0.3433734939759036, - "acc_norm_stderr": 0.03696584317010602 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.3279742765273312, - "acc_stderr": 0.0266644108869376, - "acc_norm": 0.3279742765273312, - "acc_norm_stderr": 0.0266644108869376 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.3094170403587444, - "acc_stderr": 0.031024411740572196, - "acc_norm": 0.3094170403587444, - "acc_norm_stderr": 0.031024411740572196 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.40458015267175573, - "acc_stderr": 0.043046937953806645, - "acc_norm": 0.40458015267175573, - "acc_norm_stderr": 0.043046937953806645 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.398989898989899, - "acc_stderr": 0.03488901616852732, - "acc_norm": 0.398989898989899, - "acc_norm_stderr": 0.03488901616852732 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.31724137931034485, - "acc_stderr": 0.038783523721386236, - "acc_norm": 0.31724137931034485, - "acc_norm_stderr": 0.038783523721386236 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.19607843137254902, - "acc_stderr": 0.03950581861179962, - "acc_norm": 0.19607843137254902, - "acc_norm_stderr": 0.03950581861179962 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.31512605042016806, - "acc_stderr": 0.03017680828897434, - "acc_norm": 0.31512605042016806, - "acc_norm_stderr": 0.03017680828897434 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.28205128205128205, - "acc_stderr": 0.022815813098896628, - "acc_norm": 0.28205128205128205, - "acc_norm_stderr": 0.022815813098896628 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.27, - "acc_stderr": 0.044619604333847394, - "acc_norm": 0.27, - "acc_norm_stderr": 0.044619604333847394 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.27, - "acc_stderr": 0.044619604333847394, - "acc_norm": 0.27, - "acc_norm_stderr": 0.044619604333847394 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.3055555555555556, - "acc_stderr": 0.044531975073749834, - "acc_norm": 0.3055555555555556, - "acc_norm_stderr": 0.044531975073749834 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.28078817733990147, - "acc_stderr": 0.03161856335358609, - "acc_norm": 0.28078817733990147, - "acc_norm_stderr": 0.03161856335358609 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3064516129032258, - "acc_stderr": 0.02622648565255388, - "acc_norm": 0.3064516129032258, - "acc_norm_stderr": 0.02622648565255388 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.36324786324786323, - "acc_stderr": 0.03150712523091264, - "acc_norm": 0.36324786324786323, - "acc_norm_stderr": 0.03150712523091264 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.32452830188679244, - "acc_stderr": 0.028815615713432115, - "acc_norm": 0.32452830188679244, - "acc_norm_stderr": 0.028815615713432115 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.36363636363636365, - "acc_stderr": 0.04607582090719976, - "acc_norm": 0.36363636363636365, - "acc_norm_stderr": 0.04607582090719976 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.26296296296296295, - "acc_stderr": 0.026842057873833706, - "acc_norm": 0.26296296296296295, - "acc_norm_stderr": 0.026842057873833706 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.31125827814569534, - "acc_stderr": 0.037804458505267334, - "acc_norm": 0.31125827814569534, - "acc_norm_stderr": 0.037804458505267334 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.3383084577114428, - "acc_stderr": 0.03345563070339192, - "acc_norm": 0.3383084577114428, - "acc_norm_stderr": 0.03345563070339192 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.2543352601156069, - "acc_stderr": 0.0332055644308557, - "acc_norm": 0.2543352601156069, - "acc_norm_stderr": 0.0332055644308557 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.25396825396825395, - "acc_stderr": 0.022418042891113942, - "acc_norm": 0.25396825396825395, - "acc_norm_stderr": 0.022418042891113942 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2152777777777778, - "acc_stderr": 0.03437079344106134, - "acc_norm": 0.2152777777777778, - "acc_norm_stderr": 0.03437079344106134 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252604, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252604 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.35, - "acc_stderr": 0.0479372485441102, - "acc_norm": 0.35, - "acc_norm_stderr": 0.0479372485441102 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.32947976878612717, - "acc_stderr": 0.0253052581318797, - "acc_norm": 0.32947976878612717, - "acc_norm_stderr": 0.0253052581318797 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.25766871165644173, - "acc_stderr": 0.03436150827846917, - "acc_norm": 0.25766871165644173, - "acc_norm_stderr": 0.03436150827846917 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.32407407407407407, - "acc_stderr": 0.02604176620271716, - "acc_norm": 0.32407407407407407, - "acc_norm_stderr": 0.02604176620271716 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.27979274611398963, - "acc_stderr": 0.03239637046735704, - "acc_norm": 0.27979274611398963, - "acc_norm_stderr": 0.03239637046735704 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2631578947368421, - "acc_stderr": 0.041424397194893624, - "acc_norm": 0.2631578947368421, - "acc_norm_stderr": 0.041424397194893624 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.381651376146789, - "acc_stderr": 0.020828148517022593, - "acc_norm": 0.381651376146789, - "acc_norm_stderr": 0.020828148517022593 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.20634920634920634, - "acc_stderr": 0.03619604524124249, - "acc_norm": 0.20634920634920634, - "acc_norm_stderr": 0.03619604524124249 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.369281045751634, - "acc_stderr": 0.02763417668960266, - "acc_norm": 0.369281045751634, - "acc_norm_stderr": 0.02763417668960266 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.3884297520661157, - "acc_stderr": 0.04449270350068382, - "acc_norm": 0.3884297520661157, - "acc_norm_stderr": 0.04449270350068382 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.3618421052631579, - "acc_stderr": 0.03910525752849724, - "acc_norm": 0.3618421052631579, - "acc_norm_stderr": 0.03910525752849724 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.27450980392156865, - "acc_stderr": 0.018054027458815194, - "acc_norm": 0.27450980392156865, - "acc_norm_stderr": 0.018054027458815194 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2801418439716312, - "acc_stderr": 0.026789172351140242, - "acc_norm": 0.2801418439716312, - "acc_norm_stderr": 0.026789172351140242 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.2857142857142857, - "acc_stderr": 0.04287858751340456, - "acc_norm": 0.2857142857142857, - "acc_norm_stderr": 0.04287858751340456 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.35648148148148145, - "acc_stderr": 0.03266478331527272, - "acc_norm": 0.35648148148148145, - "acc_norm_stderr": 0.03266478331527272 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.24134078212290502, - "acc_stderr": 0.014310999547961438, - "acc_norm": 0.24134078212290502, - "acc_norm_stderr": 0.014310999547961438 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542127, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542127 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.23, - "acc_stderr": 0.04229525846816507, - "acc_norm": 0.23, - "acc_norm_stderr": 0.04229525846816507 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.3897058823529412, - "acc_stderr": 0.02962466358115969, - "acc_norm": 0.3897058823529412, - "acc_norm_stderr": 0.02962466358115969 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.2938775510204082, - "acc_stderr": 0.029162738410249772, - "acc_norm": 0.2938775510204082, - "acc_norm_stderr": 0.029162738410249772 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.3628691983122363, - "acc_stderr": 0.031299208255302136, - "acc_norm": 0.3628691983122363, - "acc_norm_stderr": 0.031299208255302136 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2900912646675359, - "acc_stderr": 0.011590375554733095, - "acc_norm": 0.2900912646675359, - "acc_norm_stderr": 0.011590375554733095 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.2696078431372549, - "acc_stderr": 0.03114557065948678, - "acc_norm": 0.2696078431372549, - "acc_norm_stderr": 0.03114557065948678 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.30303030303030304, - "acc_stderr": 0.035886248000917075, - "acc_norm": 0.30303030303030304, - "acc_norm_stderr": 0.035886248000917075 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.23133414932680538, - "mc1_stderr": 0.014761945174862677, - "mc2": 0.37061566370146265, - "mc2_stderr": 0.014735163251703702 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.31338028169014087, - "acc_stderr": 0.01590117396348767, - "acc_norm": 0.49413145539906106, - "acc_norm_stderr": 0.017138598632436264 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "nakhyeon/llama-ko-qlora-1024", - "model_sha": "10f5e7aa49eb466a26eb3c696b72fff0e003a954", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/nakhyeonn/llama-2-ko-qlora-prompt/result_2023-10-23 21:41:32.json b/nakhyeonn/llama-2-ko-qlora-prompt/result_2023-10-23 21:41:32.json deleted file mode 100644 index cad3d5e6b75fff5ba4bfb893cd4e54d374992058..0000000000000000000000000000000000000000 --- a/nakhyeonn/llama-2-ko-qlora-prompt/result_2023-10-23 21:41:32.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.3191126279863481, - "acc_stderr": 0.0136216961191733, - "acc_norm": 0.38139931740614336, - "acc_norm_stderr": 0.01419438908668525 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3826926906990639, - "acc_stderr": 0.004850508945116094, - "acc_norm": 0.49571798446524595, - "acc_norm_stderr": 0.004989598426249537 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.2631578947368421, - "acc_stderr": 0.03377310252209194, - "acc_norm": 0.2631578947368421, - "acc_norm_stderr": 0.03377310252209194 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.2815533980582524, - "acc_stderr": 0.044532548363264673, - "acc_norm": 0.2815533980582524, - "acc_norm_stderr": 0.044532548363264673 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.3371647509578544, - "acc_stderr": 0.01690520742080355, - "acc_norm": 0.3371647509578544, - "acc_norm_stderr": 0.01690520742080355 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.04072314811876837, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.04072314811876837 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.28, - "acc_stderr": 0.045126085985421255, - "acc_norm": 0.28, - "acc_norm_stderr": 0.045126085985421255 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.2978723404255319, - "acc_stderr": 0.02989614568209546, - "acc_norm": 0.2978723404255319, - "acc_norm_stderr": 0.02989614568209546 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3433734939759036, - "acc_stderr": 0.03696584317010602, - "acc_norm": 0.3433734939759036, - "acc_norm_stderr": 0.03696584317010602 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.3279742765273312, - "acc_stderr": 0.0266644108869376, - "acc_norm": 0.3279742765273312, - "acc_norm_stderr": 0.0266644108869376 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.3094170403587444, - "acc_stderr": 0.031024411740572196, - "acc_norm": 0.3094170403587444, - "acc_norm_stderr": 0.031024411740572196 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.40458015267175573, - "acc_stderr": 0.043046937953806645, - "acc_norm": 0.40458015267175573, - "acc_norm_stderr": 0.043046937953806645 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.398989898989899, - "acc_stderr": 0.03488901616852732, - "acc_norm": 0.398989898989899, - "acc_norm_stderr": 0.03488901616852732 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.31724137931034485, - "acc_stderr": 0.038783523721386236, - "acc_norm": 0.31724137931034485, - "acc_norm_stderr": 0.038783523721386236 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.19607843137254902, - "acc_stderr": 0.03950581861179962, - "acc_norm": 0.19607843137254902, - "acc_norm_stderr": 0.03950581861179962 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.31512605042016806, - "acc_stderr": 0.03017680828897434, - "acc_norm": 0.31512605042016806, - "acc_norm_stderr": 0.03017680828897434 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.28205128205128205, - "acc_stderr": 0.022815813098896628, - "acc_norm": 0.28205128205128205, - "acc_norm_stderr": 0.022815813098896628 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.27, - "acc_stderr": 0.044619604333847394, - "acc_norm": 0.27, - "acc_norm_stderr": 0.044619604333847394 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.27, - "acc_stderr": 0.044619604333847394, - "acc_norm": 0.27, - "acc_norm_stderr": 0.044619604333847394 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.3055555555555556, - "acc_stderr": 0.044531975073749834, - "acc_norm": 0.3055555555555556, - "acc_norm_stderr": 0.044531975073749834 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.28078817733990147, - "acc_stderr": 0.03161856335358609, - "acc_norm": 0.28078817733990147, - "acc_norm_stderr": 0.03161856335358609 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3064516129032258, - "acc_stderr": 0.02622648565255388, - "acc_norm": 0.3064516129032258, - "acc_norm_stderr": 0.02622648565255388 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.36324786324786323, - "acc_stderr": 0.03150712523091264, - "acc_norm": 0.36324786324786323, - "acc_norm_stderr": 0.03150712523091264 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.32452830188679244, - "acc_stderr": 0.028815615713432115, - "acc_norm": 0.32452830188679244, - "acc_norm_stderr": 0.028815615713432115 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.36363636363636365, - "acc_stderr": 0.04607582090719976, - "acc_norm": 0.36363636363636365, - "acc_norm_stderr": 0.04607582090719976 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.26296296296296295, - "acc_stderr": 0.026842057873833706, - "acc_norm": 0.26296296296296295, - "acc_norm_stderr": 0.026842057873833706 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.31125827814569534, - "acc_stderr": 0.037804458505267334, - "acc_norm": 0.31125827814569534, - "acc_norm_stderr": 0.037804458505267334 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.3383084577114428, - "acc_stderr": 0.03345563070339192, - "acc_norm": 0.3383084577114428, - "acc_norm_stderr": 0.03345563070339192 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.2543352601156069, - "acc_stderr": 0.0332055644308557, - "acc_norm": 0.2543352601156069, - "acc_norm_stderr": 0.0332055644308557 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.25396825396825395, - "acc_stderr": 0.022418042891113942, - "acc_norm": 0.25396825396825395, - "acc_norm_stderr": 0.022418042891113942 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2152777777777778, - "acc_stderr": 0.03437079344106134, - "acc_norm": 0.2152777777777778, - "acc_norm_stderr": 0.03437079344106134 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252604, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252604 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.35, - "acc_stderr": 0.0479372485441102, - "acc_norm": 0.35, - "acc_norm_stderr": 0.0479372485441102 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.32947976878612717, - "acc_stderr": 0.0253052581318797, - "acc_norm": 0.32947976878612717, - "acc_norm_stderr": 0.0253052581318797 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.25766871165644173, - "acc_stderr": 0.03436150827846917, - "acc_norm": 0.25766871165644173, - "acc_norm_stderr": 0.03436150827846917 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.32407407407407407, - "acc_stderr": 0.02604176620271716, - "acc_norm": 0.32407407407407407, - "acc_norm_stderr": 0.02604176620271716 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.27979274611398963, - "acc_stderr": 0.03239637046735704, - "acc_norm": 0.27979274611398963, - "acc_norm_stderr": 0.03239637046735704 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2631578947368421, - "acc_stderr": 0.041424397194893624, - "acc_norm": 0.2631578947368421, - "acc_norm_stderr": 0.041424397194893624 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.381651376146789, - "acc_stderr": 0.020828148517022593, - "acc_norm": 0.381651376146789, - "acc_norm_stderr": 0.020828148517022593 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.20634920634920634, - "acc_stderr": 0.03619604524124249, - "acc_norm": 0.20634920634920634, - "acc_norm_stderr": 0.03619604524124249 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.369281045751634, - "acc_stderr": 0.02763417668960266, - "acc_norm": 0.369281045751634, - "acc_norm_stderr": 0.02763417668960266 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.3884297520661157, - "acc_stderr": 0.04449270350068382, - "acc_norm": 0.3884297520661157, - "acc_norm_stderr": 0.04449270350068382 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.3618421052631579, - "acc_stderr": 0.03910525752849724, - "acc_norm": 0.3618421052631579, - "acc_norm_stderr": 0.03910525752849724 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.27450980392156865, - "acc_stderr": 0.018054027458815194, - "acc_norm": 0.27450980392156865, - "acc_norm_stderr": 0.018054027458815194 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2801418439716312, - "acc_stderr": 0.026789172351140242, - "acc_norm": 0.2801418439716312, - "acc_norm_stderr": 0.026789172351140242 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.2857142857142857, - "acc_stderr": 0.04287858751340456, - "acc_norm": 0.2857142857142857, - "acc_norm_stderr": 0.04287858751340456 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.35648148148148145, - "acc_stderr": 0.03266478331527272, - "acc_norm": 0.35648148148148145, - "acc_norm_stderr": 0.03266478331527272 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.24134078212290502, - "acc_stderr": 0.014310999547961438, - "acc_norm": 0.24134078212290502, - "acc_norm_stderr": 0.014310999547961438 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542127, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542127 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.23, - "acc_stderr": 0.04229525846816507, - "acc_norm": 0.23, - "acc_norm_stderr": 0.04229525846816507 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.3897058823529412, - "acc_stderr": 0.02962466358115969, - "acc_norm": 0.3897058823529412, - "acc_norm_stderr": 0.02962466358115969 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.2938775510204082, - "acc_stderr": 0.029162738410249772, - "acc_norm": 0.2938775510204082, - "acc_norm_stderr": 0.029162738410249772 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.3628691983122363, - "acc_stderr": 0.031299208255302136, - "acc_norm": 0.3628691983122363, - "acc_norm_stderr": 0.031299208255302136 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2900912646675359, - "acc_stderr": 0.011590375554733095, - "acc_norm": 0.2900912646675359, - "acc_norm_stderr": 0.011590375554733095 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.2696078431372549, - "acc_stderr": 0.03114557065948678, - "acc_norm": 0.2696078431372549, - "acc_norm_stderr": 0.03114557065948678 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.30303030303030304, - "acc_stderr": 0.035886248000917075, - "acc_norm": 0.30303030303030304, - "acc_norm_stderr": 0.035886248000917075 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.23133414932680538, - "mc1_stderr": 0.014761945174862677, - "mc2": 0.37061566370146265, - "mc2_stderr": 0.014735163251703702 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.31338028169014087, - "acc_stderr": 0.01590117396348767, - "acc_norm": 0.49413145539906106, - "acc_norm_stderr": 0.017138598632436264 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "nakhyeonn/llama-2-ko-qlora-prompt", - "model_sha": "3c10df72b42af16132ec1528e2892ef74b65ae4b", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/nakhyeonn/llama-2-ko-qlora-prompt_1024/result_2023-10-23 21:46:13.json b/nakhyeonn/llama-2-ko-qlora-prompt_1024/result_2023-10-23 21:46:13.json deleted file mode 100644 index 13607cf0c1c18280c46a582c1f0223c01b2921d0..0000000000000000000000000000000000000000 --- a/nakhyeonn/llama-2-ko-qlora-prompt_1024/result_2023-10-23 21:46:13.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.3191126279863481, - "acc_stderr": 0.0136216961191733, - "acc_norm": 0.38139931740614336, - "acc_norm_stderr": 0.01419438908668525 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3826926906990639, - "acc_stderr": 0.004850508945116094, - "acc_norm": 0.49571798446524595, - "acc_norm_stderr": 0.004989598426249537 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.2631578947368421, - "acc_stderr": 0.03377310252209194, - "acc_norm": 0.2631578947368421, - "acc_norm_stderr": 0.03377310252209194 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.2815533980582524, - "acc_stderr": 0.044532548363264673, - "acc_norm": 0.2815533980582524, - "acc_norm_stderr": 0.044532548363264673 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.3371647509578544, - "acc_stderr": 0.01690520742080355, - "acc_norm": 0.3371647509578544, - "acc_norm_stderr": 0.01690520742080355 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.04072314811876837, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.04072314811876837 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.28, - "acc_stderr": 0.045126085985421255, - "acc_norm": 0.28, - "acc_norm_stderr": 0.045126085985421255 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.2978723404255319, - "acc_stderr": 0.02989614568209546, - "acc_norm": 0.2978723404255319, - "acc_norm_stderr": 0.02989614568209546 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3433734939759036, - "acc_stderr": 0.03696584317010602, - "acc_norm": 0.3433734939759036, - "acc_norm_stderr": 0.03696584317010602 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.3279742765273312, - "acc_stderr": 0.0266644108869376, - "acc_norm": 0.3279742765273312, - "acc_norm_stderr": 0.0266644108869376 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.3094170403587444, - "acc_stderr": 0.031024411740572196, - "acc_norm": 0.3094170403587444, - "acc_norm_stderr": 0.031024411740572196 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.40458015267175573, - "acc_stderr": 0.043046937953806645, - "acc_norm": 0.40458015267175573, - "acc_norm_stderr": 0.043046937953806645 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.398989898989899, - "acc_stderr": 0.03488901616852732, - "acc_norm": 0.398989898989899, - "acc_norm_stderr": 0.03488901616852732 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.31724137931034485, - "acc_stderr": 0.038783523721386236, - "acc_norm": 0.31724137931034485, - "acc_norm_stderr": 0.038783523721386236 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.19607843137254902, - "acc_stderr": 0.03950581861179962, - "acc_norm": 0.19607843137254902, - "acc_norm_stderr": 0.03950581861179962 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.31512605042016806, - "acc_stderr": 0.03017680828897434, - "acc_norm": 0.31512605042016806, - "acc_norm_stderr": 0.03017680828897434 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.28205128205128205, - "acc_stderr": 0.022815813098896628, - "acc_norm": 0.28205128205128205, - "acc_norm_stderr": 0.022815813098896628 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.27, - "acc_stderr": 0.044619604333847394, - "acc_norm": 0.27, - "acc_norm_stderr": 0.044619604333847394 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.27, - "acc_stderr": 0.044619604333847394, - "acc_norm": 0.27, - "acc_norm_stderr": 0.044619604333847394 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.3055555555555556, - "acc_stderr": 0.044531975073749834, - "acc_norm": 0.3055555555555556, - "acc_norm_stderr": 0.044531975073749834 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.28078817733990147, - "acc_stderr": 0.03161856335358609, - "acc_norm": 0.28078817733990147, - "acc_norm_stderr": 0.03161856335358609 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3064516129032258, - "acc_stderr": 0.02622648565255388, - "acc_norm": 0.3064516129032258, - "acc_norm_stderr": 0.02622648565255388 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.36324786324786323, - "acc_stderr": 0.03150712523091264, - "acc_norm": 0.36324786324786323, - "acc_norm_stderr": 0.03150712523091264 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.32452830188679244, - "acc_stderr": 0.028815615713432115, - "acc_norm": 0.32452830188679244, - "acc_norm_stderr": 0.028815615713432115 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.36363636363636365, - "acc_stderr": 0.04607582090719976, - "acc_norm": 0.36363636363636365, - "acc_norm_stderr": 0.04607582090719976 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.26296296296296295, - "acc_stderr": 0.026842057873833706, - "acc_norm": 0.26296296296296295, - "acc_norm_stderr": 0.026842057873833706 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.31125827814569534, - "acc_stderr": 0.037804458505267334, - "acc_norm": 0.31125827814569534, - "acc_norm_stderr": 0.037804458505267334 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.3383084577114428, - "acc_stderr": 0.03345563070339192, - "acc_norm": 0.3383084577114428, - "acc_norm_stderr": 0.03345563070339192 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.2543352601156069, - "acc_stderr": 0.0332055644308557, - "acc_norm": 0.2543352601156069, - "acc_norm_stderr": 0.0332055644308557 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.25396825396825395, - "acc_stderr": 0.022418042891113942, - "acc_norm": 0.25396825396825395, - "acc_norm_stderr": 0.022418042891113942 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2152777777777778, - "acc_stderr": 0.03437079344106134, - "acc_norm": 0.2152777777777778, - "acc_norm_stderr": 0.03437079344106134 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252604, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252604 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.35, - "acc_stderr": 0.0479372485441102, - "acc_norm": 0.35, - "acc_norm_stderr": 0.0479372485441102 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.32947976878612717, - "acc_stderr": 0.0253052581318797, - "acc_norm": 0.32947976878612717, - "acc_norm_stderr": 0.0253052581318797 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.25766871165644173, - "acc_stderr": 0.03436150827846917, - "acc_norm": 0.25766871165644173, - "acc_norm_stderr": 0.03436150827846917 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.32407407407407407, - "acc_stderr": 0.02604176620271716, - "acc_norm": 0.32407407407407407, - "acc_norm_stderr": 0.02604176620271716 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.27979274611398963, - "acc_stderr": 0.03239637046735704, - "acc_norm": 0.27979274611398963, - "acc_norm_stderr": 0.03239637046735704 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2631578947368421, - "acc_stderr": 0.041424397194893624, - "acc_norm": 0.2631578947368421, - "acc_norm_stderr": 0.041424397194893624 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.381651376146789, - "acc_stderr": 0.020828148517022593, - "acc_norm": 0.381651376146789, - "acc_norm_stderr": 0.020828148517022593 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.20634920634920634, - "acc_stderr": 0.03619604524124249, - "acc_norm": 0.20634920634920634, - "acc_norm_stderr": 0.03619604524124249 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.369281045751634, - "acc_stderr": 0.02763417668960266, - "acc_norm": 0.369281045751634, - "acc_norm_stderr": 0.02763417668960266 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.3884297520661157, - "acc_stderr": 0.04449270350068382, - "acc_norm": 0.3884297520661157, - "acc_norm_stderr": 0.04449270350068382 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.3618421052631579, - "acc_stderr": 0.03910525752849724, - "acc_norm": 0.3618421052631579, - "acc_norm_stderr": 0.03910525752849724 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.27450980392156865, - "acc_stderr": 0.018054027458815194, - "acc_norm": 0.27450980392156865, - "acc_norm_stderr": 0.018054027458815194 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2801418439716312, - "acc_stderr": 0.026789172351140242, - "acc_norm": 0.2801418439716312, - "acc_norm_stderr": 0.026789172351140242 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.2857142857142857, - "acc_stderr": 0.04287858751340456, - "acc_norm": 0.2857142857142857, - "acc_norm_stderr": 0.04287858751340456 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.35648148148148145, - "acc_stderr": 0.03266478331527272, - "acc_norm": 0.35648148148148145, - "acc_norm_stderr": 0.03266478331527272 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.24134078212290502, - "acc_stderr": 0.014310999547961438, - "acc_norm": 0.24134078212290502, - "acc_norm_stderr": 0.014310999547961438 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542127, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542127 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.23, - "acc_stderr": 0.04229525846816507, - "acc_norm": 0.23, - "acc_norm_stderr": 0.04229525846816507 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.3897058823529412, - "acc_stderr": 0.02962466358115969, - "acc_norm": 0.3897058823529412, - "acc_norm_stderr": 0.02962466358115969 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.2938775510204082, - "acc_stderr": 0.029162738410249772, - "acc_norm": 0.2938775510204082, - "acc_norm_stderr": 0.029162738410249772 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.3628691983122363, - "acc_stderr": 0.031299208255302136, - "acc_norm": 0.3628691983122363, - "acc_norm_stderr": 0.031299208255302136 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2900912646675359, - "acc_stderr": 0.011590375554733095, - "acc_norm": 0.2900912646675359, - "acc_norm_stderr": 0.011590375554733095 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.2696078431372549, - "acc_stderr": 0.03114557065948678, - "acc_norm": 0.2696078431372549, - "acc_norm_stderr": 0.03114557065948678 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.30303030303030304, - "acc_stderr": 0.035886248000917075, - "acc_norm": 0.30303030303030304, - "acc_norm_stderr": 0.035886248000917075 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.23133414932680538, - "mc1_stderr": 0.014761945174862677, - "mc2": 0.37061566370146265, - "mc2_stderr": 0.014735163251703702 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.31338028169014087, - "acc_stderr": 0.01590117396348767, - "acc_norm": 0.49413145539906106, - "acc_norm_stderr": 0.017138598632436264 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "nakhyeonn/llama-2-ko-qlora-prompt_1024", - "model_sha": "601927f98314ac016133b9e10a24de87dc97ff95", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/nakhyeonn/llama-2-ko-qlora-prompt_1024_new/result_2023-10-25 10:59:57.json b/nakhyeonn/llama-2-ko-qlora-prompt_1024_new/result_2023-10-25 10:59:57.json deleted file mode 100644 index 19fc2ae61f71fb88dd908ec33bdf94e104b930dd..0000000000000000000000000000000000000000 --- a/nakhyeonn/llama-2-ko-qlora-prompt_1024_new/result_2023-10-25 10:59:57.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.3191126279863481, - "acc_stderr": 0.0136216961191733, - "acc_norm": 0.38139931740614336, - "acc_norm_stderr": 0.01419438908668525 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3826926906990639, - "acc_stderr": 0.004850508945116094, - "acc_norm": 0.49571798446524595, - "acc_norm_stderr": 0.004989598426249537 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.2631578947368421, - "acc_stderr": 0.03377310252209194, - "acc_norm": 0.2631578947368421, - "acc_norm_stderr": 0.03377310252209194 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.2815533980582524, - "acc_stderr": 0.044532548363264673, - "acc_norm": 0.2815533980582524, - "acc_norm_stderr": 0.044532548363264673 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.3371647509578544, - "acc_stderr": 0.01690520742080355, - "acc_norm": 0.3371647509578544, - "acc_norm_stderr": 0.01690520742080355 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.04072314811876837, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.04072314811876837 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.28, - "acc_stderr": 0.045126085985421255, - "acc_norm": 0.28, - "acc_norm_stderr": 0.045126085985421255 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.2978723404255319, - "acc_stderr": 0.02989614568209546, - "acc_norm": 0.2978723404255319, - "acc_norm_stderr": 0.02989614568209546 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3433734939759036, - "acc_stderr": 0.03696584317010602, - "acc_norm": 0.3433734939759036, - "acc_norm_stderr": 0.03696584317010602 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.3279742765273312, - "acc_stderr": 0.0266644108869376, - "acc_norm": 0.3279742765273312, - "acc_norm_stderr": 0.0266644108869376 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.3094170403587444, - "acc_stderr": 0.031024411740572196, - "acc_norm": 0.3094170403587444, - "acc_norm_stderr": 0.031024411740572196 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.40458015267175573, - "acc_stderr": 0.043046937953806645, - "acc_norm": 0.40458015267175573, - "acc_norm_stderr": 0.043046937953806645 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.398989898989899, - "acc_stderr": 0.03488901616852732, - "acc_norm": 0.398989898989899, - "acc_norm_stderr": 0.03488901616852732 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.31724137931034485, - "acc_stderr": 0.038783523721386236, - "acc_norm": 0.31724137931034485, - "acc_norm_stderr": 0.038783523721386236 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.19607843137254902, - "acc_stderr": 0.03950581861179962, - "acc_norm": 0.19607843137254902, - "acc_norm_stderr": 0.03950581861179962 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.31512605042016806, - "acc_stderr": 0.03017680828897434, - "acc_norm": 0.31512605042016806, - "acc_norm_stderr": 0.03017680828897434 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.28205128205128205, - "acc_stderr": 0.022815813098896628, - "acc_norm": 0.28205128205128205, - "acc_norm_stderr": 0.022815813098896628 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.27, - "acc_stderr": 0.044619604333847394, - "acc_norm": 0.27, - "acc_norm_stderr": 0.044619604333847394 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.27, - "acc_stderr": 0.044619604333847394, - "acc_norm": 0.27, - "acc_norm_stderr": 0.044619604333847394 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.3055555555555556, - "acc_stderr": 0.044531975073749834, - "acc_norm": 0.3055555555555556, - "acc_norm_stderr": 0.044531975073749834 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.28078817733990147, - "acc_stderr": 0.03161856335358609, - "acc_norm": 0.28078817733990147, - "acc_norm_stderr": 0.03161856335358609 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3064516129032258, - "acc_stderr": 0.02622648565255388, - "acc_norm": 0.3064516129032258, - "acc_norm_stderr": 0.02622648565255388 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.36324786324786323, - "acc_stderr": 0.03150712523091264, - "acc_norm": 0.36324786324786323, - "acc_norm_stderr": 0.03150712523091264 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.32452830188679244, - "acc_stderr": 0.028815615713432115, - "acc_norm": 0.32452830188679244, - "acc_norm_stderr": 0.028815615713432115 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.36363636363636365, - "acc_stderr": 0.04607582090719976, - "acc_norm": 0.36363636363636365, - "acc_norm_stderr": 0.04607582090719976 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.26296296296296295, - "acc_stderr": 0.026842057873833706, - "acc_norm": 0.26296296296296295, - "acc_norm_stderr": 0.026842057873833706 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.31125827814569534, - "acc_stderr": 0.037804458505267334, - "acc_norm": 0.31125827814569534, - "acc_norm_stderr": 0.037804458505267334 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.3383084577114428, - "acc_stderr": 0.03345563070339192, - "acc_norm": 0.3383084577114428, - "acc_norm_stderr": 0.03345563070339192 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.2543352601156069, - "acc_stderr": 0.0332055644308557, - "acc_norm": 0.2543352601156069, - "acc_norm_stderr": 0.0332055644308557 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.25396825396825395, - "acc_stderr": 0.022418042891113942, - "acc_norm": 0.25396825396825395, - "acc_norm_stderr": 0.022418042891113942 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2152777777777778, - "acc_stderr": 0.03437079344106134, - "acc_norm": 0.2152777777777778, - "acc_norm_stderr": 0.03437079344106134 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252604, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252604 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.35, - "acc_stderr": 0.0479372485441102, - "acc_norm": 0.35, - "acc_norm_stderr": 0.0479372485441102 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.32947976878612717, - "acc_stderr": 0.0253052581318797, - "acc_norm": 0.32947976878612717, - "acc_norm_stderr": 0.0253052581318797 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.25766871165644173, - "acc_stderr": 0.03436150827846917, - "acc_norm": 0.25766871165644173, - "acc_norm_stderr": 0.03436150827846917 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.32407407407407407, - "acc_stderr": 0.02604176620271716, - "acc_norm": 0.32407407407407407, - "acc_norm_stderr": 0.02604176620271716 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.27979274611398963, - "acc_stderr": 0.03239637046735704, - "acc_norm": 0.27979274611398963, - "acc_norm_stderr": 0.03239637046735704 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2631578947368421, - "acc_stderr": 0.041424397194893624, - "acc_norm": 0.2631578947368421, - "acc_norm_stderr": 0.041424397194893624 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.381651376146789, - "acc_stderr": 0.020828148517022593, - "acc_norm": 0.381651376146789, - "acc_norm_stderr": 0.020828148517022593 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.20634920634920634, - "acc_stderr": 0.03619604524124249, - "acc_norm": 0.20634920634920634, - "acc_norm_stderr": 0.03619604524124249 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.369281045751634, - "acc_stderr": 0.02763417668960266, - "acc_norm": 0.369281045751634, - "acc_norm_stderr": 0.02763417668960266 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.3884297520661157, - "acc_stderr": 0.04449270350068382, - "acc_norm": 0.3884297520661157, - "acc_norm_stderr": 0.04449270350068382 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.3618421052631579, - "acc_stderr": 0.03910525752849724, - "acc_norm": 0.3618421052631579, - "acc_norm_stderr": 0.03910525752849724 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.27450980392156865, - "acc_stderr": 0.018054027458815194, - "acc_norm": 0.27450980392156865, - "acc_norm_stderr": 0.018054027458815194 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2801418439716312, - "acc_stderr": 0.026789172351140242, - "acc_norm": 0.2801418439716312, - "acc_norm_stderr": 0.026789172351140242 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.2857142857142857, - "acc_stderr": 0.04287858751340456, - "acc_norm": 0.2857142857142857, - "acc_norm_stderr": 0.04287858751340456 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.35648148148148145, - "acc_stderr": 0.03266478331527272, - "acc_norm": 0.35648148148148145, - "acc_norm_stderr": 0.03266478331527272 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.24134078212290502, - "acc_stderr": 0.014310999547961438, - "acc_norm": 0.24134078212290502, - "acc_norm_stderr": 0.014310999547961438 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542127, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542127 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.23, - "acc_stderr": 0.04229525846816507, - "acc_norm": 0.23, - "acc_norm_stderr": 0.04229525846816507 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.3897058823529412, - "acc_stderr": 0.02962466358115969, - "acc_norm": 0.3897058823529412, - "acc_norm_stderr": 0.02962466358115969 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.2938775510204082, - "acc_stderr": 0.029162738410249772, - "acc_norm": 0.2938775510204082, - "acc_norm_stderr": 0.029162738410249772 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.3628691983122363, - "acc_stderr": 0.031299208255302136, - "acc_norm": 0.3628691983122363, - "acc_norm_stderr": 0.031299208255302136 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2900912646675359, - "acc_stderr": 0.011590375554733095, - "acc_norm": 0.2900912646675359, - "acc_norm_stderr": 0.011590375554733095 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.2696078431372549, - "acc_stderr": 0.03114557065948678, - "acc_norm": 0.2696078431372549, - "acc_norm_stderr": 0.03114557065948678 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.30303030303030304, - "acc_stderr": 0.035886248000917075, - "acc_norm": 0.30303030303030304, - "acc_norm_stderr": 0.035886248000917075 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.23133414932680538, - "mc1_stderr": 0.014761945174862677, - "mc2": 0.37061566370146265, - "mc2_stderr": 0.014735163251703702 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.31338028169014087, - "acc_stderr": 0.01590117396348767, - "acc_norm": 0.49413145539906106, - "acc_norm_stderr": 0.017138598632436264 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "nakhyeonn/llama-2-ko-qlora-prompt_1024_new", - "model_sha": "4738337870d8e87a2f9a8aac64fcc6935d24afdc", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/nayohan/llama-2-ko-7b-Inst/result_2023-10-25 05:26:11.json b/nayohan/llama-2-ko-7b-Inst/result_2023-10-25 05:26:11.json deleted file mode 100644 index 9a432f71d39200662e73341eda618def4a2fb875..0000000000000000000000000000000000000000 --- a/nayohan/llama-2-ko-7b-Inst/result_2023-10-25 05:26:11.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.32337883959044367, - "acc_stderr": 0.013669421630012122, - "acc_norm": 0.37372013651877134, - "acc_norm_stderr": 0.014137708601759091 - }, - "harness|ko_hellaswag|10": { - "acc": 0.38548097988448515, - "acc_stderr": 0.004857140410776749, - "acc_norm": 0.4992033459470225, - "acc_norm_stderr": 0.004989775077835649 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.3216374269005848, - "acc_stderr": 0.03582529442573122, - "acc_norm": 0.3216374269005848, - "acc_norm_stderr": 0.03582529442573122 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.2621359223300971, - "acc_stderr": 0.04354631077260595, - "acc_norm": 0.2621359223300971, - "acc_norm_stderr": 0.04354631077260595 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.388250319284802, - "acc_stderr": 0.017427673295544333, - "acc_norm": 0.388250319284802, - "acc_norm_stderr": 0.017427673295544333 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.37037037037037035, - "acc_stderr": 0.04171654161354543, - "acc_norm": 0.37037037037037035, - "acc_norm_stderr": 0.04171654161354543 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.28, - "acc_stderr": 0.045126085985421255, - "acc_norm": 0.28, - "acc_norm_stderr": 0.045126085985421255 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.28936170212765955, - "acc_stderr": 0.029644006577009618, - "acc_norm": 0.28936170212765955, - "acc_norm_stderr": 0.029644006577009618 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3132530120481928, - "acc_stderr": 0.036108050180310235, - "acc_norm": 0.3132530120481928, - "acc_norm_stderr": 0.036108050180310235 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.36977491961414793, - "acc_stderr": 0.027417996705630998, - "acc_norm": 0.36977491961414793, - "acc_norm_stderr": 0.027417996705630998 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.31390134529147984, - "acc_stderr": 0.031146796482972465, - "acc_norm": 0.31390134529147984, - "acc_norm_stderr": 0.031146796482972465 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.366412213740458, - "acc_stderr": 0.042258754519696386, - "acc_norm": 0.366412213740458, - "acc_norm_stderr": 0.042258754519696386 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.2828282828282828, - "acc_stderr": 0.03208779558786752, - "acc_norm": 0.2828282828282828, - "acc_norm_stderr": 0.03208779558786752 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.3310344827586207, - "acc_stderr": 0.039215453124671215, - "acc_norm": 0.3310344827586207, - "acc_norm_stderr": 0.039215453124671215 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.20588235294117646, - "acc_stderr": 0.04023382273617747, - "acc_norm": 0.20588235294117646, - "acc_norm_stderr": 0.04023382273617747 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.25210084033613445, - "acc_stderr": 0.028205545033277726, - "acc_norm": 0.25210084033613445, - "acc_norm_stderr": 0.028205545033277726 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.2282051282051282, - "acc_stderr": 0.021278393863586282, - "acc_norm": 0.2282051282051282, - "acc_norm_stderr": 0.021278393863586282 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.37, - "acc_stderr": 0.04852365870939099, - "acc_norm": 0.37, - "acc_norm_stderr": 0.04852365870939099 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.36, - "acc_stderr": 0.048241815132442176, - "acc_norm": 0.36, - "acc_norm_stderr": 0.048241815132442176 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.3425925925925926, - "acc_stderr": 0.045879047413018105, - "acc_norm": 0.3425925925925926, - "acc_norm_stderr": 0.045879047413018105 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.2660098522167488, - "acc_stderr": 0.031089826002937523, - "acc_norm": 0.2660098522167488, - "acc_norm_stderr": 0.031089826002937523 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3161290322580645, - "acc_stderr": 0.026450874489042767, - "acc_norm": 0.3161290322580645, - "acc_norm_stderr": 0.026450874489042767 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.39316239316239315, - "acc_stderr": 0.03199957924651048, - "acc_norm": 0.39316239316239315, - "acc_norm_stderr": 0.03199957924651048 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.32452830188679244, - "acc_stderr": 0.028815615713432118, - "acc_norm": 0.32452830188679244, - "acc_norm_stderr": 0.028815615713432118 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.33636363636363636, - "acc_stderr": 0.04525393596302505, - "acc_norm": 0.33636363636363636, - "acc_norm_stderr": 0.04525393596302505 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.22962962962962963, - "acc_stderr": 0.02564410863926763, - "acc_norm": 0.22962962962962963, - "acc_norm_stderr": 0.02564410863926763 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2847682119205298, - "acc_stderr": 0.03684881521389024, - "acc_norm": 0.2847682119205298, - "acc_norm_stderr": 0.03684881521389024 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.3283582089552239, - "acc_stderr": 0.03320685889744324, - "acc_norm": 0.3283582089552239, - "acc_norm_stderr": 0.03320685889744324 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.23699421965317918, - "acc_stderr": 0.03242414757483099, - "acc_norm": 0.23699421965317918, - "acc_norm_stderr": 0.03242414757483099 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.25925925925925924, - "acc_stderr": 0.022569897074918417, - "acc_norm": 0.25925925925925924, - "acc_norm_stderr": 0.022569897074918417 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2152777777777778, - "acc_stderr": 0.03437079344106133, - "acc_norm": 0.2152777777777778, - "acc_norm_stderr": 0.03437079344106133 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.17, - "acc_stderr": 0.0377525168068637, - "acc_norm": 0.17, - "acc_norm_stderr": 0.0377525168068637 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252605, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252605 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.2861271676300578, - "acc_stderr": 0.02433214677913413, - "acc_norm": 0.2861271676300578, - "acc_norm_stderr": 0.02433214677913413 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.3128834355828221, - "acc_stderr": 0.036429145782924055, - "acc_norm": 0.3128834355828221, - "acc_norm_stderr": 0.036429145782924055 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.30864197530864196, - "acc_stderr": 0.025702640260603746, - "acc_norm": 0.30864197530864196, - "acc_norm_stderr": 0.025702640260603746 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.27, - "acc_stderr": 0.0446196043338474, - "acc_norm": 0.27, - "acc_norm_stderr": 0.0446196043338474 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.2538860103626943, - "acc_stderr": 0.03141024780565317, - "acc_norm": 0.2538860103626943, - "acc_norm_stderr": 0.03141024780565317 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.3157894736842105, - "acc_stderr": 0.04372748290278007, - "acc_norm": 0.3157894736842105, - "acc_norm_stderr": 0.04372748290278007 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.29908256880733947, - "acc_stderr": 0.019630417285415175, - "acc_norm": 0.29908256880733947, - "acc_norm_stderr": 0.019630417285415175 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.15079365079365079, - "acc_stderr": 0.03200686497287392, - "acc_norm": 0.15079365079365079, - "acc_norm_stderr": 0.03200686497287392 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.3104575163398693, - "acc_stderr": 0.026493033225145898, - "acc_norm": 0.3104575163398693, - "acc_norm_stderr": 0.026493033225145898 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.27, - "acc_stderr": 0.04461960433384741, - "acc_norm": 0.27, - "acc_norm_stderr": 0.04461960433384741 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.4462809917355372, - "acc_stderr": 0.0453793517794788, - "acc_norm": 0.4462809917355372, - "acc_norm_stderr": 0.0453793517794788 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.27631578947368424, - "acc_stderr": 0.03639057569952925, - "acc_norm": 0.27631578947368424, - "acc_norm_stderr": 0.03639057569952925 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.29901960784313725, - "acc_stderr": 0.018521756215423024, - "acc_norm": 0.29901960784313725, - "acc_norm_stderr": 0.018521756215423024 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2765957446808511, - "acc_stderr": 0.026684564340460997, - "acc_norm": 0.2765957446808511, - "acc_norm_stderr": 0.026684564340460997 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.3392857142857143, - "acc_stderr": 0.044939490686135404, - "acc_norm": 0.3392857142857143, - "acc_norm_stderr": 0.044939490686135404 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.2361111111111111, - "acc_stderr": 0.028963702570791033, - "acc_norm": 0.2361111111111111, - "acc_norm_stderr": 0.028963702570791033 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2424581005586592, - "acc_stderr": 0.01433352205921789, - "acc_norm": 0.2424581005586592, - "acc_norm_stderr": 0.01433352205921789 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.18, - "acc_stderr": 0.03861229196653696, - "acc_norm": 0.18, - "acc_norm_stderr": 0.03861229196653696 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.26, - "acc_stderr": 0.044084400227680794, - "acc_norm": 0.26, - "acc_norm_stderr": 0.044084400227680794 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.3014705882352941, - "acc_stderr": 0.027875982114273168, - "acc_norm": 0.3014705882352941, - "acc_norm_stderr": 0.027875982114273168 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.2530612244897959, - "acc_stderr": 0.02783302387139968, - "acc_norm": 0.2530612244897959, - "acc_norm_stderr": 0.02783302387139968 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.350210970464135, - "acc_stderr": 0.03105239193758435, - "acc_norm": 0.350210970464135, - "acc_norm_stderr": 0.03105239193758435 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2757496740547588, - "acc_stderr": 0.011413813609160989, - "acc_norm": 0.2757496740547588, - "acc_norm_stderr": 0.011413813609160989 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.2696078431372549, - "acc_stderr": 0.03114557065948678, - "acc_norm": 0.2696078431372549, - "acc_norm_stderr": 0.03114557065948678 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.3151515151515151, - "acc_stderr": 0.0362773057502241, - "acc_norm": 0.3151515151515151, - "acc_norm_stderr": 0.0362773057502241 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.22276621787025705, - "mc1_stderr": 0.01456650696139675, - "mc2": 0.36506276866988424, - "mc2_stderr": 0.014809047702061968 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.357981220657277, - "acc_stderr": 0.016433845814829085, - "acc_norm": 0.4424882629107981, - "acc_norm_stderr": 0.01702601866298502 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "nayohan/llama-2-ko-7b-Inst", - "model_sha": "be7272fa767e378e7a91bc94b59c8bcb726202fd", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/nayohan/polyglot-ko-1.3b-Inst/result_2023-10-21 12:29:48.json b/nayohan/polyglot-ko-1.3b-Inst/result_2023-10-21 12:29:48.json deleted file mode 100644 index 3027840370ef26870e349e2c02d35caf143a2b2a..0000000000000000000000000000000000000000 --- a/nayohan/polyglot-ko-1.3b-Inst/result_2023-10-21 12:29:48.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.2354948805460751, - "acc_stderr": 0.012399451855004746, - "acc_norm": 0.28924914675767915, - "acc_norm_stderr": 0.013250012579393443 - }, - "harness|ko_hellaswag|10": { - "acc": 0.335291774546903, - "acc_stderr": 0.004711275408138412, - "acc_norm": 0.4166500697072296, - "acc_norm_stderr": 0.004919962822208309 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.21637426900584794, - "acc_stderr": 0.03158149539338731, - "acc_norm": 0.21637426900584794, - "acc_norm_stderr": 0.03158149539338731 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.39805825242718446, - "acc_stderr": 0.048467482539772386, - "acc_norm": 0.39805825242718446, - "acc_norm_stderr": 0.048467482539772386 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.26181353767560667, - "acc_stderr": 0.015720838678445266, - "acc_norm": 0.26181353767560667, - "acc_norm_stderr": 0.015720838678445266 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.23703703703703705, - "acc_stderr": 0.03673731683969506, - "acc_norm": 0.23703703703703705, - "acc_norm_stderr": 0.03673731683969506 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.24, - "acc_stderr": 0.04292346959909284, - "acc_norm": 0.24, - "acc_norm_stderr": 0.04292346959909284 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.19574468085106383, - "acc_stderr": 0.025937853139977155, - "acc_norm": 0.19574468085106383, - "acc_norm_stderr": 0.025937853139977155 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.18674698795180722, - "acc_stderr": 0.030338749144500594, - "acc_norm": 0.18674698795180722, - "acc_norm_stderr": 0.030338749144500594 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.24115755627009647, - "acc_stderr": 0.024296594034763426, - "acc_norm": 0.24115755627009647, - "acc_norm_stderr": 0.024296594034763426 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.24663677130044842, - "acc_stderr": 0.028930413120910877, - "acc_norm": 0.24663677130044842, - "acc_norm_stderr": 0.028930413120910877 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.19083969465648856, - "acc_stderr": 0.03446513350752599, - "acc_norm": 0.19083969465648856, - "acc_norm_stderr": 0.03446513350752599 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.27, - "acc_stderr": 0.044619604333847394, - "acc_norm": 0.27, - "acc_norm_stderr": 0.044619604333847394 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.2474747474747475, - "acc_stderr": 0.030746300742124498, - "acc_norm": 0.2474747474747475, - "acc_norm_stderr": 0.030746300742124498 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.2413793103448276, - "acc_stderr": 0.03565998174135302, - "acc_norm": 0.2413793103448276, - "acc_norm_stderr": 0.03565998174135302 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.30392156862745096, - "acc_stderr": 0.045766654032077615, - "acc_norm": 0.30392156862745096, - "acc_norm_stderr": 0.045766654032077615 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.35294117647058826, - "acc_stderr": 0.031041941304059288, - "acc_norm": 0.35294117647058826, - "acc_norm_stderr": 0.031041941304059288 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.35384615384615387, - "acc_stderr": 0.02424378399406217, - "acc_norm": 0.35384615384615387, - "acc_norm_stderr": 0.02424378399406217 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.2, - "acc_stderr": 0.040201512610368466, - "acc_norm": 0.2, - "acc_norm_stderr": 0.040201512610368466 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.23148148148148148, - "acc_stderr": 0.04077494709252626, - "acc_norm": 0.23148148148148148, - "acc_norm_stderr": 0.04077494709252626 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.270935960591133, - "acc_stderr": 0.031270907132977, - "acc_norm": 0.270935960591133, - "acc_norm_stderr": 0.031270907132977 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3064516129032258, - "acc_stderr": 0.026226485652553873, - "acc_norm": 0.3064516129032258, - "acc_norm_stderr": 0.026226485652553873 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.19230769230769232, - "acc_stderr": 0.025819233256483727, - "acc_norm": 0.19230769230769232, - "acc_norm_stderr": 0.025819233256483727 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.27169811320754716, - "acc_stderr": 0.027377706624670713, - "acc_norm": 0.27169811320754716, - "acc_norm_stderr": 0.027377706624670713 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.2545454545454545, - "acc_stderr": 0.04172343038705383, - "acc_norm": 0.2545454545454545, - "acc_norm_stderr": 0.04172343038705383 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.26296296296296295, - "acc_stderr": 0.02684205787383371, - "acc_norm": 0.26296296296296295, - "acc_norm_stderr": 0.02684205787383371 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.271523178807947, - "acc_stderr": 0.03631329803969654, - "acc_norm": 0.271523178807947, - "acc_norm_stderr": 0.03631329803969654 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.22388059701492538, - "acc_stderr": 0.029475250236017193, - "acc_norm": 0.22388059701492538, - "acc_norm_stderr": 0.029475250236017193 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.3179190751445087, - "acc_stderr": 0.0355068398916558, - "acc_norm": 0.3179190751445087, - "acc_norm_stderr": 0.0355068398916558 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2566137566137566, - "acc_stderr": 0.022494510767503154, - "acc_norm": 0.2566137566137566, - "acc_norm_stderr": 0.022494510767503154 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2777777777777778, - "acc_stderr": 0.03745554791462457, - "acc_norm": 0.2777777777777778, - "acc_norm_stderr": 0.03745554791462457 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.22, - "acc_stderr": 0.0416333199893227, - "acc_norm": 0.22, - "acc_norm_stderr": 0.0416333199893227 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.22832369942196531, - "acc_stderr": 0.022598703804321624, - "acc_norm": 0.22832369942196531, - "acc_norm_stderr": 0.022598703804321624 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.2883435582822086, - "acc_stderr": 0.035590395316173425, - "acc_norm": 0.2883435582822086, - "acc_norm_stderr": 0.035590395316173425 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.24074074074074073, - "acc_stderr": 0.023788583551658526, - "acc_norm": 0.24074074074074073, - "acc_norm_stderr": 0.023788583551658526 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252605, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252605 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.36787564766839376, - "acc_stderr": 0.034801756684660366, - "acc_norm": 0.36787564766839376, - "acc_norm_stderr": 0.034801756684660366 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.22807017543859648, - "acc_stderr": 0.03947152782669415, - "acc_norm": 0.22807017543859648, - "acc_norm_stderr": 0.03947152782669415 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.23853211009174313, - "acc_stderr": 0.018272575810231857, - "acc_norm": 0.23853211009174313, - "acc_norm_stderr": 0.018272575810231857 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.24603174603174602, - "acc_stderr": 0.03852273364924316, - "acc_norm": 0.24603174603174602, - "acc_norm_stderr": 0.03852273364924316 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.23202614379084968, - "acc_stderr": 0.02417084087934101, - "acc_norm": 0.23202614379084968, - "acc_norm_stderr": 0.02417084087934101 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.18, - "acc_stderr": 0.038612291966536955, - "acc_norm": 0.18, - "acc_norm_stderr": 0.038612291966536955 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.2644628099173554, - "acc_stderr": 0.04026187527591205, - "acc_norm": 0.2644628099173554, - "acc_norm_stderr": 0.04026187527591205 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.2631578947368421, - "acc_stderr": 0.03583496176361062, - "acc_norm": 0.2631578947368421, - "acc_norm_stderr": 0.03583496176361062 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.21405228758169934, - "acc_stderr": 0.01659342966232903, - "acc_norm": 0.21405228758169934, - "acc_norm_stderr": 0.01659342966232903 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.29432624113475175, - "acc_stderr": 0.027187127011503803, - "acc_norm": 0.29432624113475175, - "acc_norm_stderr": 0.027187127011503803 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.21428571428571427, - "acc_stderr": 0.038946411200447915, - "acc_norm": 0.21428571428571427, - "acc_norm_stderr": 0.038946411200447915 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.4675925925925926, - "acc_stderr": 0.03402801581358966, - "acc_norm": 0.4675925925925926, - "acc_norm_stderr": 0.03402801581358966 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.27262569832402234, - "acc_stderr": 0.014893391735249608, - "acc_norm": 0.27262569832402234, - "acc_norm_stderr": 0.014893391735249608 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252604, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252604 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.26, - "acc_stderr": 0.04408440022768077, - "acc_norm": 0.26, - "acc_norm_stderr": 0.04408440022768077 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.4485294117647059, - "acc_stderr": 0.030211479609121593, - "acc_norm": 0.4485294117647059, - "acc_norm_stderr": 0.030211479609121593 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.40816326530612246, - "acc_stderr": 0.03146465712827423, - "acc_norm": 0.40816326530612246, - "acc_norm_stderr": 0.03146465712827423 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.2320675105485232, - "acc_stderr": 0.02747974455080852, - "acc_norm": 0.2320675105485232, - "acc_norm_stderr": 0.02747974455080852 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.22685788787483702, - "acc_stderr": 0.010696348133569929, - "acc_norm": 0.22685788787483702, - "acc_norm_stderr": 0.010696348133569929 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.2647058823529412, - "acc_stderr": 0.030964517926923393, - "acc_norm": 0.2647058823529412, - "acc_norm_stderr": 0.030964517926923393 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.22424242424242424, - "acc_stderr": 0.032568666616811015, - "acc_norm": 0.22424242424242424, - "acc_norm_stderr": 0.032568666616811015 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.23623011015911874, - "mc1_stderr": 0.014869755015871082, - "mc2": 0.4038819958960065, - "mc2_stderr": 0.014994809766039018 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.20187793427230047, - "acc_stderr": 0.013759869182275591, - "acc_norm": 0.3028169014084507, - "acc_norm_stderr": 0.015750657965844923 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "nayohan/polyglot-ko-1.3b-Inst", - "model_sha": "639d14146b4085f23df93967d62b50b2ff9d4af7", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/nayohan/polyglot-ko-5.8b-Inst-All/result_2023-10-25 17:49:08.json b/nayohan/polyglot-ko-5.8b-Inst-All/result_2023-10-25 17:49:08.json deleted file mode 100644 index 85923ab50adc2e711f049b805370a3a9c9bddfe5..0000000000000000000000000000000000000000 --- a/nayohan/polyglot-ko-5.8b-Inst-All/result_2023-10-25 17:49:08.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.2773037542662116, - "acc_stderr": 0.013082095839059374, - "acc_norm": 0.3191126279863481, - "acc_norm_stderr": 0.0136216961191733 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3692491535550687, - "acc_stderr": 0.004816152074023089, - "acc_norm": 0.47321250746863175, - "acc_norm_stderr": 0.00498261523305711 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.23391812865497075, - "acc_stderr": 0.03246721765117827, - "acc_norm": 0.23391812865497075, - "acc_norm_stderr": 0.03246721765117827 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.3786407766990291, - "acc_stderr": 0.04802694698258974, - "acc_norm": 0.3786407766990291, - "acc_norm_stderr": 0.04802694698258974 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.26309067688378035, - "acc_stderr": 0.015745497169049057, - "acc_norm": 0.26309067688378035, - "acc_norm_stderr": 0.015745497169049057 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.26666666666666666, - "acc_stderr": 0.03820169914517905, - "acc_norm": 0.26666666666666666, - "acc_norm_stderr": 0.03820169914517905 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.26, - "acc_stderr": 0.04408440022768077, - "acc_norm": 0.26, - "acc_norm_stderr": 0.04408440022768077 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.3148936170212766, - "acc_stderr": 0.03036358219723816, - "acc_norm": 0.3148936170212766, - "acc_norm_stderr": 0.03036358219723816 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3192771084337349, - "acc_stderr": 0.036293353299478595, - "acc_norm": 0.3192771084337349, - "acc_norm_stderr": 0.036293353299478595 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.27009646302250806, - "acc_stderr": 0.025218040373410616, - "acc_norm": 0.27009646302250806, - "acc_norm_stderr": 0.025218040373410616 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.3183856502242152, - "acc_stderr": 0.03126580522513713, - "acc_norm": 0.3183856502242152, - "acc_norm_stderr": 0.03126580522513713 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.22137404580152673, - "acc_stderr": 0.0364129708131373, - "acc_norm": 0.22137404580152673, - "acc_norm_stderr": 0.0364129708131373 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.35353535353535354, - "acc_stderr": 0.03406086723547153, - "acc_norm": 0.35353535353535354, - "acc_norm_stderr": 0.03406086723547153 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.21379310344827587, - "acc_stderr": 0.034165204477475494, - "acc_norm": 0.21379310344827587, - "acc_norm_stderr": 0.034165204477475494 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.35294117647058826, - "acc_stderr": 0.04755129616062947, - "acc_norm": 0.35294117647058826, - "acc_norm_stderr": 0.04755129616062947 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.3445378151260504, - "acc_stderr": 0.030868682604121622, - "acc_norm": 0.3445378151260504, - "acc_norm_stderr": 0.030868682604121622 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.3487179487179487, - "acc_stderr": 0.02416278028401772, - "acc_norm": 0.3487179487179487, - "acc_norm_stderr": 0.02416278028401772 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.17, - "acc_stderr": 0.03775251680686371, - "acc_norm": 0.17, - "acc_norm_stderr": 0.03775251680686371 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252604, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252604 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.23148148148148148, - "acc_stderr": 0.04077494709252626, - "acc_norm": 0.23148148148148148, - "acc_norm_stderr": 0.04077494709252626 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.28078817733990147, - "acc_stderr": 0.0316185633535861, - "acc_norm": 0.28078817733990147, - "acc_norm_stderr": 0.0316185633535861 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3225806451612903, - "acc_stderr": 0.026593084516572284, - "acc_norm": 0.3225806451612903, - "acc_norm_stderr": 0.026593084516572284 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.19658119658119658, - "acc_stderr": 0.02603538609895129, - "acc_norm": 0.19658119658119658, - "acc_norm_stderr": 0.02603538609895129 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.26037735849056604, - "acc_stderr": 0.027008766090708087, - "acc_norm": 0.26037735849056604, - "acc_norm_stderr": 0.027008766090708087 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.24545454545454545, - "acc_stderr": 0.041220665028782834, - "acc_norm": 0.24545454545454545, - "acc_norm_stderr": 0.041220665028782834 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.26296296296296295, - "acc_stderr": 0.026842057873833706, - "acc_norm": 0.26296296296296295, - "acc_norm_stderr": 0.026842057873833706 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.33112582781456956, - "acc_stderr": 0.038425817186598696, - "acc_norm": 0.33112582781456956, - "acc_norm_stderr": 0.038425817186598696 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.2885572139303483, - "acc_stderr": 0.03203841040213321, - "acc_norm": 0.2885572139303483, - "acc_norm_stderr": 0.03203841040213321 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.30057803468208094, - "acc_stderr": 0.03496101481191181, - "acc_norm": 0.30057803468208094, - "acc_norm_stderr": 0.03496101481191181 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2328042328042328, - "acc_stderr": 0.02176596167215453, - "acc_norm": 0.2328042328042328, - "acc_norm_stderr": 0.02176596167215453 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.22916666666666666, - "acc_stderr": 0.03514697467862388, - "acc_norm": 0.22916666666666666, - "acc_norm_stderr": 0.03514697467862388 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.27, - "acc_stderr": 0.044619604333847394, - "acc_norm": 0.27, - "acc_norm_stderr": 0.044619604333847394 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.26, - "acc_stderr": 0.0440844002276808, - "acc_norm": 0.26, - "acc_norm_stderr": 0.0440844002276808 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.21098265895953758, - "acc_stderr": 0.021966309947043117, - "acc_norm": 0.21098265895953758, - "acc_norm_stderr": 0.021966309947043117 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.22699386503067484, - "acc_stderr": 0.03291099578615769, - "acc_norm": 0.22699386503067484, - "acc_norm_stderr": 0.03291099578615769 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.2623456790123457, - "acc_stderr": 0.024477222856135107, - "acc_norm": 0.2623456790123457, - "acc_norm_stderr": 0.024477222856135107 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.24, - "acc_stderr": 0.042923469599092816, - "acc_norm": 0.24, - "acc_norm_stderr": 0.042923469599092816 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.3316062176165803, - "acc_stderr": 0.03397636541089116, - "acc_norm": 0.3316062176165803, - "acc_norm_stderr": 0.03397636541089116 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.23684210526315788, - "acc_stderr": 0.039994238792813365, - "acc_norm": 0.23684210526315788, - "acc_norm_stderr": 0.039994238792813365 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.3137614678899083, - "acc_stderr": 0.019894723341469127, - "acc_norm": 0.3137614678899083, - "acc_norm_stderr": 0.019894723341469127 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.24603174603174602, - "acc_stderr": 0.03852273364924316, - "acc_norm": 0.24603174603174602, - "acc_norm_stderr": 0.03852273364924316 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.27450980392156865, - "acc_stderr": 0.025553169991826514, - "acc_norm": 0.27450980392156865, - "acc_norm_stderr": 0.025553169991826514 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.24, - "acc_stderr": 0.04292346959909283, - "acc_norm": 0.24, - "acc_norm_stderr": 0.04292346959909283 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.3305785123966942, - "acc_stderr": 0.04294340845212095, - "acc_norm": 0.3305785123966942, - "acc_norm_stderr": 0.04294340845212095 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.25, - "acc_stderr": 0.03523807393012047, - "acc_norm": 0.25, - "acc_norm_stderr": 0.03523807393012047 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.2222222222222222, - "acc_stderr": 0.016819028375736386, - "acc_norm": 0.2222222222222222, - "acc_norm_stderr": 0.016819028375736386 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.23404255319148937, - "acc_stderr": 0.025257861359432414, - "acc_norm": 0.23404255319148937, - "acc_norm_stderr": 0.025257861359432414 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.2857142857142857, - "acc_stderr": 0.042878587513404565, - "acc_norm": 0.2857142857142857, - "acc_norm_stderr": 0.042878587513404565 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.4583333333333333, - "acc_stderr": 0.03398110890294636, - "acc_norm": 0.4583333333333333, - "acc_norm_stderr": 0.03398110890294636 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.27262569832402234, - "acc_stderr": 0.014893391735249608, - "acc_norm": 0.27262569832402234, - "acc_norm_stderr": 0.014893391735249608 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.21, - "acc_stderr": 0.040936018074033256, - "acc_norm": 0.21, - "acc_norm_stderr": 0.040936018074033256 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.4485294117647059, - "acc_stderr": 0.030211479609121593, - "acc_norm": 0.4485294117647059, - "acc_norm_stderr": 0.030211479609121593 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.4, - "acc_stderr": 0.03136250240935892, - "acc_norm": 0.4, - "acc_norm_stderr": 0.03136250240935892 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.23628691983122363, - "acc_stderr": 0.02765215314415926, - "acc_norm": 0.23628691983122363, - "acc_norm_stderr": 0.02765215314415926 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2607561929595828, - "acc_stderr": 0.011213471559602336, - "acc_norm": 0.2607561929595828, - "acc_norm_stderr": 0.011213471559602336 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.2549019607843137, - "acc_stderr": 0.030587591351604246, - "acc_norm": 0.2549019607843137, - "acc_norm_stderr": 0.030587591351604246 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.23636363636363636, - "acc_stderr": 0.0331750593000918, - "acc_norm": 0.23636363636363636, - "acc_norm_stderr": 0.0331750593000918 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.23990208078335373, - "mc1_stderr": 0.014948812679062137, - "mc2": 0.40001430050776826, - "mc2_stderr": 0.014747441557861264 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.2992957746478873, - "acc_stderr": 0.015698309276204934, - "acc_norm": 0.4084507042253521, - "acc_norm_stderr": 0.016850023674109628 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "nayohan/polyglot-ko-5.8b-Inst-All", - "model_sha": "a1bb0d8ecb43625e03b527639fc4599763fa398e", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/nayohan/polyglot-ko-5.8b-Inst-v1.1/result_2023-10-22 05:08:33.json b/nayohan/polyglot-ko-5.8b-Inst-v1.1/result_2023-10-22 05:08:33.json deleted file mode 100644 index d1d1140c996c26a1bd3dc5e56b43e02230ef5984..0000000000000000000000000000000000000000 --- a/nayohan/polyglot-ko-5.8b-Inst-v1.1/result_2023-10-22 05:08:33.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.2721843003412969, - "acc_stderr": 0.013006600406423707, - "acc_norm": 0.31399317406143346, - "acc_norm_stderr": 0.013562691224726286 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3724357697669787, - "acc_stderr": 0.004824655406075561, - "acc_norm": 0.47759410476000796, - "acc_norm_stderr": 0.004984768912326939 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.17543859649122806, - "acc_stderr": 0.029170885500727665, - "acc_norm": 0.17543859649122806, - "acc_norm_stderr": 0.029170885500727665 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.3786407766990291, - "acc_stderr": 0.04802694698258973, - "acc_norm": 0.3786407766990291, - "acc_norm_stderr": 0.04802694698258973 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.20434227330779056, - "acc_stderr": 0.014419123980931904, - "acc_norm": 0.20434227330779056, - "acc_norm_stderr": 0.014419123980931904 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.22962962962962963, - "acc_stderr": 0.036333844140734636, - "acc_norm": 0.22962962962962963, - "acc_norm_stderr": 0.036333844140734636 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.21, - "acc_stderr": 0.040936018074033256, - "acc_norm": 0.21, - "acc_norm_stderr": 0.040936018074033256 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.20851063829787234, - "acc_stderr": 0.026556982117838746, - "acc_norm": 0.20851063829787234, - "acc_norm_stderr": 0.026556982117838746 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.20481927710843373, - "acc_stderr": 0.031417842916639245, - "acc_norm": 0.20481927710843373, - "acc_norm_stderr": 0.031417842916639245 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.24115755627009647, - "acc_stderr": 0.024296594034763426, - "acc_norm": 0.24115755627009647, - "acc_norm_stderr": 0.024296594034763426 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.10762331838565023, - "acc_stderr": 0.020799400082880004, - "acc_norm": 0.10762331838565023, - "acc_norm_stderr": 0.020799400082880004 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.2824427480916031, - "acc_stderr": 0.03948406125768361, - "acc_norm": 0.2824427480916031, - "acc_norm_stderr": 0.03948406125768361 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.24, - "acc_stderr": 0.04292346959909284, - "acc_norm": 0.24, - "acc_norm_stderr": 0.04292346959909284 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.35353535353535354, - "acc_stderr": 0.03406086723547153, - "acc_norm": 0.35353535353535354, - "acc_norm_stderr": 0.03406086723547153 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.2413793103448276, - "acc_stderr": 0.03565998174135302, - "acc_norm": 0.2413793103448276, - "acc_norm_stderr": 0.03565998174135302 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.37254901960784315, - "acc_stderr": 0.048108401480826346, - "acc_norm": 0.37254901960784315, - "acc_norm_stderr": 0.048108401480826346 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.3487394957983193, - "acc_stderr": 0.030956636328566548, - "acc_norm": 0.3487394957983193, - "acc_norm_stderr": 0.030956636328566548 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.3641025641025641, - "acc_stderr": 0.024396672985094778, - "acc_norm": 0.3641025641025641, - "acc_norm_stderr": 0.024396672985094778 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.18, - "acc_stderr": 0.03861229196653694, - "acc_norm": 0.18, - "acc_norm_stderr": 0.03861229196653694 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.18, - "acc_stderr": 0.038612291966536955, - "acc_norm": 0.18, - "acc_norm_stderr": 0.038612291966536955 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.21296296296296297, - "acc_stderr": 0.03957835471980981, - "acc_norm": 0.21296296296296297, - "acc_norm_stderr": 0.03957835471980981 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.28078817733990147, - "acc_stderr": 0.0316185633535861, - "acc_norm": 0.28078817733990147, - "acc_norm_stderr": 0.0316185633535861 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3161290322580645, - "acc_stderr": 0.026450874489042764, - "acc_norm": 0.3161290322580645, - "acc_norm_stderr": 0.026450874489042764 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.19658119658119658, - "acc_stderr": 0.02603538609895129, - "acc_norm": 0.19658119658119658, - "acc_norm_stderr": 0.02603538609895129 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.2981132075471698, - "acc_stderr": 0.028152837942493864, - "acc_norm": 0.2981132075471698, - "acc_norm_stderr": 0.028152837942493864 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.22727272727272727, - "acc_stderr": 0.040139645540727735, - "acc_norm": 0.22727272727272727, - "acc_norm_stderr": 0.040139645540727735 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.26296296296296295, - "acc_stderr": 0.02684205787383371, - "acc_norm": 0.26296296296296295, - "acc_norm_stderr": 0.02684205787383371 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.33112582781456956, - "acc_stderr": 0.038425817186598696, - "acc_norm": 0.33112582781456956, - "acc_norm_stderr": 0.038425817186598696 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.26865671641791045, - "acc_stderr": 0.03134328358208954, - "acc_norm": 0.26865671641791045, - "acc_norm_stderr": 0.03134328358208954 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.3352601156069364, - "acc_stderr": 0.03599586301247078, - "acc_norm": 0.3352601156069364, - "acc_norm_stderr": 0.03599586301247078 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2671957671957672, - "acc_stderr": 0.02278967314577656, - "acc_norm": 0.2671957671957672, - "acc_norm_stderr": 0.02278967314577656 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2638888888888889, - "acc_stderr": 0.03685651095897532, - "acc_norm": 0.2638888888888889, - "acc_norm_stderr": 0.03685651095897532 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.41, - "acc_stderr": 0.049431107042371025, - "acc_norm": 0.41, - "acc_norm_stderr": 0.049431107042371025 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.26, - "acc_stderr": 0.044084400227680794, - "acc_norm": 0.26, - "acc_norm_stderr": 0.044084400227680794 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.2138728323699422, - "acc_stderr": 0.022075709251757173, - "acc_norm": 0.2138728323699422, - "acc_norm_stderr": 0.022075709251757173 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.2331288343558282, - "acc_stderr": 0.0332201579577674, - "acc_norm": 0.2331288343558282, - "acc_norm_stderr": 0.0332201579577674 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.22530864197530864, - "acc_stderr": 0.02324620264781975, - "acc_norm": 0.22530864197530864, - "acc_norm_stderr": 0.02324620264781975 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.36787564766839376, - "acc_stderr": 0.034801756684660366, - "acc_norm": 0.36787564766839376, - "acc_norm_stderr": 0.034801756684660366 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.23684210526315788, - "acc_stderr": 0.039994238792813365, - "acc_norm": 0.23684210526315788, - "acc_norm_stderr": 0.039994238792813365 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.3486238532110092, - "acc_stderr": 0.020431254090714328, - "acc_norm": 0.3486238532110092, - "acc_norm_stderr": 0.020431254090714328 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.36507936507936506, - "acc_stderr": 0.04306241259127153, - "acc_norm": 0.36507936507936506, - "acc_norm_stderr": 0.04306241259127153 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.29411764705882354, - "acc_stderr": 0.026090162504279053, - "acc_norm": 0.29411764705882354, - "acc_norm_stderr": 0.026090162504279053 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.21, - "acc_stderr": 0.040936018074033256, - "acc_norm": 0.21, - "acc_norm_stderr": 0.040936018074033256 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.14049586776859505, - "acc_stderr": 0.031722334260021585, - "acc_norm": 0.14049586776859505, - "acc_norm_stderr": 0.031722334260021585 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.3355263157894737, - "acc_stderr": 0.038424985593952694, - "acc_norm": 0.3355263157894737, - "acc_norm_stderr": 0.038424985593952694 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.2173202614379085, - "acc_stderr": 0.016684820929148598, - "acc_norm": 0.2173202614379085, - "acc_norm_stderr": 0.016684820929148598 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.24113475177304963, - "acc_stderr": 0.02551873104953776, - "acc_norm": 0.24113475177304963, - "acc_norm_stderr": 0.02551873104953776 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.16071428571428573, - "acc_stderr": 0.034859460964757394, - "acc_norm": 0.16071428571428573, - "acc_norm_stderr": 0.034859460964757394 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.4722222222222222, - "acc_stderr": 0.0340470532865388, - "acc_norm": 0.4722222222222222, - "acc_norm_stderr": 0.0340470532865388 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.27262569832402234, - "acc_stderr": 0.014893391735249608, - "acc_norm": 0.27262569832402234, - "acc_norm_stderr": 0.014893391735249608 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252604, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252604 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.19, - "acc_stderr": 0.03942772444036624, - "acc_norm": 0.19, - "acc_norm_stderr": 0.03942772444036624 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.4485294117647059, - "acc_stderr": 0.030211479609121593, - "acc_norm": 0.4485294117647059, - "acc_norm_stderr": 0.030211479609121593 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.4, - "acc_stderr": 0.03136250240935892, - "acc_norm": 0.4, - "acc_norm_stderr": 0.03136250240935892 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.20253164556962025, - "acc_stderr": 0.026160568246601464, - "acc_norm": 0.20253164556962025, - "acc_norm_stderr": 0.026160568246601464 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.24315514993481094, - "acc_stderr": 0.010956556654417358, - "acc_norm": 0.24315514993481094, - "acc_norm_stderr": 0.010956556654417358 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.2549019607843137, - "acc_stderr": 0.030587591351604246, - "acc_norm": 0.2549019607843137, - "acc_norm_stderr": 0.030587591351604246 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.2545454545454545, - "acc_stderr": 0.03401506715249039, - "acc_norm": 0.2545454545454545, - "acc_norm_stderr": 0.03401506715249039 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.23623011015911874, - "mc1_stderr": 0.014869755015871088, - "mc2": 0.39381819679905655, - "mc2_stderr": 0.014740320370934685 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.22417840375586853, - "acc_stderr": 0.01429595059251123, - "acc_norm": 0.31690140845070425, - "acc_norm_stderr": 0.015949203508790564 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "nayohan/polyglot-ko-5.8b-Inst-v1.1", - "model_sha": "3b281fef4e86eff3db4d4cce287b5941abae9ae5", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/nayohan/polyglot-ko-5.8b-Inst/result_2023-10-12 03:26:44.json b/nayohan/polyglot-ko-5.8b-Inst/result_2023-10-12 03:26:44.json deleted file mode 100644 index d8c611e57a2acdc608239e059b3dd6901183c25e..0000000000000000000000000000000000000000 --- a/nayohan/polyglot-ko-5.8b-Inst/result_2023-10-12 03:26:44.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.27474402730375425, - "acc_stderr": 0.013044617212771227, - "acc_norm": 0.3191126279863481, - "acc_norm_stderr": 0.013621696119173307 - }, - "harness|ko_hellaswag|10": { - "acc": 0.37950607448715395, - "acc_stderr": 0.004842723234022034, - "acc_norm": 0.4827723561043617, - "acc_norm_stderr": 0.004986818680313436 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.15789473684210525, - "acc_stderr": 0.027966785859160893, - "acc_norm": 0.15789473684210525, - "acc_norm_stderr": 0.027966785859160893 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.30097087378640774, - "acc_stderr": 0.04541609446503948, - "acc_norm": 0.30097087378640774, - "acc_norm_stderr": 0.04541609446503948 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.21966794380587484, - "acc_stderr": 0.014805384478371162, - "acc_norm": 0.21966794380587484, - "acc_norm_stderr": 0.014805384478371162 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.22962962962962963, - "acc_stderr": 0.036333844140734636, - "acc_norm": 0.22962962962962963, - "acc_norm_stderr": 0.036333844140734636 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.14042553191489363, - "acc_stderr": 0.022712077616627864, - "acc_norm": 0.14042553191489363, - "acc_norm_stderr": 0.022712077616627864 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.24096385542168675, - "acc_stderr": 0.03329394119073528, - "acc_norm": 0.24096385542168675, - "acc_norm_stderr": 0.03329394119073528 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.26688102893890675, - "acc_stderr": 0.025122637608816646, - "acc_norm": 0.26688102893890675, - "acc_norm_stderr": 0.025122637608816646 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.14349775784753363, - "acc_stderr": 0.02352937126961819, - "acc_norm": 0.14349775784753363, - "acc_norm_stderr": 0.02352937126961819 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.2748091603053435, - "acc_stderr": 0.039153454088478354, - "acc_norm": 0.2748091603053435, - "acc_norm_stderr": 0.039153454088478354 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252606, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252606 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.3434343434343434, - "acc_stderr": 0.03383201223244442, - "acc_norm": 0.3434343434343434, - "acc_norm_stderr": 0.03383201223244442 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.2413793103448276, - "acc_stderr": 0.03565998174135302, - "acc_norm": 0.2413793103448276, - "acc_norm_stderr": 0.03565998174135302 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.3137254901960784, - "acc_stderr": 0.04617034827006718, - "acc_norm": 0.3137254901960784, - "acc_norm_stderr": 0.04617034827006718 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.3445378151260504, - "acc_stderr": 0.030868682604121622, - "acc_norm": 0.3445378151260504, - "acc_norm_stderr": 0.030868682604121622 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.35128205128205126, - "acc_stderr": 0.024203665177902796, - "acc_norm": 0.35128205128205126, - "acc_norm_stderr": 0.024203665177902796 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.17, - "acc_stderr": 0.03775251680686371, - "acc_norm": 0.17, - "acc_norm_stderr": 0.03775251680686371 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.26, - "acc_stderr": 0.0440844002276808, - "acc_norm": 0.26, - "acc_norm_stderr": 0.0440844002276808 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.2222222222222222, - "acc_stderr": 0.040191074725573483, - "acc_norm": 0.2222222222222222, - "acc_norm_stderr": 0.040191074725573483 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.2413793103448276, - "acc_stderr": 0.030108330718011625, - "acc_norm": 0.2413793103448276, - "acc_norm_stderr": 0.030108330718011625 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.2967741935483871, - "acc_stderr": 0.0259885007924119, - "acc_norm": 0.2967741935483871, - "acc_norm_stderr": 0.0259885007924119 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.19658119658119658, - "acc_stderr": 0.02603538609895129, - "acc_norm": 0.19658119658119658, - "acc_norm_stderr": 0.02603538609895129 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.27547169811320754, - "acc_stderr": 0.027495663683724067, - "acc_norm": 0.27547169811320754, - "acc_norm_stderr": 0.027495663683724067 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.23636363636363636, - "acc_stderr": 0.04069306319721377, - "acc_norm": 0.23636363636363636, - "acc_norm_stderr": 0.04069306319721377 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.26296296296296295, - "acc_stderr": 0.02684205787383371, - "acc_norm": 0.26296296296296295, - "acc_norm_stderr": 0.02684205787383371 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.31125827814569534, - "acc_stderr": 0.03780445850526733, - "acc_norm": 0.31125827814569534, - "acc_norm_stderr": 0.03780445850526733 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.2537313432835821, - "acc_stderr": 0.030769444967296018, - "acc_norm": 0.2537313432835821, - "acc_norm_stderr": 0.030769444967296018 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.32947976878612717, - "acc_stderr": 0.035839017547364134, - "acc_norm": 0.32947976878612717, - "acc_norm_stderr": 0.035839017547364134 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.23544973544973544, - "acc_stderr": 0.02185150982203172, - "acc_norm": 0.23544973544973544, - "acc_norm_stderr": 0.02185150982203172 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2708333333333333, - "acc_stderr": 0.03716177437566016, - "acc_norm": 0.2708333333333333, - "acc_norm_stderr": 0.03716177437566016 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252604, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252604 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.22254335260115607, - "acc_stderr": 0.02239421566194282, - "acc_norm": 0.22254335260115607, - "acc_norm_stderr": 0.02239421566194282 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.2392638036809816, - "acc_stderr": 0.0335195387952127, - "acc_norm": 0.2392638036809816, - "acc_norm_stderr": 0.0335195387952127 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.2345679012345679, - "acc_stderr": 0.023576881744005723, - "acc_norm": 0.2345679012345679, - "acc_norm_stderr": 0.023576881744005723 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.27, - "acc_stderr": 0.044619604333847394, - "acc_norm": 0.27, - "acc_norm_stderr": 0.044619604333847394 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.36787564766839376, - "acc_stderr": 0.034801756684660366, - "acc_norm": 0.36787564766839376, - "acc_norm_stderr": 0.034801756684660366 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2631578947368421, - "acc_stderr": 0.04142439719489361, - "acc_norm": 0.2631578947368421, - "acc_norm_stderr": 0.04142439719489361 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.3229357798165138, - "acc_stderr": 0.020048115923415318, - "acc_norm": 0.3229357798165138, - "acc_norm_stderr": 0.020048115923415318 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.2777777777777778, - "acc_stderr": 0.040061680838488774, - "acc_norm": 0.2777777777777778, - "acc_norm_stderr": 0.040061680838488774 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.28104575163398693, - "acc_stderr": 0.025738854797818733, - "acc_norm": 0.28104575163398693, - "acc_norm_stderr": 0.025738854797818733 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.24, - "acc_stderr": 0.04292346959909283, - "acc_norm": 0.24, - "acc_norm_stderr": 0.04292346959909283 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.2396694214876033, - "acc_stderr": 0.03896878985070417, - "acc_norm": 0.2396694214876033, - "acc_norm_stderr": 0.03896878985070417 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.23026315789473684, - "acc_stderr": 0.03426059424403165, - "acc_norm": 0.23026315789473684, - "acc_norm_stderr": 0.03426059424403165 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.2222222222222222, - "acc_stderr": 0.016819028375736386, - "acc_norm": 0.2222222222222222, - "acc_norm_stderr": 0.016819028375736386 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.22340425531914893, - "acc_stderr": 0.02484792135806396, - "acc_norm": 0.22340425531914893, - "acc_norm_stderr": 0.02484792135806396 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.24107142857142858, - "acc_stderr": 0.04059867246952687, - "acc_norm": 0.24107142857142858, - "acc_norm_stderr": 0.04059867246952687 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.4675925925925926, - "acc_stderr": 0.03402801581358966, - "acc_norm": 0.4675925925925926, - "acc_norm_stderr": 0.03402801581358966 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.27262569832402234, - "acc_stderr": 0.014893391735249608, - "acc_norm": 0.27262569832402234, - "acc_norm_stderr": 0.014893391735249608 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252604, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252604 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.16, - "acc_stderr": 0.03684529491774708, - "acc_norm": 0.16, - "acc_norm_stderr": 0.03684529491774708 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.4411764705882353, - "acc_stderr": 0.030161911930767102, - "acc_norm": 0.4411764705882353, - "acc_norm_stderr": 0.030161911930767102 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.4, - "acc_stderr": 0.03136250240935892, - "acc_norm": 0.4, - "acc_norm_stderr": 0.03136250240935892 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.20675105485232068, - "acc_stderr": 0.026361651668389094, - "acc_norm": 0.20675105485232068, - "acc_norm_stderr": 0.026361651668389094 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.23663624511082137, - "acc_stderr": 0.010855137351572746, - "acc_norm": 0.23663624511082137, - "acc_norm_stderr": 0.010855137351572746 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.2549019607843137, - "acc_stderr": 0.030587591351604246, - "acc_norm": 0.2549019607843137, - "acc_norm_stderr": 0.030587591351604246 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.24242424242424243, - "acc_stderr": 0.03346409881055953, - "acc_norm": 0.24242424242424243, - "acc_norm_stderr": 0.03346409881055953 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.24112607099143207, - "mc1_stderr": 0.014974827279752329, - "mc2": 0.40162480294038216, - "mc2_stderr": 0.015030387645461886 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.2312206572769953, - "acc_stderr": 0.014452713321605411, - "acc_norm": 0.2992957746478873, - "acc_norm_stderr": 0.015698309276204945 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "nayohan/polyglot-ko-5.8b-Inst", - "model_sha": "19809698b5cc1da68966cdcb3b06daceff10a901", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/nlpai-lab/kullm-polyglot-12.8b-v2/result_2023-09-27 05:32:23.json b/nlpai-lab/kullm-polyglot-12.8b-v2/result_2023-09-27 05:32:23.json deleted file mode 100644 index 04547d6ecfc32cc3af6e7fdb0111310d081141ae..0000000000000000000000000000000000000000 --- a/nlpai-lab/kullm-polyglot-12.8b-v2/result_2023-09-27 05:32:23.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.2790102389078498, - "acc_stderr": 0.013106784883601346, - "acc_norm": 0.32764505119453924, - "acc_norm_stderr": 0.013715847940719344 - }, - "harness|ko_hellaswag|10": { - "acc": 0.386476797450707, - "acc_stderr": 0.004859467984155259, - "acc_norm": 0.4987054371639116, - "acc_norm_stderr": 0.00498976468673883 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.2982456140350877, - "acc_stderr": 0.03508771929824565, - "acc_norm": 0.2982456140350877, - "acc_norm_stderr": 0.03508771929824565 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.18446601941747573, - "acc_stderr": 0.03840423627288276, - "acc_norm": 0.18446601941747573, - "acc_norm_stderr": 0.03840423627288276 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.24776500638569604, - "acc_stderr": 0.015438083080568961, - "acc_norm": 0.24776500638569604, - "acc_norm_stderr": 0.015438083080568961 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.18518518518518517, - "acc_stderr": 0.03355677216313141, - "acc_norm": 0.18518518518518517, - "acc_norm_stderr": 0.03355677216313141 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.23, - "acc_stderr": 0.04229525846816506, - "acc_norm": 0.23, - "acc_norm_stderr": 0.04229525846816506 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.28936170212765955, - "acc_stderr": 0.02964400657700962, - "acc_norm": 0.28936170212765955, - "acc_norm_stderr": 0.02964400657700962 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.25301204819277107, - "acc_stderr": 0.03384429155233136, - "acc_norm": 0.25301204819277107, - "acc_norm_stderr": 0.03384429155233136 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.2958199356913183, - "acc_stderr": 0.025922371788818784, - "acc_norm": 0.2958199356913183, - "acc_norm_stderr": 0.025922371788818784 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.21076233183856502, - "acc_stderr": 0.027373095500540193, - "acc_norm": 0.21076233183856502, - "acc_norm_stderr": 0.027373095500540193 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.2824427480916031, - "acc_stderr": 0.03948406125768361, - "acc_norm": 0.2824427480916031, - "acc_norm_stderr": 0.03948406125768361 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.25252525252525254, - "acc_stderr": 0.030954055470365907, - "acc_norm": 0.25252525252525254, - "acc_norm_stderr": 0.030954055470365907 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.2482758620689655, - "acc_stderr": 0.03600105692727772, - "acc_norm": 0.2482758620689655, - "acc_norm_stderr": 0.03600105692727772 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.04690650298201942, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.04690650298201942 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.25210084033613445, - "acc_stderr": 0.028205545033277726, - "acc_norm": 0.25210084033613445, - "acc_norm_stderr": 0.028205545033277726 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.26666666666666666, - "acc_stderr": 0.02242127361292371, - "acc_norm": 0.26666666666666666, - "acc_norm_stderr": 0.02242127361292371 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.33, - "acc_stderr": 0.047258156262526045, - "acc_norm": 0.33, - "acc_norm_stderr": 0.047258156262526045 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.23, - "acc_stderr": 0.04229525846816505, - "acc_norm": 0.23, - "acc_norm_stderr": 0.04229525846816505 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.25925925925925924, - "acc_stderr": 0.042365112580946336, - "acc_norm": 0.25925925925925924, - "acc_norm_stderr": 0.042365112580946336 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.2315270935960591, - "acc_stderr": 0.02967833314144444, - "acc_norm": 0.2315270935960591, - "acc_norm_stderr": 0.02967833314144444 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.1967741935483871, - "acc_stderr": 0.02261640942074203, - "acc_norm": 0.1967741935483871, - "acc_norm_stderr": 0.02261640942074203 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.28205128205128205, - "acc_stderr": 0.02948036054954119, - "acc_norm": 0.28205128205128205, - "acc_norm_stderr": 0.02948036054954119 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.25660377358490566, - "acc_stderr": 0.026880647889051982, - "acc_norm": 0.25660377358490566, - "acc_norm_stderr": 0.026880647889051982 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.23636363636363636, - "acc_stderr": 0.04069306319721375, - "acc_norm": 0.23636363636363636, - "acc_norm_stderr": 0.04069306319721375 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.23703703703703705, - "acc_stderr": 0.025928876132766104, - "acc_norm": 0.23703703703703705, - "acc_norm_stderr": 0.025928876132766104 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2582781456953642, - "acc_stderr": 0.035737053147634576, - "acc_norm": 0.2582781456953642, - "acc_norm_stderr": 0.035737053147634576 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.21890547263681592, - "acc_stderr": 0.029239174636647, - "acc_norm": 0.21890547263681592, - "acc_norm_stderr": 0.029239174636647 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.23699421965317918, - "acc_stderr": 0.03242414757483098, - "acc_norm": 0.23699421965317918, - "acc_norm_stderr": 0.03242414757483098 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2619047619047619, - "acc_stderr": 0.02264421261552521, - "acc_norm": 0.2619047619047619, - "acc_norm_stderr": 0.02264421261552521 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2361111111111111, - "acc_stderr": 0.03551446610810826, - "acc_norm": 0.2361111111111111, - "acc_norm_stderr": 0.03551446610810826 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.2, - "acc_stderr": 0.040201512610368445, - "acc_norm": 0.2, - "acc_norm_stderr": 0.040201512610368445 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.24566473988439305, - "acc_stderr": 0.023176298203992005, - "acc_norm": 0.24566473988439305, - "acc_norm_stderr": 0.023176298203992005 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.32515337423312884, - "acc_stderr": 0.036803503712864616, - "acc_norm": 0.32515337423312884, - "acc_norm_stderr": 0.036803503712864616 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.25617283950617287, - "acc_stderr": 0.024288533637726095, - "acc_norm": 0.25617283950617287, - "acc_norm_stderr": 0.024288533637726095 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.33, - "acc_stderr": 0.047258156262526045, - "acc_norm": 0.33, - "acc_norm_stderr": 0.047258156262526045 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.29533678756476683, - "acc_stderr": 0.03292296639155139, - "acc_norm": 0.29533678756476683, - "acc_norm_stderr": 0.03292296639155139 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2631578947368421, - "acc_stderr": 0.04142439719489362, - "acc_norm": 0.2631578947368421, - "acc_norm_stderr": 0.04142439719489362 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.25137614678899084, - "acc_stderr": 0.018599206360287415, - "acc_norm": 0.25137614678899084, - "acc_norm_stderr": 0.018599206360287415 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.20634920634920634, - "acc_stderr": 0.036196045241242515, - "acc_norm": 0.20634920634920634, - "acc_norm_stderr": 0.036196045241242515 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.23529411764705882, - "acc_stderr": 0.024288619466046102, - "acc_norm": 0.23529411764705882, - "acc_norm_stderr": 0.024288619466046102 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.19, - "acc_stderr": 0.03942772444036623, - "acc_norm": 0.19, - "acc_norm_stderr": 0.03942772444036623 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.18181818181818182, - "acc_stderr": 0.035208939510976534, - "acc_norm": 0.18181818181818182, - "acc_norm_stderr": 0.035208939510976534 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.17763157894736842, - "acc_stderr": 0.03110318238312338, - "acc_norm": 0.17763157894736842, - "acc_norm_stderr": 0.03110318238312338 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.24509803921568626, - "acc_stderr": 0.017401816711427657, - "acc_norm": 0.24509803921568626, - "acc_norm_stderr": 0.017401816711427657 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.23049645390070922, - "acc_stderr": 0.025123739226872405, - "acc_norm": 0.23049645390070922, - "acc_norm_stderr": 0.025123739226872405 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.33035714285714285, - "acc_stderr": 0.04464285714285713, - "acc_norm": 0.33035714285714285, - "acc_norm_stderr": 0.04464285714285713 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.4074074074074074, - "acc_stderr": 0.03350991604696043, - "acc_norm": 0.4074074074074074, - "acc_norm_stderr": 0.03350991604696043 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.23687150837988827, - "acc_stderr": 0.014219570788103987, - "acc_norm": 0.23687150837988827, - "acc_norm_stderr": 0.014219570788103987 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.32, - "acc_stderr": 0.046882617226215034, - "acc_norm": 0.32, - "acc_norm_stderr": 0.046882617226215034 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.26, - "acc_stderr": 0.0440844002276808, - "acc_norm": 0.26, - "acc_norm_stderr": 0.0440844002276808 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.1875, - "acc_stderr": 0.023709788253811766, - "acc_norm": 0.1875, - "acc_norm_stderr": 0.023709788253811766 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.3142857142857143, - "acc_stderr": 0.02971932942241748, - "acc_norm": 0.3142857142857143, - "acc_norm_stderr": 0.02971932942241748 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.25316455696202533, - "acc_stderr": 0.028304657943035293, - "acc_norm": 0.25316455696202533, - "acc_norm_stderr": 0.028304657943035293 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2522816166883963, - "acc_stderr": 0.011092789056875232, - "acc_norm": 0.2522816166883963, - "acc_norm_stderr": 0.011092789056875232 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.25980392156862747, - "acc_stderr": 0.03077855467869326, - "acc_norm": 0.25980392156862747, - "acc_norm_stderr": 0.03077855467869326 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.28484848484848485, - "acc_stderr": 0.03524390844511783, - "acc_norm": 0.28484848484848485, - "acc_norm_stderr": 0.03524390844511783 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.24112607099143207, - "mc1_stderr": 0.01497482727975233, - "mc2": 0.39040412705496613, - "mc2_stderr": 0.01471780652709213 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.5950704225352113, - "acc_stderr": 0.016827095223977993, - "acc_norm": 0.67018779342723, - "acc_norm_stderr": 0.016116355523395683 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "nlpai-lab/kullm-polyglot-12.8b-v2", - "model_sha": "9e0c9be881f663ca088b10faad15b54ea3ba779c", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/nlpai-lab/kullm-polyglot-5.8b-v2/result_2023-10-10 08:19:05.json b/nlpai-lab/kullm-polyglot-5.8b-v2/result_2023-10-10 08:19:05.json deleted file mode 100644 index 67a0a4e0368446ccc46165348b0f60a865f114c8..0000000000000000000000000000000000000000 --- a/nlpai-lab/kullm-polyglot-5.8b-v2/result_2023-10-10 08:19:05.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.2841296928327645, - "acc_stderr": 0.013179442447653887, - "acc_norm": 0.3293515358361775, - "acc_norm_stderr": 0.013734057652635474 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3694483170683131, - "acc_stderr": 0.004816690123209753, - "acc_norm": 0.47301334395538736, - "acc_norm_stderr": 0.004982508198584259 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.17543859649122806, - "acc_stderr": 0.029170885500727654, - "acc_norm": 0.17543859649122806, - "acc_norm_stderr": 0.029170885500727654 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.36893203883495146, - "acc_stderr": 0.04777615181156739, - "acc_norm": 0.36893203883495146, - "acc_norm_stderr": 0.04777615181156739 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.24648786717752236, - "acc_stderr": 0.015411308769686936, - "acc_norm": 0.24648786717752236, - "acc_norm_stderr": 0.015411308769686936 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.31851851851851853, - "acc_stderr": 0.040247784019771096, - "acc_norm": 0.31851851851851853, - "acc_norm_stderr": 0.040247784019771096 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542128, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542128 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.2425531914893617, - "acc_stderr": 0.028020226271200214, - "acc_norm": 0.2425531914893617, - "acc_norm_stderr": 0.028020226271200214 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.16265060240963855, - "acc_stderr": 0.02873023789261379, - "acc_norm": 0.16265060240963855, - "acc_norm_stderr": 0.02873023789261379 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.2733118971061093, - "acc_stderr": 0.02531176597542612, - "acc_norm": 0.2733118971061093, - "acc_norm_stderr": 0.02531176597542612 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.21524663677130046, - "acc_stderr": 0.027584066602208256, - "acc_norm": 0.21524663677130046, - "acc_norm_stderr": 0.027584066602208256 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.2595419847328244, - "acc_stderr": 0.03844876139785271, - "acc_norm": 0.2595419847328244, - "acc_norm_stderr": 0.03844876139785271 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.27, - "acc_stderr": 0.0446196043338474, - "acc_norm": 0.27, - "acc_norm_stderr": 0.0446196043338474 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.3383838383838384, - "acc_stderr": 0.033711241426263014, - "acc_norm": 0.3383838383838384, - "acc_norm_stderr": 0.033711241426263014 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.27586206896551724, - "acc_stderr": 0.037245636197746325, - "acc_norm": 0.27586206896551724, - "acc_norm_stderr": 0.037245636197746325 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.28431372549019607, - "acc_stderr": 0.04488482852329017, - "acc_norm": 0.28431372549019607, - "acc_norm_stderr": 0.04488482852329017 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.3487394957983193, - "acc_stderr": 0.030956636328566548, - "acc_norm": 0.3487394957983193, - "acc_norm_stderr": 0.030956636328566548 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.32051282051282054, - "acc_stderr": 0.023661296393964273, - "acc_norm": 0.32051282051282054, - "acc_norm_stderr": 0.023661296393964273 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.18, - "acc_stderr": 0.038612291966536975, - "acc_norm": 0.18, - "acc_norm_stderr": 0.038612291966536975 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.14, - "acc_stderr": 0.03487350880197773, - "acc_norm": 0.14, - "acc_norm_stderr": 0.03487350880197773 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.25, - "acc_stderr": 0.04186091791394607, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04186091791394607 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.28078817733990147, - "acc_stderr": 0.03161856335358611, - "acc_norm": 0.28078817733990147, - "acc_norm_stderr": 0.03161856335358611 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.24516129032258063, - "acc_stderr": 0.02447224384089553, - "acc_norm": 0.24516129032258063, - "acc_norm_stderr": 0.02447224384089553 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.23504273504273504, - "acc_stderr": 0.027778835904935434, - "acc_norm": 0.23504273504273504, - "acc_norm_stderr": 0.027778835904935434 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.29056603773584905, - "acc_stderr": 0.02794321998933715, - "acc_norm": 0.29056603773584905, - "acc_norm_stderr": 0.02794321998933715 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.32727272727272727, - "acc_stderr": 0.0449429086625209, - "acc_norm": 0.32727272727272727, - "acc_norm_stderr": 0.0449429086625209 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.2814814814814815, - "acc_stderr": 0.027420019350945277, - "acc_norm": 0.2814814814814815, - "acc_norm_stderr": 0.027420019350945277 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2980132450331126, - "acc_stderr": 0.037345356767871984, - "acc_norm": 0.2980132450331126, - "acc_norm_stderr": 0.037345356767871984 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.2885572139303483, - "acc_stderr": 0.03203841040213321, - "acc_norm": 0.2885572139303483, - "acc_norm_stderr": 0.03203841040213321 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.32947976878612717, - "acc_stderr": 0.03583901754736412, - "acc_norm": 0.32947976878612717, - "acc_norm_stderr": 0.03583901754736412 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.21957671957671956, - "acc_stderr": 0.021320018599770355, - "acc_norm": 0.21957671957671956, - "acc_norm_stderr": 0.021320018599770355 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2847222222222222, - "acc_stderr": 0.037738099906869334, - "acc_norm": 0.2847222222222222, - "acc_norm_stderr": 0.037738099906869334 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.33, - "acc_stderr": 0.047258156262526045, - "acc_norm": 0.33, - "acc_norm_stderr": 0.047258156262526045 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.29, - "acc_stderr": 0.04560480215720684, - "acc_norm": 0.29, - "acc_norm_stderr": 0.04560480215720684 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.23410404624277456, - "acc_stderr": 0.022797110278071145, - "acc_norm": 0.23410404624277456, - "acc_norm_stderr": 0.022797110278071145 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.25766871165644173, - "acc_stderr": 0.03436150827846917, - "acc_norm": 0.25766871165644173, - "acc_norm_stderr": 0.03436150827846917 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.24074074074074073, - "acc_stderr": 0.02378858355165852, - "acc_norm": 0.24074074074074073, - "acc_norm_stderr": 0.02378858355165852 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.26, - "acc_stderr": 0.04408440022768079, - "acc_norm": 0.26, - "acc_norm_stderr": 0.04408440022768079 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.34196891191709844, - "acc_stderr": 0.03423465100104284, - "acc_norm": 0.34196891191709844, - "acc_norm_stderr": 0.03423465100104284 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2631578947368421, - "acc_stderr": 0.0414243971948936, - "acc_norm": 0.2631578947368421, - "acc_norm_stderr": 0.0414243971948936 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.28807339449541286, - "acc_stderr": 0.019416445892636018, - "acc_norm": 0.28807339449541286, - "acc_norm_stderr": 0.019416445892636018 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.36507936507936506, - "acc_stderr": 0.04306241259127153, - "acc_norm": 0.36507936507936506, - "acc_norm_stderr": 0.04306241259127153 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.27450980392156865, - "acc_stderr": 0.025553169991826528, - "acc_norm": 0.27450980392156865, - "acc_norm_stderr": 0.025553169991826528 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.26, - "acc_stderr": 0.0440844002276808, - "acc_norm": 0.26, - "acc_norm_stderr": 0.0440844002276808 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.2644628099173554, - "acc_stderr": 0.04026187527591207, - "acc_norm": 0.2644628099173554, - "acc_norm_stderr": 0.04026187527591207 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.3157894736842105, - "acc_stderr": 0.0378272898086547, - "acc_norm": 0.3157894736842105, - "acc_norm_stderr": 0.0378272898086547 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.22058823529411764, - "acc_stderr": 0.016774672365468514, - "acc_norm": 0.22058823529411764, - "acc_norm_stderr": 0.016774672365468514 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.25177304964539005, - "acc_stderr": 0.025892151156709405, - "acc_norm": 0.25177304964539005, - "acc_norm_stderr": 0.025892151156709405 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.20535714285714285, - "acc_stderr": 0.03834241021419073, - "acc_norm": 0.20535714285714285, - "acc_norm_stderr": 0.03834241021419073 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.3425925925925926, - "acc_stderr": 0.03236585252602158, - "acc_norm": 0.3425925925925926, - "acc_norm_stderr": 0.03236585252602158 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.27262569832402234, - "acc_stderr": 0.014893391735249608, - "acc_norm": 0.27262569832402234, - "acc_norm_stderr": 0.014893391735249608 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.19, - "acc_stderr": 0.03942772444036625, - "acc_norm": 0.19, - "acc_norm_stderr": 0.03942772444036625 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.4117647058823529, - "acc_stderr": 0.02989616303312547, - "acc_norm": 0.4117647058823529, - "acc_norm_stderr": 0.02989616303312547 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.3551020408163265, - "acc_stderr": 0.03063565515038764, - "acc_norm": 0.3551020408163265, - "acc_norm_stderr": 0.03063565515038764 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.23628691983122363, - "acc_stderr": 0.027652153144159256, - "acc_norm": 0.23628691983122363, - "acc_norm_stderr": 0.027652153144159256 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.23728813559322035, - "acc_stderr": 0.010865436690780267, - "acc_norm": 0.23728813559322035, - "acc_norm_stderr": 0.010865436690780267 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.2647058823529412, - "acc_stderr": 0.030964517926923403, - "acc_norm": 0.2647058823529412, - "acc_norm_stderr": 0.030964517926923403 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.24848484848484848, - "acc_stderr": 0.03374402644139405, - "acc_norm": 0.24848484848484848, - "acc_norm_stderr": 0.03374402644139405 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.26193390452876375, - "mc1_stderr": 0.01539211880501501, - "mc2": 0.42389862375590953, - "mc2_stderr": 0.015026306992823544 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.25938967136150237, - "acc_stderr": 0.015024721881926207, - "acc_norm": 0.3274647887323944, - "acc_norm_stderr": 0.01608699911080736 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "nlpai-lab/kullm-polyglot-5.8b-v2", - "model_sha": "5981236c4fd4e624eca2326312d40419e6441256", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/oh-yeontaek/llama-2-13B-LoRA-assemble/result_2023-09-28 00:33:01.json b/oh-yeontaek/llama-2-13B-LoRA-assemble/result_2023-09-28 00:33:01.json deleted file mode 100644 index 573a381a0141f7439de9cd36f2aaa7fc4e11319a..0000000000000000000000000000000000000000 --- a/oh-yeontaek/llama-2-13B-LoRA-assemble/result_2023-09-28 00:33:01.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.35921501706484643, - "acc_stderr": 0.014020224155839162, - "acc_norm": 0.4052901023890785, - "acc_norm_stderr": 0.014346869060229325 - }, - "harness|ko_hellaswag|10": { - "acc": 0.36496713802031466, - "acc_stderr": 0.004804370563856225, - "acc_norm": 0.4689304919338777, - "acc_norm_stderr": 0.004980138679161039 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.52046783625731, - "acc_stderr": 0.0383161053282193, - "acc_norm": 0.52046783625731, - "acc_norm_stderr": 0.0383161053282193 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.5533980582524272, - "acc_stderr": 0.04922424153458934, - "acc_norm": 0.5533980582524272, - "acc_norm_stderr": 0.04922424153458934 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.49936143039591313, - "acc_stderr": 0.01787994891443166, - "acc_norm": 0.49936143039591313, - "acc_norm_stderr": 0.01787994891443166 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.34814814814814815, - "acc_stderr": 0.041153246103369526, - "acc_norm": 0.34814814814814815, - "acc_norm_stderr": 0.041153246103369526 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542127, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542127 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.39574468085106385, - "acc_stderr": 0.03196758697835362, - "acc_norm": 0.39574468085106385, - "acc_norm_stderr": 0.03196758697835362 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.4036144578313253, - "acc_stderr": 0.03819486140758397, - "acc_norm": 0.4036144578313253, - "acc_norm_stderr": 0.03819486140758397 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.42765273311897106, - "acc_stderr": 0.028099240775809563, - "acc_norm": 0.42765273311897106, - "acc_norm_stderr": 0.028099240775809563 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.47085201793721976, - "acc_stderr": 0.03350073248773404, - "acc_norm": 0.47085201793721976, - "acc_norm_stderr": 0.03350073248773404 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.45038167938931295, - "acc_stderr": 0.04363643698524779, - "acc_norm": 0.45038167938931295, - "acc_norm_stderr": 0.04363643698524779 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.43, - "acc_stderr": 0.049756985195624284, - "acc_norm": 0.43, - "acc_norm_stderr": 0.049756985195624284 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.5656565656565656, - "acc_stderr": 0.03531505879359182, - "acc_norm": 0.5656565656565656, - "acc_norm_stderr": 0.03531505879359182 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.42758620689655175, - "acc_stderr": 0.04122737111370331, - "acc_norm": 0.42758620689655175, - "acc_norm_stderr": 0.04122737111370331 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.17647058823529413, - "acc_stderr": 0.0379328118530781, - "acc_norm": 0.17647058823529413, - "acc_norm_stderr": 0.0379328118530781 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.4579831932773109, - "acc_stderr": 0.03236361111951941, - "acc_norm": 0.4579831932773109, - "acc_norm_stderr": 0.03236361111951941 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.4461538461538462, - "acc_stderr": 0.025203571773028333, - "acc_norm": 0.4461538461538462, - "acc_norm_stderr": 0.025203571773028333 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.47, - "acc_stderr": 0.050161355804659205, - "acc_norm": 0.47, - "acc_norm_stderr": 0.050161355804659205 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.32, - "acc_stderr": 0.04688261722621505, - "acc_norm": 0.32, - "acc_norm_stderr": 0.04688261722621505 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.5, - "acc_stderr": 0.04833682445228318, - "acc_norm": 0.5, - "acc_norm_stderr": 0.04833682445228318 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.33004926108374383, - "acc_stderr": 0.03308530426228258, - "acc_norm": 0.33004926108374383, - "acc_norm_stderr": 0.03308530426228258 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.47419354838709676, - "acc_stderr": 0.028406095057653326, - "acc_norm": 0.47419354838709676, - "acc_norm_stderr": 0.028406095057653326 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.6324786324786325, - "acc_stderr": 0.031585391577456365, - "acc_norm": 0.6324786324786325, - "acc_norm_stderr": 0.031585391577456365 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.4830188679245283, - "acc_stderr": 0.030755120364119905, - "acc_norm": 0.4830188679245283, - "acc_norm_stderr": 0.030755120364119905 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.5, - "acc_stderr": 0.04789131426105757, - "acc_norm": 0.5, - "acc_norm_stderr": 0.04789131426105757 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.29259259259259257, - "acc_stderr": 0.02773896963217609, - "acc_norm": 0.29259259259259257, - "acc_norm_stderr": 0.02773896963217609 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.31788079470198677, - "acc_stderr": 0.038020397601079024, - "acc_norm": 0.31788079470198677, - "acc_norm_stderr": 0.038020397601079024 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.5621890547263682, - "acc_stderr": 0.0350808011219984, - "acc_norm": 0.5621890547263682, - "acc_norm_stderr": 0.0350808011219984 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.37572254335260113, - "acc_stderr": 0.036928207672648664, - "acc_norm": 0.37572254335260113, - "acc_norm_stderr": 0.036928207672648664 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.3306878306878307, - "acc_stderr": 0.024229965298425082, - "acc_norm": 0.3306878306878307, - "acc_norm_stderr": 0.024229965298425082 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3680555555555556, - "acc_stderr": 0.040329990539607195, - "acc_norm": 0.3680555555555556, - "acc_norm_stderr": 0.040329990539607195 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.27, - "acc_stderr": 0.044619604333847394, - "acc_norm": 0.27, - "acc_norm_stderr": 0.044619604333847394 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.59, - "acc_stderr": 0.04943110704237102, - "acc_norm": 0.59, - "acc_norm_stderr": 0.04943110704237102 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.5086705202312138, - "acc_stderr": 0.0269150473553698, - "acc_norm": 0.5086705202312138, - "acc_norm_stderr": 0.0269150473553698 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.4294478527607362, - "acc_stderr": 0.03889066619112723, - "acc_norm": 0.4294478527607362, - "acc_norm_stderr": 0.03889066619112723 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.4506172839506173, - "acc_stderr": 0.027684721415656203, - "acc_norm": 0.4506172839506173, - "acc_norm_stderr": 0.027684721415656203 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.39, - "acc_stderr": 0.04902071300001974, - "acc_norm": 0.39, - "acc_norm_stderr": 0.04902071300001974 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.5181347150259067, - "acc_stderr": 0.036060650018329185, - "acc_norm": 0.5181347150259067, - "acc_norm_stderr": 0.036060650018329185 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.20175438596491227, - "acc_stderr": 0.037752050135836386, - "acc_norm": 0.20175438596491227, - "acc_norm_stderr": 0.037752050135836386 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.47706422018348627, - "acc_stderr": 0.0214147570581755, - "acc_norm": 0.47706422018348627, - "acc_norm_stderr": 0.0214147570581755 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.042163702135578345, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.042163702135578345 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.4411764705882353, - "acc_stderr": 0.02843109544417664, - "acc_norm": 0.4411764705882353, - "acc_norm_stderr": 0.02843109544417664 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.39, - "acc_stderr": 0.04902071300001975, - "acc_norm": 0.39, - "acc_norm_stderr": 0.04902071300001975 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.6363636363636364, - "acc_stderr": 0.043913262867240704, - "acc_norm": 0.6363636363636364, - "acc_norm_stderr": 0.043913262867240704 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.3684210526315789, - "acc_stderr": 0.03925523381052932, - "acc_norm": 0.3684210526315789, - "acc_norm_stderr": 0.03925523381052932 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.3480392156862745, - "acc_stderr": 0.019270998708223977, - "acc_norm": 0.3480392156862745, - "acc_norm_stderr": 0.019270998708223977 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.35106382978723405, - "acc_stderr": 0.02847350127296376, - "acc_norm": 0.35106382978723405, - "acc_norm_stderr": 0.02847350127296376 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.22321428571428573, - "acc_stderr": 0.039523019677025116, - "acc_norm": 0.22321428571428573, - "acc_norm_stderr": 0.039523019677025116 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.27314814814814814, - "acc_stderr": 0.03038805130167812, - "acc_norm": 0.27314814814814814, - "acc_norm_stderr": 0.03038805130167812 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.3229050279329609, - "acc_stderr": 0.01563844038024149, - "acc_norm": 0.3229050279329609, - "acc_norm_stderr": 0.01563844038024149 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.37, - "acc_stderr": 0.048523658709391, - "acc_norm": 0.37, - "acc_norm_stderr": 0.048523658709391 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.45, - "acc_stderr": 0.05, - "acc_norm": 0.45, - "acc_norm_stderr": 0.05 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.3492647058823529, - "acc_stderr": 0.02895975519682486, - "acc_norm": 0.3492647058823529, - "acc_norm_stderr": 0.02895975519682486 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.5265306122448979, - "acc_stderr": 0.03196412734523272, - "acc_norm": 0.5265306122448979, - "acc_norm_stderr": 0.03196412734523272 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.569620253164557, - "acc_stderr": 0.03223017195937599, - "acc_norm": 0.569620253164557, - "acc_norm_stderr": 0.03223017195937599 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.32529335071707954, - "acc_stderr": 0.011965311536571531, - "acc_norm": 0.32529335071707954, - "acc_norm_stderr": 0.011965311536571531 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.4803921568627451, - "acc_stderr": 0.03506612560524866, - "acc_norm": 0.4803921568627451, - "acc_norm_stderr": 0.03506612560524866 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.48484848484848486, - "acc_stderr": 0.03902551007374449, - "acc_norm": 0.48484848484848486, - "acc_norm_stderr": 0.03902551007374449 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.35128518971848227, - "mc1_stderr": 0.0167113581635444, - "mc2": 0.5184394133098864, - "mc2_stderr": 0.01600771387375644 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.24178403755868544, - "acc_stderr": 0.014677277126731796, - "acc_norm": 0.24647887323943662, - "acc_norm_stderr": 0.014773139084466518 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "oh-yeontaek/llama-2-13B-LoRA-assemble", - "model_sha": "85bb49d333dba4a08b051418663d16853ce30cee", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/quantumaikr/KoreanLM-1.5b/result_2023-10-18 16:24:34.json b/quantumaikr/KoreanLM-1.5b/result_2023-10-18 16:24:34.json deleted file mode 100644 index 3ff156ba8119fa1cf9124f25a89cbe016bed9347..0000000000000000000000000000000000000000 --- a/quantumaikr/KoreanLM-1.5b/result_2023-10-18 16:24:34.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.21245733788395904, - "acc_stderr": 0.011953482906582949, - "acc_norm": 0.2781569965870307, - "acc_norm_stderr": 0.0130944699195388 - }, - "harness|ko_hellaswag|10": { - "acc": 0.26000796654052977, - "acc_stderr": 0.004377421493297836, - "acc_norm": 0.2647878908583947, - "acc_norm_stderr": 0.004403184691341697 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.26900584795321636, - "acc_stderr": 0.03401052620104089, - "acc_norm": 0.26900584795321636, - "acc_norm_stderr": 0.03401052620104089 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.32038834951456313, - "acc_stderr": 0.0462028408228004, - "acc_norm": 0.32038834951456313, - "acc_norm_stderr": 0.0462028408228004 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.22988505747126436, - "acc_stderr": 0.015046301846691838, - "acc_norm": 0.22988505747126436, - "acc_norm_stderr": 0.015046301846691838 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.2962962962962963, - "acc_stderr": 0.03944624162501116, - "acc_norm": 0.2962962962962963, - "acc_norm_stderr": 0.03944624162501116 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.28, - "acc_stderr": 0.045126085985421276, - "acc_norm": 0.28, - "acc_norm_stderr": 0.045126085985421276 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.19574468085106383, - "acc_stderr": 0.025937853139977148, - "acc_norm": 0.19574468085106383, - "acc_norm_stderr": 0.025937853139977148 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.26506024096385544, - "acc_stderr": 0.03436024037944966, - "acc_norm": 0.26506024096385544, - "acc_norm_stderr": 0.03436024037944966 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.2990353697749196, - "acc_stderr": 0.02600330111788514, - "acc_norm": 0.2990353697749196, - "acc_norm_stderr": 0.02600330111788514 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.32286995515695066, - "acc_stderr": 0.03138147637575498, - "acc_norm": 0.32286995515695066, - "acc_norm_stderr": 0.03138147637575498 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.32061068702290074, - "acc_stderr": 0.04093329229834278, - "acc_norm": 0.32061068702290074, - "acc_norm_stderr": 0.04093329229834278 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.26, - "acc_stderr": 0.04408440022768079, - "acc_norm": 0.26, - "acc_norm_stderr": 0.04408440022768079 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.23232323232323232, - "acc_stderr": 0.030088629490217483, - "acc_norm": 0.23232323232323232, - "acc_norm_stderr": 0.030088629490217483 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.2482758620689655, - "acc_stderr": 0.03600105692727772, - "acc_norm": 0.2482758620689655, - "acc_norm_stderr": 0.03600105692727772 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.39215686274509803, - "acc_stderr": 0.048580835742663434, - "acc_norm": 0.39215686274509803, - "acc_norm_stderr": 0.048580835742663434 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.29411764705882354, - "acc_stderr": 0.029597329730978082, - "acc_norm": 0.29411764705882354, - "acc_norm_stderr": 0.029597329730978082 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.29743589743589743, - "acc_stderr": 0.023177408131465942, - "acc_norm": 0.29743589743589743, - "acc_norm_stderr": 0.023177408131465942 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.26, - "acc_stderr": 0.0440844002276808, - "acc_norm": 0.26, - "acc_norm_stderr": 0.0440844002276808 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.16, - "acc_stderr": 0.0368452949177471, - "acc_norm": 0.16, - "acc_norm_stderr": 0.0368452949177471 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.21296296296296297, - "acc_stderr": 0.0395783547198098, - "acc_norm": 0.21296296296296297, - "acc_norm_stderr": 0.0395783547198098 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.28078817733990147, - "acc_stderr": 0.031618563353586114, - "acc_norm": 0.28078817733990147, - "acc_norm_stderr": 0.031618563353586114 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.2870967741935484, - "acc_stderr": 0.02573654274559452, - "acc_norm": 0.2870967741935484, - "acc_norm_stderr": 0.02573654274559452 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.21794871794871795, - "acc_stderr": 0.02704685763071668, - "acc_norm": 0.21794871794871795, - "acc_norm_stderr": 0.02704685763071668 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.30943396226415093, - "acc_stderr": 0.028450154794118627, - "acc_norm": 0.30943396226415093, - "acc_norm_stderr": 0.028450154794118627 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.17272727272727273, - "acc_stderr": 0.0362069183392922, - "acc_norm": 0.17272727272727273, - "acc_norm_stderr": 0.0362069183392922 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.2074074074074074, - "acc_stderr": 0.024720713193952165, - "acc_norm": 0.2074074074074074, - "acc_norm_stderr": 0.024720713193952165 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.24503311258278146, - "acc_stderr": 0.03511807571804726, - "acc_norm": 0.24503311258278146, - "acc_norm_stderr": 0.03511807571804726 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.31840796019900497, - "acc_stderr": 0.03294118479054095, - "acc_norm": 0.31840796019900497, - "acc_norm_stderr": 0.03294118479054095 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.2774566473988439, - "acc_stderr": 0.03414014007044036, - "acc_norm": 0.2774566473988439, - "acc_norm_stderr": 0.03414014007044036 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.23809523809523808, - "acc_stderr": 0.021935878081184763, - "acc_norm": 0.23809523809523808, - "acc_norm_stderr": 0.021935878081184763 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.25, - "acc_stderr": 0.03621034121889507, - "acc_norm": 0.25, - "acc_norm_stderr": 0.03621034121889507 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.16, - "acc_stderr": 0.03684529491774708, - "acc_norm": 0.16, - "acc_norm_stderr": 0.03684529491774708 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.23699421965317918, - "acc_stderr": 0.02289408248992599, - "acc_norm": 0.23699421965317918, - "acc_norm_stderr": 0.02289408248992599 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.24539877300613497, - "acc_stderr": 0.03380939813943354, - "acc_norm": 0.24539877300613497, - "acc_norm_stderr": 0.03380939813943354 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.20679012345679013, - "acc_stderr": 0.022535006705942818, - "acc_norm": 0.20679012345679013, - "acc_norm_stderr": 0.022535006705942818 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.2538860103626943, - "acc_stderr": 0.0314102478056532, - "acc_norm": 0.2538860103626943, - "acc_norm_stderr": 0.0314102478056532 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.23684210526315788, - "acc_stderr": 0.03999423879281337, - "acc_norm": 0.23684210526315788, - "acc_norm_stderr": 0.03999423879281337 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.26238532110091745, - "acc_stderr": 0.018861885021534734, - "acc_norm": 0.26238532110091745, - "acc_norm_stderr": 0.018861885021534734 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.25396825396825395, - "acc_stderr": 0.03893259610604674, - "acc_norm": 0.25396825396825395, - "acc_norm_stderr": 0.03893259610604674 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.26143790849673204, - "acc_stderr": 0.025160998214292456, - "acc_norm": 0.26143790849673204, - "acc_norm_stderr": 0.025160998214292456 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.23, - "acc_stderr": 0.04229525846816506, - "acc_norm": 0.23, - "acc_norm_stderr": 0.04229525846816506 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.2396694214876033, - "acc_stderr": 0.03896878985070415, - "acc_norm": 0.2396694214876033, - "acc_norm_stderr": 0.03896878985070415 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.26973684210526316, - "acc_stderr": 0.03611780560284898, - "acc_norm": 0.26973684210526316, - "acc_norm_stderr": 0.03611780560284898 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.22875816993464052, - "acc_stderr": 0.016992723465466233, - "acc_norm": 0.22875816993464052, - "acc_norm_stderr": 0.016992723465466233 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2695035460992908, - "acc_stderr": 0.02646903681859063, - "acc_norm": 0.2695035460992908, - "acc_norm_stderr": 0.02646903681859063 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.21428571428571427, - "acc_stderr": 0.038946411200447915, - "acc_norm": 0.21428571428571427, - "acc_norm_stderr": 0.038946411200447915 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.2777777777777778, - "acc_stderr": 0.030546745264953185, - "acc_norm": 0.2777777777777778, - "acc_norm_stderr": 0.030546745264953185 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2569832402234637, - "acc_stderr": 0.014614465821966339, - "acc_norm": 0.2569832402234637, - "acc_norm_stderr": 0.014614465821966339 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695235, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695235 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.1801470588235294, - "acc_stderr": 0.02334516361654486, - "acc_norm": 0.1801470588235294, - "acc_norm_stderr": 0.02334516361654486 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.2938775510204082, - "acc_stderr": 0.029162738410249783, - "acc_norm": 0.2938775510204082, - "acc_norm_stderr": 0.029162738410249783 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.24050632911392406, - "acc_stderr": 0.027820781981149678, - "acc_norm": 0.24050632911392406, - "acc_norm_stderr": 0.027820781981149678 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.23728813559322035, - "acc_stderr": 0.010865436690780269, - "acc_norm": 0.23728813559322035, - "acc_norm_stderr": 0.010865436690780269 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.24019607843137256, - "acc_stderr": 0.02998373305591361, - "acc_norm": 0.24019607843137256, - "acc_norm_stderr": 0.02998373305591361 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.2545454545454545, - "acc_stderr": 0.03401506715249039, - "acc_norm": 0.2545454545454545, - "acc_norm_stderr": 0.03401506715249039 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.26438188494492043, - "mc1_stderr": 0.015438211119522517, - "mc2": 0.5207557813698324, - "mc2_stderr": 0.01656184952031738 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.2007042253521127, - "acc_stderr": 0.013729895644328095, - "acc_norm": 0.534037558685446, - "acc_norm_stderr": 0.017100018368429525 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "quantumaikr/KoreanLM-1.5b", - "model_sha": "d26b261612f7cf8358309921bc387b754596355f", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/quantumaikr/KoreanLM-3B/result_2023-10-18 16:24:22.json b/quantumaikr/KoreanLM-3B/result_2023-10-18 16:24:22.json deleted file mode 100644 index cc6f7311f5324de47bb3919587548e4574348135..0000000000000000000000000000000000000000 --- a/quantumaikr/KoreanLM-3B/result_2023-10-18 16:24:22.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.19880546075085323, - "acc_stderr": 0.011662850198175536, - "acc_norm": 0.24488054607508533, - "acc_norm_stderr": 0.012566273985131356 - }, - "harness|ko_hellaswag|10": { - "acc": 0.27106154152559253, - "acc_stderr": 0.004435993492583864, - "acc_norm": 0.27753435570603463, - "acc_norm_stderr": 0.004468672138910928 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.2982456140350877, - "acc_stderr": 0.03508771929824563, - "acc_norm": 0.2982456140350877, - "acc_norm_stderr": 0.03508771929824563 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.17475728155339806, - "acc_stderr": 0.037601780060266196, - "acc_norm": 0.17475728155339806, - "acc_norm_stderr": 0.037601780060266196 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.24265644955300128, - "acc_stderr": 0.015329888940899873, - "acc_norm": 0.24265644955300128, - "acc_norm_stderr": 0.015329888940899873 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.28888888888888886, - "acc_stderr": 0.0391545063041425, - "acc_norm": 0.28888888888888886, - "acc_norm_stderr": 0.0391545063041425 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.21, - "acc_stderr": 0.040936018074033256, - "acc_norm": 0.21, - "acc_norm_stderr": 0.040936018074033256 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.2723404255319149, - "acc_stderr": 0.029101290698386694, - "acc_norm": 0.2723404255319149, - "acc_norm_stderr": 0.029101290698386694 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.28313253012048195, - "acc_stderr": 0.03507295431370518, - "acc_norm": 0.28313253012048195, - "acc_norm_stderr": 0.03507295431370518 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.18971061093247588, - "acc_stderr": 0.022268196258783218, - "acc_norm": 0.18971061093247588, - "acc_norm_stderr": 0.022268196258783218 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.31390134529147984, - "acc_stderr": 0.031146796482972465, - "acc_norm": 0.31390134529147984, - "acc_norm_stderr": 0.031146796482972465 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.2595419847328244, - "acc_stderr": 0.03844876139785271, - "acc_norm": 0.2595419847328244, - "acc_norm_stderr": 0.03844876139785271 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.18181818181818182, - "acc_stderr": 0.0274796030105388, - "acc_norm": 0.18181818181818182, - "acc_norm_stderr": 0.0274796030105388 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.2482758620689655, - "acc_stderr": 0.036001056927277716, - "acc_norm": 0.2482758620689655, - "acc_norm_stderr": 0.036001056927277716 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.21568627450980393, - "acc_stderr": 0.04092563958237654, - "acc_norm": 0.21568627450980393, - "acc_norm_stderr": 0.04092563958237654 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.3487394957983193, - "acc_stderr": 0.03095663632856655, - "acc_norm": 0.3487394957983193, - "acc_norm_stderr": 0.03095663632856655 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.33076923076923076, - "acc_stderr": 0.023854795680971142, - "acc_norm": 0.33076923076923076, - "acc_norm_stderr": 0.023854795680971142 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.19, - "acc_stderr": 0.03942772444036623, - "acc_norm": 0.19, - "acc_norm_stderr": 0.03942772444036623 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.25925925925925924, - "acc_stderr": 0.04236511258094633, - "acc_norm": 0.25925925925925924, - "acc_norm_stderr": 0.04236511258094633 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.28078817733990147, - "acc_stderr": 0.03161856335358611, - "acc_norm": 0.28078817733990147, - "acc_norm_stderr": 0.03161856335358611 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.23870967741935484, - "acc_stderr": 0.024251071262208834, - "acc_norm": 0.23870967741935484, - "acc_norm_stderr": 0.024251071262208834 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.2606837606837607, - "acc_stderr": 0.028760348956523414, - "acc_norm": 0.2606837606837607, - "acc_norm_stderr": 0.028760348956523414 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.20754716981132076, - "acc_stderr": 0.024959918028911274, - "acc_norm": 0.20754716981132076, - "acc_norm_stderr": 0.024959918028911274 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.21818181818181817, - "acc_stderr": 0.03955932861795833, - "acc_norm": 0.21818181818181817, - "acc_norm_stderr": 0.03955932861795833 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.26666666666666666, - "acc_stderr": 0.026962424325073838, - "acc_norm": 0.26666666666666666, - "acc_norm_stderr": 0.026962424325073838 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2185430463576159, - "acc_stderr": 0.03374235550425694, - "acc_norm": 0.2185430463576159, - "acc_norm_stderr": 0.03374235550425694 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.23880597014925373, - "acc_stderr": 0.030147775935409217, - "acc_norm": 0.23880597014925373, - "acc_norm_stderr": 0.030147775935409217 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.20809248554913296, - "acc_stderr": 0.030952890217749884, - "acc_norm": 0.20809248554913296, - "acc_norm_stderr": 0.030952890217749884 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2619047619047619, - "acc_stderr": 0.022644212615525218, - "acc_norm": 0.2619047619047619, - "acc_norm_stderr": 0.022644212615525218 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2361111111111111, - "acc_stderr": 0.03551446610810826, - "acc_norm": 0.2361111111111111, - "acc_norm_stderr": 0.03551446610810826 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.23, - "acc_stderr": 0.04229525846816506, - "acc_norm": 0.23, - "acc_norm_stderr": 0.04229525846816506 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.31, - "acc_stderr": 0.046482319871173156, - "acc_norm": 0.31, - "acc_norm_stderr": 0.046482319871173156 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.24855491329479767, - "acc_stderr": 0.023267528432100174, - "acc_norm": 0.24855491329479767, - "acc_norm_stderr": 0.023267528432100174 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.2085889570552147, - "acc_stderr": 0.03192193448934725, - "acc_norm": 0.2085889570552147, - "acc_norm_stderr": 0.03192193448934725 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.20987654320987653, - "acc_stderr": 0.02265834408598137, - "acc_norm": 0.20987654320987653, - "acc_norm_stderr": 0.02265834408598137 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.21, - "acc_stderr": 0.040936018074033256, - "acc_norm": 0.21, - "acc_norm_stderr": 0.040936018074033256 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.30569948186528495, - "acc_stderr": 0.033248379397581594, - "acc_norm": 0.30569948186528495, - "acc_norm_stderr": 0.033248379397581594 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2719298245614035, - "acc_stderr": 0.04185774424022056, - "acc_norm": 0.2719298245614035, - "acc_norm_stderr": 0.04185774424022056 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.1926605504587156, - "acc_stderr": 0.016909276884936097, - "acc_norm": 0.1926605504587156, - "acc_norm_stderr": 0.016909276884936097 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.2222222222222222, - "acc_stderr": 0.03718489006818115, - "acc_norm": 0.2222222222222222, - "acc_norm_stderr": 0.03718489006818115 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.21241830065359477, - "acc_stderr": 0.023420375478296125, - "acc_norm": 0.21241830065359477, - "acc_norm_stderr": 0.023420375478296125 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.27, - "acc_stderr": 0.04461960433384739, - "acc_norm": 0.27, - "acc_norm_stderr": 0.04461960433384739 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.24793388429752067, - "acc_stderr": 0.03941897526516303, - "acc_norm": 0.24793388429752067, - "acc_norm_stderr": 0.03941897526516303 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.17763157894736842, - "acc_stderr": 0.03110318238312338, - "acc_norm": 0.17763157894736842, - "acc_norm_stderr": 0.03110318238312338 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.25163398692810457, - "acc_stderr": 0.01755581809132227, - "acc_norm": 0.25163398692810457, - "acc_norm_stderr": 0.01755581809132227 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2624113475177305, - "acc_stderr": 0.026244920349843003, - "acc_norm": 0.2624113475177305, - "acc_norm_stderr": 0.026244920349843003 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.30357142857142855, - "acc_stderr": 0.04364226155841044, - "acc_norm": 0.30357142857142855, - "acc_norm_stderr": 0.04364226155841044 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.4305555555555556, - "acc_stderr": 0.03376922151252336, - "acc_norm": 0.4305555555555556, - "acc_norm_stderr": 0.03376922151252336 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.23798882681564246, - "acc_stderr": 0.014242630070574892, - "acc_norm": 0.23798882681564246, - "acc_norm_stderr": 0.014242630070574892 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.24, - "acc_stderr": 0.04292346959909283, - "acc_norm": 0.24, - "acc_norm_stderr": 0.04292346959909283 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.29, - "acc_stderr": 0.04560480215720684, - "acc_norm": 0.29, - "acc_norm_stderr": 0.04560480215720684 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.43014705882352944, - "acc_stderr": 0.030074971917302875, - "acc_norm": 0.43014705882352944, - "acc_norm_stderr": 0.030074971917302875 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.22448979591836735, - "acc_stderr": 0.02671143055553841, - "acc_norm": 0.22448979591836735, - "acc_norm_stderr": 0.02671143055553841 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.24050632911392406, - "acc_stderr": 0.027820781981149675, - "acc_norm": 0.24050632911392406, - "acc_norm_stderr": 0.027820781981149675 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2457627118644068, - "acc_stderr": 0.010996156635142692, - "acc_norm": 0.2457627118644068, - "acc_norm_stderr": 0.010996156635142692 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.25, - "acc_stderr": 0.03039153369274154, - "acc_norm": 0.25, - "acc_norm_stderr": 0.03039153369274154 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.21818181818181817, - "acc_stderr": 0.03225078108306289, - "acc_norm": 0.21818181818181817, - "acc_norm_stderr": 0.03225078108306289 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.26438188494492043, - "mc1_stderr": 0.015438211119522519, - "mc2": 0.4822371041865183, - "mc2_stderr": 0.01604938696224229 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.24178403755868544, - "acc_stderr": 0.0146772771267318, - "acc_norm": 0.4835680751173709, - "acc_norm_stderr": 0.017130520993936013 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "quantumaikr/KoreanLM-3B", - "model_sha": "f49217779eea253aa3e7dd4645eedfd496fa9e0b", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/quantumaikr/KoreanLM/result_2023-10-18 16:23:36.json b/quantumaikr/KoreanLM/result_2023-10-18 16:23:36.json deleted file mode 100644 index 0a3dafcf94f8936b216b0767eadd7fd52cb974dc..0000000000000000000000000000000000000000 --- a/quantumaikr/KoreanLM/result_2023-10-18 16:23:36.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.2619453924914676, - "acc_stderr": 0.012849054826858117, - "acc_norm": 0.30119453924914674, - "acc_norm_stderr": 0.01340674176784762 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3179645488946425, - "acc_stderr": 0.004647338877642185, - "acc_norm": 0.3739294961163115, - "acc_norm_stderr": 0.004828564090620291 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.27485380116959063, - "acc_stderr": 0.034240429246915824, - "acc_norm": 0.27485380116959063, - "acc_norm_stderr": 0.034240429246915824 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.3106796116504854, - "acc_stderr": 0.04582124160161551, - "acc_norm": 0.3106796116504854, - "acc_norm_stderr": 0.04582124160161551 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.2988505747126437, - "acc_stderr": 0.016369256815093127, - "acc_norm": 0.2988505747126437, - "acc_norm_stderr": 0.016369256815093127 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.2740740740740741, - "acc_stderr": 0.03853254836552003, - "acc_norm": 0.2740740740740741, - "acc_norm_stderr": 0.03853254836552003 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695235, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695235 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.3574468085106383, - "acc_stderr": 0.03132941789476425, - "acc_norm": 0.3574468085106383, - "acc_norm_stderr": 0.03132941789476425 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3192771084337349, - "acc_stderr": 0.03629335329947859, - "acc_norm": 0.3192771084337349, - "acc_norm_stderr": 0.03629335329947859 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.2604501607717042, - "acc_stderr": 0.02492672322484555, - "acc_norm": 0.2604501607717042, - "acc_norm_stderr": 0.02492672322484555 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.336322869955157, - "acc_stderr": 0.031708824268455, - "acc_norm": 0.336322869955157, - "acc_norm_stderr": 0.031708824268455 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.29770992366412213, - "acc_stderr": 0.04010358942462203, - "acc_norm": 0.29770992366412213, - "acc_norm_stderr": 0.04010358942462203 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.37, - "acc_stderr": 0.04852365870939099, - "acc_norm": 0.37, - "acc_norm_stderr": 0.04852365870939099 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.2727272727272727, - "acc_stderr": 0.03173071239071724, - "acc_norm": 0.2727272727272727, - "acc_norm_stderr": 0.03173071239071724 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.2827586206896552, - "acc_stderr": 0.037528339580033376, - "acc_norm": 0.2827586206896552, - "acc_norm_stderr": 0.037528339580033376 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.1568627450980392, - "acc_stderr": 0.03618664819936244, - "acc_norm": 0.1568627450980392, - "acc_norm_stderr": 0.03618664819936244 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.28991596638655465, - "acc_stderr": 0.029472485833136084, - "acc_norm": 0.28991596638655465, - "acc_norm_stderr": 0.029472485833136084 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.23333333333333334, - "acc_stderr": 0.02144454730156048, - "acc_norm": 0.23333333333333334, - "acc_norm_stderr": 0.02144454730156048 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.23, - "acc_stderr": 0.04229525846816506, - "acc_norm": 0.23, - "acc_norm_stderr": 0.04229525846816506 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.3425925925925926, - "acc_stderr": 0.04587904741301811, - "acc_norm": 0.3425925925925926, - "acc_norm_stderr": 0.04587904741301811 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.2512315270935961, - "acc_stderr": 0.030516530732694436, - "acc_norm": 0.2512315270935961, - "acc_norm_stderr": 0.030516530732694436 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.26129032258064516, - "acc_stderr": 0.024993053397764815, - "acc_norm": 0.26129032258064516, - "acc_norm_stderr": 0.024993053397764815 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.4188034188034188, - "acc_stderr": 0.03232128912157792, - "acc_norm": 0.4188034188034188, - "acc_norm_stderr": 0.03232128912157792 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.2792452830188679, - "acc_stderr": 0.02761116340239972, - "acc_norm": 0.2792452830188679, - "acc_norm_stderr": 0.02761116340239972 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.32727272727272727, - "acc_stderr": 0.04494290866252088, - "acc_norm": 0.32727272727272727, - "acc_norm_stderr": 0.04494290866252088 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.25925925925925924, - "acc_stderr": 0.02671924078371215, - "acc_norm": 0.25925925925925924, - "acc_norm_stderr": 0.02671924078371215 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.23178807947019867, - "acc_stderr": 0.034454062719870546, - "acc_norm": 0.23178807947019867, - "acc_norm_stderr": 0.034454062719870546 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.24378109452736318, - "acc_stderr": 0.030360490154014635, - "acc_norm": 0.24378109452736318, - "acc_norm_stderr": 0.030360490154014635 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.23699421965317918, - "acc_stderr": 0.03242414757483099, - "acc_norm": 0.23699421965317918, - "acc_norm_stderr": 0.03242414757483099 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2830687830687831, - "acc_stderr": 0.023201392938194974, - "acc_norm": 0.2830687830687831, - "acc_norm_stderr": 0.023201392938194974 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2708333333333333, - "acc_stderr": 0.03716177437566017, - "acc_norm": 0.2708333333333333, - "acc_norm_stderr": 0.03716177437566017 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.22, - "acc_stderr": 0.041633319989322695, - "acc_norm": 0.22, - "acc_norm_stderr": 0.041633319989322695 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.42, - "acc_stderr": 0.049604496374885836, - "acc_norm": 0.42, - "acc_norm_stderr": 0.049604496374885836 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.28034682080924855, - "acc_stderr": 0.024182427496577622, - "acc_norm": 0.28034682080924855, - "acc_norm_stderr": 0.024182427496577622 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.3067484662576687, - "acc_stderr": 0.036230899157241474, - "acc_norm": 0.3067484662576687, - "acc_norm_stderr": 0.036230899157241474 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.27469135802469136, - "acc_stderr": 0.02483605786829468, - "acc_norm": 0.27469135802469136, - "acc_norm_stderr": 0.02483605786829468 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.22279792746113988, - "acc_stderr": 0.03003114797764154, - "acc_norm": 0.22279792746113988, - "acc_norm_stderr": 0.03003114797764154 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.23684210526315788, - "acc_stderr": 0.03999423879281336, - "acc_norm": 0.23684210526315788, - "acc_norm_stderr": 0.03999423879281336 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.23302752293577983, - "acc_stderr": 0.018125669180861493, - "acc_norm": 0.23302752293577983, - "acc_norm_stderr": 0.018125669180861493 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.2698412698412698, - "acc_stderr": 0.03970158273235172, - "acc_norm": 0.2698412698412698, - "acc_norm_stderr": 0.03970158273235172 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.2973856209150327, - "acc_stderr": 0.02617390850671858, - "acc_norm": 0.2973856209150327, - "acc_norm_stderr": 0.02617390850671858 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.35, - "acc_stderr": 0.0479372485441102, - "acc_norm": 0.35, - "acc_norm_stderr": 0.0479372485441102 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.35537190082644626, - "acc_stderr": 0.04369236326573981, - "acc_norm": 0.35537190082644626, - "acc_norm_stderr": 0.04369236326573981 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.21052631578947367, - "acc_stderr": 0.033176727875331574, - "acc_norm": 0.21052631578947367, - "acc_norm_stderr": 0.033176727875331574 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.28104575163398693, - "acc_stderr": 0.018185218954318082, - "acc_norm": 0.28104575163398693, - "acc_norm_stderr": 0.018185218954318082 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2978723404255319, - "acc_stderr": 0.027281608344469414, - "acc_norm": 0.2978723404255319, - "acc_norm_stderr": 0.027281608344469414 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.3125, - "acc_stderr": 0.043994650575715215, - "acc_norm": 0.3125, - "acc_norm_stderr": 0.043994650575715215 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.16203703703703703, - "acc_stderr": 0.025130453652268455, - "acc_norm": 0.16203703703703703, - "acc_norm_stderr": 0.025130453652268455 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2424581005586592, - "acc_stderr": 0.01433352205921789, - "acc_norm": 0.2424581005586592, - "acc_norm_stderr": 0.01433352205921789 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.19, - "acc_stderr": 0.03942772444036625, - "acc_norm": 0.19, - "acc_norm_stderr": 0.03942772444036625 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.22, - "acc_stderr": 0.041633319989322716, - "acc_norm": 0.22, - "acc_norm_stderr": 0.041633319989322716 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.20588235294117646, - "acc_stderr": 0.024562204314142314, - "acc_norm": 0.20588235294117646, - "acc_norm_stderr": 0.024562204314142314 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.16326530612244897, - "acc_stderr": 0.023661699177098604, - "acc_norm": 0.16326530612244897, - "acc_norm_stderr": 0.023661699177098604 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.350210970464135, - "acc_stderr": 0.031052391937584353, - "acc_norm": 0.350210970464135, - "acc_norm_stderr": 0.031052391937584353 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.26140808344198174, - "acc_stderr": 0.011222528169771316, - "acc_norm": 0.26140808344198174, - "acc_norm_stderr": 0.011222528169771316 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.23529411764705882, - "acc_stderr": 0.029771775228145628, - "acc_norm": 0.23529411764705882, - "acc_norm_stderr": 0.029771775228145628 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.2909090909090909, - "acc_stderr": 0.03546563019624336, - "acc_norm": 0.2909090909090909, - "acc_norm_stderr": 0.03546563019624336 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.26193390452876375, - "mc1_stderr": 0.015392118805015016, - "mc2": 0.42260296070190784, - "mc2_stderr": 0.015435227733476522 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.1068075117370892, - "acc_stderr": 0.0105878712050749, - "acc_norm": 0.1619718309859155, - "acc_norm_stderr": 0.012629445242618125 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "quantumaikr/KoreanLM", - "model_sha": "f4351abcdd6a933afbaffad0badf60c273e71920", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/shangrilar/llama2-ko-7b-kullm-base/result_2023-10-03 23:40:33.json b/shangrilar/llama2-ko-7b-kullm-base/result_2023-10-03 23:40:33.json deleted file mode 100644 index 199e9875d47a47794b4be6d6a65f760880e5d2ae..0000000000000000000000000000000000000000 --- a/shangrilar/llama2-ko-7b-kullm-base/result_2023-10-03 23:40:33.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.3174061433447099, - "acc_stderr": 0.01360223908803817, - "acc_norm": 0.38054607508532423, - "acc_norm_stderr": 0.014188277712349814 - }, - "harness|ko_hellaswag|10": { - "acc": 0.38309101772555265, - "acc_stderr": 0.004851466623601449, - "acc_norm": 0.49571798446524595, - "acc_norm_stderr": 0.004989598426249537 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.2631578947368421, - "acc_stderr": 0.03377310252209194, - "acc_norm": 0.2631578947368421, - "acc_norm_stderr": 0.03377310252209194 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.2815533980582524, - "acc_stderr": 0.044532548363264673, - "acc_norm": 0.2815533980582524, - "acc_norm_stderr": 0.044532548363264673 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.3371647509578544, - "acc_stderr": 0.016905207420803554, - "acc_norm": 0.3371647509578544, - "acc_norm_stderr": 0.016905207420803554 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.04072314811876837, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.04072314811876837 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.28, - "acc_stderr": 0.045126085985421255, - "acc_norm": 0.28, - "acc_norm_stderr": 0.045126085985421255 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.3021276595744681, - "acc_stderr": 0.030017554471880557, - "acc_norm": 0.3021276595744681, - "acc_norm_stderr": 0.030017554471880557 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3373493975903614, - "acc_stderr": 0.03680783690727581, - "acc_norm": 0.3373493975903614, - "acc_norm_stderr": 0.03680783690727581 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.3279742765273312, - "acc_stderr": 0.0266644108869376, - "acc_norm": 0.3279742765273312, - "acc_norm_stderr": 0.0266644108869376 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.3094170403587444, - "acc_stderr": 0.031024411740572196, - "acc_norm": 0.3094170403587444, - "acc_norm_stderr": 0.031024411740572196 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.40458015267175573, - "acc_stderr": 0.043046937953806645, - "acc_norm": 0.40458015267175573, - "acc_norm_stderr": 0.043046937953806645 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.40404040404040403, - "acc_stderr": 0.03496130972056128, - "acc_norm": 0.40404040404040403, - "acc_norm_stderr": 0.03496130972056128 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.3103448275862069, - "acc_stderr": 0.03855289616378949, - "acc_norm": 0.3103448275862069, - "acc_norm_stderr": 0.03855289616378949 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.19607843137254902, - "acc_stderr": 0.03950581861179962, - "acc_norm": 0.19607843137254902, - "acc_norm_stderr": 0.03950581861179962 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.31932773109243695, - "acc_stderr": 0.030283995525884396, - "acc_norm": 0.31932773109243695, - "acc_norm_stderr": 0.030283995525884396 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.2717948717948718, - "acc_stderr": 0.02255655101013235, - "acc_norm": 0.2717948717948718, - "acc_norm_stderr": 0.02255655101013235 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.27, - "acc_stderr": 0.044619604333847394, - "acc_norm": 0.27, - "acc_norm_stderr": 0.044619604333847394 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.26, - "acc_stderr": 0.04408440022768078, - "acc_norm": 0.26, - "acc_norm_stderr": 0.04408440022768078 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.3055555555555556, - "acc_stderr": 0.044531975073749834, - "acc_norm": 0.3055555555555556, - "acc_norm_stderr": 0.044531975073749834 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.28078817733990147, - "acc_stderr": 0.03161856335358609, - "acc_norm": 0.28078817733990147, - "acc_norm_stderr": 0.03161856335358609 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3032258064516129, - "acc_stderr": 0.026148685930671746, - "acc_norm": 0.3032258064516129, - "acc_norm_stderr": 0.026148685930671746 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.36752136752136755, - "acc_stderr": 0.031585391577456365, - "acc_norm": 0.36752136752136755, - "acc_norm_stderr": 0.031585391577456365 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.32452830188679244, - "acc_stderr": 0.028815615713432115, - "acc_norm": 0.32452830188679244, - "acc_norm_stderr": 0.028815615713432115 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.35454545454545455, - "acc_stderr": 0.04582004841505416, - "acc_norm": 0.35454545454545455, - "acc_norm_stderr": 0.04582004841505416 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.25925925925925924, - "acc_stderr": 0.026719240783712156, - "acc_norm": 0.25925925925925924, - "acc_norm_stderr": 0.026719240783712156 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.304635761589404, - "acc_stderr": 0.03757949922943342, - "acc_norm": 0.304635761589404, - "acc_norm_stderr": 0.03757949922943342 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.3383084577114428, - "acc_stderr": 0.03345563070339192, - "acc_norm": 0.3383084577114428, - "acc_norm_stderr": 0.03345563070339192 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.24855491329479767, - "acc_stderr": 0.03295304696818318, - "acc_norm": 0.24855491329479767, - "acc_norm_stderr": 0.03295304696818318 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2566137566137566, - "acc_stderr": 0.022494510767503154, - "acc_norm": 0.2566137566137566, - "acc_norm_stderr": 0.022494510767503154 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2152777777777778, - "acc_stderr": 0.03437079344106134, - "acc_norm": 0.2152777777777778, - "acc_norm_stderr": 0.03437079344106134 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252604, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252604 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.36, - "acc_stderr": 0.04824181513244218, - "acc_norm": 0.36, - "acc_norm_stderr": 0.04824181513244218 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.32947976878612717, - "acc_stderr": 0.0253052581318797, - "acc_norm": 0.32947976878612717, - "acc_norm_stderr": 0.0253052581318797 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.26380368098159507, - "acc_stderr": 0.03462419931615624, - "acc_norm": 0.26380368098159507, - "acc_norm_stderr": 0.03462419931615624 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.3271604938271605, - "acc_stderr": 0.026105673861409825, - "acc_norm": 0.3271604938271605, - "acc_norm_stderr": 0.026105673861409825 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.27461139896373055, - "acc_stderr": 0.03221024508041153, - "acc_norm": 0.27461139896373055, - "acc_norm_stderr": 0.03221024508041153 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2543859649122807, - "acc_stderr": 0.040969851398436716, - "acc_norm": 0.2543859649122807, - "acc_norm_stderr": 0.040969851398436716 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.3798165137614679, - "acc_stderr": 0.020808825617866244, - "acc_norm": 0.3798165137614679, - "acc_norm_stderr": 0.020808825617866244 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.20634920634920634, - "acc_stderr": 0.03619604524124249, - "acc_norm": 0.20634920634920634, - "acc_norm_stderr": 0.03619604524124249 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.3660130718954248, - "acc_stderr": 0.027582811415159614, - "acc_norm": 0.3660130718954248, - "acc_norm_stderr": 0.027582811415159614 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.3884297520661157, - "acc_stderr": 0.04449270350068382, - "acc_norm": 0.3884297520661157, - "acc_norm_stderr": 0.04449270350068382 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.3618421052631579, - "acc_stderr": 0.03910525752849724, - "acc_norm": 0.3618421052631579, - "acc_norm_stderr": 0.03910525752849724 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.2777777777777778, - "acc_stderr": 0.018120224251484577, - "acc_norm": 0.2777777777777778, - "acc_norm_stderr": 0.018120224251484577 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2872340425531915, - "acc_stderr": 0.026992199173064356, - "acc_norm": 0.2872340425531915, - "acc_norm_stderr": 0.026992199173064356 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.2857142857142857, - "acc_stderr": 0.04287858751340456, - "acc_norm": 0.2857142857142857, - "acc_norm_stderr": 0.04287858751340456 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.35185185185185186, - "acc_stderr": 0.032568505702936484, - "acc_norm": 0.35185185185185186, - "acc_norm_stderr": 0.032568505702936484 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.24022346368715083, - "acc_stderr": 0.014288343803925295, - "acc_norm": 0.24022346368715083, - "acc_norm_stderr": 0.014288343803925295 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.27, - "acc_stderr": 0.0446196043338474, - "acc_norm": 0.27, - "acc_norm_stderr": 0.0446196043338474 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.23, - "acc_stderr": 0.04229525846816507, - "acc_norm": 0.23, - "acc_norm_stderr": 0.04229525846816507 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.3897058823529412, - "acc_stderr": 0.02962466358115969, - "acc_norm": 0.3897058823529412, - "acc_norm_stderr": 0.02962466358115969 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.2938775510204082, - "acc_stderr": 0.029162738410249772, - "acc_norm": 0.2938775510204082, - "acc_norm_stderr": 0.029162738410249772 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.35864978902953587, - "acc_stderr": 0.031219569445301833, - "acc_norm": 0.35864978902953587, - "acc_norm_stderr": 0.031219569445301833 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2900912646675359, - "acc_stderr": 0.011590375554733096, - "acc_norm": 0.2900912646675359, - "acc_norm_stderr": 0.011590375554733096 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.2696078431372549, - "acc_stderr": 0.03114557065948678, - "acc_norm": 0.2696078431372549, - "acc_norm_stderr": 0.03114557065948678 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.30303030303030304, - "acc_stderr": 0.035886248000917075, - "acc_norm": 0.30303030303030304, - "acc_norm_stderr": 0.035886248000917075 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.23133414932680538, - "mc1_stderr": 0.014761945174862677, - "mc2": 0.3706017104903605, - "mc2_stderr": 0.014735026291520032 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.31220657276995306, - "acc_stderr": 0.015884928030374876, - "acc_norm": 0.4953051643192488, - "acc_norm_stderr": 0.017139023665847613 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "shangrilar/llama2-ko-7b-kullm-base", - "model_sha": "b7db1fa5f45f178d4e98ac52ece14064ded1b7c0", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/siryuon/KOEN-13B/result_2023-10-06 14:34:04.json b/siryuon/KOEN-13B/result_2023-10-06 14:34:04.json deleted file mode 100644 index 386c1ab9f9493f92592b233d4170992ecd0e3dce..0000000000000000000000000000000000000000 --- a/siryuon/KOEN-13B/result_2023-10-06 14:34:04.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.3660409556313993, - "acc_stderr": 0.014077223108470139, - "acc_norm": 0.4180887372013652, - "acc_norm_stderr": 0.014413988396996083 - }, - "harness|ko_hellaswag|10": { - "acc": 0.403505277833101, - "acc_stderr": 0.0048959776766252395, - "acc_norm": 0.536247759410476, - "acc_norm_stderr": 0.0049766519897576356 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.47953216374269003, - "acc_stderr": 0.0383161053282193, - "acc_norm": 0.47953216374269003, - "acc_norm_stderr": 0.0383161053282193 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.3883495145631068, - "acc_stderr": 0.0482572933735639, - "acc_norm": 0.3883495145631068, - "acc_norm_stderr": 0.0482572933735639 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.4904214559386973, - "acc_stderr": 0.017876682275340873, - "acc_norm": 0.4904214559386973, - "acc_norm_stderr": 0.017876682275340873 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.37037037037037035, - "acc_stderr": 0.04171654161354543, - "acc_norm": 0.37037037037037035, - "acc_norm_stderr": 0.04171654161354543 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.28, - "acc_stderr": 0.045126085985421255, - "acc_norm": 0.28, - "acc_norm_stderr": 0.045126085985421255 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.33617021276595743, - "acc_stderr": 0.030881618520676942, - "acc_norm": 0.33617021276595743, - "acc_norm_stderr": 0.030881618520676942 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.41566265060240964, - "acc_stderr": 0.03836722176598053, - "acc_norm": 0.41566265060240964, - "acc_norm_stderr": 0.03836722176598053 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.44694533762057875, - "acc_stderr": 0.028237769422085328, - "acc_norm": 0.44694533762057875, - "acc_norm_stderr": 0.028237769422085328 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.3632286995515695, - "acc_stderr": 0.03227790442850499, - "acc_norm": 0.3632286995515695, - "acc_norm_stderr": 0.03227790442850499 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.48091603053435117, - "acc_stderr": 0.04382094705550988, - "acc_norm": 0.48091603053435117, - "acc_norm_stderr": 0.04382094705550988 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252605, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252605 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.494949494949495, - "acc_stderr": 0.035621707606254015, - "acc_norm": 0.494949494949495, - "acc_norm_stderr": 0.035621707606254015 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.38620689655172413, - "acc_stderr": 0.04057324734419035, - "acc_norm": 0.38620689655172413, - "acc_norm_stderr": 0.04057324734419035 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.2549019607843137, - "acc_stderr": 0.043364327079931785, - "acc_norm": 0.2549019607843137, - "acc_norm_stderr": 0.043364327079931785 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.3865546218487395, - "acc_stderr": 0.03163145807552379, - "acc_norm": 0.3865546218487395, - "acc_norm_stderr": 0.03163145807552379 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.36153846153846153, - "acc_stderr": 0.02435958146539696, - "acc_norm": 0.36153846153846153, - "acc_norm_stderr": 0.02435958146539696 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.44, - "acc_stderr": 0.049888765156985884, - "acc_norm": 0.44, - "acc_norm_stderr": 0.049888765156985884 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.36, - "acc_stderr": 0.048241815132442176, - "acc_norm": 0.36, - "acc_norm_stderr": 0.048241815132442176 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.37962962962962965, - "acc_stderr": 0.04691521224077742, - "acc_norm": 0.37962962962962965, - "acc_norm_stderr": 0.04691521224077742 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.3399014778325123, - "acc_stderr": 0.033327690684107895, - "acc_norm": 0.3399014778325123, - "acc_norm_stderr": 0.033327690684107895 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.42258064516129035, - "acc_stderr": 0.02810096472427264, - "acc_norm": 0.42258064516129035, - "acc_norm_stderr": 0.02810096472427264 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.594017094017094, - "acc_stderr": 0.03217180182641086, - "acc_norm": 0.594017094017094, - "acc_norm_stderr": 0.03217180182641086 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.4188679245283019, - "acc_stderr": 0.030365050829115208, - "acc_norm": 0.4188679245283019, - "acc_norm_stderr": 0.030365050829115208 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.38181818181818183, - "acc_stderr": 0.046534298079135075, - "acc_norm": 0.38181818181818183, - "acc_norm_stderr": 0.046534298079135075 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.26296296296296295, - "acc_stderr": 0.02684205787383371, - "acc_norm": 0.26296296296296295, - "acc_norm_stderr": 0.02684205787383371 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.26490066225165565, - "acc_stderr": 0.03603038545360385, - "acc_norm": 0.26490066225165565, - "acc_norm_stderr": 0.03603038545360385 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.4626865671641791, - "acc_stderr": 0.03525675167467974, - "acc_norm": 0.4626865671641791, - "acc_norm_stderr": 0.03525675167467974 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.3063583815028902, - "acc_stderr": 0.035149425512674366, - "acc_norm": 0.3063583815028902, - "acc_norm_stderr": 0.035149425512674366 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.30423280423280424, - "acc_stderr": 0.023695415009463087, - "acc_norm": 0.30423280423280424, - "acc_norm_stderr": 0.023695415009463087 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3472222222222222, - "acc_stderr": 0.039812405437178615, - "acc_norm": 0.3472222222222222, - "acc_norm_stderr": 0.039812405437178615 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.32, - "acc_stderr": 0.046882617226215034, - "acc_norm": 0.32, - "acc_norm_stderr": 0.046882617226215034 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.51, - "acc_stderr": 0.05024183937956913, - "acc_norm": 0.51, - "acc_norm_stderr": 0.05024183937956913 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.43641618497109824, - "acc_stderr": 0.026700545424943684, - "acc_norm": 0.43641618497109824, - "acc_norm_stderr": 0.026700545424943684 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.3496932515337423, - "acc_stderr": 0.03746668325470021, - "acc_norm": 0.3496932515337423, - "acc_norm_stderr": 0.03746668325470021 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.4382716049382716, - "acc_stderr": 0.02760791408740046, - "acc_norm": 0.4382716049382716, - "acc_norm_stderr": 0.02760791408740046 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.27, - "acc_stderr": 0.044619604333847394, - "acc_norm": 0.27, - "acc_norm_stderr": 0.044619604333847394 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.37305699481865284, - "acc_stderr": 0.034902055920485744, - "acc_norm": 0.37305699481865284, - "acc_norm_stderr": 0.034902055920485744 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2807017543859649, - "acc_stderr": 0.042270544512321984, - "acc_norm": 0.2807017543859649, - "acc_norm_stderr": 0.042270544512321984 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.42568807339449544, - "acc_stderr": 0.021199235972470802, - "acc_norm": 0.42568807339449544, - "acc_norm_stderr": 0.021199235972470802 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.2857142857142857, - "acc_stderr": 0.04040610178208839, - "acc_norm": 0.2857142857142857, - "acc_norm_stderr": 0.04040610178208839 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.3954248366013072, - "acc_stderr": 0.02799672318063145, - "acc_norm": 0.3954248366013072, - "acc_norm_stderr": 0.02799672318063145 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.38, - "acc_stderr": 0.048783173121456316, - "acc_norm": 0.38, - "acc_norm_stderr": 0.048783173121456316 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.5454545454545454, - "acc_stderr": 0.045454545454545484, - "acc_norm": 0.5454545454545454, - "acc_norm_stderr": 0.045454545454545484 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.45394736842105265, - "acc_stderr": 0.04051646342874141, - "acc_norm": 0.45394736842105265, - "acc_norm_stderr": 0.04051646342874141 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.3055555555555556, - "acc_stderr": 0.018635594034423983, - "acc_norm": 0.3055555555555556, - "acc_norm_stderr": 0.018635594034423983 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.3191489361702128, - "acc_stderr": 0.027807990141320214, - "acc_norm": 0.3191489361702128, - "acc_norm_stderr": 0.027807990141320214 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.2767857142857143, - "acc_stderr": 0.042466243366976256, - "acc_norm": 0.2767857142857143, - "acc_norm_stderr": 0.042466243366976256 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.03214952147802749, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.03214952147802749 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2424581005586592, - "acc_stderr": 0.01433352205921789, - "acc_norm": 0.2424581005586592, - "acc_norm_stderr": 0.01433352205921789 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252604, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252604 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.36, - "acc_stderr": 0.04824181513244218, - "acc_norm": 0.36, - "acc_norm_stderr": 0.04824181513244218 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.27941176470588236, - "acc_stderr": 0.027257202606114948, - "acc_norm": 0.27941176470588236, - "acc_norm_stderr": 0.027257202606114948 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.3346938775510204, - "acc_stderr": 0.030209235226242307, - "acc_norm": 0.3346938775510204, - "acc_norm_stderr": 0.030209235226242307 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.48523206751054854, - "acc_stderr": 0.032533028078777386, - "acc_norm": 0.48523206751054854, - "acc_norm_stderr": 0.032533028078777386 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.28552803129074317, - "acc_stderr": 0.011535751586665657, - "acc_norm": 0.28552803129074317, - "acc_norm_stderr": 0.011535751586665657 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.4019607843137255, - "acc_stderr": 0.034411900234824655, - "acc_norm": 0.4019607843137255, - "acc_norm_stderr": 0.034411900234824655 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.4121212121212121, - "acc_stderr": 0.03843566993588717, - "acc_norm": 0.4121212121212121, - "acc_norm_stderr": 0.03843566993588717 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.26560587515299877, - "mc1_stderr": 0.015461027627253602, - "mc2": 0.4382289478351752, - "mc2_stderr": 0.014859555204764835 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.5434272300469484, - "acc_stderr": 0.017075008217544083, - "acc_norm": 0.6244131455399061, - "acc_norm_stderr": 0.01660070111698995 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "siryuon/KOEN-13B", - "model_sha": "c18bdc67d61099d74c7b77ea46714795082fc698", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/siryuon/polyglot-ko-12.8b-sryn/result_2023-10-06 07:26:45.json b/siryuon/polyglot-ko-12.8b-sryn/result_2023-10-06 07:26:45.json deleted file mode 100644 index b4039e1d4d5b8ec3a97d878a7fd4a86a91222a50..0000000000000000000000000000000000000000 --- a/siryuon/polyglot-ko-12.8b-sryn/result_2023-10-06 07:26:45.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.2883959044368601, - "acc_stderr": 0.013238394422428164, - "acc_norm": 0.3515358361774744, - "acc_norm_stderr": 0.013952413699600933 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3881696873132842, - "acc_stderr": 0.004863375698153872, - "acc_norm": 0.4946225851424019, - "acc_norm_stderr": 0.004989492828168542 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.26900584795321636, - "acc_stderr": 0.0340105262010409, - "acc_norm": 0.26900584795321636, - "acc_norm_stderr": 0.0340105262010409 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.2524271844660194, - "acc_stderr": 0.04301250399690877, - "acc_norm": 0.2524271844660194, - "acc_norm_stderr": 0.04301250399690877 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.24521072796934865, - "acc_stderr": 0.01538435228454394, - "acc_norm": 0.24521072796934865, - "acc_norm_stderr": 0.01538435228454394 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.25925925925925924, - "acc_stderr": 0.03785714465066653, - "acc_norm": 0.25925925925925924, - "acc_norm_stderr": 0.03785714465066653 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.20851063829787234, - "acc_stderr": 0.026556982117838707, - "acc_norm": 0.20851063829787234, - "acc_norm_stderr": 0.026556982117838707 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.25301204819277107, - "acc_stderr": 0.03384429155233135, - "acc_norm": 0.25301204819277107, - "acc_norm_stderr": 0.03384429155233135 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.2861736334405145, - "acc_stderr": 0.025670259242188933, - "acc_norm": 0.2861736334405145, - "acc_norm_stderr": 0.025670259242188933 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.16143497757847533, - "acc_stderr": 0.02469395789912846, - "acc_norm": 0.16143497757847533, - "acc_norm_stderr": 0.02469395789912846 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.2748091603053435, - "acc_stderr": 0.039153454088478354, - "acc_norm": 0.2748091603053435, - "acc_norm_stderr": 0.039153454088478354 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.35, - "acc_stderr": 0.04793724854411022, - "acc_norm": 0.35, - "acc_norm_stderr": 0.04793724854411022 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.30808080808080807, - "acc_stderr": 0.03289477330098615, - "acc_norm": 0.30808080808080807, - "acc_norm_stderr": 0.03289477330098615 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.25517241379310346, - "acc_stderr": 0.03632984052707842, - "acc_norm": 0.25517241379310346, - "acc_norm_stderr": 0.03632984052707842 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.2549019607843137, - "acc_stderr": 0.043364327079931785, - "acc_norm": 0.2549019607843137, - "acc_norm_stderr": 0.043364327079931785 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.2773109243697479, - "acc_stderr": 0.02907937453948001, - "acc_norm": 0.2773109243697479, - "acc_norm_stderr": 0.02907937453948001 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.26153846153846155, - "acc_stderr": 0.02228214120420442, - "acc_norm": 0.26153846153846155, - "acc_norm_stderr": 0.02228214120420442 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.22, - "acc_stderr": 0.041633319989322695, - "acc_norm": 0.22, - "acc_norm_stderr": 0.041633319989322695 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.18, - "acc_stderr": 0.038612291966536955, - "acc_norm": 0.18, - "acc_norm_stderr": 0.038612291966536955 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.26851851851851855, - "acc_stderr": 0.04284467968052191, - "acc_norm": 0.26851851851851855, - "acc_norm_stderr": 0.04284467968052191 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.22167487684729065, - "acc_stderr": 0.029225575892489593, - "acc_norm": 0.22167487684729065, - "acc_norm_stderr": 0.029225575892489593 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.23870967741935484, - "acc_stderr": 0.024251071262208834, - "acc_norm": 0.23870967741935484, - "acc_norm_stderr": 0.024251071262208834 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.3034188034188034, - "acc_stderr": 0.030118210106942652, - "acc_norm": 0.3034188034188034, - "acc_norm_stderr": 0.030118210106942652 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.27169811320754716, - "acc_stderr": 0.027377706624670713, - "acc_norm": 0.27169811320754716, - "acc_norm_stderr": 0.027377706624670713 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.2545454545454545, - "acc_stderr": 0.041723430387053825, - "acc_norm": 0.2545454545454545, - "acc_norm_stderr": 0.041723430387053825 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.28888888888888886, - "acc_stderr": 0.027634907264178544, - "acc_norm": 0.28888888888888886, - "acc_norm_stderr": 0.027634907264178544 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.26490066225165565, - "acc_stderr": 0.036030385453603826, - "acc_norm": 0.26490066225165565, - "acc_norm_stderr": 0.036030385453603826 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.1791044776119403, - "acc_stderr": 0.027113286753111837, - "acc_norm": 0.1791044776119403, - "acc_norm_stderr": 0.027113286753111837 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.3352601156069364, - "acc_stderr": 0.03599586301247078, - "acc_norm": 0.3352601156069364, - "acc_norm_stderr": 0.03599586301247078 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2222222222222222, - "acc_stderr": 0.02141168439369418, - "acc_norm": 0.2222222222222222, - "acc_norm_stderr": 0.02141168439369418 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2777777777777778, - "acc_stderr": 0.037455547914624576, - "acc_norm": 0.2777777777777778, - "acc_norm_stderr": 0.037455547914624576 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.22, - "acc_stderr": 0.041633319989322695, - "acc_norm": 0.22, - "acc_norm_stderr": 0.041633319989322695 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.24277456647398843, - "acc_stderr": 0.0230836585869842, - "acc_norm": 0.24277456647398843, - "acc_norm_stderr": 0.0230836585869842 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.2331288343558282, - "acc_stderr": 0.0332201579577674, - "acc_norm": 0.2331288343558282, - "acc_norm_stderr": 0.0332201579577674 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.29012345679012347, - "acc_stderr": 0.02525117393649502, - "acc_norm": 0.29012345679012347, - "acc_norm_stderr": 0.02525117393649502 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.24, - "acc_stderr": 0.04292346959909284, - "acc_norm": 0.24, - "acc_norm_stderr": 0.04292346959909284 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.29533678756476683, - "acc_stderr": 0.03292296639155141, - "acc_norm": 0.29533678756476683, - "acc_norm_stderr": 0.03292296639155141 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2894736842105263, - "acc_stderr": 0.04266339443159394, - "acc_norm": 0.2894736842105263, - "acc_norm_stderr": 0.04266339443159394 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.3137614678899083, - "acc_stderr": 0.019894723341469127, - "acc_norm": 0.3137614678899083, - "acc_norm_stderr": 0.019894723341469127 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.1984126984126984, - "acc_stderr": 0.035670166752768635, - "acc_norm": 0.1984126984126984, - "acc_norm_stderr": 0.035670166752768635 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.27450980392156865, - "acc_stderr": 0.025553169991826524, - "acc_norm": 0.27450980392156865, - "acc_norm_stderr": 0.025553169991826524 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.256198347107438, - "acc_stderr": 0.03984979653302872, - "acc_norm": 0.256198347107438, - "acc_norm_stderr": 0.03984979653302872 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.23026315789473684, - "acc_stderr": 0.03426059424403165, - "acc_norm": 0.23026315789473684, - "acc_norm_stderr": 0.03426059424403165 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.23366013071895425, - "acc_stderr": 0.017119158496044506, - "acc_norm": 0.23366013071895425, - "acc_norm_stderr": 0.017119158496044506 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.23404255319148937, - "acc_stderr": 0.025257861359432407, - "acc_norm": 0.23404255319148937, - "acc_norm_stderr": 0.025257861359432407 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.29464285714285715, - "acc_stderr": 0.04327040932578728, - "acc_norm": 0.29464285714285715, - "acc_norm_stderr": 0.04327040932578728 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.39351851851851855, - "acc_stderr": 0.03331747876370312, - "acc_norm": 0.39351851851851855, - "acc_norm_stderr": 0.03331747876370312 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.27150837988826815, - "acc_stderr": 0.014874252168095278, - "acc_norm": 0.27150837988826815, - "acc_norm_stderr": 0.014874252168095278 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.39705882352941174, - "acc_stderr": 0.02972215209928006, - "acc_norm": 0.39705882352941174, - "acc_norm_stderr": 0.02972215209928006 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.2693877551020408, - "acc_stderr": 0.02840125202902294, - "acc_norm": 0.2693877551020408, - "acc_norm_stderr": 0.02840125202902294 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.25738396624472576, - "acc_stderr": 0.028458820991460288, - "acc_norm": 0.25738396624472576, - "acc_norm_stderr": 0.028458820991460288 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.25097783572359844, - "acc_stderr": 0.01107373029918721, - "acc_norm": 0.25097783572359844, - "acc_norm_stderr": 0.01107373029918721 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.23039215686274508, - "acc_stderr": 0.02955429260569506, - "acc_norm": 0.23039215686274508, - "acc_norm_stderr": 0.02955429260569506 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.18787878787878787, - "acc_stderr": 0.03050193405942914, - "acc_norm": 0.18787878787878787, - "acc_norm_stderr": 0.03050193405942914 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.27539779681762544, - "mc1_stderr": 0.015638135667775523, - "mc2": 0.42952087211843815, - "mc2_stderr": 0.015198599249746652 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.3779342723004695, - "acc_stderr": 0.016621166340849272, - "acc_norm": 0.43896713615023475, - "acc_norm_stderr": 0.017011608310486037 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "siryuon/polyglot-ko-12.8b-sryn", - "model_sha": "5bc6d25dbc83bb1e2bcc656141316ae2cb079aff", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/skt/ko-gpt-trinity-1.2B-v0.5/result_2023-09-27 05:12:48.json b/skt/ko-gpt-trinity-1.2B-v0.5/result_2023-09-27 05:12:48.json deleted file mode 100644 index 7694adb65a0104ce5ac9aca329546b435927fef2..0000000000000000000000000000000000000000 --- a/skt/ko-gpt-trinity-1.2B-v0.5/result_2023-09-27 05:12:48.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.21331058020477817, - "acc_stderr": 0.011970971742326334, - "acc_norm": 0.2687713310580205, - "acc_norm_stderr": 0.012955065963710686 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3132842063333997, - "acc_stderr": 0.004628809258483527, - "acc_norm": 0.3736307508464449, - "acc_norm_stderr": 0.004827786289074844 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.2046783625730994, - "acc_stderr": 0.03094445977853321, - "acc_norm": 0.2046783625730994, - "acc_norm_stderr": 0.03094445977853321 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.2524271844660194, - "acc_stderr": 0.04301250399690877, - "acc_norm": 0.2524271844660194, - "acc_norm_stderr": 0.04301250399690877 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.2669220945083014, - "acc_stderr": 0.01581845089477755, - "acc_norm": 0.2669220945083014, - "acc_norm_stderr": 0.01581845089477755 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.2222222222222222, - "acc_stderr": 0.03591444084196969, - "acc_norm": 0.2222222222222222, - "acc_norm_stderr": 0.03591444084196969 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.28, - "acc_stderr": 0.045126085985421276, - "acc_norm": 0.28, - "acc_norm_stderr": 0.045126085985421276 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.23404255319148937, - "acc_stderr": 0.027678452578212373, - "acc_norm": 0.23404255319148937, - "acc_norm_stderr": 0.027678452578212373 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.3192771084337349, - "acc_stderr": 0.03629335329947859, - "acc_norm": 0.3192771084337349, - "acc_norm_stderr": 0.03629335329947859 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.2282958199356913, - "acc_stderr": 0.023839303311398195, - "acc_norm": 0.2282958199356913, - "acc_norm_stderr": 0.023839303311398195 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.20179372197309417, - "acc_stderr": 0.02693611191280227, - "acc_norm": 0.20179372197309417, - "acc_norm_stderr": 0.02693611191280227 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.22900763358778625, - "acc_stderr": 0.036853466317118506, - "acc_norm": 0.22900763358778625, - "acc_norm_stderr": 0.036853466317118506 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.24, - "acc_stderr": 0.042923469599092816, - "acc_norm": 0.24, - "acc_norm_stderr": 0.042923469599092816 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.25757575757575757, - "acc_stderr": 0.03115626951964684, - "acc_norm": 0.25757575757575757, - "acc_norm_stderr": 0.03115626951964684 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.2413793103448276, - "acc_stderr": 0.03565998174135302, - "acc_norm": 0.2413793103448276, - "acc_norm_stderr": 0.03565998174135302 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.20588235294117646, - "acc_stderr": 0.04023382273617747, - "acc_norm": 0.20588235294117646, - "acc_norm_stderr": 0.04023382273617747 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.33613445378151263, - "acc_stderr": 0.030684737115135356, - "acc_norm": 0.33613445378151263, - "acc_norm_stderr": 0.030684737115135356 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.3230769230769231, - "acc_stderr": 0.02371088850197057, - "acc_norm": 0.3230769230769231, - "acc_norm_stderr": 0.02371088850197057 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.22, - "acc_stderr": 0.04163331998932269, - "acc_norm": 0.22, - "acc_norm_stderr": 0.04163331998932269 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.18, - "acc_stderr": 0.038612291966536955, - "acc_norm": 0.18, - "acc_norm_stderr": 0.038612291966536955 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.24074074074074073, - "acc_stderr": 0.04133119440243838, - "acc_norm": 0.24074074074074073, - "acc_norm_stderr": 0.04133119440243838 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.3054187192118227, - "acc_stderr": 0.03240661565868408, - "acc_norm": 0.3054187192118227, - "acc_norm_stderr": 0.03240661565868408 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3193548387096774, - "acc_stderr": 0.026522709674667768, - "acc_norm": 0.3193548387096774, - "acc_norm_stderr": 0.026522709674667768 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.23504273504273504, - "acc_stderr": 0.02777883590493544, - "acc_norm": 0.23504273504273504, - "acc_norm_stderr": 0.02777883590493544 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.21509433962264152, - "acc_stderr": 0.02528839450289137, - "acc_norm": 0.21509433962264152, - "acc_norm_stderr": 0.02528839450289137 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.22727272727272727, - "acc_stderr": 0.040139645540727735, - "acc_norm": 0.22727272727272727, - "acc_norm_stderr": 0.040139645540727735 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.26666666666666666, - "acc_stderr": 0.026962424325073838, - "acc_norm": 0.26666666666666666, - "acc_norm_stderr": 0.026962424325073838 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.19205298013245034, - "acc_stderr": 0.03216298420593612, - "acc_norm": 0.19205298013245034, - "acc_norm_stderr": 0.03216298420593612 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.26865671641791045, - "acc_stderr": 0.03134328358208954, - "acc_norm": 0.26865671641791045, - "acc_norm_stderr": 0.03134328358208954 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.1791907514450867, - "acc_stderr": 0.02924251305906329, - "acc_norm": 0.1791907514450867, - "acc_norm_stderr": 0.02924251305906329 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.25132275132275134, - "acc_stderr": 0.022340482339643898, - "acc_norm": 0.25132275132275134, - "acc_norm_stderr": 0.022340482339643898 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2638888888888889, - "acc_stderr": 0.03685651095897532, - "acc_norm": 0.2638888888888889, - "acc_norm_stderr": 0.03685651095897532 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.23, - "acc_stderr": 0.042295258468165044, - "acc_norm": 0.23, - "acc_norm_stderr": 0.042295258468165044 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.26, - "acc_stderr": 0.044084400227680794, - "acc_norm": 0.26, - "acc_norm_stderr": 0.044084400227680794 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.24566473988439305, - "acc_stderr": 0.02317629820399201, - "acc_norm": 0.24566473988439305, - "acc_norm_stderr": 0.02317629820399201 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.3006134969325153, - "acc_stderr": 0.03602511318806771, - "acc_norm": 0.3006134969325153, - "acc_norm_stderr": 0.03602511318806771 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.25308641975308643, - "acc_stderr": 0.024191808600713002, - "acc_norm": 0.25308641975308643, - "acc_norm_stderr": 0.024191808600713002 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.23, - "acc_stderr": 0.042295258468165065, - "acc_norm": 0.23, - "acc_norm_stderr": 0.042295258468165065 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.3471502590673575, - "acc_stderr": 0.03435696168361355, - "acc_norm": 0.3471502590673575, - "acc_norm_stderr": 0.03435696168361355 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2807017543859649, - "acc_stderr": 0.04227054451232199, - "acc_norm": 0.2807017543859649, - "acc_norm_stderr": 0.04227054451232199 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.22568807339449543, - "acc_stderr": 0.01792308766780305, - "acc_norm": 0.22568807339449543, - "acc_norm_stderr": 0.01792308766780305 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.2777777777777778, - "acc_stderr": 0.04006168083848877, - "acc_norm": 0.2777777777777778, - "acc_norm_stderr": 0.04006168083848877 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.24509803921568626, - "acc_stderr": 0.02463004897982476, - "acc_norm": 0.24509803921568626, - "acc_norm_stderr": 0.02463004897982476 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.27, - "acc_stderr": 0.0446196043338474, - "acc_norm": 0.27, - "acc_norm_stderr": 0.0446196043338474 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.35537190082644626, - "acc_stderr": 0.04369236326573981, - "acc_norm": 0.35537190082644626, - "acc_norm_stderr": 0.04369236326573981 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.17105263157894737, - "acc_stderr": 0.030643607071677105, - "acc_norm": 0.17105263157894737, - "acc_norm_stderr": 0.030643607071677105 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.2434640522875817, - "acc_stderr": 0.017362473762146623, - "acc_norm": 0.2434640522875817, - "acc_norm_stderr": 0.017362473762146623 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2553191489361702, - "acc_stderr": 0.02601199293090201, - "acc_norm": 0.2553191489361702, - "acc_norm_stderr": 0.02601199293090201 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.17857142857142858, - "acc_stderr": 0.036352091215778065, - "acc_norm": 0.17857142857142858, - "acc_norm_stderr": 0.036352091215778065 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.46296296296296297, - "acc_stderr": 0.03400603625538272, - "acc_norm": 0.46296296296296297, - "acc_norm_stderr": 0.03400603625538272 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.27039106145251396, - "acc_stderr": 0.01485499393801008, - "acc_norm": 0.27039106145251396, - "acc_norm_stderr": 0.01485499393801008 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.27, - "acc_stderr": 0.0446196043338474, - "acc_norm": 0.27, - "acc_norm_stderr": 0.0446196043338474 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.4485294117647059, - "acc_stderr": 0.030211479609121593, - "acc_norm": 0.4485294117647059, - "acc_norm_stderr": 0.030211479609121593 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.17551020408163265, - "acc_stderr": 0.02435280072297001, - "acc_norm": 0.17551020408163265, - "acc_norm_stderr": 0.02435280072297001 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.2911392405063291, - "acc_stderr": 0.02957160106575337, - "acc_norm": 0.2911392405063291, - "acc_norm_stderr": 0.02957160106575337 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2405475880052151, - "acc_stderr": 0.010916406735478947, - "acc_norm": 0.2405475880052151, - "acc_norm_stderr": 0.010916406735478947 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.2549019607843137, - "acc_stderr": 0.030587591351604246, - "acc_norm": 0.2549019607843137, - "acc_norm_stderr": 0.030587591351604246 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.24242424242424243, - "acc_stderr": 0.033464098810559534, - "acc_norm": 0.24242424242424243, - "acc_norm_stderr": 0.033464098810559534 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.25091799265605874, - "mc1_stderr": 0.01517698502770768, - "mc2": 0.4268789482469243, - "mc2_stderr": 0.015138938072410749 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.3779342723004695, - "acc_stderr": 0.016621166340849283, - "acc_norm": 0.5211267605633803, - "acc_norm_stderr": 0.017124472080967054 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "skt/ko-gpt-trinity-1.2B-v0.5", - "model_sha": "33f84c0da333d34533f0cfbe8f5972022d681e96", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/skt/kogpt2-base-v2/result_2023-10-04 13:20:51.json b/skt/kogpt2-base-v2/result_2023-10-04 13:20:51.json deleted file mode 100644 index 34a14fbb4dc2338efcca4036eb8435439ba84fcb..0000000000000000000000000000000000000000 --- a/skt/kogpt2-base-v2/result_2023-10-04 13:20:51.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.19197952218430034, - "acc_stderr": 0.011509598906598086, - "acc_norm": 0.23976109215017063, - "acc_norm_stderr": 0.012476304127453947 - }, - "harness|ko_hellaswag|10": { - "acc": 0.2806213901613224, - "acc_stderr": 0.004483845735187827, - "acc_norm": 0.3103963353913563, - "acc_norm_stderr": 0.0046171032803720095 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.25146198830409355, - "acc_stderr": 0.033275044238468436, - "acc_norm": 0.25146198830409355, - "acc_norm_stderr": 0.033275044238468436 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.17475728155339806, - "acc_stderr": 0.037601780060266196, - "acc_norm": 0.17475728155339806, - "acc_norm_stderr": 0.037601780060266196 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.28607918263090676, - "acc_stderr": 0.016160871405127526, - "acc_norm": 0.28607918263090676, - "acc_norm_stderr": 0.016160871405127526 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.25925925925925924, - "acc_stderr": 0.03785714465066654, - "acc_norm": 0.25925925925925924, - "acc_norm_stderr": 0.03785714465066654 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.19, - "acc_stderr": 0.03942772444036623, - "acc_norm": 0.19, - "acc_norm_stderr": 0.03942772444036623 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.24680851063829787, - "acc_stderr": 0.0281854413012341, - "acc_norm": 0.24680851063829787, - "acc_norm_stderr": 0.0281854413012341 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.28313253012048195, - "acc_stderr": 0.03507295431370518, - "acc_norm": 0.28313253012048195, - "acc_norm_stderr": 0.03507295431370518 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.2797427652733119, - "acc_stderr": 0.025494259350694905, - "acc_norm": 0.2797427652733119, - "acc_norm_stderr": 0.025494259350694905 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.3811659192825112, - "acc_stderr": 0.032596251184168264, - "acc_norm": 0.3811659192825112, - "acc_norm_stderr": 0.032596251184168264 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.24427480916030533, - "acc_stderr": 0.037683359597287434, - "acc_norm": 0.24427480916030533, - "acc_norm_stderr": 0.037683359597287434 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.20202020202020202, - "acc_stderr": 0.02860620428922987, - "acc_norm": 0.20202020202020202, - "acc_norm_stderr": 0.02860620428922987 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.27586206896551724, - "acc_stderr": 0.037245636197746325, - "acc_norm": 0.27586206896551724, - "acc_norm_stderr": 0.037245636197746325 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.21568627450980393, - "acc_stderr": 0.04092563958237654, - "acc_norm": 0.21568627450980393, - "acc_norm_stderr": 0.04092563958237654 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.3403361344537815, - "acc_stderr": 0.030778057422931666, - "acc_norm": 0.3403361344537815, - "acc_norm_stderr": 0.030778057422931666 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.3564102564102564, - "acc_stderr": 0.024283140529467295, - "acc_norm": 0.3564102564102564, - "acc_norm_stderr": 0.024283140529467295 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.24, - "acc_stderr": 0.042923469599092816, - "acc_norm": 0.24, - "acc_norm_stderr": 0.042923469599092816 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.33, - "acc_stderr": 0.047258156262526045, - "acc_norm": 0.33, - "acc_norm_stderr": 0.047258156262526045 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.28703703703703703, - "acc_stderr": 0.043733130409147614, - "acc_norm": 0.28703703703703703, - "acc_norm_stderr": 0.043733130409147614 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.29064039408866993, - "acc_stderr": 0.0319474007226554, - "acc_norm": 0.29064039408866993, - "acc_norm_stderr": 0.0319474007226554 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.23870967741935484, - "acc_stderr": 0.024251071262208834, - "acc_norm": 0.23870967741935484, - "acc_norm_stderr": 0.024251071262208834 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.19658119658119658, - "acc_stderr": 0.02603538609895129, - "acc_norm": 0.19658119658119658, - "acc_norm_stderr": 0.02603538609895129 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.2, - "acc_stderr": 0.02461829819586651, - "acc_norm": 0.2, - "acc_norm_stderr": 0.02461829819586651 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.23636363636363636, - "acc_stderr": 0.04069306319721377, - "acc_norm": 0.23636363636363636, - "acc_norm_stderr": 0.04069306319721377 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.26296296296296295, - "acc_stderr": 0.02684205787383371, - "acc_norm": 0.26296296296296295, - "acc_norm_stderr": 0.02684205787383371 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.23841059602649006, - "acc_stderr": 0.0347918557259966, - "acc_norm": 0.23841059602649006, - "acc_norm_stderr": 0.0347918557259966 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.23383084577114427, - "acc_stderr": 0.02992941540834839, - "acc_norm": 0.23383084577114427, - "acc_norm_stderr": 0.02992941540834839 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.28901734104046245, - "acc_stderr": 0.03456425745086999, - "acc_norm": 0.28901734104046245, - "acc_norm_stderr": 0.03456425745086999 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2566137566137566, - "acc_stderr": 0.022494510767503154, - "acc_norm": 0.2566137566137566, - "acc_norm_stderr": 0.022494510767503154 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.22916666666666666, - "acc_stderr": 0.03514697467862388, - "acc_norm": 0.22916666666666666, - "acc_norm_stderr": 0.03514697467862388 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.21, - "acc_stderr": 0.04093601807403326, - "acc_norm": 0.21, - "acc_norm_stderr": 0.04093601807403326 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.26, - "acc_stderr": 0.044084400227680794, - "acc_norm": 0.26, - "acc_norm_stderr": 0.044084400227680794 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.24855491329479767, - "acc_stderr": 0.023267528432100174, - "acc_norm": 0.24855491329479767, - "acc_norm_stderr": 0.023267528432100174 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.2331288343558282, - "acc_stderr": 0.0332201579577674, - "acc_norm": 0.2331288343558282, - "acc_norm_stderr": 0.0332201579577674 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.21604938271604937, - "acc_stderr": 0.022899162918445796, - "acc_norm": 0.21604938271604937, - "acc_norm_stderr": 0.022899162918445796 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.26424870466321243, - "acc_stderr": 0.03182155050916647, - "acc_norm": 0.26424870466321243, - "acc_norm_stderr": 0.03182155050916647 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.24561403508771928, - "acc_stderr": 0.04049339297748141, - "acc_norm": 0.24561403508771928, - "acc_norm_stderr": 0.04049339297748141 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.21100917431192662, - "acc_stderr": 0.017493922404112648, - "acc_norm": 0.21100917431192662, - "acc_norm_stderr": 0.017493922404112648 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.1746031746031746, - "acc_stderr": 0.033954900208561116, - "acc_norm": 0.1746031746031746, - "acc_norm_stderr": 0.033954900208561116 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.2222222222222222, - "acc_stderr": 0.023805186524888156, - "acc_norm": 0.2222222222222222, - "acc_norm_stderr": 0.023805186524888156 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.2396694214876033, - "acc_stderr": 0.03896878985070417, - "acc_norm": 0.2396694214876033, - "acc_norm_stderr": 0.03896878985070417 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.18421052631578946, - "acc_stderr": 0.0315469804508223, - "acc_norm": 0.18421052631578946, - "acc_norm_stderr": 0.0315469804508223 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.2173202614379085, - "acc_stderr": 0.016684820929148594, - "acc_norm": 0.2173202614379085, - "acc_norm_stderr": 0.016684820929148594 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.23404255319148937, - "acc_stderr": 0.025257861359432414, - "acc_norm": 0.23404255319148937, - "acc_norm_stderr": 0.025257861359432414 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.22321428571428573, - "acc_stderr": 0.039523019677025116, - "acc_norm": 0.22321428571428573, - "acc_norm_stderr": 0.039523019677025116 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.4722222222222222, - "acc_stderr": 0.0340470532865388, - "acc_norm": 0.4722222222222222, - "acc_norm_stderr": 0.0340470532865388 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.24692737430167597, - "acc_stderr": 0.014422292204808852, - "acc_norm": 0.24692737430167597, - "acc_norm_stderr": 0.014422292204808852 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252604, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252604 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252605, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252605 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.4485294117647059, - "acc_stderr": 0.030211479609121593, - "acc_norm": 0.4485294117647059, - "acc_norm_stderr": 0.030211479609121593 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.39183673469387753, - "acc_stderr": 0.03125127591089165, - "acc_norm": 0.39183673469387753, - "acc_norm_stderr": 0.03125127591089165 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.2616033755274262, - "acc_stderr": 0.028609516716994934, - "acc_norm": 0.2616033755274262, - "acc_norm_stderr": 0.028609516716994934 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.24641460234680573, - "acc_stderr": 0.01100597139992723, - "acc_norm": 0.24641460234680573, - "acc_norm_stderr": 0.01100597139992723 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.28431372549019607, - "acc_stderr": 0.03166009679399812, - "acc_norm": 0.28431372549019607, - "acc_norm_stderr": 0.03166009679399812 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.21818181818181817, - "acc_stderr": 0.03225078108306289, - "acc_norm": 0.21818181818181817, - "acc_norm_stderr": 0.03225078108306289 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2533659730722154, - "mc1_stderr": 0.01522589934082682, - "mc2": 0.45650352414713125, - "mc2_stderr": 0.015641592781139333 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.3438967136150235, - "acc_stderr": 0.016283034359936485, - "acc_norm": 0.545774647887324, - "acc_norm_stderr": 0.017067801531871794 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "skt/kogpt2-base-v2", - "model_sha": "d0c0df48bf2b2c9350dd855021a5b216f560c0c7", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/sminpark/ds-alpha-model-v0.1-merged/result_2023-10-12 05:18:50.json b/sminpark/ds-alpha-model-v0.1-merged/result_2023-10-12 05:18:50.json deleted file mode 100644 index 826f98aec63b5cabecb8f3caa739c4b1d6ac9b09..0000000000000000000000000000000000000000 --- a/sminpark/ds-alpha-model-v0.1-merged/result_2023-10-12 05:18:50.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.27559726962457337, - "acc_stderr": 0.01305716965576184, - "acc_norm": 0.3225255972696246, - "acc_norm_stderr": 0.013659980894277366 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3858793069109739, - "acc_stderr": 0.0048580740134439885, - "acc_norm": 0.4965146385182235, - "acc_norm_stderr": 0.0049896601807921685 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.2982456140350877, - "acc_stderr": 0.03508771929824565, - "acc_norm": 0.2982456140350877, - "acc_norm_stderr": 0.03508771929824565 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.1941747572815534, - "acc_stderr": 0.03916667762822582, - "acc_norm": 0.1941747572815534, - "acc_norm_stderr": 0.03916667762822582 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.25287356321839083, - "acc_stderr": 0.01554337731371968, - "acc_norm": 0.25287356321839083, - "acc_norm_stderr": 0.01554337731371968 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.1925925925925926, - "acc_stderr": 0.034065420585026526, - "acc_norm": 0.1925925925925926, - "acc_norm_stderr": 0.034065420585026526 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.23, - "acc_stderr": 0.04229525846816506, - "acc_norm": 0.23, - "acc_norm_stderr": 0.04229525846816506 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.28085106382978725, - "acc_stderr": 0.02937917046412482, - "acc_norm": 0.28085106382978725, - "acc_norm_stderr": 0.02937917046412482 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.2469879518072289, - "acc_stderr": 0.03357351982064537, - "acc_norm": 0.2469879518072289, - "acc_norm_stderr": 0.03357351982064537 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.29260450160771706, - "acc_stderr": 0.02583989833487798, - "acc_norm": 0.29260450160771706, - "acc_norm_stderr": 0.02583989833487798 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.19282511210762332, - "acc_stderr": 0.02647824096048936, - "acc_norm": 0.19282511210762332, - "acc_norm_stderr": 0.02647824096048936 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.2748091603053435, - "acc_stderr": 0.039153454088478354, - "acc_norm": 0.2748091603053435, - "acc_norm_stderr": 0.039153454088478354 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.29, - "acc_stderr": 0.04560480215720684, - "acc_norm": 0.29, - "acc_norm_stderr": 0.04560480215720684 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.2474747474747475, - "acc_stderr": 0.0307463007421245, - "acc_norm": 0.2474747474747475, - "acc_norm_stderr": 0.0307463007421245 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.23448275862068965, - "acc_stderr": 0.035306258743465914, - "acc_norm": 0.23448275862068965, - "acc_norm_stderr": 0.035306258743465914 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.29411764705882354, - "acc_stderr": 0.04533838195929775, - "acc_norm": 0.29411764705882354, - "acc_norm_stderr": 0.04533838195929775 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.2773109243697479, - "acc_stderr": 0.029079374539480007, - "acc_norm": 0.2773109243697479, - "acc_norm_stderr": 0.029079374539480007 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.2923076923076923, - "acc_stderr": 0.023060438380857744, - "acc_norm": 0.2923076923076923, - "acc_norm_stderr": 0.023060438380857744 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695235, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695235 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.23, - "acc_stderr": 0.04229525846816505, - "acc_norm": 0.23, - "acc_norm_stderr": 0.04229525846816505 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.24074074074074073, - "acc_stderr": 0.041331194402438376, - "acc_norm": 0.24074074074074073, - "acc_norm_stderr": 0.041331194402438376 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.23645320197044334, - "acc_stderr": 0.029896114291733555, - "acc_norm": 0.23645320197044334, - "acc_norm_stderr": 0.029896114291733555 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.2064516129032258, - "acc_stderr": 0.02302589961718871, - "acc_norm": 0.2064516129032258, - "acc_norm_stderr": 0.02302589961718871 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.27350427350427353, - "acc_stderr": 0.029202540153431177, - "acc_norm": 0.27350427350427353, - "acc_norm_stderr": 0.029202540153431177 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.26037735849056604, - "acc_stderr": 0.027008766090708097, - "acc_norm": 0.26037735849056604, - "acc_norm_stderr": 0.027008766090708097 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.23636363636363636, - "acc_stderr": 0.04069306319721375, - "acc_norm": 0.23636363636363636, - "acc_norm_stderr": 0.04069306319721375 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.22592592592592592, - "acc_stderr": 0.02549753263960954, - "acc_norm": 0.22592592592592592, - "acc_norm_stderr": 0.02549753263960954 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2582781456953642, - "acc_stderr": 0.035737053147634576, - "acc_norm": 0.2582781456953642, - "acc_norm_stderr": 0.035737053147634576 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.22885572139303484, - "acc_stderr": 0.029705284056772432, - "acc_norm": 0.22885572139303484, - "acc_norm_stderr": 0.029705284056772432 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.24855491329479767, - "acc_stderr": 0.03295304696818318, - "acc_norm": 0.24855491329479767, - "acc_norm_stderr": 0.03295304696818318 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2566137566137566, - "acc_stderr": 0.022494510767503154, - "acc_norm": 0.2566137566137566, - "acc_norm_stderr": 0.022494510767503154 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2708333333333333, - "acc_stderr": 0.03716177437566018, - "acc_norm": 0.2708333333333333, - "acc_norm_stderr": 0.03716177437566018 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.23, - "acc_stderr": 0.04229525846816506, - "acc_norm": 0.23, - "acc_norm_stderr": 0.04229525846816506 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.2514450867052023, - "acc_stderr": 0.02335736578587404, - "acc_norm": 0.2514450867052023, - "acc_norm_stderr": 0.02335736578587404 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.3128834355828221, - "acc_stderr": 0.03642914578292404, - "acc_norm": 0.3128834355828221, - "acc_norm_stderr": 0.03642914578292404 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.25617283950617287, - "acc_stderr": 0.024288533637726095, - "acc_norm": 0.25617283950617287, - "acc_norm_stderr": 0.024288533637726095 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.33, - "acc_stderr": 0.047258156262526045, - "acc_norm": 0.33, - "acc_norm_stderr": 0.047258156262526045 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.30569948186528495, - "acc_stderr": 0.03324837939758159, - "acc_norm": 0.30569948186528495, - "acc_norm_stderr": 0.03324837939758159 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2631578947368421, - "acc_stderr": 0.041424397194893624, - "acc_norm": 0.2631578947368421, - "acc_norm_stderr": 0.041424397194893624 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.26238532110091745, - "acc_stderr": 0.018861885021534734, - "acc_norm": 0.26238532110091745, - "acc_norm_stderr": 0.018861885021534734 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.19047619047619047, - "acc_stderr": 0.03512207412302053, - "acc_norm": 0.19047619047619047, - "acc_norm_stderr": 0.03512207412302053 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.24509803921568626, - "acc_stderr": 0.024630048979824775, - "acc_norm": 0.24509803921568626, - "acc_norm_stderr": 0.024630048979824775 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.2, - "acc_stderr": 0.04020151261036844, - "acc_norm": 0.2, - "acc_norm_stderr": 0.04020151261036844 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.19008264462809918, - "acc_stderr": 0.03581796951709282, - "acc_norm": 0.19008264462809918, - "acc_norm_stderr": 0.03581796951709282 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.17763157894736842, - "acc_stderr": 0.03110318238312338, - "acc_norm": 0.17763157894736842, - "acc_norm_stderr": 0.03110318238312338 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.25163398692810457, - "acc_stderr": 0.01755581809132227, - "acc_norm": 0.25163398692810457, - "acc_norm_stderr": 0.01755581809132227 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2375886524822695, - "acc_stderr": 0.025389512552729906, - "acc_norm": 0.2375886524822695, - "acc_norm_stderr": 0.025389512552729906 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.3125, - "acc_stderr": 0.043994650575715215, - "acc_norm": 0.3125, - "acc_norm_stderr": 0.043994650575715215 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.4398148148148148, - "acc_stderr": 0.03385177976044811, - "acc_norm": 0.4398148148148148, - "acc_norm_stderr": 0.03385177976044811 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.23575418994413408, - "acc_stderr": 0.014196375686290804, - "acc_norm": 0.23575418994413408, - "acc_norm_stderr": 0.014196375686290804 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.33, - "acc_stderr": 0.047258156262526045, - "acc_norm": 0.33, - "acc_norm_stderr": 0.047258156262526045 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.24, - "acc_stderr": 0.04292346959909283, - "acc_norm": 0.24, - "acc_norm_stderr": 0.04292346959909283 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.19852941176470587, - "acc_stderr": 0.02423101337054109, - "acc_norm": 0.19852941176470587, - "acc_norm_stderr": 0.02423101337054109 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.31020408163265306, - "acc_stderr": 0.029613459872484378, - "acc_norm": 0.31020408163265306, - "acc_norm_stderr": 0.029613459872484378 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.25316455696202533, - "acc_stderr": 0.0283046579430353, - "acc_norm": 0.25316455696202533, - "acc_norm_stderr": 0.0283046579430353 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2588005215123859, - "acc_stderr": 0.011186109046564608, - "acc_norm": 0.2588005215123859, - "acc_norm_stderr": 0.011186109046564608 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.24509803921568626, - "acc_stderr": 0.03019028245350195, - "acc_norm": 0.24509803921568626, - "acc_norm_stderr": 0.03019028245350195 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.2727272727272727, - "acc_stderr": 0.03477691162163659, - "acc_norm": 0.2727272727272727, - "acc_norm_stderr": 0.03477691162163659 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2521419828641371, - "mc1_stderr": 0.01520152224629995, - "mc2": 0.39714724864543566, - "mc2_stderr": 0.014754643585296967 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.6068075117370892, - "acc_stderr": 0.01674415749294926, - "acc_norm": 0.6795774647887324, - "acc_norm_stderr": 0.01599617808862692 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "sminpark/ds-alpha-model-v0.1-merged", - "model_sha": "877c87e7e62fa297f23e49e4aed3a2c0398a920a", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/sue3489/test0_kullm-polyglot-5.8b-v2-koalpaca-v1.1b/result_2023-10-04 08:06:42.json b/sue3489/test0_kullm-polyglot-5.8b-v2-koalpaca-v1.1b/result_2023-10-04 08:06:42.json deleted file mode 100644 index dfaaf343f15ab4b6348ff5ba1288ee14ed32f141..0000000000000000000000000000000000000000 --- a/sue3489/test0_kullm-polyglot-5.8b-v2-koalpaca-v1.1b/result_2023-10-04 08:06:42.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.2713310580204778, - "acc_stderr": 0.012993807727545789, - "acc_norm": 0.310580204778157, - "acc_norm_stderr": 0.013522292098053057 - }, - "harness|ko_hellaswag|10": { - "acc": 0.36456881099382593, - "acc_stderr": 0.004803253812881045, - "acc_norm": 0.46564429396534557, - "acc_norm_stderr": 0.004977988452502642 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.22807017543859648, - "acc_stderr": 0.03218093795602357, - "acc_norm": 0.22807017543859648, - "acc_norm_stderr": 0.03218093795602357 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.2912621359223301, - "acc_stderr": 0.04498676320572924, - "acc_norm": 0.2912621359223301, - "acc_norm_stderr": 0.04498676320572924 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.23499361430395913, - "acc_stderr": 0.015162024152278445, - "acc_norm": 0.23499361430395913, - "acc_norm_stderr": 0.015162024152278445 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.2740740740740741, - "acc_stderr": 0.03853254836552003, - "acc_norm": 0.2740740740740741, - "acc_norm_stderr": 0.03853254836552003 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.21, - "acc_stderr": 0.040936018074033256, - "acc_norm": 0.21, - "acc_norm_stderr": 0.040936018074033256 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.1829787234042553, - "acc_stderr": 0.02527604100044997, - "acc_norm": 0.1829787234042553, - "acc_norm_stderr": 0.02527604100044997 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.30120481927710846, - "acc_stderr": 0.0357160923005348, - "acc_norm": 0.30120481927710846, - "acc_norm_stderr": 0.0357160923005348 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.27009646302250806, - "acc_stderr": 0.0252180403734106, - "acc_norm": 0.27009646302250806, - "acc_norm_stderr": 0.0252180403734106 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.27802690582959644, - "acc_stderr": 0.030069584874494033, - "acc_norm": 0.27802690582959644, - "acc_norm_stderr": 0.030069584874494033 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.2366412213740458, - "acc_stderr": 0.03727673575596918, - "acc_norm": 0.2366412213740458, - "acc_norm_stderr": 0.03727673575596918 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.3181818181818182, - "acc_stderr": 0.033184773338453315, - "acc_norm": 0.3181818181818182, - "acc_norm_stderr": 0.033184773338453315 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.2827586206896552, - "acc_stderr": 0.03752833958003336, - "acc_norm": 0.2827586206896552, - "acc_norm_stderr": 0.03752833958003336 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.27450980392156865, - "acc_stderr": 0.044405219061793254, - "acc_norm": 0.27450980392156865, - "acc_norm_stderr": 0.044405219061793254 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.3445378151260504, - "acc_stderr": 0.030868682604121622, - "acc_norm": 0.3445378151260504, - "acc_norm_stderr": 0.030868682604121622 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.358974358974359, - "acc_stderr": 0.024321738484602364, - "acc_norm": 0.358974358974359, - "acc_norm_stderr": 0.024321738484602364 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.21, - "acc_stderr": 0.040936018074033256, - "acc_norm": 0.21, - "acc_norm_stderr": 0.040936018074033256 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.24074074074074073, - "acc_stderr": 0.04133119440243838, - "acc_norm": 0.24074074074074073, - "acc_norm_stderr": 0.04133119440243838 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.2413793103448276, - "acc_stderr": 0.03010833071801162, - "acc_norm": 0.2413793103448276, - "acc_norm_stderr": 0.03010833071801162 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3096774193548387, - "acc_stderr": 0.026302774983517414, - "acc_norm": 0.3096774193548387, - "acc_norm_stderr": 0.026302774983517414 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.21794871794871795, - "acc_stderr": 0.02704685763071668, - "acc_norm": 0.21794871794871795, - "acc_norm_stderr": 0.02704685763071668 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.2528301886792453, - "acc_stderr": 0.026749899771241238, - "acc_norm": 0.2528301886792453, - "acc_norm_stderr": 0.026749899771241238 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.3181818181818182, - "acc_stderr": 0.04461272175910507, - "acc_norm": 0.3181818181818182, - "acc_norm_stderr": 0.04461272175910507 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.26666666666666666, - "acc_stderr": 0.026962424325073838, - "acc_norm": 0.26666666666666666, - "acc_norm_stderr": 0.026962424325073838 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2980132450331126, - "acc_stderr": 0.037345356767871984, - "acc_norm": 0.2980132450331126, - "acc_norm_stderr": 0.037345356767871984 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.23880597014925373, - "acc_stderr": 0.030147775935409217, - "acc_norm": 0.23880597014925373, - "acc_norm_stderr": 0.030147775935409217 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.2658959537572254, - "acc_stderr": 0.03368762932259431, - "acc_norm": 0.2658959537572254, - "acc_norm_stderr": 0.03368762932259431 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.24603174603174602, - "acc_stderr": 0.022182037202948365, - "acc_norm": 0.24603174603174602, - "acc_norm_stderr": 0.022182037202948365 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2569444444444444, - "acc_stderr": 0.03653946969442099, - "acc_norm": 0.2569444444444444, - "acc_norm_stderr": 0.03653946969442099 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695235, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695235 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.28, - "acc_stderr": 0.045126085985421276, - "acc_norm": 0.28, - "acc_norm_stderr": 0.045126085985421276 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.23699421965317918, - "acc_stderr": 0.022894082489925992, - "acc_norm": 0.23699421965317918, - "acc_norm_stderr": 0.022894082489925992 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.2331288343558282, - "acc_stderr": 0.0332201579577674, - "acc_norm": 0.2331288343558282, - "acc_norm_stderr": 0.0332201579577674 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.2654320987654321, - "acc_stderr": 0.024569223600460845, - "acc_norm": 0.2654320987654321, - "acc_norm_stderr": 0.024569223600460845 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542127, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542127 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.3160621761658031, - "acc_stderr": 0.033553973696861736, - "acc_norm": 0.3160621761658031, - "acc_norm_stderr": 0.033553973696861736 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.24561403508771928, - "acc_stderr": 0.0404933929774814, - "acc_norm": 0.24561403508771928, - "acc_norm_stderr": 0.0404933929774814 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.3247706422018349, - "acc_stderr": 0.02007772910931033, - "acc_norm": 0.3247706422018349, - "acc_norm_stderr": 0.02007772910931033 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.30952380952380953, - "acc_stderr": 0.04134913018303316, - "acc_norm": 0.30952380952380953, - "acc_norm_stderr": 0.04134913018303316 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.28104575163398693, - "acc_stderr": 0.025738854797818737, - "acc_norm": 0.28104575163398693, - "acc_norm_stderr": 0.025738854797818737 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.24, - "acc_stderr": 0.04292346959909283, - "acc_norm": 0.24, - "acc_norm_stderr": 0.04292346959909283 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.24793388429752067, - "acc_stderr": 0.03941897526516302, - "acc_norm": 0.24793388429752067, - "acc_norm_stderr": 0.03941897526516302 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.3223684210526316, - "acc_stderr": 0.03803510248351586, - "acc_norm": 0.3223684210526316, - "acc_norm_stderr": 0.03803510248351586 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.22712418300653595, - "acc_stderr": 0.016949853279212376, - "acc_norm": 0.22712418300653595, - "acc_norm_stderr": 0.016949853279212376 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2375886524822695, - "acc_stderr": 0.025389512552729906, - "acc_norm": 0.2375886524822695, - "acc_norm_stderr": 0.025389512552729906 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.25892857142857145, - "acc_stderr": 0.04157751539865629, - "acc_norm": 0.25892857142857145, - "acc_norm_stderr": 0.04157751539865629 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.38425925925925924, - "acc_stderr": 0.03317354514310742, - "acc_norm": 0.38425925925925924, - "acc_norm_stderr": 0.03317354514310742 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.27262569832402234, - "acc_stderr": 0.014893391735249608, - "acc_norm": 0.27262569832402234, - "acc_norm_stderr": 0.014893391735249608 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.36, - "acc_stderr": 0.048241815132442176, - "acc_norm": 0.36, - "acc_norm_stderr": 0.048241815132442176 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.24, - "acc_stderr": 0.042923469599092816, - "acc_norm": 0.24, - "acc_norm_stderr": 0.042923469599092816 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.4522058823529412, - "acc_stderr": 0.030233758551596452, - "acc_norm": 0.4522058823529412, - "acc_norm_stderr": 0.030233758551596452 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.3020408163265306, - "acc_stderr": 0.029393609319879808, - "acc_norm": 0.3020408163265306, - "acc_norm_stderr": 0.029393609319879808 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.270042194092827, - "acc_stderr": 0.028900721906293426, - "acc_norm": 0.270042194092827, - "acc_norm_stderr": 0.028900721906293426 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.25097783572359844, - "acc_stderr": 0.011073730299187224, - "acc_norm": 0.25097783572359844, - "acc_norm_stderr": 0.011073730299187224 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.25, - "acc_stderr": 0.03039153369274154, - "acc_norm": 0.25, - "acc_norm_stderr": 0.03039153369274154 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.2787878787878788, - "acc_stderr": 0.03501438706296781, - "acc_norm": 0.2787878787878788, - "acc_norm_stderr": 0.03501438706296781 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.28886168910648713, - "mc1_stderr": 0.015866346401384308, - "mc2": 0.4382110452098873, - "mc2_stderr": 0.015112522165835224 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.2828638497652582, - "acc_stderr": 0.01543919852423632, - "acc_norm": 0.36032863849765256, - "acc_norm_stderr": 0.016457469695705117 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "sue3489/test0_kullm-polyglot-5.8b-v2-koalpaca-v1.1b", - "model_sha": "79acd9e76f6a5f1e814294761b11c31fc24b9e64", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/sue3489/test1_kullm-polyglot-5.8b-v2-koalpaca-v1.1b/result_2023-10-04 08:49:57.json b/sue3489/test1_kullm-polyglot-5.8b-v2-koalpaca-v1.1b/result_2023-10-04 08:49:57.json deleted file mode 100644 index 8df46027d04e2b8e363d157dc422080ffc7770c7..0000000000000000000000000000000000000000 --- a/sue3489/test1_kullm-polyglot-5.8b-v2-koalpaca-v1.1b/result_2023-10-04 08:49:57.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.2841296928327645, - "acc_stderr": 0.013179442447653887, - "acc_norm": 0.32593856655290104, - "acc_norm_stderr": 0.013697432466693237 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3791077474606652, - "acc_stderr": 0.004841734453506664, - "acc_norm": 0.4759012148974308, - "acc_norm_stderr": 0.004983982396187361 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.19298245614035087, - "acc_stderr": 0.030267457554898465, - "acc_norm": 0.19298245614035087, - "acc_norm_stderr": 0.030267457554898465 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.27184466019417475, - "acc_stderr": 0.044052680241409216, - "acc_norm": 0.27184466019417475, - "acc_norm_stderr": 0.044052680241409216 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.2503192848020434, - "acc_stderr": 0.015491088951494597, - "acc_norm": 0.2503192848020434, - "acc_norm_stderr": 0.015491088951494597 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.28888888888888886, - "acc_stderr": 0.0391545063041425, - "acc_norm": 0.28888888888888886, - "acc_norm_stderr": 0.0391545063041425 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.22, - "acc_stderr": 0.041633319989322695, - "acc_norm": 0.22, - "acc_norm_stderr": 0.041633319989322695 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.251063829787234, - "acc_stderr": 0.028346963777162452, - "acc_norm": 0.251063829787234, - "acc_norm_stderr": 0.028346963777162452 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.25301204819277107, - "acc_stderr": 0.033844291552331346, - "acc_norm": 0.25301204819277107, - "acc_norm_stderr": 0.033844291552331346 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.26366559485530544, - "acc_stderr": 0.02502553850053234, - "acc_norm": 0.26366559485530544, - "acc_norm_stderr": 0.02502553850053234 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.28699551569506726, - "acc_stderr": 0.030360379710291936, - "acc_norm": 0.28699551569506726, - "acc_norm_stderr": 0.030360379710291936 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.2900763358778626, - "acc_stderr": 0.03980066246467765, - "acc_norm": 0.2900763358778626, - "acc_norm_stderr": 0.03980066246467765 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.2676767676767677, - "acc_stderr": 0.03154449888270286, - "acc_norm": 0.2676767676767677, - "acc_norm_stderr": 0.03154449888270286 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.2896551724137931, - "acc_stderr": 0.037800192304380135, - "acc_norm": 0.2896551724137931, - "acc_norm_stderr": 0.037800192304380135 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.20588235294117646, - "acc_stderr": 0.04023382273617746, - "acc_norm": 0.20588235294117646, - "acc_norm_stderr": 0.04023382273617746 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.23529411764705882, - "acc_stderr": 0.027553614467863783, - "acc_norm": 0.23529411764705882, - "acc_norm_stderr": 0.027553614467863783 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.2564102564102564, - "acc_stderr": 0.02213908110397153, - "acc_norm": 0.2564102564102564, - "acc_norm_stderr": 0.02213908110397153 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.26, - "acc_stderr": 0.0440844002276808, - "acc_norm": 0.26, - "acc_norm_stderr": 0.0440844002276808 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.18, - "acc_stderr": 0.03861229196653695, - "acc_norm": 0.18, - "acc_norm_stderr": 0.03861229196653695 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.25, - "acc_stderr": 0.04186091791394607, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04186091791394607 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.23645320197044334, - "acc_stderr": 0.029896114291733545, - "acc_norm": 0.23645320197044334, - "acc_norm_stderr": 0.029896114291733545 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.24193548387096775, - "acc_stderr": 0.024362599693031096, - "acc_norm": 0.24193548387096775, - "acc_norm_stderr": 0.024362599693031096 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.21794871794871795, - "acc_stderr": 0.027046857630716677, - "acc_norm": 0.21794871794871795, - "acc_norm_stderr": 0.027046857630716677 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.24150943396226415, - "acc_stderr": 0.026341480371118352, - "acc_norm": 0.24150943396226415, - "acc_norm_stderr": 0.026341480371118352 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.3181818181818182, - "acc_stderr": 0.04461272175910508, - "acc_norm": 0.3181818181818182, - "acc_norm_stderr": 0.04461272175910508 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.2851851851851852, - "acc_stderr": 0.027528599210340492, - "acc_norm": 0.2851851851851852, - "acc_norm_stderr": 0.027528599210340492 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2052980132450331, - "acc_stderr": 0.03297986648473836, - "acc_norm": 0.2052980132450331, - "acc_norm_stderr": 0.03297986648473836 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.23880597014925373, - "acc_stderr": 0.030147775935409217, - "acc_norm": 0.23880597014925373, - "acc_norm_stderr": 0.030147775935409217 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.24277456647398843, - "acc_stderr": 0.0326926380614177, - "acc_norm": 0.24277456647398843, - "acc_norm_stderr": 0.0326926380614177 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2328042328042328, - "acc_stderr": 0.021765961672154534, - "acc_norm": 0.2328042328042328, - "acc_norm_stderr": 0.021765961672154534 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3194444444444444, - "acc_stderr": 0.03899073687357335, - "acc_norm": 0.3194444444444444, - "acc_norm_stderr": 0.03899073687357335 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.27, - "acc_stderr": 0.04461960433384741, - "acc_norm": 0.27, - "acc_norm_stderr": 0.04461960433384741 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.24855491329479767, - "acc_stderr": 0.023267528432100174, - "acc_norm": 0.24855491329479767, - "acc_norm_stderr": 0.023267528432100174 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.22699386503067484, - "acc_stderr": 0.03291099578615769, - "acc_norm": 0.22699386503067484, - "acc_norm_stderr": 0.03291099578615769 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.25617283950617287, - "acc_stderr": 0.0242885336377261, - "acc_norm": 0.25617283950617287, - "acc_norm_stderr": 0.0242885336377261 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542127, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542127 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.2538860103626943, - "acc_stderr": 0.03141024780565319, - "acc_norm": 0.2538860103626943, - "acc_norm_stderr": 0.03141024780565319 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.18421052631578946, - "acc_stderr": 0.03646758875075566, - "acc_norm": 0.18421052631578946, - "acc_norm_stderr": 0.03646758875075566 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.30091743119266057, - "acc_stderr": 0.019664751366802114, - "acc_norm": 0.30091743119266057, - "acc_norm_stderr": 0.019664751366802114 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.2777777777777778, - "acc_stderr": 0.04006168083848878, - "acc_norm": 0.2777777777777778, - "acc_norm_stderr": 0.04006168083848878 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.22875816993464052, - "acc_stderr": 0.02405102973991225, - "acc_norm": 0.22875816993464052, - "acc_norm_stderr": 0.02405102973991225 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.24, - "acc_stderr": 0.04292346959909282, - "acc_norm": 0.24, - "acc_norm_stderr": 0.04292346959909282 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.2231404958677686, - "acc_stderr": 0.03800754475228733, - "acc_norm": 0.2231404958677686, - "acc_norm_stderr": 0.03800754475228733 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.2565789473684211, - "acc_stderr": 0.0355418036802569, - "acc_norm": 0.2565789473684211, - "acc_norm_stderr": 0.0355418036802569 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.24019607843137256, - "acc_stderr": 0.017282760695167418, - "acc_norm": 0.24019607843137256, - "acc_norm_stderr": 0.017282760695167418 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2127659574468085, - "acc_stderr": 0.024414612974307703, - "acc_norm": 0.2127659574468085, - "acc_norm_stderr": 0.024414612974307703 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.3125, - "acc_stderr": 0.043994650575715215, - "acc_norm": 0.3125, - "acc_norm_stderr": 0.043994650575715215 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.26851851851851855, - "acc_stderr": 0.03022522616001237, - "acc_norm": 0.26851851851851855, - "acc_norm_stderr": 0.03022522616001237 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.27150837988826815, - "acc_stderr": 0.014874252168095278, - "acc_norm": 0.27150837988826815, - "acc_norm_stderr": 0.014874252168095278 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.33, - "acc_stderr": 0.047258156262526045, - "acc_norm": 0.33, - "acc_norm_stderr": 0.047258156262526045 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.21, - "acc_stderr": 0.040936018074033256, - "acc_norm": 0.21, - "acc_norm_stderr": 0.040936018074033256 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.3602941176470588, - "acc_stderr": 0.029163128570670736, - "acc_norm": 0.3602941176470588, - "acc_norm_stderr": 0.029163128570670736 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.2571428571428571, - "acc_stderr": 0.027979823538744546, - "acc_norm": 0.2571428571428571, - "acc_norm_stderr": 0.027979823538744546 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.270042194092827, - "acc_stderr": 0.028900721906293426, - "acc_norm": 0.270042194092827, - "acc_norm_stderr": 0.028900721906293426 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2470664928292047, - "acc_stderr": 0.011015752255279338, - "acc_norm": 0.2470664928292047, - "acc_norm_stderr": 0.011015752255279338 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.24509803921568626, - "acc_stderr": 0.03019028245350194, - "acc_norm": 0.24509803921568626, - "acc_norm_stderr": 0.03019028245350194 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.21818181818181817, - "acc_stderr": 0.03225078108306289, - "acc_norm": 0.21818181818181817, - "acc_norm_stderr": 0.03225078108306289 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2766217870257038, - "mc1_stderr": 0.015659605755326905, - "mc2": 0.43178124206391555, - "mc2_stderr": 0.01588615796057271 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.20187793427230047, - "acc_stderr": 0.013759869182275584, - "acc_norm": 0.2323943661971831, - "acc_norm_stderr": 0.014478284105610294 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "sue3489/test1_kullm-polyglot-5.8b-v2-koalpaca-v1.1b", - "model_sha": "acc7ed3105114ba922fe4b408807b57e39ec0cff", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/sue3489/test2_kullm-polyglot-5.8b-v2-koalpaca-v1.1b/result_2023-10-04 07:55:25.json b/sue3489/test2_kullm-polyglot-5.8b-v2-koalpaca-v1.1b/result_2023-10-04 07:55:25.json deleted file mode 100644 index c31a9cd4cb7ad5c11d0f82e1d4001f1563a895c2..0000000000000000000000000000000000000000 --- a/sue3489/test2_kullm-polyglot-5.8b-v2-koalpaca-v1.1b/result_2023-10-04 07:55:25.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.2713310580204778, - "acc_stderr": 0.012993807727545787, - "acc_norm": 0.31399317406143346, - "acc_norm_stderr": 0.013562691224726293 - }, - "harness|ko_hellaswag|10": { - "acc": 0.36656044612626965, - "acc_stderr": 0.004808802114592829, - "acc_norm": 0.46564429396534557, - "acc_norm_stderr": 0.0049779884525026396 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.2222222222222222, - "acc_stderr": 0.031885780176863984, - "acc_norm": 0.2222222222222222, - "acc_norm_stderr": 0.031885780176863984 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.24271844660194175, - "acc_stderr": 0.04245022486384493, - "acc_norm": 0.24271844660194175, - "acc_norm_stderr": 0.04245022486384493 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.24648786717752236, - "acc_stderr": 0.015411308769686938, - "acc_norm": 0.24648786717752236, - "acc_norm_stderr": 0.015411308769686938 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.25925925925925924, - "acc_stderr": 0.03785714465066652, - "acc_norm": 0.25925925925925924, - "acc_norm_stderr": 0.03785714465066652 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.23, - "acc_stderr": 0.04229525846816508, - "acc_norm": 0.23, - "acc_norm_stderr": 0.04229525846816508 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.33191489361702126, - "acc_stderr": 0.03078373675774566, - "acc_norm": 0.33191489361702126, - "acc_norm_stderr": 0.03078373675774566 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.2891566265060241, - "acc_stderr": 0.03529486801511115, - "acc_norm": 0.2891566265060241, - "acc_norm_stderr": 0.03529486801511115 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.26688102893890675, - "acc_stderr": 0.025122637608816622, - "acc_norm": 0.26688102893890675, - "acc_norm_stderr": 0.025122637608816622 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.33183856502242154, - "acc_stderr": 0.03160295143776679, - "acc_norm": 0.33183856502242154, - "acc_norm_stderr": 0.03160295143776679 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.26717557251908397, - "acc_stderr": 0.038808483010823944, - "acc_norm": 0.26717557251908397, - "acc_norm_stderr": 0.038808483010823944 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.29797979797979796, - "acc_stderr": 0.03258630383836556, - "acc_norm": 0.29797979797979796, - "acc_norm_stderr": 0.03258630383836556 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.2482758620689655, - "acc_stderr": 0.03600105692727772, - "acc_norm": 0.2482758620689655, - "acc_norm_stderr": 0.03600105692727772 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.18627450980392157, - "acc_stderr": 0.038739587141493524, - "acc_norm": 0.18627450980392157, - "acc_norm_stderr": 0.038739587141493524 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.23949579831932774, - "acc_stderr": 0.027722065493361283, - "acc_norm": 0.23949579831932774, - "acc_norm_stderr": 0.027722065493361283 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.2076923076923077, - "acc_stderr": 0.020567539567246804, - "acc_norm": 0.2076923076923077, - "acc_norm_stderr": 0.020567539567246804 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.22, - "acc_stderr": 0.041633319989322695, - "acc_norm": 0.22, - "acc_norm_stderr": 0.041633319989322695 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.23, - "acc_stderr": 0.04229525846816505, - "acc_norm": 0.23, - "acc_norm_stderr": 0.04229525846816505 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.2962962962962963, - "acc_stderr": 0.04414343666854932, - "acc_norm": 0.2962962962962963, - "acc_norm_stderr": 0.04414343666854932 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.2561576354679803, - "acc_stderr": 0.030712730070982592, - "acc_norm": 0.2561576354679803, - "acc_norm_stderr": 0.030712730070982592 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.24193548387096775, - "acc_stderr": 0.024362599693031083, - "acc_norm": 0.24193548387096775, - "acc_norm_stderr": 0.024362599693031083 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.2264957264957265, - "acc_stderr": 0.027421007295392923, - "acc_norm": 0.2264957264957265, - "acc_norm_stderr": 0.027421007295392923 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.25660377358490566, - "acc_stderr": 0.026880647889052, - "acc_norm": 0.25660377358490566, - "acc_norm_stderr": 0.026880647889052 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.3, - "acc_stderr": 0.04389311454644286, - "acc_norm": 0.3, - "acc_norm_stderr": 0.04389311454644286 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.25555555555555554, - "acc_stderr": 0.026593939101844054, - "acc_norm": 0.25555555555555554, - "acc_norm_stderr": 0.026593939101844054 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2052980132450331, - "acc_stderr": 0.03297986648473835, - "acc_norm": 0.2052980132450331, - "acc_norm_stderr": 0.03297986648473835 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.24378109452736318, - "acc_stderr": 0.03036049015401465, - "acc_norm": 0.24378109452736318, - "acc_norm_stderr": 0.03036049015401465 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.2138728323699422, - "acc_stderr": 0.03126511206173041, - "acc_norm": 0.2138728323699422, - "acc_norm_stderr": 0.03126511206173041 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.24603174603174602, - "acc_stderr": 0.022182037202948368, - "acc_norm": 0.24603174603174602, - "acc_norm_stderr": 0.022182037202948368 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3263888888888889, - "acc_stderr": 0.03921067198982266, - "acc_norm": 0.3263888888888889, - "acc_norm_stderr": 0.03921067198982266 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252604, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252604 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.2514450867052023, - "acc_stderr": 0.023357365785874037, - "acc_norm": 0.2514450867052023, - "acc_norm_stderr": 0.023357365785874037 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.24539877300613497, - "acc_stderr": 0.03380939813943354, - "acc_norm": 0.24539877300613497, - "acc_norm_stderr": 0.03380939813943354 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.25925925925925924, - "acc_stderr": 0.02438366553103545, - "acc_norm": 0.25925925925925924, - "acc_norm_stderr": 0.02438366553103545 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.29, - "acc_stderr": 0.04560480215720684, - "acc_norm": 0.29, - "acc_norm_stderr": 0.04560480215720684 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.23316062176165803, - "acc_stderr": 0.030516111371476005, - "acc_norm": 0.23316062176165803, - "acc_norm_stderr": 0.030516111371476005 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.21052631578947367, - "acc_stderr": 0.038351539543994194, - "acc_norm": 0.21052631578947367, - "acc_norm_stderr": 0.038351539543994194 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.25688073394495414, - "acc_stderr": 0.018732492928342472, - "acc_norm": 0.25688073394495414, - "acc_norm_stderr": 0.018732492928342472 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.29365079365079366, - "acc_stderr": 0.04073524322147127, - "acc_norm": 0.29365079365079366, - "acc_norm_stderr": 0.04073524322147127 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.24836601307189543, - "acc_stderr": 0.02473998135511359, - "acc_norm": 0.24836601307189543, - "acc_norm_stderr": 0.02473998135511359 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.19, - "acc_stderr": 0.03942772444036624, - "acc_norm": 0.19, - "acc_norm_stderr": 0.03942772444036624 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.23140495867768596, - "acc_stderr": 0.03849856098794088, - "acc_norm": 0.23140495867768596, - "acc_norm_stderr": 0.03849856098794088 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.24342105263157895, - "acc_stderr": 0.034923496688842384, - "acc_norm": 0.24342105263157895, - "acc_norm_stderr": 0.034923496688842384 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.23039215686274508, - "acc_stderr": 0.01703522925803404, - "acc_norm": 0.23039215686274508, - "acc_norm_stderr": 0.01703522925803404 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.22695035460992907, - "acc_stderr": 0.02498710636564297, - "acc_norm": 0.22695035460992907, - "acc_norm_stderr": 0.02498710636564297 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.32142857142857145, - "acc_stderr": 0.04432804055291519, - "acc_norm": 0.32142857142857145, - "acc_norm_stderr": 0.04432804055291519 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.2777777777777778, - "acc_stderr": 0.030546745264953167, - "acc_norm": 0.2777777777777778, - "acc_norm_stderr": 0.030546745264953167 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2681564245810056, - "acc_stderr": 0.014816119635316994, - "acc_norm": 0.2681564245810056, - "acc_norm_stderr": 0.014816119635316994 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.17, - "acc_stderr": 0.037752516806863715, - "acc_norm": 0.17, - "acc_norm_stderr": 0.037752516806863715 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.3014705882352941, - "acc_stderr": 0.027875982114273168, - "acc_norm": 0.3014705882352941, - "acc_norm_stderr": 0.027875982114273168 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.23673469387755103, - "acc_stderr": 0.027212835884073163, - "acc_norm": 0.23673469387755103, - "acc_norm_stderr": 0.027212835884073163 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.22362869198312235, - "acc_stderr": 0.027123298205229972, - "acc_norm": 0.22362869198312235, - "acc_norm_stderr": 0.027123298205229972 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.24185136897001303, - "acc_stderr": 0.010936550813827054, - "acc_norm": 0.24185136897001303, - "acc_norm_stderr": 0.010936550813827054 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.28431372549019607, - "acc_stderr": 0.031660096793998116, - "acc_norm": 0.28431372549019607, - "acc_norm_stderr": 0.031660096793998116 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.2545454545454545, - "acc_stderr": 0.03401506715249039, - "acc_norm": 0.2545454545454545, - "acc_norm_stderr": 0.03401506715249039 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2692778457772338, - "mc1_stderr": 0.01552856663708731, - "mc2": 0.42575853795337826, - "mc2_stderr": 0.016210145327267837 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.1983568075117371, - "acc_stderr": 0.013669396132574575, - "acc_norm": 0.22535211267605634, - "acc_norm_stderr": 0.014322479434188889 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "sue3489/test2_kullm-polyglot-5.8b-v2-koalpaca-v1.1b", - "model_sha": "ab9bbba26729005519ac0cc01b349be5e2ad95fe", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/taeminlee/mistral_7B_ma/result_2023-10-19 06:42:26.json b/taeminlee/mistral_7B_ma/result_2023-10-19 06:42:26.json deleted file mode 100644 index e5110b5492ae7a7cf76dba350676e112acc47eed..0000000000000000000000000000000000000000 --- a/taeminlee/mistral_7B_ma/result_2023-10-19 06:42:26.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.33532423208191126, - "acc_stderr": 0.01379618294778556, - "acc_norm": 0.38139931740614336, - "acc_norm_stderr": 0.01419438908668526 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3703445528779128, - "acc_stderr": 0.004819100456867818, - "acc_norm": 0.481876120294762, - "acc_norm_stderr": 0.004986502296931182 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.4619883040935672, - "acc_stderr": 0.03823727092882307, - "acc_norm": 0.4619883040935672, - "acc_norm_stderr": 0.03823727092882307 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.5631067961165048, - "acc_stderr": 0.049111471073657764, - "acc_norm": 0.5631067961165048, - "acc_norm_stderr": 0.049111471073657764 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.46871008939974457, - "acc_stderr": 0.017844918090468544, - "acc_norm": 0.46871008939974457, - "acc_norm_stderr": 0.017844918090468544 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.4148148148148148, - "acc_stderr": 0.042561937679014075, - "acc_norm": 0.4148148148148148, - "acc_norm_stderr": 0.042561937679014075 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.40425531914893614, - "acc_stderr": 0.03208115750788684, - "acc_norm": 0.40425531914893614, - "acc_norm_stderr": 0.03208115750788684 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.43373493975903615, - "acc_stderr": 0.03858158940685515, - "acc_norm": 0.43373493975903615, - "acc_norm_stderr": 0.03858158940685515 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.5112540192926045, - "acc_stderr": 0.028390897396863533, - "acc_norm": 0.5112540192926045, - "acc_norm_stderr": 0.028390897396863533 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.4439461883408072, - "acc_stderr": 0.03334625674242728, - "acc_norm": 0.4439461883408072, - "acc_norm_stderr": 0.03334625674242728 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.5038167938931297, - "acc_stderr": 0.043851623256015534, - "acc_norm": 0.5038167938931297, - "acc_norm_stderr": 0.043851623256015534 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.37, - "acc_stderr": 0.04852365870939099, - "acc_norm": 0.37, - "acc_norm_stderr": 0.04852365870939099 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.5505050505050505, - "acc_stderr": 0.035441324919479704, - "acc_norm": 0.5505050505050505, - "acc_norm_stderr": 0.035441324919479704 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.4482758620689655, - "acc_stderr": 0.041443118108781506, - "acc_norm": 0.4482758620689655, - "acc_norm_stderr": 0.041443118108781506 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.24509803921568626, - "acc_stderr": 0.04280105837364395, - "acc_norm": 0.24509803921568626, - "acc_norm_stderr": 0.04280105837364395 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.542016806722689, - "acc_stderr": 0.03236361111951941, - "acc_norm": 0.542016806722689, - "acc_norm_stderr": 0.03236361111951941 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.4794871794871795, - "acc_stderr": 0.025329663163489943, - "acc_norm": 0.4794871794871795, - "acc_norm_stderr": 0.025329663163489943 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.57, - "acc_stderr": 0.04975698519562429, - "acc_norm": 0.57, - "acc_norm_stderr": 0.04975698519562429 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.34, - "acc_stderr": 0.047609522856952344, - "acc_norm": 0.34, - "acc_norm_stderr": 0.047609522856952344 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.5277777777777778, - "acc_stderr": 0.04826217294139894, - "acc_norm": 0.5277777777777778, - "acc_norm_stderr": 0.04826217294139894 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.43842364532019706, - "acc_stderr": 0.03491207857486519, - "acc_norm": 0.43842364532019706, - "acc_norm_stderr": 0.03491207857486519 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.4935483870967742, - "acc_stderr": 0.02844163823354051, - "acc_norm": 0.4935483870967742, - "acc_norm_stderr": 0.02844163823354051 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.7435897435897436, - "acc_stderr": 0.028605953702004243, - "acc_norm": 0.7435897435897436, - "acc_norm_stderr": 0.028605953702004243 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.4339622641509434, - "acc_stderr": 0.030503292013342592, - "acc_norm": 0.4339622641509434, - "acc_norm_stderr": 0.030503292013342592 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.509090909090909, - "acc_stderr": 0.0478833976870286, - "acc_norm": 0.509090909090909, - "acc_norm_stderr": 0.0478833976870286 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.337037037037037, - "acc_stderr": 0.028820884666253252, - "acc_norm": 0.337037037037037, - "acc_norm_stderr": 0.028820884666253252 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2582781456953642, - "acc_stderr": 0.035737053147634576, - "acc_norm": 0.2582781456953642, - "acc_norm_stderr": 0.035737053147634576 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.5970149253731343, - "acc_stderr": 0.034683432951111266, - "acc_norm": 0.5970149253731343, - "acc_norm_stderr": 0.034683432951111266 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.3815028901734104, - "acc_stderr": 0.03703851193099521, - "acc_norm": 0.3815028901734104, - "acc_norm_stderr": 0.03703851193099521 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.373015873015873, - "acc_stderr": 0.02490699045899257, - "acc_norm": 0.373015873015873, - "acc_norm_stderr": 0.02490699045899257 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3472222222222222, - "acc_stderr": 0.039812405437178615, - "acc_norm": 0.3472222222222222, - "acc_norm_stderr": 0.039812405437178615 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.41, - "acc_stderr": 0.049431107042371025, - "acc_norm": 0.41, - "acc_norm_stderr": 0.049431107042371025 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.58, - "acc_stderr": 0.04960449637488584, - "acc_norm": 0.58, - "acc_norm_stderr": 0.04960449637488584 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.523121387283237, - "acc_stderr": 0.026890297881303118, - "acc_norm": 0.523121387283237, - "acc_norm_stderr": 0.026890297881303118 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.5153374233128835, - "acc_stderr": 0.039265223787088424, - "acc_norm": 0.5153374233128835, - "acc_norm_stderr": 0.039265223787088424 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.4382716049382716, - "acc_stderr": 0.027607914087400473, - "acc_norm": 0.4382716049382716, - "acc_norm_stderr": 0.027607914087400473 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.35, - "acc_stderr": 0.0479372485441102, - "acc_norm": 0.35, - "acc_norm_stderr": 0.0479372485441102 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.5129533678756477, - "acc_stderr": 0.0360722806104775, - "acc_norm": 0.5129533678756477, - "acc_norm_stderr": 0.0360722806104775 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.04434600701584925, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.04434600701584925 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.4917431192660551, - "acc_stderr": 0.021434399918214338, - "acc_norm": 0.4917431192660551, - "acc_norm_stderr": 0.021434399918214338 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.35714285714285715, - "acc_stderr": 0.04285714285714281, - "acc_norm": 0.35714285714285715, - "acc_norm_stderr": 0.04285714285714281 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.45751633986928103, - "acc_stderr": 0.028526383452142628, - "acc_norm": 0.45751633986928103, - "acc_norm_stderr": 0.028526383452142628 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.48, - "acc_stderr": 0.050211673156867795, - "acc_norm": 0.48, - "acc_norm_stderr": 0.050211673156867795 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.6942148760330579, - "acc_stderr": 0.04205953933884124, - "acc_norm": 0.6942148760330579, - "acc_norm_stderr": 0.04205953933884124 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.4407894736842105, - "acc_stderr": 0.04040311062490435, - "acc_norm": 0.4407894736842105, - "acc_norm_stderr": 0.04040311062490435 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.39869281045751637, - "acc_stderr": 0.01980828131744984, - "acc_norm": 0.39869281045751637, - "acc_norm_stderr": 0.01980828131744984 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.35815602836879434, - "acc_stderr": 0.028602085862759412, - "acc_norm": 0.35815602836879434, - "acc_norm_stderr": 0.028602085862759412 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.4017857142857143, - "acc_stderr": 0.04653333146973646, - "acc_norm": 0.4017857142857143, - "acc_norm_stderr": 0.04653333146973646 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.4351851851851852, - "acc_stderr": 0.03381200005643525, - "acc_norm": 0.4351851851851852, - "acc_norm_stderr": 0.03381200005643525 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.34413407821229053, - "acc_stderr": 0.015889221313307094, - "acc_norm": 0.34413407821229053, - "acc_norm_stderr": 0.015889221313307094 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.44, - "acc_stderr": 0.04988876515698589, - "acc_norm": 0.44, - "acc_norm_stderr": 0.04988876515698589 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.58, - "acc_stderr": 0.049604496374885836, - "acc_norm": 0.58, - "acc_norm_stderr": 0.049604496374885836 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.41544117647058826, - "acc_stderr": 0.029935342707877743, - "acc_norm": 0.41544117647058826, - "acc_norm_stderr": 0.029935342707877743 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.563265306122449, - "acc_stderr": 0.03175195237583323, - "acc_norm": 0.563265306122449, - "acc_norm_stderr": 0.03175195237583323 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.5738396624472574, - "acc_stderr": 0.03219035703131774, - "acc_norm": 0.5738396624472574, - "acc_norm_stderr": 0.03219035703131774 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.32790091264667537, - "acc_stderr": 0.011989936640666535, - "acc_norm": 0.32790091264667537, - "acc_norm_stderr": 0.011989936640666535 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.44607843137254904, - "acc_stderr": 0.03488845451304974, - "acc_norm": 0.44607843137254904, - "acc_norm_stderr": 0.03488845451304974 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.45454545454545453, - "acc_stderr": 0.03888176921674099, - "acc_norm": 0.45454545454545453, - "acc_norm_stderr": 0.03888176921674099 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2864137086903305, - "mc1_stderr": 0.015826142439502342, - "mc2": 0.4613168911756529, - "mc2_stderr": 0.015417066073991514 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.24295774647887325, - "acc_stderr": 0.01470146638508064, - "acc_norm": 0.29694835680751175, - "acc_norm_stderr": 0.015662796197363146 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "taeminlee/mistral_7B_ma", - "model_sha": "9773826bd9bb297186b78c87a410cbb07e1919cc", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/taeminlee/polyglot_12.8b_ins_orcastyle/result_2023-10-10 08:18:37.json b/taeminlee/polyglot_12.8b_ins_orcastyle/result_2023-10-10 08:18:37.json deleted file mode 100644 index 2498a26d3a85528bb394e1cf1ab71b5c179cc9d2..0000000000000000000000000000000000000000 --- a/taeminlee/polyglot_12.8b_ins_orcastyle/result_2023-10-10 08:18:37.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.27047781569965873, - "acc_stderr": 0.012980954547659556, - "acc_norm": 0.32849829351535836, - "acc_norm_stderr": 0.01372497846553737 - }, - "harness|ko_hellaswag|10": { - "acc": 0.386476797450707, - "acc_stderr": 0.004859467984155263, - "acc_norm": 0.4980083648675563, - "acc_norm_stderr": 0.004989741826250384 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.3157894736842105, - "acc_stderr": 0.035650796707083106, - "acc_norm": 0.3157894736842105, - "acc_norm_stderr": 0.035650796707083106 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.1650485436893204, - "acc_stderr": 0.036756688322331886, - "acc_norm": 0.1650485436893204, - "acc_norm_stderr": 0.036756688322331886 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.27330779054916987, - "acc_stderr": 0.015936681062628556, - "acc_norm": 0.27330779054916987, - "acc_norm_stderr": 0.015936681062628556 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.3111111111111111, - "acc_stderr": 0.03999262876617722, - "acc_norm": 0.3111111111111111, - "acc_norm_stderr": 0.03999262876617722 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.21, - "acc_stderr": 0.040936018074033256, - "acc_norm": 0.21, - "acc_norm_stderr": 0.040936018074033256 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.2297872340425532, - "acc_stderr": 0.027501752944412424, - "acc_norm": 0.2297872340425532, - "acc_norm_stderr": 0.027501752944412424 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.21686746987951808, - "acc_stderr": 0.032082844503563655, - "acc_norm": 0.21686746987951808, - "acc_norm_stderr": 0.032082844503563655 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.3054662379421222, - "acc_stderr": 0.026160584450140474, - "acc_norm": 0.3054662379421222, - "acc_norm_stderr": 0.026160584450140474 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.19282511210762332, - "acc_stderr": 0.02647824096048936, - "acc_norm": 0.19282511210762332, - "acc_norm_stderr": 0.02647824096048936 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.20610687022900764, - "acc_stderr": 0.03547771004159465, - "acc_norm": 0.20610687022900764, - "acc_norm_stderr": 0.03547771004159465 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.21, - "acc_stderr": 0.040936018074033256, - "acc_norm": 0.21, - "acc_norm_stderr": 0.040936018074033256 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.23737373737373738, - "acc_stderr": 0.03031371053819887, - "acc_norm": 0.23737373737373738, - "acc_norm_stderr": 0.03031371053819887 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.2482758620689655, - "acc_stderr": 0.03600105692727771, - "acc_norm": 0.2482758620689655, - "acc_norm_stderr": 0.03600105692727771 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.20588235294117646, - "acc_stderr": 0.04023382273617747, - "acc_norm": 0.20588235294117646, - "acc_norm_stderr": 0.04023382273617747 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.22268907563025211, - "acc_stderr": 0.02702543349888239, - "acc_norm": 0.22268907563025211, - "acc_norm_stderr": 0.02702543349888239 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.22564102564102564, - "acc_stderr": 0.02119363252514854, - "acc_norm": 0.22564102564102564, - "acc_norm_stderr": 0.02119363252514854 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.26, - "acc_stderr": 0.04408440022768078, - "acc_norm": 0.26, - "acc_norm_stderr": 0.04408440022768078 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.19, - "acc_stderr": 0.03942772444036623, - "acc_norm": 0.19, - "acc_norm_stderr": 0.03942772444036623 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.2777777777777778, - "acc_stderr": 0.04330043749650742, - "acc_norm": 0.2777777777777778, - "acc_norm_stderr": 0.04330043749650742 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.2315270935960591, - "acc_stderr": 0.029678333141444455, - "acc_norm": 0.2315270935960591, - "acc_norm_stderr": 0.029678333141444455 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.22580645161290322, - "acc_stderr": 0.023785577884181012, - "acc_norm": 0.22580645161290322, - "acc_norm_stderr": 0.023785577884181012 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.3076923076923077, - "acc_stderr": 0.030236389942173092, - "acc_norm": 0.3076923076923077, - "acc_norm_stderr": 0.030236389942173092 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.24150943396226415, - "acc_stderr": 0.02634148037111835, - "acc_norm": 0.24150943396226415, - "acc_norm_stderr": 0.02634148037111835 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.22727272727272727, - "acc_stderr": 0.04013964554072774, - "acc_norm": 0.22727272727272727, - "acc_norm_stderr": 0.04013964554072774 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.2777777777777778, - "acc_stderr": 0.027309140588230193, - "acc_norm": 0.2777777777777778, - "acc_norm_stderr": 0.027309140588230193 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2119205298013245, - "acc_stderr": 0.03336767086567977, - "acc_norm": 0.2119205298013245, - "acc_norm_stderr": 0.03336767086567977 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.26865671641791045, - "acc_stderr": 0.03134328358208954, - "acc_norm": 0.26865671641791045, - "acc_norm_stderr": 0.03134328358208954 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.23699421965317918, - "acc_stderr": 0.03242414757483099, - "acc_norm": 0.23699421965317918, - "acc_norm_stderr": 0.03242414757483099 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.21164021164021163, - "acc_stderr": 0.021037331505262883, - "acc_norm": 0.21164021164021163, - "acc_norm_stderr": 0.021037331505262883 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2638888888888889, - "acc_stderr": 0.03685651095897532, - "acc_norm": 0.2638888888888889, - "acc_norm_stderr": 0.03685651095897532 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.19, - "acc_stderr": 0.03942772444036623, - "acc_norm": 0.19, - "acc_norm_stderr": 0.03942772444036623 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.21, - "acc_stderr": 0.040936018074033256, - "acc_norm": 0.21, - "acc_norm_stderr": 0.040936018074033256 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.24277456647398843, - "acc_stderr": 0.023083658586984204, - "acc_norm": 0.24277456647398843, - "acc_norm_stderr": 0.023083658586984204 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.25766871165644173, - "acc_stderr": 0.03436150827846917, - "acc_norm": 0.25766871165644173, - "acc_norm_stderr": 0.03436150827846917 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.3117283950617284, - "acc_stderr": 0.02577311116963043, - "acc_norm": 0.3117283950617284, - "acc_norm_stderr": 0.02577311116963043 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.22, - "acc_stderr": 0.041633319989322695, - "acc_norm": 0.22, - "acc_norm_stderr": 0.041633319989322695 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.21761658031088082, - "acc_stderr": 0.029778663037752975, - "acc_norm": 0.21761658031088082, - "acc_norm_stderr": 0.029778663037752975 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.23684210526315788, - "acc_stderr": 0.039994238792813344, - "acc_norm": 0.23684210526315788, - "acc_norm_stderr": 0.039994238792813344 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.3137614678899083, - "acc_stderr": 0.019894723341469134, - "acc_norm": 0.3137614678899083, - "acc_norm_stderr": 0.019894723341469134 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.2222222222222222, - "acc_stderr": 0.03718489006818115, - "acc_norm": 0.2222222222222222, - "acc_norm_stderr": 0.03718489006818115 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.24836601307189543, - "acc_stderr": 0.02473998135511359, - "acc_norm": 0.24836601307189543, - "acc_norm_stderr": 0.02473998135511359 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.23, - "acc_stderr": 0.04229525846816505, - "acc_norm": 0.23, - "acc_norm_stderr": 0.04229525846816505 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.36363636363636365, - "acc_stderr": 0.04391326286724071, - "acc_norm": 0.36363636363636365, - "acc_norm_stderr": 0.04391326286724071 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.23684210526315788, - "acc_stderr": 0.03459777606810536, - "acc_norm": 0.23684210526315788, - "acc_norm_stderr": 0.03459777606810536 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.24673202614379086, - "acc_stderr": 0.0174408203674025, - "acc_norm": 0.24673202614379086, - "acc_norm_stderr": 0.0174408203674025 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2198581560283688, - "acc_stderr": 0.024706141070705488, - "acc_norm": 0.2198581560283688, - "acc_norm_stderr": 0.024706141070705488 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.22321428571428573, - "acc_stderr": 0.03952301967702511, - "acc_norm": 0.22321428571428573, - "acc_norm_stderr": 0.03952301967702511 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.28703703703703703, - "acc_stderr": 0.030851992993257013, - "acc_norm": 0.28703703703703703, - "acc_norm_stderr": 0.030851992993257013 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.24692737430167597, - "acc_stderr": 0.014422292204808852, - "acc_norm": 0.24692737430167597, - "acc_norm_stderr": 0.014422292204808852 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.23, - "acc_stderr": 0.04229525846816505, - "acc_norm": 0.23, - "acc_norm_stderr": 0.04229525846816505 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.26, - "acc_stderr": 0.044084400227680794, - "acc_norm": 0.26, - "acc_norm_stderr": 0.044084400227680794 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.27205882352941174, - "acc_stderr": 0.027033041151681453, - "acc_norm": 0.27205882352941174, - "acc_norm_stderr": 0.027033041151681453 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.23265306122448978, - "acc_stderr": 0.02704925791589618, - "acc_norm": 0.23265306122448978, - "acc_norm_stderr": 0.02704925791589618 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.29535864978902954, - "acc_stderr": 0.029696338713422882, - "acc_norm": 0.29535864978902954, - "acc_norm_stderr": 0.029696338713422882 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2653194263363755, - "acc_stderr": 0.011276198843958866, - "acc_norm": 0.2653194263363755, - "acc_norm_stderr": 0.011276198843958866 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.25980392156862747, - "acc_stderr": 0.030778554678693257, - "acc_norm": 0.25980392156862747, - "acc_norm_stderr": 0.030778554678693257 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.2606060606060606, - "acc_stderr": 0.034277431758165236, - "acc_norm": 0.2606060606060606, - "acc_norm_stderr": 0.034277431758165236 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2582619339045288, - "mc1_stderr": 0.015321821688476189, - "mc2": 0.42136905448237544, - "mc2_stderr": 0.014761634772803508 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.7300469483568075, - "acc_stderr": 0.015217900336776868, - "acc_norm": 0.789906103286385, - "acc_norm_stderr": 0.013964637699696676 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "taeminlee/polyglot_12.8b_ins_orcastyle", - "model_sha": "eed242f438b6ee3860a810454126f468373836b2", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/taeminlee/polyglot_12.8b_ins_orcastyle_ma/result_2023-10-16 09:17:56.json b/taeminlee/polyglot_12.8b_ins_orcastyle_ma/result_2023-10-16 09:17:56.json deleted file mode 100644 index 1c0b5b6fd4f463b1b32adfbececfddd4bd6d1e3d..0000000000000000000000000000000000000000 --- a/taeminlee/polyglot_12.8b_ins_orcastyle_ma/result_2023-10-16 09:17:56.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.2636518771331058, - "acc_stderr": 0.012875929151297073, - "acc_norm": 0.31399317406143346, - "acc_norm_stderr": 0.013562691224726284 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3792073292172874, - "acc_stderr": 0.0048419819735152775, - "acc_norm": 0.48775144393547104, - "acc_norm_stderr": 0.004988283981631052 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.21052631578947367, - "acc_stderr": 0.031267817146631786, - "acc_norm": 0.21052631578947367, - "acc_norm_stderr": 0.031267817146631786 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.18446601941747573, - "acc_stderr": 0.03840423627288276, - "acc_norm": 0.18446601941747573, - "acc_norm_stderr": 0.03840423627288276 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.2554278416347382, - "acc_stderr": 0.015594955384455765, - "acc_norm": 0.2554278416347382, - "acc_norm_stderr": 0.015594955384455765 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.25925925925925924, - "acc_stderr": 0.037857144650666544, - "acc_norm": 0.25925925925925924, - "acc_norm_stderr": 0.037857144650666544 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.22, - "acc_stderr": 0.04163331998932269, - "acc_norm": 0.22, - "acc_norm_stderr": 0.04163331998932269 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.25957446808510637, - "acc_stderr": 0.028659179374292326, - "acc_norm": 0.25957446808510637, - "acc_norm_stderr": 0.028659179374292326 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.2469879518072289, - "acc_stderr": 0.03357351982064536, - "acc_norm": 0.2469879518072289, - "acc_norm_stderr": 0.03357351982064536 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.31189710610932475, - "acc_stderr": 0.02631185807185416, - "acc_norm": 0.31189710610932475, - "acc_norm_stderr": 0.02631185807185416 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.29596412556053814, - "acc_stderr": 0.03063659134869981, - "acc_norm": 0.29596412556053814, - "acc_norm_stderr": 0.03063659134869981 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.3053435114503817, - "acc_stderr": 0.040393149787245626, - "acc_norm": 0.3053435114503817, - "acc_norm_stderr": 0.040393149787245626 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.3181818181818182, - "acc_stderr": 0.03318477333845331, - "acc_norm": 0.3181818181818182, - "acc_norm_stderr": 0.03318477333845331 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.2482758620689655, - "acc_stderr": 0.036001056927277716, - "acc_norm": 0.2482758620689655, - "acc_norm_stderr": 0.036001056927277716 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.28431372549019607, - "acc_stderr": 0.04488482852329017, - "acc_norm": 0.28431372549019607, - "acc_norm_stderr": 0.04488482852329017 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.33613445378151263, - "acc_stderr": 0.03068473711513535, - "acc_norm": 0.33613445378151263, - "acc_norm_stderr": 0.03068473711513535 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.3128205128205128, - "acc_stderr": 0.023507579020645347, - "acc_norm": 0.3128205128205128, - "acc_norm_stderr": 0.023507579020645347 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.21, - "acc_stderr": 0.040936018074033256, - "acc_norm": 0.21, - "acc_norm_stderr": 0.040936018074033256 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.27, - "acc_stderr": 0.04461960433384741, - "acc_norm": 0.27, - "acc_norm_stderr": 0.04461960433384741 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.26851851851851855, - "acc_stderr": 0.04284467968052191, - "acc_norm": 0.26851851851851855, - "acc_norm_stderr": 0.04284467968052191 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.28078817733990147, - "acc_stderr": 0.0316185633535861, - "acc_norm": 0.28078817733990147, - "acc_norm_stderr": 0.0316185633535861 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3064516129032258, - "acc_stderr": 0.026226485652553873, - "acc_norm": 0.3064516129032258, - "acc_norm_stderr": 0.026226485652553873 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.19658119658119658, - "acc_stderr": 0.02603538609895129, - "acc_norm": 0.19658119658119658, - "acc_norm_stderr": 0.02603538609895129 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.2528301886792453, - "acc_stderr": 0.026749899771241238, - "acc_norm": 0.2528301886792453, - "acc_norm_stderr": 0.026749899771241238 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.23636363636363636, - "acc_stderr": 0.04069306319721377, - "acc_norm": 0.23636363636363636, - "acc_norm_stderr": 0.04069306319721377 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.26666666666666666, - "acc_stderr": 0.026962424325073838, - "acc_norm": 0.26666666666666666, - "acc_norm_stderr": 0.026962424325073838 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.32450331125827814, - "acc_stderr": 0.03822746937658754, - "acc_norm": 0.32450331125827814, - "acc_norm_stderr": 0.03822746937658754 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.263681592039801, - "acc_stderr": 0.031157150869355554, - "acc_norm": 0.263681592039801, - "acc_norm_stderr": 0.031157150869355554 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.2947976878612717, - "acc_stderr": 0.03476599607516478, - "acc_norm": 0.2947976878612717, - "acc_norm_stderr": 0.03476599607516478 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.23544973544973544, - "acc_stderr": 0.021851509822031715, - "acc_norm": 0.23544973544973544, - "acc_norm_stderr": 0.021851509822031715 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2916666666666667, - "acc_stderr": 0.038009680605548574, - "acc_norm": 0.2916666666666667, - "acc_norm_stderr": 0.038009680605548574 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.38, - "acc_stderr": 0.04878317312145632, - "acc_norm": 0.38, - "acc_norm_stderr": 0.04878317312145632 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.2630057803468208, - "acc_stderr": 0.023703099525258176, - "acc_norm": 0.2630057803468208, - "acc_norm_stderr": 0.023703099525258176 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.25153374233128833, - "acc_stderr": 0.03408997886857529, - "acc_norm": 0.25153374233128833, - "acc_norm_stderr": 0.03408997886857529 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.2623456790123457, - "acc_stderr": 0.024477222856135114, - "acc_norm": 0.2623456790123457, - "acc_norm_stderr": 0.024477222856135114 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.32642487046632124, - "acc_stderr": 0.033840286211432945, - "acc_norm": 0.32642487046632124, - "acc_norm_stderr": 0.033840286211432945 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.24561403508771928, - "acc_stderr": 0.040493392977481404, - "acc_norm": 0.24561403508771928, - "acc_norm_stderr": 0.040493392977481404 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.3284403669724771, - "acc_stderr": 0.020135902797298395, - "acc_norm": 0.3284403669724771, - "acc_norm_stderr": 0.020135902797298395 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.1984126984126984, - "acc_stderr": 0.03567016675276863, - "acc_norm": 0.1984126984126984, - "acc_norm_stderr": 0.03567016675276863 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.28431372549019607, - "acc_stderr": 0.02582916327275748, - "acc_norm": 0.28431372549019607, - "acc_norm_stderr": 0.02582916327275748 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.2, - "acc_stderr": 0.04020151261036844, - "acc_norm": 0.2, - "acc_norm_stderr": 0.04020151261036844 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.23140495867768596, - "acc_stderr": 0.03849856098794088, - "acc_norm": 0.23140495867768596, - "acc_norm_stderr": 0.03849856098794088 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.26973684210526316, - "acc_stderr": 0.03611780560284898, - "acc_norm": 0.26973684210526316, - "acc_norm_stderr": 0.03611780560284898 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.23366013071895425, - "acc_stderr": 0.017119158496044506, - "acc_norm": 0.23366013071895425, - "acc_norm_stderr": 0.017119158496044506 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.24468085106382978, - "acc_stderr": 0.02564555362226673, - "acc_norm": 0.24468085106382978, - "acc_norm_stderr": 0.02564555362226673 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.19642857142857142, - "acc_stderr": 0.03770970049347018, - "acc_norm": 0.19642857142857142, - "acc_norm_stderr": 0.03770970049347018 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.44907407407407407, - "acc_stderr": 0.03392238405321617, - "acc_norm": 0.44907407407407407, - "acc_norm_stderr": 0.03392238405321617 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.27150837988826815, - "acc_stderr": 0.014874252168095278, - "acc_norm": 0.27150837988826815, - "acc_norm_stderr": 0.014874252168095278 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.32, - "acc_stderr": 0.04688261722621504, - "acc_norm": 0.32, - "acc_norm_stderr": 0.04688261722621504 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.21, - "acc_stderr": 0.040936018074033256, - "acc_norm": 0.21, - "acc_norm_stderr": 0.040936018074033256 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.4375, - "acc_stderr": 0.030134614954403924, - "acc_norm": 0.4375, - "acc_norm_stderr": 0.030134614954403924 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.37551020408163266, - "acc_stderr": 0.031001209039894836, - "acc_norm": 0.37551020408163266, - "acc_norm_stderr": 0.031001209039894836 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.25738396624472576, - "acc_stderr": 0.028458820991460295, - "acc_norm": 0.25738396624472576, - "acc_norm_stderr": 0.028458820991460295 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.24771838331160365, - "acc_stderr": 0.011025499291443737, - "acc_norm": 0.24771838331160365, - "acc_norm_stderr": 0.011025499291443737 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.23529411764705882, - "acc_stderr": 0.029771775228145638, - "acc_norm": 0.23529411764705882, - "acc_norm_stderr": 0.029771775228145638 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.2727272727272727, - "acc_stderr": 0.0347769116216366, - "acc_norm": 0.2727272727272727, - "acc_norm_stderr": 0.0347769116216366 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2668298653610771, - "mc1_stderr": 0.01548369193923726, - "mc2": 0.4409725050511923, - "mc2_stderr": 0.014977060866131325 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.7429577464788732, - "acc_stderr": 0.014980266433015262, - "acc_norm": 0.8215962441314554, - "acc_norm_stderr": 0.013124006571347439 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "taeminlee/polyglot_12.8b_ins_orcastyle_ma", - "model_sha": "e59d2d5beba5386564f914402663e3d530a1b093", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/teknium/OpenHermes-2-Mistral-7B/result_2023-10-23 09:00:37.json b/teknium/OpenHermes-2-Mistral-7B/result_2023-10-23 09:00:37.json deleted file mode 100644 index ca4cd0ef0624eca1617b1c5ee48bf5a82fe8e365..0000000000000000000000000000000000000000 --- a/teknium/OpenHermes-2-Mistral-7B/result_2023-10-23 09:00:37.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.3310580204778157, - "acc_stderr": 0.013752062419817836, - "acc_norm": 0.37372013651877134, - "acc_norm_stderr": 0.014137708601759075 - }, - "harness|ko_hellaswag|10": { - "acc": 0.37711611232822145, - "acc_stderr": 0.004836738514051328, - "acc_norm": 0.48078072097191793, - "acc_norm_stderr": 0.004986093791041655 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.4678362573099415, - "acc_stderr": 0.03826882417660369, - "acc_norm": 0.4678362573099415, - "acc_norm_stderr": 0.03826882417660369 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.5533980582524272, - "acc_stderr": 0.04922424153458934, - "acc_norm": 0.5533980582524272, - "acc_norm_stderr": 0.04922424153458934 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.48020434227330777, - "acc_stderr": 0.017865944827291605, - "acc_norm": 0.48020434227330777, - "acc_norm_stderr": 0.017865944827291605 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.4074074074074074, - "acc_stderr": 0.042446332383532286, - "acc_norm": 0.4074074074074074, - "acc_norm_stderr": 0.042446332383532286 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.27, - "acc_stderr": 0.044619604333847394, - "acc_norm": 0.27, - "acc_norm_stderr": 0.044619604333847394 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.3617021276595745, - "acc_stderr": 0.0314108219759624, - "acc_norm": 0.3617021276595745, - "acc_norm_stderr": 0.0314108219759624 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.42771084337349397, - "acc_stderr": 0.03851597683718533, - "acc_norm": 0.42771084337349397, - "acc_norm_stderr": 0.03851597683718533 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.45980707395498394, - "acc_stderr": 0.028306190403305696, - "acc_norm": 0.45980707395498394, - "acc_norm_stderr": 0.028306190403305696 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.48878923766816146, - "acc_stderr": 0.033549366530984746, - "acc_norm": 0.48878923766816146, - "acc_norm_stderr": 0.033549366530984746 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.46564885496183206, - "acc_stderr": 0.04374928560599738, - "acc_norm": 0.46564885496183206, - "acc_norm_stderr": 0.04374928560599738 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.35, - "acc_stderr": 0.0479372485441102, - "acc_norm": 0.35, - "acc_norm_stderr": 0.0479372485441102 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.5202020202020202, - "acc_stderr": 0.03559443565563918, - "acc_norm": 0.5202020202020202, - "acc_norm_stderr": 0.03559443565563918 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.4896551724137931, - "acc_stderr": 0.04165774775728763, - "acc_norm": 0.4896551724137931, - "acc_norm_stderr": 0.04165774775728763 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.3137254901960784, - "acc_stderr": 0.04617034827006716, - "acc_norm": 0.3137254901960784, - "acc_norm_stderr": 0.04617034827006716 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.4579831932773109, - "acc_stderr": 0.03236361111951941, - "acc_norm": 0.4579831932773109, - "acc_norm_stderr": 0.03236361111951941 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.4461538461538462, - "acc_stderr": 0.025203571773028333, - "acc_norm": 0.4461538461538462, - "acc_norm_stderr": 0.025203571773028333 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.65, - "acc_stderr": 0.04793724854411021, - "acc_norm": 0.65, - "acc_norm_stderr": 0.04793724854411021 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.5185185185185185, - "acc_stderr": 0.04830366024635331, - "acc_norm": 0.5185185185185185, - "acc_norm_stderr": 0.04830366024635331 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.35467980295566504, - "acc_stderr": 0.03366124489051448, - "acc_norm": 0.35467980295566504, - "acc_norm_stderr": 0.03366124489051448 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.44516129032258067, - "acc_stderr": 0.028272410186214906, - "acc_norm": 0.44516129032258067, - "acc_norm_stderr": 0.028272410186214906 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.6965811965811965, - "acc_stderr": 0.030118210106942656, - "acc_norm": 0.6965811965811965, - "acc_norm_stderr": 0.030118210106942656 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.44150943396226416, - "acc_stderr": 0.030561590426731833, - "acc_norm": 0.44150943396226416, - "acc_norm_stderr": 0.030561590426731833 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.5181818181818182, - "acc_stderr": 0.04785964010794916, - "acc_norm": 0.5181818181818182, - "acc_norm_stderr": 0.04785964010794916 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.2814814814814815, - "acc_stderr": 0.02742001935094527, - "acc_norm": 0.2814814814814815, - "acc_norm_stderr": 0.02742001935094527 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.31125827814569534, - "acc_stderr": 0.03780445850526733, - "acc_norm": 0.31125827814569534, - "acc_norm_stderr": 0.03780445850526733 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.582089552238806, - "acc_stderr": 0.034875586404620636, - "acc_norm": 0.582089552238806, - "acc_norm_stderr": 0.034875586404620636 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.3236994219653179, - "acc_stderr": 0.0356760379963917, - "acc_norm": 0.3236994219653179, - "acc_norm_stderr": 0.0356760379963917 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.3783068783068783, - "acc_stderr": 0.024976954053155247, - "acc_norm": 0.3783068783068783, - "acc_norm_stderr": 0.024976954053155247 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3888888888888889, - "acc_stderr": 0.04076663253918567, - "acc_norm": 0.3888888888888889, - "acc_norm_stderr": 0.04076663253918567 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695235, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695235 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.57, - "acc_stderr": 0.04975698519562426, - "acc_norm": 0.57, - "acc_norm_stderr": 0.04975698519562426 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.5028901734104047, - "acc_stderr": 0.026918645383239015, - "acc_norm": 0.5028901734104047, - "acc_norm_stderr": 0.026918645383239015 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.5153374233128835, - "acc_stderr": 0.03926522378708843, - "acc_norm": 0.5153374233128835, - "acc_norm_stderr": 0.03926522378708843 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.4691358024691358, - "acc_stderr": 0.027767689606833942, - "acc_norm": 0.4691358024691358, - "acc_norm_stderr": 0.027767689606833942 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.33, - "acc_stderr": 0.047258156262526045, - "acc_norm": 0.33, - "acc_norm_stderr": 0.047258156262526045 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.5181347150259067, - "acc_stderr": 0.036060650018329185, - "acc_norm": 0.5181347150259067, - "acc_norm_stderr": 0.036060650018329185 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.3333333333333333, - "acc_stderr": 0.044346007015849245, - "acc_norm": 0.3333333333333333, - "acc_norm_stderr": 0.044346007015849245 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.4954128440366973, - "acc_stderr": 0.021436420955529424, - "acc_norm": 0.4954128440366973, - "acc_norm_stderr": 0.021436420955529424 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.3968253968253968, - "acc_stderr": 0.04375888492727061, - "acc_norm": 0.3968253968253968, - "acc_norm_stderr": 0.04375888492727061 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.48366013071895425, - "acc_stderr": 0.028614624752805407, - "acc_norm": 0.48366013071895425, - "acc_norm_stderr": 0.028614624752805407 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.49, - "acc_stderr": 0.05024183937956911, - "acc_norm": 0.49, - "acc_norm_stderr": 0.05024183937956911 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.6115702479338843, - "acc_stderr": 0.04449270350068383, - "acc_norm": 0.6115702479338843, - "acc_norm_stderr": 0.04449270350068383 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.40131578947368424, - "acc_stderr": 0.03988903703336285, - "acc_norm": 0.40131578947368424, - "acc_norm_stderr": 0.03988903703336285 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.42320261437908496, - "acc_stderr": 0.019987809769482064, - "acc_norm": 0.42320261437908496, - "acc_norm_stderr": 0.019987809769482064 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.35815602836879434, - "acc_stderr": 0.028602085862759412, - "acc_norm": 0.35815602836879434, - "acc_norm_stderr": 0.028602085862759412 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.4375, - "acc_stderr": 0.04708567521880525, - "acc_norm": 0.4375, - "acc_norm_stderr": 0.04708567521880525 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.35185185185185186, - "acc_stderr": 0.032568505702936464, - "acc_norm": 0.35185185185185186, - "acc_norm_stderr": 0.032568505702936464 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2223463687150838, - "acc_stderr": 0.013907189208156881, - "acc_norm": 0.2223463687150838, - "acc_norm_stderr": 0.013907189208156881 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.38, - "acc_stderr": 0.048783173121456316, - "acc_norm": 0.38, - "acc_norm_stderr": 0.048783173121456316 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.63, - "acc_stderr": 0.04852365870939099, - "acc_norm": 0.63, - "acc_norm_stderr": 0.04852365870939099 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.34191176470588236, - "acc_stderr": 0.028814722422254184, - "acc_norm": 0.34191176470588236, - "acc_norm_stderr": 0.028814722422254184 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.5265306122448979, - "acc_stderr": 0.03196412734523272, - "acc_norm": 0.5265306122448979, - "acc_norm_stderr": 0.03196412734523272 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.569620253164557, - "acc_stderr": 0.03223017195937599, - "acc_norm": 0.569620253164557, - "acc_norm_stderr": 0.03223017195937599 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.34485006518904826, - "acc_stderr": 0.01213988100628706, - "acc_norm": 0.34485006518904826, - "acc_norm_stderr": 0.01213988100628706 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.4411764705882353, - "acc_stderr": 0.034849415144292316, - "acc_norm": 0.4411764705882353, - "acc_norm_stderr": 0.034849415144292316 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.4666666666666667, - "acc_stderr": 0.03895658065271846, - "acc_norm": 0.4666666666666667, - "acc_norm_stderr": 0.03895658065271846 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.31334149326805383, - "mc1_stderr": 0.01623806506905961, - "mc2": 0.4910419282897384, - "mc2_stderr": 0.015891313216487672 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.4647887323943662, - "acc_stderr": 0.017097225489196715, - "acc_norm": 0.4894366197183099, - "acc_norm_stderr": 0.01713595374322079 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "teknium/OpenHermes-2-Mistral-7B", - "model_sha": "fab9e80a3117907a9264db4aa88b0cfd8798ec9d", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/tlphams/gollm-instruct-all-in-one-v1/result_2023-10-04 06:14:50.json b/tlphams/gollm-instruct-all-in-one-v1/result_2023-10-04 06:14:50.json deleted file mode 100644 index 5d4d88256b3c3c22e943a78b295f10b31cdccb29..0000000000000000000000000000000000000000 --- a/tlphams/gollm-instruct-all-in-one-v1/result_2023-10-04 06:14:50.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.25853242320819114, - "acc_stderr": 0.012794553754288677, - "acc_norm": 0.31569965870307165, - "acc_norm_stderr": 0.013582571095815293 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3498307110137423, - "acc_stderr": 0.004759416464201141, - "acc_norm": 0.43178649671380204, - "acc_norm_stderr": 0.0049431275832905125 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.15789473684210525, - "acc_stderr": 0.027966785859160872, - "acc_norm": 0.15789473684210525, - "acc_norm_stderr": 0.027966785859160872 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.34951456310679613, - "acc_stderr": 0.047211885060971716, - "acc_norm": 0.34951456310679613, - "acc_norm_stderr": 0.047211885060971716 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.20689655172413793, - "acc_stderr": 0.014485656041669164, - "acc_norm": 0.20689655172413793, - "acc_norm_stderr": 0.014485656041669164 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.13333333333333333, - "acc_stderr": 0.029365879728106822, - "acc_norm": 0.13333333333333333, - "acc_norm_stderr": 0.029365879728106822 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.24, - "acc_stderr": 0.042923469599092816, - "acc_norm": 0.24, - "acc_norm_stderr": 0.042923469599092816 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.2425531914893617, - "acc_stderr": 0.028020226271200217, - "acc_norm": 0.2425531914893617, - "acc_norm_stderr": 0.028020226271200217 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.21084337349397592, - "acc_stderr": 0.03175554786629919, - "acc_norm": 0.21084337349397592, - "acc_norm_stderr": 0.03175554786629919 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.24437299035369775, - "acc_stderr": 0.024406162094668882, - "acc_norm": 0.24437299035369775, - "acc_norm_stderr": 0.024406162094668882 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.2242152466367713, - "acc_stderr": 0.027991534258519527, - "acc_norm": 0.2242152466367713, - "acc_norm_stderr": 0.027991534258519527 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.31297709923664124, - "acc_stderr": 0.04066962905677697, - "acc_norm": 0.31297709923664124, - "acc_norm_stderr": 0.04066962905677697 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.26, - "acc_stderr": 0.04408440022768077, - "acc_norm": 0.26, - "acc_norm_stderr": 0.04408440022768077 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.23737373737373738, - "acc_stderr": 0.030313710538198885, - "acc_norm": 0.23737373737373738, - "acc_norm_stderr": 0.030313710538198885 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.2482758620689655, - "acc_stderr": 0.03600105692727772, - "acc_norm": 0.2482758620689655, - "acc_norm_stderr": 0.03600105692727772 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.38235294117647056, - "acc_stderr": 0.04835503696107223, - "acc_norm": 0.38235294117647056, - "acc_norm_stderr": 0.04835503696107223 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.31932773109243695, - "acc_stderr": 0.0302839955258844, - "acc_norm": 0.31932773109243695, - "acc_norm_stderr": 0.0302839955258844 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.35384615384615387, - "acc_stderr": 0.02424378399406217, - "acc_norm": 0.35384615384615387, - "acc_norm_stderr": 0.02424378399406217 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.17, - "acc_stderr": 0.03775251680686371, - "acc_norm": 0.17, - "acc_norm_stderr": 0.03775251680686371 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.2222222222222222, - "acc_stderr": 0.040191074725573483, - "acc_norm": 0.2222222222222222, - "acc_norm_stderr": 0.040191074725573483 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.2561576354679803, - "acc_stderr": 0.0307127300709826, - "acc_norm": 0.2561576354679803, - "acc_norm_stderr": 0.0307127300709826 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.3, - "acc_stderr": 0.026069362295335137, - "acc_norm": 0.3, - "acc_norm_stderr": 0.026069362295335137 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.19230769230769232, - "acc_stderr": 0.02581923325648373, - "acc_norm": 0.19230769230769232, - "acc_norm_stderr": 0.02581923325648373 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.2792452830188679, - "acc_stderr": 0.027611163402399715, - "acc_norm": 0.2792452830188679, - "acc_norm_stderr": 0.027611163402399715 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.24545454545454545, - "acc_stderr": 0.041220665028782834, - "acc_norm": 0.24545454545454545, - "acc_norm_stderr": 0.041220665028782834 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.22592592592592592, - "acc_stderr": 0.025497532639609542, - "acc_norm": 0.22592592592592592, - "acc_norm_stderr": 0.025497532639609542 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.24503311258278146, - "acc_stderr": 0.035118075718047245, - "acc_norm": 0.24503311258278146, - "acc_norm_stderr": 0.035118075718047245 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.2736318407960199, - "acc_stderr": 0.03152439186555401, - "acc_norm": 0.2736318407960199, - "acc_norm_stderr": 0.03152439186555401 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.27167630057803466, - "acc_stderr": 0.03391750322321659, - "acc_norm": 0.27167630057803466, - "acc_norm_stderr": 0.03391750322321659 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.18518518518518517, - "acc_stderr": 0.020006075494524416, - "acc_norm": 0.18518518518518517, - "acc_norm_stderr": 0.020006075494524416 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.25, - "acc_stderr": 0.03621034121889507, - "acc_norm": 0.25, - "acc_norm_stderr": 0.03621034121889507 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.35, - "acc_stderr": 0.047937248544110196, - "acc_norm": 0.35, - "acc_norm_stderr": 0.047937248544110196 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.2138728323699422, - "acc_stderr": 0.022075709251757177, - "acc_norm": 0.2138728323699422, - "acc_norm_stderr": 0.022075709251757177 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.25766871165644173, - "acc_stderr": 0.03436150827846917, - "acc_norm": 0.25766871165644173, - "acc_norm_stderr": 0.03436150827846917 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.22530864197530864, - "acc_stderr": 0.02324620264781975, - "acc_norm": 0.22530864197530864, - "acc_norm_stderr": 0.02324620264781975 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542127, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542127 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.34196891191709844, - "acc_stderr": 0.03423465100104284, - "acc_norm": 0.34196891191709844, - "acc_norm_stderr": 0.03423465100104284 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.23684210526315788, - "acc_stderr": 0.039994238792813365, - "acc_norm": 0.23684210526315788, - "acc_norm_stderr": 0.039994238792813365 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.30825688073394497, - "acc_stderr": 0.019798366698367268, - "acc_norm": 0.30825688073394497, - "acc_norm_stderr": 0.019798366698367268 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.2857142857142857, - "acc_stderr": 0.04040610178208841, - "acc_norm": 0.2857142857142857, - "acc_norm_stderr": 0.04040610178208841 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.30392156862745096, - "acc_stderr": 0.026336613469046633, - "acc_norm": 0.30392156862745096, - "acc_norm_stderr": 0.026336613469046633 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.19, - "acc_stderr": 0.03942772444036623, - "acc_norm": 0.19, - "acc_norm_stderr": 0.03942772444036623 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.21487603305785125, - "acc_stderr": 0.03749492448709698, - "acc_norm": 0.21487603305785125, - "acc_norm_stderr": 0.03749492448709698 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.34210526315789475, - "acc_stderr": 0.03860731599316091, - "acc_norm": 0.34210526315789475, - "acc_norm_stderr": 0.03860731599316091 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.21405228758169934, - "acc_stderr": 0.01659342966232903, - "acc_norm": 0.21405228758169934, - "acc_norm_stderr": 0.01659342966232903 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.22340425531914893, - "acc_stderr": 0.024847921358063962, - "acc_norm": 0.22340425531914893, - "acc_norm_stderr": 0.024847921358063962 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.16964285714285715, - "acc_stderr": 0.03562367850095391, - "acc_norm": 0.16964285714285715, - "acc_norm_stderr": 0.03562367850095391 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.39351851851851855, - "acc_stderr": 0.03331747876370312, - "acc_norm": 0.39351851851851855, - "acc_norm_stderr": 0.03331747876370312 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.26033519553072626, - "acc_stderr": 0.014676252009319464, - "acc_norm": 0.26033519553072626, - "acc_norm_stderr": 0.014676252009319464 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.22, - "acc_stderr": 0.04163331998932269, - "acc_norm": 0.22, - "acc_norm_stderr": 0.04163331998932269 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.4522058823529412, - "acc_stderr": 0.030233758551596452, - "acc_norm": 0.4522058823529412, - "acc_norm_stderr": 0.030233758551596452 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.3224489795918367, - "acc_stderr": 0.02992310056368391, - "acc_norm": 0.3224489795918367, - "acc_norm_stderr": 0.02992310056368391 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.25738396624472576, - "acc_stderr": 0.028458820991460295, - "acc_norm": 0.25738396624472576, - "acc_norm_stderr": 0.028458820991460295 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.22685788787483702, - "acc_stderr": 0.01069634813356993, - "acc_norm": 0.22685788787483702, - "acc_norm_stderr": 0.01069634813356993 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.25980392156862747, - "acc_stderr": 0.030778554678693247, - "acc_norm": 0.25980392156862747, - "acc_norm_stderr": 0.030778554678693247 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.26666666666666666, - "acc_stderr": 0.03453131801885415, - "acc_norm": 0.26666666666666666, - "acc_norm_stderr": 0.03453131801885415 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.22643818849449204, - "mc1_stderr": 0.014651337324602602, - "mc2": 0.40483124709618634, - "mc2_stderr": 0.015674854564799542 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.2828638497652582, - "acc_stderr": 0.015439198524236338, - "acc_norm": 0.3427230046948357, - "acc_norm_stderr": 0.016269756922059894 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "tlphams/gollm-instruct-all-in-one-v1", - "model_sha": "44937fddb3168a387b55173371b365a0b280ae3e", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/tlphams/gollm-tendency-45/result_2023-10-04 14:27:01.json b/tlphams/gollm-tendency-45/result_2023-10-04 14:27:01.json deleted file mode 100644 index 7d68392b2e47b05410f05aad5455f34a51524566..0000000000000000000000000000000000000000 --- a/tlphams/gollm-tendency-45/result_2023-10-04 14:27:01.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.2986348122866894, - "acc_stderr": 0.01337407861506875, - "acc_norm": 0.34982935153583616, - "acc_norm_stderr": 0.013936809212158292 - }, - "harness|ko_hellaswag|10": { - "acc": 0.40948018323043217, - "acc_stderr": 0.004907329270272704, - "acc_norm": 0.5320653256323441, - "acc_norm_stderr": 0.004979510001776618 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.2807017543859649, - "acc_stderr": 0.03446296217088426, - "acc_norm": 0.2807017543859649, - "acc_norm_stderr": 0.03446296217088426 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.24271844660194175, - "acc_stderr": 0.04245022486384495, - "acc_norm": 0.24271844660194175, - "acc_norm_stderr": 0.04245022486384495 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.26309067688378035, - "acc_stderr": 0.01574549716904905, - "acc_norm": 0.26309067688378035, - "acc_norm_stderr": 0.01574549716904905 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.32592592592592595, - "acc_stderr": 0.040491220417025055, - "acc_norm": 0.32592592592592595, - "acc_norm_stderr": 0.040491220417025055 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.2, - "acc_stderr": 0.026148818018424495, - "acc_norm": 0.2, - "acc_norm_stderr": 0.026148818018424495 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.19879518072289157, - "acc_stderr": 0.031069390260789413, - "acc_norm": 0.19879518072289157, - "acc_norm_stderr": 0.031069390260789413 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.28938906752411575, - "acc_stderr": 0.025755865922632938, - "acc_norm": 0.28938906752411575, - "acc_norm_stderr": 0.025755865922632938 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.15246636771300448, - "acc_stderr": 0.02412620481325287, - "acc_norm": 0.15246636771300448, - "acc_norm_stderr": 0.02412620481325287 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.3053435114503817, - "acc_stderr": 0.04039314978724561, - "acc_norm": 0.3053435114503817, - "acc_norm_stderr": 0.04039314978724561 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.26262626262626265, - "acc_stderr": 0.031353050095330834, - "acc_norm": 0.26262626262626265, - "acc_norm_stderr": 0.031353050095330834 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.32413793103448274, - "acc_stderr": 0.03900432069185555, - "acc_norm": 0.32413793103448274, - "acc_norm_stderr": 0.03900432069185555 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.23529411764705882, - "acc_stderr": 0.04220773659171451, - "acc_norm": 0.23529411764705882, - "acc_norm_stderr": 0.04220773659171451 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.27310924369747897, - "acc_stderr": 0.02894200404099817, - "acc_norm": 0.27310924369747897, - "acc_norm_stderr": 0.02894200404099817 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.25384615384615383, - "acc_stderr": 0.022066054378726257, - "acc_norm": 0.25384615384615383, - "acc_norm_stderr": 0.022066054378726257 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.27, - "acc_stderr": 0.044619604333847394, - "acc_norm": 0.27, - "acc_norm_stderr": 0.044619604333847394 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.21296296296296297, - "acc_stderr": 0.03957835471980981, - "acc_norm": 0.21296296296296297, - "acc_norm_stderr": 0.03957835471980981 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.24630541871921183, - "acc_stderr": 0.03031509928561773, - "acc_norm": 0.24630541871921183, - "acc_norm_stderr": 0.03031509928561773 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.2806451612903226, - "acc_stderr": 0.025560604721022888, - "acc_norm": 0.2806451612903226, - "acc_norm_stderr": 0.025560604721022888 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.2692307692307692, - "acc_stderr": 0.02905858830374884, - "acc_norm": 0.2692307692307692, - "acc_norm_stderr": 0.02905858830374884 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.2528301886792453, - "acc_stderr": 0.026749899771241238, - "acc_norm": 0.2528301886792453, - "acc_norm_stderr": 0.026749899771241238 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.2636363636363636, - "acc_stderr": 0.04220224692971987, - "acc_norm": 0.2636363636363636, - "acc_norm_stderr": 0.04220224692971987 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.24444444444444444, - "acc_stderr": 0.02620276653465215, - "acc_norm": 0.24444444444444444, - "acc_norm_stderr": 0.02620276653465215 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2980132450331126, - "acc_stderr": 0.037345356767871984, - "acc_norm": 0.2980132450331126, - "acc_norm_stderr": 0.037345356767871984 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.21393034825870647, - "acc_stderr": 0.028996909693328923, - "acc_norm": 0.21393034825870647, - "acc_norm_stderr": 0.028996909693328923 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.3063583815028902, - "acc_stderr": 0.03514942551267438, - "acc_norm": 0.3063583815028902, - "acc_norm_stderr": 0.03514942551267438 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2724867724867725, - "acc_stderr": 0.022930973071633345, - "acc_norm": 0.2724867724867725, - "acc_norm_stderr": 0.022930973071633345 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2638888888888889, - "acc_stderr": 0.03685651095897532, - "acc_norm": 0.2638888888888889, - "acc_norm_stderr": 0.03685651095897532 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.28, - "acc_stderr": 0.045126085985421276, - "acc_norm": 0.28, - "acc_norm_stderr": 0.045126085985421276 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.22832369942196531, - "acc_stderr": 0.022598703804321624, - "acc_norm": 0.22832369942196531, - "acc_norm_stderr": 0.022598703804321624 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.294478527607362, - "acc_stderr": 0.03581165790474082, - "acc_norm": 0.294478527607362, - "acc_norm_stderr": 0.03581165790474082 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.2839506172839506, - "acc_stderr": 0.02508947852376513, - "acc_norm": 0.2839506172839506, - "acc_norm_stderr": 0.02508947852376513 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.2694300518134715, - "acc_stderr": 0.03201867122877794, - "acc_norm": 0.2694300518134715, - "acc_norm_stderr": 0.03201867122877794 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2543859649122807, - "acc_stderr": 0.040969851398436695, - "acc_norm": 0.2543859649122807, - "acc_norm_stderr": 0.040969851398436695 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.29724770642201837, - "acc_stderr": 0.019595707224643533, - "acc_norm": 0.29724770642201837, - "acc_norm_stderr": 0.019595707224643533 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.15079365079365079, - "acc_stderr": 0.03200686497287392, - "acc_norm": 0.15079365079365079, - "acc_norm_stderr": 0.03200686497287392 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.28104575163398693, - "acc_stderr": 0.025738854797818737, - "acc_norm": 0.28104575163398693, - "acc_norm_stderr": 0.025738854797818737 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.23, - "acc_stderr": 0.04229525846816506, - "acc_norm": 0.23, - "acc_norm_stderr": 0.04229525846816506 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.38016528925619836, - "acc_stderr": 0.04431324501968431, - "acc_norm": 0.38016528925619836, - "acc_norm_stderr": 0.04431324501968431 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.28289473684210525, - "acc_stderr": 0.03665349695640767, - "acc_norm": 0.28289473684210525, - "acc_norm_stderr": 0.03665349695640767 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.28104575163398693, - "acc_stderr": 0.018185218954318082, - "acc_norm": 0.28104575163398693, - "acc_norm_stderr": 0.018185218954318082 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2695035460992908, - "acc_stderr": 0.02646903681859063, - "acc_norm": 0.2695035460992908, - "acc_norm_stderr": 0.02646903681859063 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.20535714285714285, - "acc_stderr": 0.03834241021419074, - "acc_norm": 0.20535714285714285, - "acc_norm_stderr": 0.03834241021419074 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.26851851851851855, - "acc_stderr": 0.030225226160012397, - "acc_norm": 0.26851851851851855, - "acc_norm_stderr": 0.030225226160012397 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.24804469273743016, - "acc_stderr": 0.014444157808261453, - "acc_norm": 0.24804469273743016, - "acc_norm_stderr": 0.014444157808261453 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.28, - "acc_stderr": 0.045126085985421276, - "acc_norm": 0.28, - "acc_norm_stderr": 0.045126085985421276 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.32, - "acc_stderr": 0.046882617226215034, - "acc_norm": 0.32, - "acc_norm_stderr": 0.046882617226215034 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.3639705882352941, - "acc_stderr": 0.029227192460032025, - "acc_norm": 0.3639705882352941, - "acc_norm_stderr": 0.029227192460032025 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.24081632653061225, - "acc_stderr": 0.027372942201788163, - "acc_norm": 0.24081632653061225, - "acc_norm_stderr": 0.027372942201788163 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.2911392405063291, - "acc_stderr": 0.029571601065753374, - "acc_norm": 0.2911392405063291, - "acc_norm_stderr": 0.029571601065753374 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.24902216427640156, - "acc_stderr": 0.01104489226404077, - "acc_norm": 0.24902216427640156, - "acc_norm_stderr": 0.01104489226404077 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.2647058823529412, - "acc_stderr": 0.0309645179269234, - "acc_norm": 0.2647058823529412, - "acc_norm_stderr": 0.0309645179269234 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.24848484848484848, - "acc_stderr": 0.03374402644139404, - "acc_norm": 0.24848484848484848, - "acc_norm_stderr": 0.03374402644139404 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2558139534883721, - "mc1_stderr": 0.015274176219283349, - "mc2": 0.421210061474517, - "mc2_stderr": 0.01567952563537008 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.3931924882629108, - "acc_stderr": 0.016744157492949278, - "acc_norm": 0.4636150234741784, - "acc_norm_stderr": 0.01709433745632628 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "tlphams/gollm-tendency-45", - "model_sha": "a8f12582eb5700e1d45d045df0c38e79364e0b0c", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/upstage/llama-30b-instruct-2048/result_2023-10-17 17:42:28.json b/upstage/llama-30b-instruct-2048/result_2023-10-17 17:42:28.json deleted file mode 100644 index 0fdd54ef1ef7521b6c5170ed95f53cf220d99622..0000000000000000000000000000000000000000 --- a/upstage/llama-30b-instruct-2048/result_2023-10-17 17:42:28.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.2687713310580205, - "acc_stderr": 0.012955065963710677, - "acc_norm": 0.30204778156996587, - "acc_norm_stderr": 0.013417519144716417 - }, - "harness|ko_hellaswag|10": { - "acc": 0.326229834694284, - "acc_stderr": 0.004678743563766653, - "acc_norm": 0.39474208325034854, - "acc_norm_stderr": 0.004877962644991874 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.42105263157894735, - "acc_stderr": 0.03786720706234215, - "acc_norm": 0.42105263157894735, - "acc_norm_stderr": 0.03786720706234215 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.3786407766990291, - "acc_stderr": 0.048026946982589726, - "acc_norm": 0.3786407766990291, - "acc_norm_stderr": 0.048026946982589726 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.4086845466155811, - "acc_stderr": 0.01757925014815341, - "acc_norm": 0.4086845466155811, - "acc_norm_stderr": 0.01757925014815341 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.4148148148148148, - "acc_stderr": 0.04256193767901408, - "acc_norm": 0.4148148148148148, - "acc_norm_stderr": 0.04256193767901408 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.36, - "acc_stderr": 0.048241815132442176, - "acc_norm": 0.36, - "acc_norm_stderr": 0.048241815132442176 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.2680851063829787, - "acc_stderr": 0.028957342788342343, - "acc_norm": 0.2680851063829787, - "acc_norm_stderr": 0.028957342788342343 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.2891566265060241, - "acc_stderr": 0.03529486801511115, - "acc_norm": 0.2891566265060241, - "acc_norm_stderr": 0.03529486801511115 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.3762057877813505, - "acc_stderr": 0.02751392568354943, - "acc_norm": 0.3762057877813505, - "acc_norm_stderr": 0.02751392568354943 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.3273542600896861, - "acc_stderr": 0.03149384670994131, - "acc_norm": 0.3273542600896861, - "acc_norm_stderr": 0.03149384670994131 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.45038167938931295, - "acc_stderr": 0.04363643698524779, - "acc_norm": 0.45038167938931295, - "acc_norm_stderr": 0.04363643698524779 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.35, - "acc_stderr": 0.04793724854411022, - "acc_norm": 0.35, - "acc_norm_stderr": 0.04793724854411022 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.3838383838383838, - "acc_stderr": 0.034648816750163375, - "acc_norm": 0.3838383838383838, - "acc_norm_stderr": 0.034648816750163375 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.3931034482758621, - "acc_stderr": 0.040703290137070705, - "acc_norm": 0.3931034482758621, - "acc_norm_stderr": 0.040703290137070705 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.24509803921568626, - "acc_stderr": 0.04280105837364395, - "acc_norm": 0.24509803921568626, - "acc_norm_stderr": 0.04280105837364395 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.3739495798319328, - "acc_stderr": 0.031429466378837076, - "acc_norm": 0.3739495798319328, - "acc_norm_stderr": 0.031429466378837076 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.3923076923076923, - "acc_stderr": 0.024756000382130945, - "acc_norm": 0.3923076923076923, - "acc_norm_stderr": 0.024756000382130945 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.41, - "acc_stderr": 0.04943110704237102, - "acc_norm": 0.41, - "acc_norm_stderr": 0.04943110704237102 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.3888888888888889, - "acc_stderr": 0.047128212574267705, - "acc_norm": 0.3888888888888889, - "acc_norm_stderr": 0.047128212574267705 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.3448275862068966, - "acc_stderr": 0.03344283744280458, - "acc_norm": 0.3448275862068966, - "acc_norm_stderr": 0.03344283744280458 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.38064516129032255, - "acc_stderr": 0.027621717832907032, - "acc_norm": 0.38064516129032255, - "acc_norm_stderr": 0.027621717832907032 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.5598290598290598, - "acc_stderr": 0.0325207417206305, - "acc_norm": 0.5598290598290598, - "acc_norm_stderr": 0.0325207417206305 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.35471698113207545, - "acc_stderr": 0.0294451753281996, - "acc_norm": 0.35471698113207545, - "acc_norm_stderr": 0.0294451753281996 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.4090909090909091, - "acc_stderr": 0.04709306978661896, - "acc_norm": 0.4090909090909091, - "acc_norm_stderr": 0.04709306978661896 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.29259259259259257, - "acc_stderr": 0.027738969632176088, - "acc_norm": 0.29259259259259257, - "acc_norm_stderr": 0.027738969632176088 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2913907284768212, - "acc_stderr": 0.03710185726119994, - "acc_norm": 0.2913907284768212, - "acc_norm_stderr": 0.03710185726119994 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.3781094527363184, - "acc_stderr": 0.034288678487786564, - "acc_norm": 0.3781094527363184, - "acc_norm_stderr": 0.034288678487786564 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.3583815028901734, - "acc_stderr": 0.03656343653353159, - "acc_norm": 0.3583815028901734, - "acc_norm_stderr": 0.03656343653353159 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.29894179894179895, - "acc_stderr": 0.023577604791655812, - "acc_norm": 0.29894179894179895, - "acc_norm_stderr": 0.023577604791655812 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2847222222222222, - "acc_stderr": 0.03773809990686934, - "acc_norm": 0.2847222222222222, - "acc_norm_stderr": 0.03773809990686934 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.29, - "acc_stderr": 0.04560480215720684, - "acc_norm": 0.29, - "acc_norm_stderr": 0.04560480215720684 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.53, - "acc_stderr": 0.050161355804659205, - "acc_norm": 0.53, - "acc_norm_stderr": 0.050161355804659205 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.4595375722543353, - "acc_stderr": 0.026830805998952233, - "acc_norm": 0.4595375722543353, - "acc_norm_stderr": 0.026830805998952233 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.32515337423312884, - "acc_stderr": 0.03680350371286461, - "acc_norm": 0.32515337423312884, - "acc_norm_stderr": 0.03680350371286461 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.39197530864197533, - "acc_stderr": 0.027163686038271247, - "acc_norm": 0.39197530864197533, - "acc_norm_stderr": 0.027163686038271247 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.33, - "acc_stderr": 0.04725815626252605, - "acc_norm": 0.33, - "acc_norm_stderr": 0.04725815626252605 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.40414507772020725, - "acc_stderr": 0.03541508578884021, - "acc_norm": 0.40414507772020725, - "acc_norm_stderr": 0.03541508578884021 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.2543859649122807, - "acc_stderr": 0.0409698513984367, - "acc_norm": 0.2543859649122807, - "acc_norm_stderr": 0.0409698513984367 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.3376146788990826, - "acc_stderr": 0.020275265986638903, - "acc_norm": 0.3376146788990826, - "acc_norm_stderr": 0.020275265986638903 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.2619047619047619, - "acc_stderr": 0.03932537680392868, - "acc_norm": 0.2619047619047619, - "acc_norm_stderr": 0.03932537680392868 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.3464052287581699, - "acc_stderr": 0.027245613047215362, - "acc_norm": 0.3464052287581699, - "acc_norm_stderr": 0.027245613047215362 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.37, - "acc_stderr": 0.04852365870939099, - "acc_norm": 0.37, - "acc_norm_stderr": 0.04852365870939099 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.5867768595041323, - "acc_stderr": 0.04495087843548408, - "acc_norm": 0.5867768595041323, - "acc_norm_stderr": 0.04495087843548408 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.3026315789473684, - "acc_stderr": 0.03738520676119668, - "acc_norm": 0.3026315789473684, - "acc_norm_stderr": 0.03738520676119668 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.30718954248366015, - "acc_stderr": 0.018663359671463667, - "acc_norm": 0.30718954248366015, - "acc_norm_stderr": 0.018663359671463667 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.3546099290780142, - "acc_stderr": 0.028538650028878638, - "acc_norm": 0.3546099290780142, - "acc_norm_stderr": 0.028538650028878638 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.2767857142857143, - "acc_stderr": 0.042466243366976235, - "acc_norm": 0.2767857142857143, - "acc_norm_stderr": 0.042466243366976235 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.375, - "acc_stderr": 0.033016908987210894, - "acc_norm": 0.375, - "acc_norm_stderr": 0.033016908987210894 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2324022346368715, - "acc_stderr": 0.014125968754673387, - "acc_norm": 0.2324022346368715, - "acc_norm_stderr": 0.014125968754673387 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.37, - "acc_stderr": 0.04852365870939099, - "acc_norm": 0.37, - "acc_norm_stderr": 0.04852365870939099 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.4, - "acc_stderr": 0.04923659639173309, - "acc_norm": 0.4, - "acc_norm_stderr": 0.04923659639173309 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.3639705882352941, - "acc_stderr": 0.029227192460032025, - "acc_norm": 0.3639705882352941, - "acc_norm_stderr": 0.029227192460032025 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.39183673469387753, - "acc_stderr": 0.03125127591089165, - "acc_norm": 0.39183673469387753, - "acc_norm_stderr": 0.03125127591089165 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.3670886075949367, - "acc_stderr": 0.03137624072561618, - "acc_norm": 0.3670886075949367, - "acc_norm_stderr": 0.03137624072561618 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2848761408083442, - "acc_stderr": 0.01152783084636901, - "acc_norm": 0.2848761408083442, - "acc_norm_stderr": 0.01152783084636901 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.3627450980392157, - "acc_stderr": 0.03374499356319355, - "acc_norm": 0.3627450980392157, - "acc_norm_stderr": 0.03374499356319355 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.3151515151515151, - "acc_stderr": 0.0362773057502241, - "acc_norm": 0.3151515151515151, - "acc_norm_stderr": 0.0362773057502241 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.3011015911872705, - "mc1_stderr": 0.01605899902610062, - "mc2": 0.4721970945475926, - "mc2_stderr": 0.015907996303038192 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.26291079812206575, - "acc_stderr": 0.01509035498507774, - "acc_norm": 0.30633802816901406, - "acc_norm_stderr": 0.015801911286714727 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "upstage/llama-30b-instruct-2048", - "model_sha": "9f246be5c6c08bf397ff7b42aa5fe91d011d9ace", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/vihangd/smartyplats-7b-v1/result_2023-10-21 10:56:03.json b/vihangd/smartyplats-7b-v1/result_2023-10-21 10:56:03.json deleted file mode 100644 index 4ee56efffed0dd9c9b4495988445214f56e4b7ea..0000000000000000000000000000000000000000 --- a/vihangd/smartyplats-7b-v1/result_2023-10-21 10:56:03.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.2781569965870307, - "acc_stderr": 0.0130944699195388, - "acc_norm": 0.30631399317406144, - "acc_norm_stderr": 0.013470584417276511 - }, - "harness|ko_hellaswag|10": { - "acc": 0.33320055765783707, - "acc_stderr": 0.004703942346762255, - "acc_norm": 0.3875721967735511, - "acc_norm_stderr": 0.004862003566798538 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.4444444444444444, - "acc_stderr": 0.038110796698335316, - "acc_norm": 0.4444444444444444, - "acc_norm_stderr": 0.038110796698335316 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.5242718446601942, - "acc_stderr": 0.049449010929737795, - "acc_norm": 0.5242718446601942, - "acc_norm_stderr": 0.049449010929737795 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.45721583652618136, - "acc_stderr": 0.01781438523853443, - "acc_norm": 0.45721583652618136, - "acc_norm_stderr": 0.01781438523853443 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.362962962962963, - "acc_stderr": 0.04153948404742398, - "acc_norm": 0.362962962962963, - "acc_norm_stderr": 0.04153948404742398 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.3829787234042553, - "acc_stderr": 0.03177821250236922, - "acc_norm": 0.3829787234042553, - "acc_norm_stderr": 0.03177821250236922 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.35542168674698793, - "acc_stderr": 0.03726214354322415, - "acc_norm": 0.35542168674698793, - "acc_norm_stderr": 0.03726214354322415 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.48231511254019294, - "acc_stderr": 0.02838032284907713, - "acc_norm": 0.48231511254019294, - "acc_norm_stderr": 0.02838032284907713 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.4439461883408072, - "acc_stderr": 0.03334625674242728, - "acc_norm": 0.4439461883408072, - "acc_norm_stderr": 0.03334625674242728 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.3816793893129771, - "acc_stderr": 0.0426073515764456, - "acc_norm": 0.3816793893129771, - "acc_norm_stderr": 0.0426073515764456 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.41, - "acc_stderr": 0.049431107042371025, - "acc_norm": 0.41, - "acc_norm_stderr": 0.049431107042371025 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.43434343434343436, - "acc_stderr": 0.03531505879359183, - "acc_norm": 0.43434343434343436, - "acc_norm_stderr": 0.03531505879359183 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.4413793103448276, - "acc_stderr": 0.04137931034482758, - "acc_norm": 0.4413793103448276, - "acc_norm_stderr": 0.04137931034482758 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.2549019607843137, - "acc_stderr": 0.043364327079931785, - "acc_norm": 0.2549019607843137, - "acc_norm_stderr": 0.043364327079931785 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.453781512605042, - "acc_stderr": 0.03233943468182088, - "acc_norm": 0.453781512605042, - "acc_norm_stderr": 0.03233943468182088 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.47435897435897434, - "acc_stderr": 0.02531764972644865, - "acc_norm": 0.47435897435897434, - "acc_norm_stderr": 0.02531764972644865 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.68, - "acc_stderr": 0.04688261722621504, - "acc_norm": 0.68, - "acc_norm_stderr": 0.04688261722621504 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.4351851851851852, - "acc_stderr": 0.04792898170907061, - "acc_norm": 0.4351851851851852, - "acc_norm_stderr": 0.04792898170907061 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.4187192118226601, - "acc_stderr": 0.03471192860518468, - "acc_norm": 0.4187192118226601, - "acc_norm_stderr": 0.03471192860518468 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.42258064516129035, - "acc_stderr": 0.02810096472427264, - "acc_norm": 0.42258064516129035, - "acc_norm_stderr": 0.02810096472427264 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.7307692307692307, - "acc_stderr": 0.029058588303748842, - "acc_norm": 0.7307692307692307, - "acc_norm_stderr": 0.029058588303748842 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.4188679245283019, - "acc_stderr": 0.03036505082911522, - "acc_norm": 0.4188679245283019, - "acc_norm_stderr": 0.03036505082911522 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.4727272727272727, - "acc_stderr": 0.04782001791380063, - "acc_norm": 0.4727272727272727, - "acc_norm_stderr": 0.04782001791380063 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.26666666666666666, - "acc_stderr": 0.02696242432507383, - "acc_norm": 0.26666666666666666, - "acc_norm_stderr": 0.02696242432507383 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.26490066225165565, - "acc_stderr": 0.03603038545360384, - "acc_norm": 0.26490066225165565, - "acc_norm_stderr": 0.03603038545360384 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.5970149253731343, - "acc_stderr": 0.034683432951111266, - "acc_norm": 0.5970149253731343, - "acc_norm_stderr": 0.034683432951111266 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.36416184971098264, - "acc_stderr": 0.03669072477416907, - "acc_norm": 0.36416184971098264, - "acc_norm_stderr": 0.03669072477416907 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.36243386243386244, - "acc_stderr": 0.024757473902752045, - "acc_norm": 0.36243386243386244, - "acc_norm_stderr": 0.024757473902752045 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.3125, - "acc_stderr": 0.038760854559127644, - "acc_norm": 0.3125, - "acc_norm_stderr": 0.038760854559127644 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.65, - "acc_stderr": 0.047937248544110175, - "acc_norm": 0.65, - "acc_norm_stderr": 0.047937248544110175 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.49421965317919075, - "acc_stderr": 0.02691729617914911, - "acc_norm": 0.49421965317919075, - "acc_norm_stderr": 0.02691729617914911 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.4294478527607362, - "acc_stderr": 0.038890666191127216, - "acc_norm": 0.4294478527607362, - "acc_norm_stderr": 0.038890666191127216 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.4228395061728395, - "acc_stderr": 0.027487472980871598, - "acc_norm": 0.4228395061728395, - "acc_norm_stderr": 0.027487472980871598 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.27, - "acc_stderr": 0.044619604333847394, - "acc_norm": 0.27, - "acc_norm_stderr": 0.044619604333847394 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.46113989637305697, - "acc_stderr": 0.035975244117345775, - "acc_norm": 0.46113989637305697, - "acc_norm_stderr": 0.035975244117345775 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.30701754385964913, - "acc_stderr": 0.043391383225798594, - "acc_norm": 0.30701754385964913, - "acc_norm_stderr": 0.043391383225798594 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.43853211009174314, - "acc_stderr": 0.021274713073954562, - "acc_norm": 0.43853211009174314, - "acc_norm_stderr": 0.021274713073954562 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.30158730158730157, - "acc_stderr": 0.041049472699033945, - "acc_norm": 0.30158730158730157, - "acc_norm_stderr": 0.041049472699033945 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.3888888888888889, - "acc_stderr": 0.027914055510468, - "acc_norm": 0.3888888888888889, - "acc_norm_stderr": 0.027914055510468 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.49, - "acc_stderr": 0.05024183937956912, - "acc_norm": 0.49, - "acc_norm_stderr": 0.05024183937956912 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.6611570247933884, - "acc_stderr": 0.04320767807536669, - "acc_norm": 0.6611570247933884, - "acc_norm_stderr": 0.04320767807536669 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.39473684210526316, - "acc_stderr": 0.039777499346220734, - "acc_norm": 0.39473684210526316, - "acc_norm_stderr": 0.039777499346220734 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.4133986928104575, - "acc_stderr": 0.019922115682786696, - "acc_norm": 0.4133986928104575, - "acc_norm_stderr": 0.019922115682786696 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.3546099290780142, - "acc_stderr": 0.028538650028878648, - "acc_norm": 0.3546099290780142, - "acc_norm_stderr": 0.028538650028878648 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.4017857142857143, - "acc_stderr": 0.04653333146973646, - "acc_norm": 0.4017857142857143, - "acc_norm_stderr": 0.04653333146973646 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.3611111111111111, - "acc_stderr": 0.032757734861009996, - "acc_norm": 0.3611111111111111, - "acc_norm_stderr": 0.032757734861009996 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2994413407821229, - "acc_stderr": 0.015318257745976708, - "acc_norm": 0.2994413407821229, - "acc_norm_stderr": 0.015318257745976708 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.44, - "acc_stderr": 0.04988876515698589, - "acc_norm": 0.44, - "acc_norm_stderr": 0.04988876515698589 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.54, - "acc_stderr": 0.05009082659620332, - "acc_norm": 0.54, - "acc_norm_stderr": 0.05009082659620332 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.3272058823529412, - "acc_stderr": 0.028501452860396563, - "acc_norm": 0.3272058823529412, - "acc_norm_stderr": 0.028501452860396563 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.39591836734693875, - "acc_stderr": 0.03130802899065685, - "acc_norm": 0.39591836734693875, - "acc_norm_stderr": 0.03130802899065685 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.5443037974683544, - "acc_stderr": 0.03241920684693333, - "acc_norm": 0.5443037974683544, - "acc_norm_stderr": 0.03241920684693333 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.3376792698826597, - "acc_stderr": 0.012078563777145546, - "acc_norm": 0.3376792698826597, - "acc_norm_stderr": 0.012078563777145546 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.35294117647058826, - "acc_stderr": 0.033540924375915195, - "acc_norm": 0.35294117647058826, - "acc_norm_stderr": 0.033540924375915195 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.4303030303030303, - "acc_stderr": 0.03866225962879077, - "acc_norm": 0.4303030303030303, - "acc_norm_stderr": 0.03866225962879077 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2778457772337821, - "mc1_stderr": 0.015680929364024633, - "mc2": 0.45442787164664084, - "mc2_stderr": 0.016775457950621752 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.13849765258215962, - "acc_stderr": 0.011840898511903734, - "acc_norm": 0.1795774647887324, - "acc_norm_stderr": 0.013157698435457048 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "vihangd/smartyplats-7b-v1", - "model_sha": "9ebc095e0a14ebb77fa059582bf762f21bbcdddd", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/yeen214/llama2_7b_small_tuning_v1/result_2023-10-03 06:58:13.json b/yeen214/llama2_7b_small_tuning_v1/result_2023-10-03 06:58:13.json deleted file mode 100644 index 8897c334b7857d24ae0f4d4f975ea56a8137d040..0000000000000000000000000000000000000000 --- a/yeen214/llama2_7b_small_tuning_v1/result_2023-10-03 06:58:13.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.20563139931740615, - "acc_stderr": 0.011810745260742585, - "acc_norm": 0.25853242320819114, - "acc_norm_stderr": 0.012794553754288666 - }, - "harness|ko_hellaswag|10": { - "acc": 0.252141007767377, - "acc_stderr": 0.004333543083293473, - "acc_norm": 0.24278032264489147, - "acc_norm_stderr": 0.004278871104930363 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.25146198830409355, - "acc_stderr": 0.033275044238468436, - "acc_norm": 0.25146198830409355, - "acc_norm_stderr": 0.033275044238468436 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.1941747572815534, - "acc_stderr": 0.03916667762822584, - "acc_norm": 0.1941747572815534, - "acc_norm_stderr": 0.03916667762822584 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.2720306513409962, - "acc_stderr": 0.015913367447500517, - "acc_norm": 0.2720306513409962, - "acc_norm_stderr": 0.015913367447500517 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.34074074074074073, - "acc_stderr": 0.040943762699967946, - "acc_norm": 0.34074074074074073, - "acc_norm_stderr": 0.040943762699967946 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.2127659574468085, - "acc_stderr": 0.026754391348039787, - "acc_norm": 0.2127659574468085, - "acc_norm_stderr": 0.026754391348039787 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.20481927710843373, - "acc_stderr": 0.03141784291663925, - "acc_norm": 0.20481927710843373, - "acc_norm_stderr": 0.03141784291663925 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.3086816720257235, - "acc_stderr": 0.026236965881153266, - "acc_norm": 0.3086816720257235, - "acc_norm_stderr": 0.026236965881153266 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.20179372197309417, - "acc_stderr": 0.026936111912802263, - "acc_norm": 0.20179372197309417, - "acc_norm_stderr": 0.026936111912802263 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.22900763358778625, - "acc_stderr": 0.036853466317118506, - "acc_norm": 0.22900763358778625, - "acc_norm_stderr": 0.036853466317118506 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.19, - "acc_stderr": 0.03942772444036623, - "acc_norm": 0.19, - "acc_norm_stderr": 0.03942772444036623 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.2474747474747475, - "acc_stderr": 0.030746300742124495, - "acc_norm": 0.2474747474747475, - "acc_norm_stderr": 0.030746300742124495 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.296551724137931, - "acc_stderr": 0.038061426873099935, - "acc_norm": 0.296551724137931, - "acc_norm_stderr": 0.038061426873099935 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.21568627450980393, - "acc_stderr": 0.040925639582376556, - "acc_norm": 0.21568627450980393, - "acc_norm_stderr": 0.040925639582376556 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.18067226890756302, - "acc_stderr": 0.024991964966600756, - "acc_norm": 0.18067226890756302, - "acc_norm_stderr": 0.024991964966600756 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.26666666666666666, - "acc_stderr": 0.022421273612923714, - "acc_norm": 0.26666666666666666, - "acc_norm_stderr": 0.022421273612923714 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.31, - "acc_stderr": 0.046482319871173156, - "acc_norm": 0.31, - "acc_norm_stderr": 0.046482319871173156 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.33, - "acc_stderr": 0.047258156262526045, - "acc_norm": 0.33, - "acc_norm_stderr": 0.047258156262526045 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.21296296296296297, - "acc_stderr": 0.03957835471980981, - "acc_norm": 0.21296296296296297, - "acc_norm_stderr": 0.03957835471980981 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.2857142857142857, - "acc_stderr": 0.03178529710642749, - "acc_norm": 0.2857142857142857, - "acc_norm_stderr": 0.03178529710642749 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.22258064516129034, - "acc_stderr": 0.02366421667164252, - "acc_norm": 0.22258064516129034, - "acc_norm_stderr": 0.02366421667164252 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.24786324786324787, - "acc_stderr": 0.028286324075564386, - "acc_norm": 0.24786324786324787, - "acc_norm_stderr": 0.028286324075564386 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.2188679245283019, - "acc_stderr": 0.025447863825108597, - "acc_norm": 0.2188679245283019, - "acc_norm_stderr": 0.025447863825108597 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.21818181818181817, - "acc_stderr": 0.03955932861795833, - "acc_norm": 0.21818181818181817, - "acc_norm_stderr": 0.03955932861795833 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.26296296296296295, - "acc_stderr": 0.026842057873833706, - "acc_norm": 0.26296296296296295, - "acc_norm_stderr": 0.026842057873833706 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2980132450331126, - "acc_stderr": 0.03734535676787198, - "acc_norm": 0.2980132450331126, - "acc_norm_stderr": 0.03734535676787198 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.2537313432835821, - "acc_stderr": 0.03076944496729602, - "acc_norm": 0.2537313432835821, - "acc_norm_stderr": 0.03076944496729602 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.24277456647398843, - "acc_stderr": 0.0326926380614177, - "acc_norm": 0.24277456647398843, - "acc_norm_stderr": 0.0326926380614177 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.24074074074074073, - "acc_stderr": 0.022019080012217897, - "acc_norm": 0.24074074074074073, - "acc_norm_stderr": 0.022019080012217897 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2569444444444444, - "acc_stderr": 0.03653946969442099, - "acc_norm": 0.2569444444444444, - "acc_norm_stderr": 0.03653946969442099 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.18, - "acc_stderr": 0.03861229196653694, - "acc_norm": 0.18, - "acc_norm_stderr": 0.03861229196653694 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542128, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542128 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.2745664739884393, - "acc_stderr": 0.024027745155265026, - "acc_norm": 0.2745664739884393, - "acc_norm_stderr": 0.024027745155265026 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.2883435582822086, - "acc_stderr": 0.035590395316173425, - "acc_norm": 0.2883435582822086, - "acc_norm_stderr": 0.035590395316173425 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.27469135802469136, - "acc_stderr": 0.024836057868294688, - "acc_norm": 0.27469135802469136, - "acc_norm_stderr": 0.024836057868294688 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.22, - "acc_stderr": 0.041633319989322695, - "acc_norm": 0.22, - "acc_norm_stderr": 0.041633319989322695 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.20725388601036268, - "acc_stderr": 0.02925282329180363, - "acc_norm": 0.20725388601036268, - "acc_norm_stderr": 0.02925282329180363 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.23684210526315788, - "acc_stderr": 0.03999423879281336, - "acc_norm": 0.23684210526315788, - "acc_norm_stderr": 0.03999423879281336 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.20550458715596331, - "acc_stderr": 0.01732435232501601, - "acc_norm": 0.20550458715596331, - "acc_norm_stderr": 0.01732435232501601 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.21428571428571427, - "acc_stderr": 0.03670066451047181, - "acc_norm": 0.21428571428571427, - "acc_norm_stderr": 0.03670066451047181 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.23529411764705882, - "acc_stderr": 0.024288619466046095, - "acc_norm": 0.23529411764705882, - "acc_norm_stderr": 0.024288619466046095 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.22, - "acc_stderr": 0.04163331998932267, - "acc_norm": 0.22, - "acc_norm_stderr": 0.04163331998932267 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.3140495867768595, - "acc_stderr": 0.04236964753041019, - "acc_norm": 0.3140495867768595, - "acc_norm_stderr": 0.04236964753041019 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.29605263157894735, - "acc_stderr": 0.03715062154998905, - "acc_norm": 0.29605263157894735, - "acc_norm_stderr": 0.03715062154998905 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.27941176470588236, - "acc_stderr": 0.01815287105153882, - "acc_norm": 0.27941176470588236, - "acc_norm_stderr": 0.01815287105153882 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.26595744680851063, - "acc_stderr": 0.02635806569888059, - "acc_norm": 0.26595744680851063, - "acc_norm_stderr": 0.02635806569888059 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.25, - "acc_stderr": 0.04109974682633932, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04109974682633932 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.2222222222222222, - "acc_stderr": 0.028353212866863445, - "acc_norm": 0.2222222222222222, - "acc_norm_stderr": 0.028353212866863445 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2446927374301676, - "acc_stderr": 0.014378169884098426, - "acc_norm": 0.2446927374301676, - "acc_norm_stderr": 0.014378169884098426 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.29, - "acc_stderr": 0.045604802157206845, - "acc_norm": 0.29, - "acc_norm_stderr": 0.045604802157206845 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.1875, - "acc_stderr": 0.023709788253811766, - "acc_norm": 0.1875, - "acc_norm_stderr": 0.023709788253811766 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.22040816326530613, - "acc_stderr": 0.026537045312145294, - "acc_norm": 0.22040816326530613, - "acc_norm_stderr": 0.026537045312145294 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.2742616033755274, - "acc_stderr": 0.029041333510598035, - "acc_norm": 0.2742616033755274, - "acc_norm_stderr": 0.029041333510598035 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.27053455019556716, - "acc_stderr": 0.01134599674353926, - "acc_norm": 0.27053455019556716, - "acc_norm_stderr": 0.01134599674353926 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.28921568627450983, - "acc_stderr": 0.03182231867647554, - "acc_norm": 0.28921568627450983, - "acc_norm_stderr": 0.03182231867647554 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.2545454545454545, - "acc_stderr": 0.03401506715249039, - "acc_norm": 0.2545454545454545, - "acc_norm_stderr": 0.03401506715249039 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.21909424724602203, - "mc1_stderr": 0.014480038578757447, - "mc2": NaN, - "mc2_stderr": NaN - }, - "harness|ko_commongen_v2|2": { - "acc": 0.0892018779342723, - "acc_stderr": 0.009770871054319058, - "acc_norm": 0.22300469483568075, - "acc_norm_stderr": 0.014269258984221392 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "yeen214/llama2_7b_small_tuning_v1", - "model_sha": "3f9b43b4db2da4fe3785071dd52c9fc92aa0801d", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/yeen214/test_llama2_7b/result_2023-10-01 02:38:38.json b/yeen214/test_llama2_7b/result_2023-10-01 02:38:38.json deleted file mode 100644 index 3bab4c8e71b279645c987024b6a39123384c4703..0000000000000000000000000000000000000000 --- a/yeen214/test_llama2_7b/result_2023-10-01 02:38:38.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.2713310580204778, - "acc_stderr": 0.012993807727545787, - "acc_norm": 0.310580204778157, - "acc_norm_stderr": 0.013522292098053055 - }, - "harness|ko_hellaswag|10": { - "acc": 0.3331009759012149, - "acc_stderr": 0.004703590558552501, - "acc_norm": 0.41127265484963155, - "acc_norm_stderr": 0.004910588449330016 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.47953216374269003, - "acc_stderr": 0.038316105328219316, - "acc_norm": 0.47953216374269003, - "acc_norm_stderr": 0.038316105328219316 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.3106796116504854, - "acc_stderr": 0.045821241601615506, - "acc_norm": 0.3106796116504854, - "acc_norm_stderr": 0.045821241601615506 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.36909323116219667, - "acc_stderr": 0.017256283109124613, - "acc_norm": 0.36909323116219667, - "acc_norm_stderr": 0.017256283109124613 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.3037037037037037, - "acc_stderr": 0.03972552884785138, - "acc_norm": 0.3037037037037037, - "acc_norm_stderr": 0.03972552884785138 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.3574468085106383, - "acc_stderr": 0.03132941789476425, - "acc_norm": 0.3574468085106383, - "acc_norm_stderr": 0.03132941789476425 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.25903614457831325, - "acc_stderr": 0.03410646614071857, - "acc_norm": 0.25903614457831325, - "acc_norm_stderr": 0.03410646614071857 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.3279742765273312, - "acc_stderr": 0.02666441088693762, - "acc_norm": 0.3279742765273312, - "acc_norm_stderr": 0.02666441088693762 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.35874439461883406, - "acc_stderr": 0.032190792004199956, - "acc_norm": 0.35874439461883406, - "acc_norm_stderr": 0.032190792004199956 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.366412213740458, - "acc_stderr": 0.04225875451969638, - "acc_norm": 0.366412213740458, - "acc_norm_stderr": 0.04225875451969638 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.31, - "acc_stderr": 0.04648231987117316, - "acc_norm": 0.31, - "acc_norm_stderr": 0.04648231987117316 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.3282828282828283, - "acc_stderr": 0.03345678422756777, - "acc_norm": 0.3282828282828283, - "acc_norm_stderr": 0.03345678422756777 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.3586206896551724, - "acc_stderr": 0.039966295748767186, - "acc_norm": 0.3586206896551724, - "acc_norm_stderr": 0.039966295748767186 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.16666666666666666, - "acc_stderr": 0.03708284662416545, - "acc_norm": 0.16666666666666666, - "acc_norm_stderr": 0.03708284662416545 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.3235294117647059, - "acc_stderr": 0.030388353551886845, - "acc_norm": 0.3235294117647059, - "acc_norm_stderr": 0.030388353551886845 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.30512820512820515, - "acc_stderr": 0.023346335293325887, - "acc_norm": 0.30512820512820515, - "acc_norm_stderr": 0.023346335293325887 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.38, - "acc_stderr": 0.048783173121456316, - "acc_norm": 0.38, - "acc_norm_stderr": 0.048783173121456316 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.24, - "acc_stderr": 0.042923469599092816, - "acc_norm": 0.24, - "acc_norm_stderr": 0.042923469599092816 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.39814814814814814, - "acc_stderr": 0.04732332615978814, - "acc_norm": 0.39814814814814814, - "acc_norm_stderr": 0.04732332615978814 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.33497536945812806, - "acc_stderr": 0.033208527423483104, - "acc_norm": 0.33497536945812806, - "acc_norm_stderr": 0.033208527423483104 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.32903225806451614, - "acc_stderr": 0.02672949906834997, - "acc_norm": 0.32903225806451614, - "acc_norm_stderr": 0.02672949906834997 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.49145299145299143, - "acc_stderr": 0.032751303000970296, - "acc_norm": 0.49145299145299143, - "acc_norm_stderr": 0.032751303000970296 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.3018867924528302, - "acc_stderr": 0.028254200344438676, - "acc_norm": 0.3018867924528302, - "acc_norm_stderr": 0.028254200344438676 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.35454545454545455, - "acc_stderr": 0.04582004841505415, - "acc_norm": 0.35454545454545455, - "acc_norm_stderr": 0.04582004841505415 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.2814814814814815, - "acc_stderr": 0.027420019350945273, - "acc_norm": 0.2814814814814815, - "acc_norm_stderr": 0.027420019350945273 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.2119205298013245, - "acc_stderr": 0.033367670865679766, - "acc_norm": 0.2119205298013245, - "acc_norm_stderr": 0.033367670865679766 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.4577114427860697, - "acc_stderr": 0.03522865864099597, - "acc_norm": 0.4577114427860697, - "acc_norm_stderr": 0.03522865864099597 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.26011560693641617, - "acc_stderr": 0.03345036916788991, - "acc_norm": 0.26011560693641617, - "acc_norm_stderr": 0.03345036916788991 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2751322751322751, - "acc_stderr": 0.02300008685906864, - "acc_norm": 0.2751322751322751, - "acc_norm_stderr": 0.02300008685906864 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2847222222222222, - "acc_stderr": 0.03773809990686935, - "acc_norm": 0.2847222222222222, - "acc_norm_stderr": 0.03773809990686935 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.34, - "acc_stderr": 0.04760952285695235, - "acc_norm": 0.34, - "acc_norm_stderr": 0.04760952285695235 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.49, - "acc_stderr": 0.05024183937956913, - "acc_norm": 0.49, - "acc_norm_stderr": 0.05024183937956913 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.37283236994219654, - "acc_stderr": 0.026033890613576288, - "acc_norm": 0.37283236994219654, - "acc_norm_stderr": 0.026033890613576288 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.2822085889570552, - "acc_stderr": 0.03536117886664742, - "acc_norm": 0.2822085889570552, - "acc_norm_stderr": 0.03536117886664742 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.36728395061728397, - "acc_stderr": 0.02682280175950789, - "acc_norm": 0.36728395061728397, - "acc_norm_stderr": 0.02682280175950789 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.35751295336787564, - "acc_stderr": 0.03458816042181007, - "acc_norm": 0.35751295336787564, - "acc_norm_stderr": 0.03458816042181007 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.23684210526315788, - "acc_stderr": 0.039994238792813365, - "acc_norm": 0.23684210526315788, - "acc_norm_stderr": 0.039994238792813365 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.28807339449541286, - "acc_stderr": 0.019416445892636018, - "acc_norm": 0.28807339449541286, - "acc_norm_stderr": 0.019416445892636018 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.29365079365079366, - "acc_stderr": 0.04073524322147126, - "acc_norm": 0.29365079365079366, - "acc_norm_stderr": 0.04073524322147126 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.38235294117647056, - "acc_stderr": 0.027826109307283683, - "acc_norm": 0.38235294117647056, - "acc_norm_stderr": 0.027826109307283683 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.46, - "acc_stderr": 0.05009082659620332, - "acc_norm": 0.46, - "acc_norm_stderr": 0.05009082659620332 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.5206611570247934, - "acc_stderr": 0.04560456086387235, - "acc_norm": 0.5206611570247934, - "acc_norm_stderr": 0.04560456086387235 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.3223684210526316, - "acc_stderr": 0.03803510248351586, - "acc_norm": 0.3223684210526316, - "acc_norm_stderr": 0.03803510248351586 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.3088235294117647, - "acc_stderr": 0.018690850273595284, - "acc_norm": 0.3088235294117647, - "acc_norm_stderr": 0.018690850273595284 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2872340425531915, - "acc_stderr": 0.026992199173064356, - "acc_norm": 0.2872340425531915, - "acc_norm_stderr": 0.026992199173064356 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.3125, - "acc_stderr": 0.043994650575715215, - "acc_norm": 0.3125, - "acc_norm_stderr": 0.043994650575715215 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.37962962962962965, - "acc_stderr": 0.03309682581119035, - "acc_norm": 0.37962962962962965, - "acc_norm_stderr": 0.03309682581119035 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2435754189944134, - "acc_stderr": 0.01435591196476786, - "acc_norm": 0.2435754189944134, - "acc_norm_stderr": 0.01435591196476786 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.33, - "acc_stderr": 0.047258156262526045, - "acc_norm": 0.33, - "acc_norm_stderr": 0.047258156262526045 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.22426470588235295, - "acc_stderr": 0.025336848563332386, - "acc_norm": 0.22426470588235295, - "acc_norm_stderr": 0.025336848563332386 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.39591836734693875, - "acc_stderr": 0.03130802899065685, - "acc_norm": 0.39591836734693875, - "acc_norm_stderr": 0.03130802899065685 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.35864978902953587, - "acc_stderr": 0.031219569445301854, - "acc_norm": 0.35864978902953587, - "acc_norm_stderr": 0.031219569445301854 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.2685788787483703, - "acc_stderr": 0.011320056629121734, - "acc_norm": 0.2685788787483703, - "acc_norm_stderr": 0.011320056629121734 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.3088235294117647, - "acc_stderr": 0.03242661719827218, - "acc_norm": 0.3088235294117647, - "acc_norm_stderr": 0.03242661719827218 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.3393939393939394, - "acc_stderr": 0.036974422050315967, - "acc_norm": 0.3393939393939394, - "acc_norm_stderr": 0.036974422050315967 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.2729498164014688, - "mc1_stderr": 0.015594753632006516, - "mc2": 0.4392204501367092, - "mc2_stderr": 0.01533439619345391 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.4413145539906103, - "acc_stderr": 0.01702131167184747, - "acc_norm": 0.5, - "acc_norm_stderr": 0.017139779254776524 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "yeen214/test_llama2_7b", - "model_sha": "69a4886f51ed752216cdd7f41a584d14240126f9", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file diff --git a/yeen214/test_llama2_ko_7b/result_2023-10-03 06:58:46.json b/yeen214/test_llama2_ko_7b/result_2023-10-03 06:58:46.json deleted file mode 100644 index b6d9bc73b1fd5325c9bfab9c60113587f841b41c..0000000000000000000000000000000000000000 --- a/yeen214/test_llama2_ko_7b/result_2023-10-03 06:58:46.json +++ /dev/null @@ -1,444 +0,0 @@ -{ - "results": { - "harness|ko_arc_challenge|25": { - "acc": 0.21416382252559726, - "acc_stderr": 0.011988383205966496, - "acc_norm": 0.257679180887372, - "acc_norm_stderr": 0.012780770562768409 - }, - "harness|ko_hellaswag|10": { - "acc": 0.2524397530372436, - "acc_stderr": 0.004335243434486834, - "acc_norm": 0.25323640709022105, - "acc_norm_stderr": 0.004339764434219064 - }, - "harness|ko_mmlu_world_religions|5": { - "acc": 0.30994152046783624, - "acc_stderr": 0.03546976959393163, - "acc_norm": 0.30994152046783624, - "acc_norm_stderr": 0.03546976959393163 - }, - "harness|ko_mmlu_management|5": { - "acc": 0.22330097087378642, - "acc_stderr": 0.04123553189891431, - "acc_norm": 0.22330097087378642, - "acc_norm_stderr": 0.04123553189891431 - }, - "harness|ko_mmlu_miscellaneous|5": { - "acc": 0.24265644955300128, - "acc_stderr": 0.015329888940899894, - "acc_norm": 0.24265644955300128, - "acc_norm_stderr": 0.015329888940899894 - }, - "harness|ko_mmlu_anatomy|5": { - "acc": 0.28888888888888886, - "acc_stderr": 0.03915450630414251, - "acc_norm": 0.28888888888888886, - "acc_norm_stderr": 0.03915450630414251 - }, - "harness|ko_mmlu_abstract_algebra|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542128, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542128 - }, - "harness|ko_mmlu_conceptual_physics|5": { - "acc": 0.2170212765957447, - "acc_stderr": 0.026947483121496228, - "acc_norm": 0.2170212765957447, - "acc_norm_stderr": 0.026947483121496228 - }, - "harness|ko_mmlu_virology|5": { - "acc": 0.2289156626506024, - "acc_stderr": 0.03270745277352477, - "acc_norm": 0.2289156626506024, - "acc_norm_stderr": 0.03270745277352477 - }, - "harness|ko_mmlu_philosophy|5": { - "acc": 0.2829581993569132, - "acc_stderr": 0.025583062489984824, - "acc_norm": 0.2829581993569132, - "acc_norm_stderr": 0.025583062489984824 - }, - "harness|ko_mmlu_human_aging|5": { - "acc": 0.21973094170403587, - "acc_stderr": 0.027790177064383602, - "acc_norm": 0.21973094170403587, - "acc_norm_stderr": 0.027790177064383602 - }, - "harness|ko_mmlu_human_sexuality|5": { - "acc": 0.20610687022900764, - "acc_stderr": 0.03547771004159462, - "acc_norm": 0.20610687022900764, - "acc_norm_stderr": 0.03547771004159462 - }, - "harness|ko_mmlu_medical_genetics|5": { - "acc": 0.32, - "acc_stderr": 0.04688261722621504, - "acc_norm": 0.32, - "acc_norm_stderr": 0.04688261722621504 - }, - "harness|ko_mmlu_high_school_geography|5": { - "acc": 0.25252525252525254, - "acc_stderr": 0.030954055470365914, - "acc_norm": 0.25252525252525254, - "acc_norm_stderr": 0.030954055470365914 - }, - "harness|ko_mmlu_electrical_engineering|5": { - "acc": 0.2896551724137931, - "acc_stderr": 0.03780019230438014, - "acc_norm": 0.2896551724137931, - "acc_norm_stderr": 0.03780019230438014 - }, - "harness|ko_mmlu_college_physics|5": { - "acc": 0.2549019607843137, - "acc_stderr": 0.04336432707993177, - "acc_norm": 0.2549019607843137, - "acc_norm_stderr": 0.04336432707993177 - }, - "harness|ko_mmlu_high_school_microeconomics|5": { - "acc": 0.22268907563025211, - "acc_stderr": 0.027025433498882392, - "acc_norm": 0.22268907563025211, - "acc_norm_stderr": 0.027025433498882392 - }, - "harness|ko_mmlu_high_school_macroeconomics|5": { - "acc": 0.2564102564102564, - "acc_stderr": 0.022139081103971545, - "acc_norm": 0.2564102564102564, - "acc_norm_stderr": 0.022139081103971545 - }, - "harness|ko_mmlu_computer_security|5": { - "acc": 0.25, - "acc_stderr": 0.04351941398892446, - "acc_norm": 0.25, - "acc_norm_stderr": 0.04351941398892446 - }, - "harness|ko_mmlu_global_facts|5": { - "acc": 0.19, - "acc_stderr": 0.03942772444036623, - "acc_norm": 0.19, - "acc_norm_stderr": 0.03942772444036623 - }, - "harness|ko_mmlu_jurisprudence|5": { - "acc": 0.2962962962962963, - "acc_stderr": 0.04414343666854932, - "acc_norm": 0.2962962962962963, - "acc_norm_stderr": 0.04414343666854932 - }, - "harness|ko_mmlu_high_school_chemistry|5": { - "acc": 0.22167487684729065, - "acc_stderr": 0.029225575892489614, - "acc_norm": 0.22167487684729065, - "acc_norm_stderr": 0.029225575892489614 - }, - "harness|ko_mmlu_high_school_biology|5": { - "acc": 0.25161290322580643, - "acc_stderr": 0.02468597928623996, - "acc_norm": 0.25161290322580643, - "acc_norm_stderr": 0.02468597928623996 - }, - "harness|ko_mmlu_marketing|5": { - "acc": 0.2777777777777778, - "acc_stderr": 0.029343114798094472, - "acc_norm": 0.2777777777777778, - "acc_norm_stderr": 0.029343114798094472 - }, - "harness|ko_mmlu_clinical_knowledge|5": { - "acc": 0.24528301886792453, - "acc_stderr": 0.026480357179895702, - "acc_norm": 0.24528301886792453, - "acc_norm_stderr": 0.026480357179895702 - }, - "harness|ko_mmlu_public_relations|5": { - "acc": 0.17272727272727273, - "acc_stderr": 0.03620691833929219, - "acc_norm": 0.17272727272727273, - "acc_norm_stderr": 0.03620691833929219 - }, - "harness|ko_mmlu_high_school_mathematics|5": { - "acc": 0.26296296296296295, - "acc_stderr": 0.02684205787383371, - "acc_norm": 0.26296296296296295, - "acc_norm_stderr": 0.02684205787383371 - }, - "harness|ko_mmlu_high_school_physics|5": { - "acc": 0.24503311258278146, - "acc_stderr": 0.03511807571804724, - "acc_norm": 0.24503311258278146, - "acc_norm_stderr": 0.03511807571804724 - }, - "harness|ko_mmlu_sociology|5": { - "acc": 0.23383084577114427, - "acc_stderr": 0.02992941540834838, - "acc_norm": 0.23383084577114427, - "acc_norm_stderr": 0.02992941540834838 - }, - "harness|ko_mmlu_college_medicine|5": { - "acc": 0.2658959537572254, - "acc_stderr": 0.03368762932259431, - "acc_norm": 0.2658959537572254, - "acc_norm_stderr": 0.03368762932259431 - }, - "harness|ko_mmlu_elementary_mathematics|5": { - "acc": 0.2222222222222222, - "acc_stderr": 0.02141168439369418, - "acc_norm": 0.2222222222222222, - "acc_norm_stderr": 0.02141168439369418 - }, - "harness|ko_mmlu_college_biology|5": { - "acc": 0.2777777777777778, - "acc_stderr": 0.03745554791462457, - "acc_norm": 0.2777777777777778, - "acc_norm_stderr": 0.03745554791462457 - }, - "harness|ko_mmlu_college_chemistry|5": { - "acc": 0.33, - "acc_stderr": 0.047258156262526045, - "acc_norm": 0.33, - "acc_norm_stderr": 0.047258156262526045 - }, - "harness|ko_mmlu_us_foreign_policy|5": { - "acc": 0.24, - "acc_stderr": 0.04292346959909284, - "acc_norm": 0.24, - "acc_norm_stderr": 0.04292346959909284 - }, - "harness|ko_mmlu_moral_disputes|5": { - "acc": 0.2543352601156069, - "acc_stderr": 0.02344582627654554, - "acc_norm": 0.2543352601156069, - "acc_norm_stderr": 0.02344582627654554 - }, - "harness|ko_mmlu_logical_fallacies|5": { - "acc": 0.2331288343558282, - "acc_stderr": 0.0332201579577674, - "acc_norm": 0.2331288343558282, - "acc_norm_stderr": 0.0332201579577674 - }, - "harness|ko_mmlu_prehistory|5": { - "acc": 0.2623456790123457, - "acc_stderr": 0.02447722285613511, - "acc_norm": 0.2623456790123457, - "acc_norm_stderr": 0.02447722285613511 - }, - "harness|ko_mmlu_college_mathematics|5": { - "acc": 0.28, - "acc_stderr": 0.04512608598542127, - "acc_norm": 0.28, - "acc_norm_stderr": 0.04512608598542127 - }, - "harness|ko_mmlu_high_school_government_and_politics|5": { - "acc": 0.30569948186528495, - "acc_stderr": 0.033248379397581594, - "acc_norm": 0.30569948186528495, - "acc_norm_stderr": 0.033248379397581594 - }, - "harness|ko_mmlu_econometrics|5": { - "acc": 0.24561403508771928, - "acc_stderr": 0.04049339297748141, - "acc_norm": 0.24561403508771928, - "acc_norm_stderr": 0.04049339297748141 - }, - "harness|ko_mmlu_high_school_psychology|5": { - "acc": 0.21834862385321102, - "acc_stderr": 0.017712600528722727, - "acc_norm": 0.21834862385321102, - "acc_norm_stderr": 0.017712600528722727 - }, - "harness|ko_mmlu_formal_logic|5": { - "acc": 0.23015873015873015, - "acc_stderr": 0.037649508797906045, - "acc_norm": 0.23015873015873015, - "acc_norm_stderr": 0.037649508797906045 - }, - "harness|ko_mmlu_nutrition|5": { - "acc": 0.22549019607843138, - "acc_stderr": 0.023929155517351298, - "acc_norm": 0.22549019607843138, - "acc_norm_stderr": 0.023929155517351298 - }, - "harness|ko_mmlu_business_ethics|5": { - "acc": 0.18, - "acc_stderr": 0.038612291966536955, - "acc_norm": 0.18, - "acc_norm_stderr": 0.038612291966536955 - }, - "harness|ko_mmlu_international_law|5": { - "acc": 0.256198347107438, - "acc_stderr": 0.03984979653302871, - "acc_norm": 0.256198347107438, - "acc_norm_stderr": 0.03984979653302871 - }, - "harness|ko_mmlu_astronomy|5": { - "acc": 0.25, - "acc_stderr": 0.03523807393012047, - "acc_norm": 0.25, - "acc_norm_stderr": 0.03523807393012047 - }, - "harness|ko_mmlu_professional_psychology|5": { - "acc": 0.2630718954248366, - "acc_stderr": 0.017812676542320657, - "acc_norm": 0.2630718954248366, - "acc_norm_stderr": 0.017812676542320657 - }, - "harness|ko_mmlu_professional_accounting|5": { - "acc": 0.2553191489361702, - "acc_stderr": 0.02601199293090203, - "acc_norm": 0.2553191489361702, - "acc_norm_stderr": 0.02601199293090203 - }, - "harness|ko_mmlu_machine_learning|5": { - "acc": 0.30357142857142855, - "acc_stderr": 0.04364226155841044, - "acc_norm": 0.30357142857142855, - "acc_norm_stderr": 0.04364226155841044 - }, - "harness|ko_mmlu_high_school_statistics|5": { - "acc": 0.30092592592592593, - "acc_stderr": 0.031280390843298825, - "acc_norm": 0.30092592592592593, - "acc_norm_stderr": 0.031280390843298825 - }, - "harness|ko_mmlu_moral_scenarios|5": { - "acc": 0.2536312849162011, - "acc_stderr": 0.014551553659369916, - "acc_norm": 0.2536312849162011, - "acc_norm_stderr": 0.014551553659369916 - }, - "harness|ko_mmlu_college_computer_science|5": { - "acc": 0.26, - "acc_stderr": 0.04408440022768079, - "acc_norm": 0.26, - "acc_norm_stderr": 0.04408440022768079 - }, - "harness|ko_mmlu_high_school_computer_science|5": { - "acc": 0.3, - "acc_stderr": 0.046056618647183814, - "acc_norm": 0.3, - "acc_norm_stderr": 0.046056618647183814 - }, - "harness|ko_mmlu_professional_medicine|5": { - "acc": 0.19852941176470587, - "acc_stderr": 0.024231013370541087, - "acc_norm": 0.19852941176470587, - "acc_norm_stderr": 0.024231013370541087 - }, - "harness|ko_mmlu_security_studies|5": { - "acc": 0.24897959183673468, - "acc_stderr": 0.02768297952296023, - "acc_norm": 0.24897959183673468, - "acc_norm_stderr": 0.02768297952296023 - }, - "harness|ko_mmlu_high_school_world_history|5": { - "acc": 0.2911392405063291, - "acc_stderr": 0.029571601065753374, - "acc_norm": 0.2911392405063291, - "acc_norm_stderr": 0.029571601065753374 - }, - "harness|ko_mmlu_professional_law|5": { - "acc": 0.25358539765319427, - "acc_stderr": 0.011111715336101138, - "acc_norm": 0.25358539765319427, - "acc_norm_stderr": 0.011111715336101138 - }, - "harness|ko_mmlu_high_school_us_history|5": { - "acc": 0.24509803921568626, - "acc_stderr": 0.03019028245350195, - "acc_norm": 0.24509803921568626, - "acc_norm_stderr": 0.03019028245350195 - }, - "harness|ko_mmlu_high_school_european_history|5": { - "acc": 0.2787878787878788, - "acc_stderr": 0.03501438706296781, - "acc_norm": 0.2787878787878788, - "acc_norm_stderr": 0.03501438706296781 - }, - "harness|ko_truthfulqa_mc|0": { - "mc1": 0.23623011015911874, - "mc1_stderr": 0.014869755015871112, - "mc2": 0.49817574202268433, - "mc2_stderr": 0.016860322660870557 - }, - "harness|ko_commongen_v2|2": { - "acc": 0.09859154929577464, - "acc_stderr": 0.010219175985280587, - "acc_norm": 0.3955399061032864, - "acc_norm_stderr": 0.016761550511163865 - } - }, - "versions": { - "all": 0, - "harness|ko_arc_challenge|25": 0, - "harness|ko_hellaswag|10": 0, - "harness|ko_mmlu_world_religions|5": 1, - "harness|ko_mmlu_management|5": 1, - "harness|ko_mmlu_miscellaneous|5": 1, - "harness|ko_mmlu_anatomy|5": 1, - "harness|ko_mmlu_abstract_algebra|5": 1, - "harness|ko_mmlu_conceptual_physics|5": 1, - "harness|ko_mmlu_virology|5": 1, - "harness|ko_mmlu_philosophy|5": 1, - "harness|ko_mmlu_human_aging|5": 1, - "harness|ko_mmlu_human_sexuality|5": 1, - "harness|ko_mmlu_medical_genetics|5": 1, - "harness|ko_mmlu_high_school_geography|5": 1, - "harness|ko_mmlu_electrical_engineering|5": 1, - "harness|ko_mmlu_college_physics|5": 1, - "harness|ko_mmlu_high_school_microeconomics|5": 1, - "harness|ko_mmlu_high_school_macroeconomics|5": 1, - "harness|ko_mmlu_computer_security|5": 1, - "harness|ko_mmlu_global_facts|5": 1, - "harness|ko_mmlu_jurisprudence|5": 1, - "harness|ko_mmlu_high_school_chemistry|5": 1, - "harness|ko_mmlu_high_school_biology|5": 1, - "harness|ko_mmlu_marketing|5": 1, - "harness|ko_mmlu_clinical_knowledge|5": 1, - "harness|ko_mmlu_public_relations|5": 1, - "harness|ko_mmlu_high_school_mathematics|5": 1, - "harness|ko_mmlu_high_school_physics|5": 1, - "harness|ko_mmlu_sociology|5": 1, - "harness|ko_mmlu_college_medicine|5": 1, - "harness|ko_mmlu_elementary_mathematics|5": 1, - "harness|ko_mmlu_college_biology|5": 1, - "harness|ko_mmlu_college_chemistry|5": 1, - "harness|ko_mmlu_us_foreign_policy|5": 1, - "harness|ko_mmlu_moral_disputes|5": 1, - "harness|ko_mmlu_logical_fallacies|5": 1, - "harness|ko_mmlu_prehistory|5": 1, - "harness|ko_mmlu_college_mathematics|5": 1, - "harness|ko_mmlu_high_school_government_and_politics|5": 1, - "harness|ko_mmlu_econometrics|5": 1, - "harness|ko_mmlu_high_school_psychology|5": 1, - "harness|ko_mmlu_formal_logic|5": 1, - "harness|ko_mmlu_nutrition|5": 1, - "harness|ko_mmlu_business_ethics|5": 1, - "harness|ko_mmlu_international_law|5": 1, - "harness|ko_mmlu_astronomy|5": 1, - "harness|ko_mmlu_professional_psychology|5": 1, - "harness|ko_mmlu_professional_accounting|5": 1, - "harness|ko_mmlu_machine_learning|5": 1, - "harness|ko_mmlu_high_school_statistics|5": 1, - "harness|ko_mmlu_moral_scenarios|5": 1, - "harness|ko_mmlu_college_computer_science|5": 1, - "harness|ko_mmlu_high_school_computer_science|5": 1, - "harness|ko_mmlu_professional_medicine|5": 1, - "harness|ko_mmlu_security_studies|5": 1, - "harness|ko_mmlu_high_school_world_history|5": 1, - "harness|ko_mmlu_professional_law|5": 1, - "harness|ko_mmlu_high_school_us_history|5": 1, - "harness|ko_mmlu_high_school_european_history|5": 1, - "harness|ko_truthfulqa_mc|0": 0, - "harness|ko_commongen_v2|2": 1 - }, - "config_general": { - "model_name": "yeen214/test_llama2_ko_7b", - "model_sha": "45901e1d6ccb22f5ed8aec3f9dd366823fdd1c33", - "model_dtype": "torch.float16", - "lighteval_sha": "", - "num_few_shot_default": 0, - "num_fewshot_seeds": 1, - "override_batch_size": 1, - "max_samples": null - } -} \ No newline at end of file